diff --git a/docs/openapi.json b/docs/openapi.json index e432a84..1ab18a6 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -20,7 +20,9 @@ "paths": { "/v1/acp": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_acp_servers", "responses": { "200": { @@ -38,7 +40,9 @@ }, "/v1/acp/{server_id}": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_acp", "parameters": [ { @@ -88,7 +92,9 @@ } }, "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_acp", "parameters": [ { @@ -198,7 +204,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_acp", "parameters": [ { @@ -220,7 +228,9 @@ }, "/v1/agents": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_agents", "parameters": [ { @@ -270,7 +280,9 @@ }, "/v1/agents/{agent}": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_agent", "parameters": [ { @@ -339,7 +351,9 @@ }, "/v1/agents/{agent}/install": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_agent_install", "parameters": [ { @@ -398,7 +412,9 @@ }, "/v1/config/mcp": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_config_mcp", "parameters": [ { @@ -444,7 +460,9 @@ } }, "put": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "put_v1_config_mcp", "parameters": [ { @@ -483,7 +501,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_config_mcp", "parameters": [ { @@ -514,7 +534,9 @@ }, "/v1/config/skills": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_config_skills", "parameters": [ { @@ -560,7 +582,9 @@ } }, "put": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "put_v1_config_skills", "parameters": [ { @@ -599,7 +623,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_config_skills", "parameters": [ { @@ -630,7 +656,9 @@ }, "/v1/fs/entries": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_fs_entries", "parameters": [ { @@ -663,7 +691,9 @@ }, "/v1/fs/entry": { "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_fs_entry", "parameters": [ { @@ -702,7 +732,9 @@ }, "/v1/fs/file": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_fs_file", "parameters": [ { @@ -722,7 +754,9 @@ } }, "put": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "put_v1_fs_file", "parameters": [ { @@ -762,7 +796,9 @@ }, "/v1/fs/mkdir": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_fs_mkdir", "parameters": [ { @@ -791,7 +827,9 @@ }, "/v1/fs/move": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_fs_move", "requestBody": { "content": { @@ -819,7 +857,9 @@ }, "/v1/fs/stat": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_fs_stat", "parameters": [ { @@ -848,7 +888,9 @@ }, "/v1/fs/upload-batch": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_fs_upload_batch", "parameters": [ { @@ -889,7 +931,9 @@ }, "/v1/health": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_health", "responses": { "200": { @@ -907,7 +951,9 @@ }, "/v1/processes": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "List all managed processes.", "description": "Returns a list of all processes (running and exited) currently tracked\nby the runtime, sorted by process ID.", "operationId": "get_v1_processes", @@ -935,7 +981,9 @@ } }, "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Create a long-lived managed process.", "description": "Spawns a new process with the given command and arguments. Supports both\npipe-based and PTY (tty) modes. Returns the process descriptor on success.", "operationId": "post_v1_processes", @@ -995,7 +1043,9 @@ }, "/v1/processes/config": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Get process runtime configuration.", "description": "Returns the current runtime configuration for the process management API,\nincluding limits for concurrency, timeouts, and buffer sizes.", "operationId": "get_v1_processes_config", @@ -1023,7 +1073,9 @@ } }, "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Update process runtime configuration.", "description": "Replaces the runtime configuration for the process management API.\nValidates that all values are non-zero and clamps default timeout to max.", "operationId": "post_v1_processes_config", @@ -1073,7 +1125,9 @@ }, "/v1/processes/run": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Run a one-shot command.", "description": "Executes a command to completion and returns its stdout, stderr, exit code,\nand duration. Supports configurable timeout and output size limits.", "operationId": "post_v1_processes_run", @@ -1123,7 +1177,9 @@ }, "/v1/processes/{id}": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Get a single process by ID.", "description": "Returns the current state of a managed process including its status,\nPID, exit code, and creation/exit timestamps.", "operationId": "get_v1_process", @@ -1172,7 +1228,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Delete a process record.", "description": "Removes a stopped process from the runtime. Returns 409 if the process\nis still running; stop or kill it first.", "operationId": "delete_v1_process", @@ -1226,7 +1284,9 @@ }, "/v1/processes/{id}/input": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Write input to a process.", "description": "Sends data to a process's stdin (pipe mode) or PTY writer (tty mode).\nData can be encoded as base64, utf8, or text. Returns 413 if the decoded\npayload exceeds the configured `maxInputBytesPerRequest` limit.", "operationId": "post_v1_process_input", @@ -1307,7 +1367,9 @@ }, "/v1/processes/{id}/kill": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Send SIGKILL to a process.", "description": "Sends SIGKILL to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.", "operationId": "post_v1_process_kill", @@ -1370,7 +1432,9 @@ }, "/v1/processes/{id}/logs": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Fetch process logs.", "description": "Returns buffered log entries for a process. Supports filtering by stream\ntype, tail count, and sequence-based resumption. When `follow=true`,\nreturns an SSE stream that replays buffered entries then streams live output.", "operationId": "get_v1_process_logs", @@ -1468,7 +1532,9 @@ }, "/v1/processes/{id}/stop": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Send SIGTERM to a process.", "description": "Sends SIGTERM to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.", "operationId": "post_v1_process_stop", @@ -1531,7 +1597,9 @@ }, "/v1/processes/{id}/terminal/resize": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Resize a process terminal.", "description": "Sets the PTY window size (columns and rows) for a tty-mode process and\nsends SIGWINCH so the child process can adapt.", "operationId": "post_v1_process_terminal_resize", @@ -1612,7 +1680,9 @@ }, "/v1/processes/{id}/terminal/ws": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Open an interactive WebSocket terminal session.", "description": "Upgrades the connection to a WebSocket for bidirectional PTY I/O. Accepts\n`access_token` query param for browser-based auth (WebSocket API cannot\nsend custom headers). Streams raw PTY output as binary frames and accepts\nJSON control frames for input, resize, and close.", "operationId": "get_v1_process_terminal_ws", @@ -1689,7 +1759,9 @@ "schemas": { "AcpEnvelope": { "type": "object", - "required": ["jsonrpc"], + "required": [ + "jsonrpc" + ], "properties": { "error": { "nullable": true @@ -1723,7 +1795,11 @@ }, "AcpServerInfo": { "type": "object", - "required": ["serverId", "agent", "createdAtMs"], + "required": [ + "serverId", + "agent", + "createdAtMs" + ], "properties": { "agent": { "type": "string" @@ -1739,7 +1815,9 @@ }, "AcpServerListResponse": { "type": "object", - "required": ["servers"], + "required": [ + "servers" + ], "properties": { "servers": { "type": "array", @@ -1830,7 +1908,12 @@ }, "AgentInfo": { "type": "object", - "required": ["id", "installed", "credentialsAvailable", "capabilities"], + "required": [ + "id", + "installed", + "credentialsAvailable", + "capabilities" + ], "properties": { "capabilities": { "$ref": "#/components/schemas/AgentCapabilities" @@ -1873,7 +1956,11 @@ }, "AgentInstallArtifact": { "type": "object", - "required": ["kind", "path", "source"], + "required": [ + "kind", + "path", + "source" + ], "properties": { "kind": { "type": "string" @@ -1909,7 +1996,10 @@ }, "AgentInstallResponse": { "type": "object", - "required": ["already_installed", "artifacts"], + "required": [ + "already_installed", + "artifacts" + ], "properties": { "already_installed": { "type": "boolean" @@ -1924,7 +2014,9 @@ }, "AgentListResponse": { "type": "object", - "required": ["agents"], + "required": [ + "agents" + ], "properties": { "agents": { "type": "array", @@ -1957,7 +2049,9 @@ }, "FsActionResponse": { "type": "object", - "required": ["path"], + "required": [ + "path" + ], "properties": { "path": { "type": "string" @@ -1966,7 +2060,9 @@ }, "FsDeleteQuery": { "type": "object", - "required": ["path"], + "required": [ + "path" + ], "properties": { "path": { "type": "string" @@ -1988,7 +2084,12 @@ }, "FsEntry": { "type": "object", - "required": ["name", "path", "entryType", "size"], + "required": [ + "name", + "path", + "entryType", + "size" + ], "properties": { "entryType": { "$ref": "#/components/schemas/FsEntryType" @@ -2012,11 +2113,17 @@ }, "FsEntryType": { "type": "string", - "enum": ["file", "directory"] + "enum": [ + "file", + "directory" + ] }, "FsMoveRequest": { "type": "object", - "required": ["from", "to"], + "required": [ + "from", + "to" + ], "properties": { "from": { "type": "string" @@ -2032,7 +2139,10 @@ }, "FsMoveResponse": { "type": "object", - "required": ["from", "to"], + "required": [ + "from", + "to" + ], "properties": { "from": { "type": "string" @@ -2044,7 +2154,9 @@ }, "FsPathQuery": { "type": "object", - "required": ["path"], + "required": [ + "path" + ], "properties": { "path": { "type": "string" @@ -2053,7 +2165,11 @@ }, "FsStat": { "type": "object", - "required": ["path", "entryType", "size"], + "required": [ + "path", + "entryType", + "size" + ], "properties": { "entryType": { "$ref": "#/components/schemas/FsEntryType" @@ -2083,7 +2199,10 @@ }, "FsUploadBatchResponse": { "type": "object", - "required": ["paths", "truncated"], + "required": [ + "paths", + "truncated" + ], "properties": { "paths": { "type": "array", @@ -2098,7 +2217,10 @@ }, "FsWriteResponse": { "type": "object", - "required": ["path", "bytesWritten"], + "required": [ + "path", + "bytesWritten" + ], "properties": { "bytesWritten": { "type": "integer", @@ -2112,7 +2234,9 @@ }, "HealthResponse": { "type": "object", - "required": ["status"], + "required": [ + "status" + ], "properties": { "status": { "type": "string" @@ -2121,7 +2245,10 @@ }, "McpConfigQuery": { "type": "object", - "required": ["directory", "mcpName"], + "required": [ + "directory", + "mcpName" + ], "properties": { "directory": { "type": "string" @@ -2135,7 +2262,10 @@ "oneOf": [ { "type": "object", - "required": ["command", "type"], + "required": [ + "command", + "type" + ], "properties": { "args": { "type": "array", @@ -2169,13 +2299,18 @@ }, "type": { "type": "string", - "enum": ["local"] + "enum": [ + "local" + ] } } }, { "type": "object", - "required": ["url", "type"], + "required": [ + "url", + "type" + ], "properties": { "bearerTokenEnvVar": { "type": "string", @@ -2223,7 +2358,9 @@ }, "type": { "type": "string", - "enum": ["remote"] + "enum": [ + "remote" + ] }, "url": { "type": "string" @@ -2237,7 +2374,11 @@ }, "ProblemDetails": { "type": "object", - "required": ["type", "title", "status"], + "required": [ + "type", + "title", + "status" + ], "properties": { "detail": { "type": "string", @@ -2263,7 +2404,14 @@ }, "ProcessConfig": { "type": "object", - "required": ["maxConcurrentProcesses", "defaultRunTimeoutMs", "maxRunTimeoutMs", "maxOutputBytes", "maxLogBytesPerProcess", "maxInputBytesPerRequest"], + "required": [ + "maxConcurrentProcesses", + "defaultRunTimeoutMs", + "maxRunTimeoutMs", + "maxOutputBytes", + "maxLogBytesPerProcess", + "maxInputBytesPerRequest" + ], "properties": { "defaultRunTimeoutMs": { "type": "integer", @@ -2295,7 +2443,9 @@ }, "ProcessCreateRequest": { "type": "object", - "required": ["command"], + "required": [ + "command" + ], "properties": { "args": { "type": "array", @@ -2326,7 +2476,15 @@ }, "ProcessInfo": { "type": "object", - "required": ["id", "command", "args", "tty", "interactive", "status", "createdAtMs"], + "required": [ + "id", + "command", + "args", + "tty", + "interactive", + "status", + "createdAtMs" + ], "properties": { "args": { "type": "array", @@ -2377,7 +2535,9 @@ }, "ProcessInputRequest": { "type": "object", - "required": ["data"], + "required": [ + "data" + ], "properties": { "data": { "type": "string" @@ -2390,7 +2550,9 @@ }, "ProcessInputResponse": { "type": "object", - "required": ["bytesWritten"], + "required": [ + "bytesWritten" + ], "properties": { "bytesWritten": { "type": "integer", @@ -2400,7 +2562,9 @@ }, "ProcessListResponse": { "type": "object", - "required": ["processes"], + "required": [ + "processes" + ], "properties": { "processes": { "type": "array", @@ -2412,7 +2576,13 @@ }, "ProcessLogEntry": { "type": "object", - "required": ["sequence", "stream", "timestampMs", "data", "encoding"], + "required": [ + "sequence", + "stream", + "timestampMs", + "data", + "encoding" + ], "properties": { "data": { "type": "string" @@ -2464,7 +2634,11 @@ }, "ProcessLogsResponse": { "type": "object", - "required": ["processId", "stream", "entries"], + "required": [ + "processId", + "stream", + "entries" + ], "properties": { "entries": { "type": "array", @@ -2482,11 +2656,18 @@ }, "ProcessLogsStream": { "type": "string", - "enum": ["stdout", "stderr", "combined", "pty"] + "enum": [ + "stdout", + "stderr", + "combined", + "pty" + ] }, "ProcessRunRequest": { "type": "object", - "required": ["command"], + "required": [ + "command" + ], "properties": { "args": { "type": "array", @@ -2522,7 +2703,14 @@ }, "ProcessRunResponse": { "type": "object", - "required": ["timedOut", "stdout", "stderr", "stdoutTruncated", "stderrTruncated", "durationMs"], + "required": [ + "timedOut", + "stdout", + "stderr", + "stdoutTruncated", + "stderrTruncated", + "durationMs" + ], "properties": { "durationMs": { "type": "integer", @@ -2564,11 +2752,17 @@ }, "ProcessState": { "type": "string", - "enum": ["running", "exited"] + "enum": [ + "running", + "exited" + ] }, "ProcessTerminalResizeRequest": { "type": "object", - "required": ["cols", "rows"], + "required": [ + "cols", + "rows" + ], "properties": { "cols": { "type": "integer", @@ -2584,7 +2778,10 @@ }, "ProcessTerminalResizeResponse": { "type": "object", - "required": ["cols", "rows"], + "required": [ + "cols", + "rows" + ], "properties": { "cols": { "type": "integer", @@ -2600,11 +2797,16 @@ }, "ServerStatus": { "type": "string", - "enum": ["running", "stopped"] + "enum": [ + "running", + "stopped" + ] }, "ServerStatusInfo": { "type": "object", - "required": ["status"], + "required": [ + "status" + ], "properties": { "status": { "$ref": "#/components/schemas/ServerStatus" @@ -2619,7 +2821,10 @@ }, "SkillSource": { "type": "object", - "required": ["type", "source"], + "required": [ + "type", + "source" + ], "properties": { "ref": { "type": "string", @@ -2646,7 +2851,9 @@ }, "SkillsConfig": { "type": "object", - "required": ["sources"], + "required": [ + "sources" + ], "properties": { "sources": { "type": "array", @@ -2658,7 +2865,10 @@ }, "SkillsConfigQuery": { "type": "object", - "required": ["directory", "skillName"], + "required": [ + "directory", + "skillName" + ], "properties": { "directory": { "type": "string" @@ -2676,4 +2886,4 @@ "description": "ACP proxy v1 API" } ] -} +} \ No newline at end of file diff --git a/foundry/AGENT-HANDOFF.md b/foundry/AGENT-HANDOFF.md new file mode 100644 index 0000000..20bade7 --- /dev/null +++ b/foundry/AGENT-HANDOFF.md @@ -0,0 +1,179 @@ +# Foundry Agent Handoff + +## Baseline + +- Repo: `rivet-dev/sandbox-agent` +- Branch: `columbus-v2` +- Last pushed commit: `3174fe73` (`feat(foundry): checkpoint actor and workspace refactor`) +- Progress/spec tracker: [FOUNDRY-CHANGES.md](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/FOUNDRY-CHANGES.md) + +## What is already landed + +These spec slices are already implemented and pushed: + +- Item `1`: backend actor rename `auth-user` -> `user` +- Item `2`: Better Auth mapping comments +- Item `5`: task raw SQL cleanup into migrations +- Item `6`: `history` -> `audit-log` +- Item `7`: default model moved to user-scoped app state +- Item `20`: admin action prefixing +- Item `23`: dead `getTaskEnriched` / `enrichTaskRecord` removal +- Item `25`: `Workbench` -> `Workspace` rename across backend/shared/client/frontend +- Item `26`: branch rename deleted +- Organization realtime was already collapsed to full-snapshot `organizationUpdated` +- Task realtime was already aligned to `taskUpdated` + +## Known blocker + +Spec item `3` is only partially done. The singleton constraint for the Better Auth `user` table is still blocked. + +- File: [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts) +- Reason: Better Auth still depends on external string `user.id`, so a literal singleton `CHECK (id = 1)` on that table is not a safe mechanical change. + +## Important current state + +There are uncommitted edits on top of the pushed checkpoint. Another agent should start from the current worktree, not just `origin/columbus-v2`. + +Current dirty files: + +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts) +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- [foundry/packages/client/src/mock/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/mock/backend-client.ts) + +These files are the current hot path for the unfinished structural work. + +## What is partially in place but not finished + +### User-owned task UI state + +The user actor already has the schema and CRUD surface for per-user task/session UI state: + +- [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts) + `user_task_state` +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) + `getTaskState`, `upsertTaskState`, `deleteTaskState` + +But the task actor and UI are still reading/writing the old task-global fields: + +- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts) + still contains `task_runtime.active_session_id` and session `unread` / `draft_*` +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) + still derives unread/draft/active-session from task-local rows +- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx) + still treats `activeSessionId` as frontend-local and uses task-level unread/draft state + +So items `21`, `22`, `24`, and part of `19` are only half-done. + +### Coordinator ownership + +The current architecture still violates the intended coordinator pattern: + +- Organization still owns `taskLookup` and `taskSummaries` + - [foundry/packages/backend/src/actors/organization/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/db/schema.ts) +- Organization still resolves `taskId -> repoId` + - [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- Task still pushes summary updates to organization instead of repository + - [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- Repository still does not own a `tasks` projection table yet + - [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts) + +So items `9`, `13`, and `15` are still open. + +### Queue-only mutations + +Task actor workspace commands already go through queue sends. Other actors still do not fully follow the queue-only mutation rule: + +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/organization/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/app-shell.ts) + +So items `4`, `10`, and `11` are still open. + +### Dynamic model/agent data + +The frontend/client still hardcode model groups: + +- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/view-model.ts) +- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts) +- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts) + `WorkspaceModelId` is still a hardcoded union + +The repo already has the API source of truth available through the TypeScript SDK: + +- [sdks/typescript/src/client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/sdks/typescript/src/client.ts) + `SandboxAgent.listAgents({ config: true })` +- [server/packages/sandbox-agent/src/router.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router.rs) + `/v1/agents` +- [server/packages/sandbox-agent/src/router/support.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router/support.rs) + `fallback_config_options` + +So item `8` is still open. + +### GitHub sync chunking/progress + +GitHub data sync is still a delete-and-replace flow: + +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) + `replaceRepositories`, `replaceBranches`, `replaceMembers`, `replacePullRequests`, and full-sync flow +- [foundry/packages/backend/src/actors/github-data/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/db/schema.ts) + no generation/progress columns yet +- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts) + no structured sync progress field yet + +So item `16` is still open. + +## Recommended next order + +If another agent picks this up, this is the safest order: + +1. Finish items `21`, `22`, `24`, `19` together. + Reason: user-owned task UI state is already half-wired, and task schema cleanup depends on the same files. + +2. Finish items `9`, `13`, `15` together. + Reason: coordinator ownership, repo-owned task projections, and PR/task unification are the same refactor seam. + +3. Finish item `16`. + Reason: GitHub sync chunking is mostly isolated to `github-data` plus app-shell/shared snapshot wiring. + +4. Finish item `8`. + Reason: dynamic model/agent data is largely independent once user default model is already user-scoped. + +5. Finish items `4`, `10`, `11`, `12`, `18`, final event audit. + +6. Do item `17` last. + +## Concrete file hotspots for the next agent + +Backend: + +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts) +- [foundry/packages/backend/src/actors/task/workflow/common.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/common.ts) +- [foundry/packages/backend/src/actors/task/workflow/commands.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/commands.ts) +- [foundry/packages/backend/src/actors/task/workflow/init.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/init.ts) +- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts) +- [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) + +Shared/client/frontend: + +- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts) +- [foundry/packages/shared/src/contracts.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/contracts.ts) +- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts) +- [foundry/packages/client/src/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/backend-client.ts) +- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts) +- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx) +- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx) +- [foundry/packages/frontend/src/features/tasks/status.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/features/tasks/status.ts) + +## Notes that matter + +- The pushed checkpoint is useful, but it is not the full current state. There are uncommitted edits in the hot-path backend files listed above. +- The current tree already contains a partially added `user_task_state` path. Do not duplicate that work; finish the migration by removing the old task-owned fields and rewiring readers/writers. +- The current task actor still reads mutable fields from `c.state` such as `repoRemote`, `branchName`, `title`, `task`, `sandboxProviderId`, and `agentType`. That is part of item `19`. +- The current frontend still synthesizes PR-only rows into fake tasks. That should go away as part of repo-owned task projection / PR unification. diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md index e347a60..268b04c 100644 --- a/foundry/CLAUDE.md +++ b/foundry/CLAUDE.md @@ -56,6 +56,8 @@ Use `pnpm` workspaces and Turborepo. - mock frontend changes: `just foundry-mock` or restart with `just foundry-mock-down && just foundry-mock` - local frontend-only work outside Docker: restart `pnpm --filter @sandbox-agent/foundry-frontend dev` or `just foundry-dev-mock` as appropriate - The backend does **not** hot reload. Bun's `--hot` flag causes the server to re-bind on a different port (e.g. 6421 instead of 6420), breaking all client connections while the container still exposes the original port. After backend code changes, restart the backend container: `just foundry-dev-down && just foundry-dev`. +- The dev server has debug logging enabled by default (`RIVET_LOG_LEVEL=debug`, `FOUNDRY_LOG_LEVEL=debug`) via `compose.dev.yaml`. Error stacks and timestamps are also enabled. +- The frontend client uses JSON encoding for RivetKit in development (`import.meta.env.DEV`) for easier debugging. Production uses the default encoding. ## Railway Logs @@ -73,13 +75,14 @@ Use `pnpm` workspaces and Turborepo. - All backend interaction (actor calls, metadata/health checks, backend HTTP endpoint access) must go through the dedicated client library in `packages/client`. - Outside `packages/client`, do not call backend endpoints directly (for example `fetch(.../v1/rivet...)`), except in black-box E2E tests that intentionally exercise raw transport behavior. - GUI state should update in realtime (no manual refresh buttons). Prefer RivetKit push reactivity and actor-driven events; do not add polling/refetch for normal product flows. -- Keep the mock workbench types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up. +- Keep the mock workspace types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up. - Keep frontend route/state coverage current in code and tests; there is no separate page-inventory doc to maintain. - If Foundry uses a shared component from `@sandbox-agent/react`, make changes in `sdks/react` instead of copying or forking that component into Foundry. - When changing shared React components in `sdks/react` for Foundry, verify they still work in the Sandbox Agent Inspector before finishing. -- When making UI changes, verify the live flow with `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish. +- When making UI changes, verify the live flow with the Chrome DevTools MCP or `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish. - When asked for screenshots, capture all relevant affected screens and modal states, not just a single viewport. Include empty, populated, success, and blocked/error states when they are part of the changed flow. - If a screenshot catches a transition frame, blank modal, or otherwise misleading state, retake it before reporting it. +- When verifying UI in the browser, attempt to sign in by navigating to `/signin` and clicking "Continue with GitHub". If the browser lands on the GitHub login page (github.com/login) and you don't have credentials, stop and ask the user to complete the sign-in. Do not assume the session is invalid just because you see the Foundry sign-in page — always attempt the OAuth flow first. ## Realtime Data Architecture @@ -99,7 +102,7 @@ Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or - **Organization actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the organization actor when they mutate. The organization actor broadcasts the updated entity to connected clients. `getOrganizationSummary` reads from local tables only — no fan-out to child actors. - **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it. - **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). -- The expensive fan-out (querying every repository/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path. +- There is no fan-out on the read path. The organization actor owns all task summaries locally. ### Subscription manager @@ -141,6 +144,15 @@ The client subscribes to `app` always, `organization` when entering an organizat - Do not add backend git clone paths, `git fetch`, `git for-each-ref`, or direct backend git CLI calls. If you need git data, either read stored GitHub metadata or run the command inside a sandbox. - The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain. +## React Hook Dependency Safety + +- **Never use unstable references as `useEffect`/`useMemo`/`useCallback` dependencies.** React compares dependencies by reference, not value. Expressions like `?? []`, `?? {}`, `.map(...)`, `.filter(...)`, or object/array literals create new references every render, causing infinite re-render loops when used as dependencies. +- If the upstream value may be `undefined`/`null` and you need a fallback, either: + - Use the raw upstream value as the dependency and apply the fallback inside the effect body: `useEffect(() => { doThing(value ?? []); }, [value]);` + - Derive a stable primitive key: `const key = JSON.stringify(value ?? []);` then depend on `key` + - Memoize: `const stable = useMemo(() => value ?? [], [value]);` +- When reviewing code, treat any `?? []`, `?? {}`, or inline `.map()/.filter()` in a dependency array as a bug. + ## UI System - Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation. @@ -165,6 +177,7 @@ The client subscribes to `app` always, `organization` when entering an organizat - If the system reaches an unexpected state, raise an explicit error with actionable context. - Do not fail silently, swallow errors, or auto-ignore inconsistent data. - Prefer fail-fast behavior over hidden degradation when correctness is uncertain. +- **Never use bare `catch {}` or `catch { }` blocks.** Every catch must at minimum log the error with `logActorWarning` or `console.warn`. Silent catches hide bugs and make debugging impossible. If a catch is intentionally degrading (e.g. returning empty data when a sandbox is expired), it must still log so operators can see what happened. Use `catch (error) { logActorWarning(..., { error: resolveErrorMessage(error) }); }` or equivalent. ## RivetKit Dependency Policy @@ -205,8 +218,9 @@ For all Rivet/RivetKit implementation: - Do not add custom backend REST endpoints (no `/v1/*` shim layer). - We own the sandbox-agent project; treat sandbox-agent defects as first-party bugs and fix them instead of working around them. - Keep strict single-writer ownership: each table/row has exactly one actor writer. -- Parent actors (`organization`, `repository`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout. +- Parent actors (`organization`, `task`, `sandbox-instance`) use command-only loops with no timeout. - Periodic syncing lives in dedicated child actors with one timeout cadence each. +- **Task actors must be created lazily** — never during sync or bulk operations. PR sync writes virtual entries to the org's local `taskIndex`/`taskSummaries` tables. The task actor is created on first user interaction via `getOrCreate`. See `packages/backend/CLAUDE.md` "Lazy Task Actor Creation" for details. - Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes. - Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs. - `send` policy: always `await` the `send(...)` call itself so enqueue failures surface immediately, but default to `wait: false`. @@ -227,8 +241,8 @@ Action handlers must return fast. The pattern: Examples: - `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via organization events. -- `sendWorkbenchMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. -- `createWorkbenchSession` → `wait: true` (returns `{ tabId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. +- `sendWorkspaceMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. +- `createWorkspaceSession` → `wait: true` (returns `{ sessionId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. Never use `wait: true` for operations that depend on external readiness, sandbox I/O, agent responses, git network operations, polling loops, or long-running queue drains. Never hold an action open while waiting for an external system to become ready — that is a polling/retry loop in disguise. @@ -240,11 +254,11 @@ All `wait: true` sends must have an explicit `timeout`. Maximum timeout for any ### Task creation: resolve metadata before creating the actor -When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (repository) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. +When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the organization actor *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. - Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O. - Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against the repository's task index. -- The repository actor already has the task index and GitHub-backed default branch metadata. Resolve the branch name there without local git fetches. +- The organization actor owns the task index and reads GitHub-backed default branch metadata from the github-data actor. Resolve the branch name there without local git fetches. - Do not defer naming to a background provision workflow. Do not poll for names to become available. - The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return. - Actor handle policy: @@ -320,9 +334,9 @@ Each entry must include: - Friction/issue - Attempted fix/workaround and outcome -## History Events +## Audit Log Events -Log notable workflow changes to `events` so `hf history` remains complete: +Log notable workflow changes to `events` so the audit log remains complete: - create - attach @@ -331,6 +345,8 @@ Log notable workflow changes to `events` so `hf history` remains complete: - status transitions - PR state transitions +When adding new task/workspace commands, always add a corresponding audit log event. + ## Validation After Changes Always run and fix failures: diff --git a/foundry/FOUNDRY-CHANGES.md b/foundry/FOUNDRY-CHANGES.md new file mode 100644 index 0000000..2bd76d2 --- /dev/null +++ b/foundry/FOUNDRY-CHANGES.md @@ -0,0 +1,1456 @@ +# Foundry Planned Changes + +## How to use this document + +Work through items checking boxes as you go. Some items have dependencies — do not start an item until its dependencies are checked off. After each item, run `pnpm -w typecheck && pnpm -w build && pnpm -w test` to validate. If an item includes a "CLAUDE.md update" section, apply it in the same change. Commit after each item passes validation. + +## Progress Log + +- 2026-03-14 10: Initial architecture mapping complete. + - Confirmed the current hot spots match the spec: `auth-user` is still mutation-by-action, `history` is still a separate actor with an `append` action wrapper, organization still owns `taskLookup`/`taskSummaries`, and the `Workbench*` surface is still shared across backend/client/frontend. + - Started foundational rename and migration planning for items `1`, `6`, and `25` because they drive most of the later fallout. +- 2026-03-14 11: Audit-log rename slice landed. + - Renamed the backend actor from `history` to `audit-log`, switched the queue name to `auditLog.command.append`, and removed the `append` action wrapper. + - Updated task/repository/organization call sites to send directly to the audit-log queue or read through the renamed audit-log handle. +- 2026-03-14 12: Foundational naming and dead-surface cleanup landed. + - Renamed the backend auth actor surface from `authUser` to `user`, including actor registration, key helpers, handles, and Better Auth service routing. + - Deleted the dead `getTaskEnriched` / `enrichTaskRecord` fan-out path and changed organization task reads to go straight to the task actor. + - Renamed admin-only GitHub rebuild/reload actions with the `admin*` prefix across backend, client, and frontend. + - Collapsed organization realtime to full-snapshot `organizationUpdated` events and aligned task events to `type: "taskUpdated"`. +- 2026-03-14 13: Task schema migration cleanup landed. + - Removed the task actor's runtime `CREATE TABLE IF NOT EXISTS` / `ALTER TABLE` helpers from `task/workbench.ts` and `task/workflow/init.ts`. + - Updated the checked-in task migration artifacts so the schema-defined task/session/runtime columns are created directly by migrations. +- 2026-03-14 14: Item 3 blocker documented. + - The spec's requested literal singleton `CHECK (id = 1)` on the Better Auth `user` table conflicts with the existing Better Auth adapter contract, which relies on external string `user.id`. + - Proceeding safely will require a design adjustment for that table rather than a straight mechanical migration. +- 2026-03-14 15: Better Auth mapping comments landed. + - Added Better Auth vs custom Foundry table/action comments in the user and organization actor schema/action surfaces so the adapter-constrained paths are explicit. +- 2026-03-15 09: Branch rename surface deleted and stale organization subscription fixed. + - Removed the remaining branch-rename surface from the client, mock backend, frontend UI, and repository action layer. There are no remaining `renameBranch` / `renameWorkbenchBranch` references in Foundry. + - Fixed the remote backend client to listen for `organizationUpdated` on the organization connection instead of the dead `workspaceUpdated` event name. +- 2026-03-15 10: Backend workspace rename landed. + - Renamed the backend task UI/workflow surface from `workbench` to `workspace`, including the task actor file, queue topic family, organization proxy actions, and the task session table name (`task_workspace_sessions`). + - Backend actor code no longer contains `Workbench` / `workbench` references, so the remaining shared/client/frontend rename can align to a stable backend target. +- 2026-03-15 11: Default model moved to user-scoped app state. + - Removed `defaultModel` from the organization schema/snapshot and stored it on the user profile instead, exposed through the app snapshot as a user preference. + - Wired `setAppDefaultModel` through the backend/app clients and changed the model picker to persist the starred/default model instead of resetting local React state on reload. +- 2026-03-15 11: Workspace surface completed across Foundry packages. + - Renamed the shared/client/frontend surface from `Workbench` to `Workspace`, including `workspace.ts`, workspace client/model files, DTO/type names, backend-client method names, frontend view-model imports, and the affected e2e/test files. + - Verified that Foundry backend/shared/client/frontend packages no longer contain `Workbench` / `workbench` references. +- 2026-03-15 11: Singleton constraints tightened where safe. + - Added `CHECK (id = 1)` enforcement for `github_meta`, `repo_meta`, `organization_profile`, and `user_profiles`, and updated the affected code paths/migrations to use row id `1`. + - The Better Auth `user` table remains blocked by the adapter contract, so item `3` is still open overall. +- 2026-03-14 12: Confirmed blocker for later user-table singleton work. + - Item `3` conflicts with the current Better Auth adapter contract for the `user` table: the adapter depends on the external string `user.id`, while the spec also asks for a literal singleton `CHECK (id = 1)` on that same table. + - That cannot be applied mechanically without redesigning the Better Auth adapter contract or introducing a separate surrogate identity column. I have not forced that change yet. +- 2026-03-15 13: Task/repository durable-state cleanup and auth-scoped workspace reads landed. + - Removed the remaining task/repository actor durable-state duplication: task `createState` now holds only `(organizationId, repoId, taskId)`, repository `createState` now holds only `(organizationId, repoId)`, task initialization seeds SQLite from the initialize queue payload, and task record reads fetch `repoRemote` through repository metadata instead of stale actor state. + - Removed the repository creation-time `remoteUrl` dependency from actor handles/callers and changed repository metadata to backfill/persist `remoteUrl` from GitHub data when needed. + - Wired Better Auth session ids through the remote client workspace/task-detail reads and through the task workflow queue handlers so user-scoped workspace state is no longer dropped on the floor by the organization/task proxy path. +- 2026-03-15 14: Coordinator routing boundary tightened. + - Removed the organization actor's fallback `taskId -> repoId` scan across repositories; task proxy actions now require `repoId` and route directly to the repository/task coordinator path the client already uses. + - Updated backend architecture notes to reflect the live repo-owned task projection (`tasks`) and the removal of the old organization-owned `taskLookup` / `taskSummaries` indexes. +- 2026-03-15 15: Workspace session-selection and dead task-status cleanup landed. + - Surfaced viewer-scoped `activeSessionId` through workspace task summary/detail DTOs, threaded it through the backend/client/mock surfaces, and added a dedicated workspace `select_session` mutation so session-tab selection now persists in `user_task_state` instead of living only in frontend local state. + - Removed dead task `diffStat` and sandbox `statusMessage` fields from the live workspace/task contracts and backend writes, and updated stale frontend/mock/e2e consumers to stop reading them. +- 2026-03-15 16: GitHub sync progress is now live on the organization topic. + - Added persisted GitHub sync phase/generation/progress fields to the github-data actor meta row and the organization profile projection, and exposed them through `organizationUpdated` snapshots so workspace consumers no longer wait on stale app-topic state during repo imports. + - Chunked branch and pull-request fetches by repository batches, added generation markers to imported GitHub rows, switched sync refreshes to upsert+sweep instead of delete-then-replace, and updated the workspace shell/dev panel to show live sync phase progress from the organization subscription. +- 2026-03-15 17: Foundry-local model lists now route through shared Sandbox Agent config resources. + - Removed the remaining duplicated hardcoded model tables from the frontend/client workspace view-model layer and switched backend default-model / agent-inference fallbacks to the shared catalog helpers in `shared/src/models.ts`. + - Updated mock/default app state to stop seeding deleted `claude-sonnet-4` / `claude-opus-4` ids, and aligned the user-profile default-model migration fallback with the shared catalog default. +- 2026-03-15 17: Shared model catalog moved off the old fixed union. + - Replaced the shared `WorkspaceModelId` closed union with string ids, introduced a shared model catalog derived from the sandbox-agent agent-config resources, and switched the client/frontend picker label helpers to consume that catalog instead of maintaining separate hardcoded `MODEL_GROUPS` arrays. + - Updated backend default-model and model→agent fallback logic to use the shared catalog/default id, and relaxed e2e env parsing so new sandbox-agent model ids can flow through without patching Foundry first. +- 2026-03-15 18: Workspace task status collapsed to a single live field. + - Removed the duplicate `runtimeStatus` field from workspace task/detail DTOs and all current backend/client/frontend consumers, so workspace task `status` is now the only task-state field on that surface. + - Removed the remaining synthetic `"new"` task status from the live workspace path; mock task creation now starts in the first concrete init state instead of exposing a frontend-only status. +- 2026-03-15 19: GitHub sync now persists branch and PR batches as they are fetched. + - The branch and pull-request phases now upsert each fetched repository batch immediately and only sweep stale rows after the phase completes, instead of buffering the full dataset in memory until the end of the sync. + - This aligns chunked progress reporting with chunked persistence and tightens recovery behavior for large repository imports. +- 2026-03-15 20: Repository-owned task projection artifacts are now aligned with runtime. + - Removed the last stale `task_lookup` Drizzle artifacts from the organization actor so the checked-in schema snapshots match the live repository-owned `tasks` projection. + - There are no remaining org/repo runtime references to the old org-side task lookup table. +- 2026-03-15 21: Legacy task/runtime fields are fully gone from the live Foundry surface. + - Confirmed the old task-table/runtime fields from item `21` are removed across backend/shared/client/frontend, and renamed the last leftover `agentTypeForModel()` helper to the neutral `sandboxAgentIdForModel()`. + - Deleted the final dead frontend diff-stat formatter/test that only referenced already-removed task diff state. +- 2026-03-15 22: Task status tracking is now fully collapsed to the canonical task status enum. + - With the earlier backend `statusMessage` removal plus this turn's workspace contract cleanup, the workspace/task surface now derives all task status UI from the canonical backend `status` enum. + - There are no remaining live workspace `runtimeStatus` or synthetic `"new"` task-state branches. +- 2026-03-15 23: Per-user workspace UI state is fully sourced from the user actor overlay. + - Confirmed the shared task actor no longer stores per-user `activeSessionId`, unread, or draft columns; those values are persisted in `user_task_state` and only projected back into workspace DTOs for the current viewer. + - The remaining active-session/unread/draft references in client/frontend code are consumer fields of that user-scoped overlay, not shared task-actor storage. +- 2026-03-15 24: Subscription topics are now fully normalized to single-snapshot events. + - Confirmed the shared realtime contracts now expose one full replacement event per topic (`appUpdated`, `organizationUpdated`, `taskUpdated`, `sessionUpdated`, `processesUpdated`) with matching wire event names and type fields. + - The client subscription manager already treats organization/task topics as full-snapshot refreshes, so there are no remaining multi-variant organization events or `taskDetailUpdated` name mismatches in live code. +- 2026-03-15 25: Sidebar PR/task split dead branches trimmed further. + - Removed the remaining dead `pr:`-id sidebar branch and switched the workspace sidebar to the real `pullRequest.isDraft` field instead of stale `pullRequest.status` reads. + - This does not finish item `15`, but it reduces the remaining synthetic PR/task split surface in the frontend. +- 2026-03-15 26: User-actor mutations now flow through a dedicated workflow queue. + - Added [user/workflow.ts](/home/nathan/sandbox-agent/foundry/packages/backend/src/actors/user/workflow.ts) plus shared query helpers, wired the user actor up with explicit queue names, and moved auth/profile/session/task-state mutations behind workflow handlers instead of direct action bodies. +- 2026-03-15 27: Organization GitHub/shell/billing mutations now route through workflow queues. + - Added shared organization queue definitions in `organization/queues.ts`, taught the organization workflow to handle the remaining GitHub projection, org-profile, and billing mutation commands, and switched the app-shell, Better Auth, GitHub-data actor, and org-isolation test to send queue messages instead of calling direct org mutation actions. + - Deleted the dead organization shell mutation actions that no longer had callers (`applyOrganizationSyncCompleted`, `markOrganizationSyncFailed`, `applyGithubInstallationCreated`, `applyGithubInstallationRemoved`, `applyGithubRepositoryChanges`), which moves items `4`, `10`, and `12` forward even though the broader org action split is still open. +- 2026-03-15 28: Organization action split trimmed more of the monolith and removed dead event types. + - Moved `starSandboxAgentRepo` into `organization/actions/onboarding.ts` and the admin GitHub reload actions into `organization/actions/github.ts`, so `organization/actions.ts` is carrying fewer unrelated app-shell responsibilities. + - Deleted the dead backend-only `actors/events.ts` type file after confirming nothing in Foundry still imports those old task/PR event interfaces. +- 2026-03-15 29: Repo overview branch rows now carry a single PR object. + - Replaced the repo-overview branch DTO's scalar PR fields (`prNumber`, `prState`, `prUrl`, `reviewStatus`, `reviewer`) with `pullRequest: WorkspacePullRequestSummary | null`, and updated repository overview assembly plus the organization dashboard to consume that unified PR shape. + - This does not finish item `15`, but it removes another synthetic PR-only read surface and makes the repo overview align better with the task summary PR model. +- 2026-03-15 30: Repo overview stopped falling back to raw GitHub PR rows. + - Changed repository overview assembly to read PR metadata only from the repo-owned task projection instead of rejoining live GitHub PR rows on read, so the dashboard is one step closer to treating PRs as task data rather than a separate UI entity. +- 2026-03-15 31: GitHub organization-shell repair now uses the org workflow queue. + - Converted `syncOrganizationShellFromGithub` from a direct org action into a workflow-backed mutation command and updated the GitHub org sync path to send `organization.command.github.organization_shell.sync_from_github` instead of calling the action directly. + - Updated Better Auth adapter writes and task user-overlay writes to send directly to the user workflow queue, which partially lands item `4` and sets up item `11` for the user actor. +- 2026-03-15 27: Workflow layout standardized and queue-only write paths expanded. + - Split the remaining inline actor workflows into dedicated files for `audit-log`, `repository`, `github-data`, and `organization`, and moved user read actions into `user/actions/*` with Better Auth-prefixed action names. + - Removed the task actor's public mutation action wrappers entirely, moved organization/repository/github-data/task coordination onto direct queue sends, and made repository metadata reads stop mutating `repo_meta` on cache misses. +- 2026-03-15 28: PR-only admin/UI seams trimmed and PR branches now claim real tasks. + - Removed the remaining dedicated "reload pull requests" / "reload pull request" admin hooks from the backend/client/frontend surfaces and deleted the sidebar PR-only context action. + - Repository PR refresh now lazily creates a branch-owned task when a pull request arrives for an unclaimed branch, so PR-only branches stop living purely as a side table in GitHub sync flows. +- 2026-03-15 29: Organization Better Auth writes now use workflow queues. + - Split the organization actor's Better Auth routing and verification reads into `organization/actions/better-auth.ts`, moved `APP_SHELL_ORGANIZATION_ID` to `organization/constants.ts`, and renamed the org Better Auth read surface to the `betterAuth*` form. + - Added dedicated organization workflow queue handlers for session/email/account index writes plus verification CRUD, and updated `services/better-auth.ts` to send those mutations directly to organization queues instead of calling mutation actions. +- 2026-03-15 30: Shared model routing metadata is now centralized. + - Extended the shared model catalog with explicit `agentKind` and `sandboxAgentId` metadata, changed `WorkspaceAgentKind` to a dynamic string, and switched backend task session creation to resolve sandbox agent ids through the shared catalog instead of hardcoded `Codex` vs `Claude` branching. + - Updated the mock app/workspace and frontend model picker/new-task flows to consume the shared catalog/default model instead of forcing stale `Claude`/`Codex` fallbacks or a baked-in `gpt-5.3-codex` create-task default. +- 2026-03-15 31: Dead GitHub-data PR reload surface removed and fixture PR shapes aligned. + - Deleted the unused GitHub-data `reloadPullRequest` workflow command plus the dead `listOpenPullRequests` / `getPullRequestForBranch` action surface that no longer has live Foundry callers. + - Fixed the stale client `workspace-model.ts` pull-request fixtures to use the live `WorkspacePullRequestSummary` shape, which removes the last targeted client type errors in the touched slice. +- 2026-03-15 32: Organization action splitting continued past Better Auth. + - Moved the app snapshot/default-model/org-profile actions into `organization/actions/organization.ts`, onboarding actions into `organization/actions/onboarding.ts`, and app-level GitHub token/import actions into `organization/actions/github.ts`, then composed those files at the actor boundary. + - `organization/app-shell.ts` now exports shared helpers for those domains and no longer directly defines the moved action handlers, shrinking the remaining monolith and advancing item `10`. +- 2026-03-15 33: Task PR detail now reads the repository-owned task projection. + - Removed duplicate scalar PR fields from `TaskRecord` and `WorkspaceTaskDetail`, switched the remaining frontend/client consumers to the canonical `pullRequest` object, and trimmed stale mock/test scaffolding that still populated those dead fields. + - Replaced the task actor's PR lookup path with a repository projection read (`getProjectedTaskSummary`) so task detail/summary no longer ask the repo actor to re-query GitHub PR rows by branch. +- 2026-03-15 34: Workspace model catalogs now come from the live sandbox-agent API. + - Added a shared normalizer for `/v1/agents?config=true` payloads, exposed sandbox-scoped `listWorkspaceModelGroups()` from the task sandbox actor, and switched backend workspace session creation to resolve sandbox agent ids from the live sandbox catalog instead of only the checked-in default tables. + - Updated the frontend workspace model picker to query the active sandbox for model groups and use that live catalog for labels/options, while keeping the shared default catalog only as a fallback when no sandbox is available yet or the sandbox-agent connection is unavailable. +- 2026-03-15 35: Backend-only organization snapshot refresh is now queue-backed. + - Added `organization.command.snapshot.broadcast` to the organization workflow, switched repository and app-import callers to send that queue message instead of calling the organization actor's `refreshOrganizationSnapshot` action directly, and removed the direct action wrapper. + - Deleted the dead `adminReconcileWorkspaceState` organization action/interface entry after confirming nothing in Foundry still calls it. +- 2026-03-15 36: Dead backend actor export cleanup continued. + - Removed the stale `export * from "./events.js"` line from `backend/src/actors/index.ts`, which was left behind after deleting the dead backend event type file. + - This keeps the backend actor barrel aligned with the live file set and advances the final dead-code/event audit. +- 2026-03-15 34: Item 17 removed from this checklist; do not leave started items half-finished. + - By request, item `17` (`Type all actor context parameters — remove c: any`) is deferred out of this Foundry task and should not block completion here. + - Process note for the remaining checklist work: once an item is started, finish that item to completion before opening a different partial seam. Item `15` is the current priority under that rule. +- 2026-03-15 35: Task/PR unification now routes live PR changes through repository-owned task summaries only. + - GitHub PR sync and webhook handling now send concrete PR summaries directly to the repository coordinator, which lazily creates a real branch-owned task when needed and persists PR metadata on the task projection instead of re-querying raw `github_pull_requests` rows from repository reads. + - Cleared the last stale scalar PR test references (`prUrl`, `reviewStatus`, `reviewer`) so the remaining Foundry surfaces consistently use the canonical `pullRequest` object. +- 2026-03-15 36: Organization action entrypoints are now fully organized under `actions/`, and the public mutation surface is queue-only. + - Moved organization task/workspace proxy actions plus `createTaskMutation` into `organization/actions/tasks.ts`, added `organization/actions/app.ts` so every composed org action bundle now lives under `organization/actions/*`, and removed dead `app-shell` exports that no longer had external callers. + - Audited the remaining public organization actor actions and confirmed the write paths go through organization/repository/task/github-data workflow queues instead of direct mutation actions, which closes item `4` and item `10`. +- 2026-03-15 37: Organization dead-code audit completed. + - Removed the leftover exported-only Better Auth predicate helper from `organization/actions/better-auth.ts`; it is now module-private because nothing outside that file uses it. + - Audited the remaining organization actor surface and confirmed the live public reads/writes still in use are the composed `actions/*` bundles plus workflow mutation helpers. There are no remaining dead org action exports from the pre-refactor monolith. +- 2026-03-15 38: Final dead-event and dead-surface audit completed for the in-scope Foundry refactor. + - Confirmed the live Foundry realtime topics each have a single event type (`appUpdated`, `organizationUpdated`, `taskUpdated`, `sessionUpdated`), and the deleted legacy event names (`workspaceUpdated`, `taskSummaryUpdated`, `taskDetailUpdated`, `pullRequestUpdated`, `pullRequestRemoved`) no longer exist in live Foundry code. + - Re-audited the major removed compatibility seams (`Workbench`, branch rename, PR-only sidebar ids, duplicate runtime task status, `getTaskEnriched`, organization-owned task lookup tables) and found no remaining live references beyond expected domain strings like GitHub webhook event names or CLI `pr` labels. +- 2026-03-15 39: Item 15 was finished for real by moving PR ownership into the task actor. + - Added task-local `pull_request_json` storage, switched task detail/summary reads to the task DB, and added `task.command.pull_request.sync` so GitHub/repository flows update PR metadata through the task coordinator instead of overlaying it in the repository projection. + - The mock right sidebar now trusts the canonical `task.pullRequest.url` field instead of rebuilding a PR URL from repo name + PR number. +- 2026-03-15 40: Better Auth user singleton constraint is now enforced without breaking the adapter contract. + - The user actor's `user` table now uses an integer singleton primary key with `CHECK (id = 1)` plus a separate `auth_user_id` column for Better Auth's external string identity. + - Updated the user actor query/join/mutation helpers so Better Auth still reads and writes logical `user.id` as the external string id while SQLite enforces the singleton row invariant locally. + +No backwards compatibility — delete old code, don't deprecate. If something is removed, remove it everywhere (backend, client, shared types, frontend, tests, mocks). + +### Suggested execution order (respects dependencies) + +**Wave 1 — no dependencies, can be done in any order:** +1, 2, 3, 4, 5, 6, 13, 16, 20, 21, 23, 25 + +**Wave 2 — depends on wave 1:** +7 (after 1), 9 (after 13), 10 (after 1+6), 11 (after 4), 22 (after 1), 24 (after 21), 26 (after 25) + +**Wave 3 — depends on wave 2:** +8 (after 7+25), 12 (after 10), 15 (after 9+13), 19 (after 21+24) + +**Wave 4 — depends on wave 3:** +14 (after 15) + +**Final:** +18 (after everything), final audit pass (after everything) + +### Index + +- [x] 1. Rename Auth User actor → User actor +- [x] 2. Add Better Auth mapping comments to user/org actor tables +- [x] 3. Enforce `id = 1` CHECK constraint on single-row tables +- [x] 4. Move all mutation actions to queue messages +- [x] 5. Migrate task actor raw SQL to Drizzle migrations +- [x] 6. Rename History actor → Audit Log actor +- [x] 7. Move starred/default model to user actor settings *(depends on: 1)* +- [x] 8. Replace hardcoded model/agent lists with sandbox-agent API data *(depends on: 7, 25)* +- [x] 9. Flatten `taskLookup` + `taskSummaries` into single `tasks` table *(depends on: 13)* +- [x] 10. Reorganize user and org actor actions into `actions/` folders *(depends on: 1, 6)* +- [x] 11. Standardize workflow file structure across all actors *(depends on: 4)* +- [x] 12. Audit and remove dead code in organization actor *(depends on: 10)* +- [x] 13. Enforce coordinator pattern and fix ownership violations +- [x] 14. Standardize one event per subscription topic *(depends on: 15)* +- [x] 15. Unify tasks and pull requests — PRs are just task data *(depends on: 9, 13)* +- [x] 16. Chunk GitHub data sync and publish progress +- [x] 18. Final pass: remove all dead code *(depends on: all other items)* +- [x] 19. Remove duplicate data between `c.state` and SQLite *(depends on: 21, 24)* +- [x] 20. Prefix admin/recovery actions with `admin` +- [x] 21. Remove legacy/session-scoped fields from task table +- [x] 22. Move per-user UI state from task actor to user actor *(depends on: 1)* +- [x] 23. Delete `getTaskEnriched` and `enrichTaskRecord` (dead code) +- [x] 24. Clean up task status tracking *(depends on: 21)* +- [x] 25. Remove "Workbench" prefix from all types, functions, files, tables +- [x] 26. Delete branch rename (branches immutable after creation) *(depends on: 25)* +- [x] Final audit pass: dead events scan *(depends on: all other items)* + +Deferred follow-up outside this checklist: + +- 17. Type all actor context parameters — remove `c: any` *(removed from this task's scope by request)* + +--- + +## [ ] 1. Rename Auth User actor → User actor + +**Rationale:** The actor is already a single per-user actor storing all user data. The "Auth" prefix is unnecessary. + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/`** → rename directory to `user/` + - `index.ts` — rename export `authUser` → `user`, display name `"Auth User"` → `"User"` + - `db/schema.ts`, `db/db.ts`, `db/migrations.ts`, `db/drizzle.config.ts` — update any auth-prefixed references +- **`foundry/packages/backend/src/actors/keys.ts`** — `authUserKey()` → `userKey()` +- **`foundry/packages/backend/src/actors/handles.ts`** — `getOrCreateAuthUser` → `getOrCreateUser`, `getAuthUser` → `getUser`, `selfAuthUser` → `selfUser` +- **`foundry/packages/backend/src/actors/index.ts`** — update import path and registration +- **`foundry/packages/backend/src/services/better-auth.ts`** — update all `authUser` references +- **Action names** — consider dropping "Auth" prefix from `createAuthRecord`, `findOneAuthRecord`, `updateAuthRecord`, `deleteAuthRecord`, `countAuthRecords`, etc. + +--- + +## [ ] 2. Add Better Auth mapping comments to user/org actor tables, actions, and queues + +**Rationale:** The user and organization actors contain a mix of Better Auth-driven and custom Foundry code. Tables, actions, and queues that exist to serve Better Auth's adapter need comments so developers know which pieces are constrained by Better Auth's schema/contract and which are ours to change freely. + +### Table mapping + +| Actor | Table | Better Auth? | Notes | +|---|---|---|---| +| user | `user` | Yes — 1:1 `user` model | All fields from Better Auth | +| user | `session` | Yes — 1:1 `session` model | All fields from Better Auth | +| user | `account` | Yes — 1:1 `account` model | All fields from Better Auth | +| user | `user_profiles` | No — custom Foundry | GitHub login, role, eligible orgs, starter repo status | +| user | `session_state` | No — custom Foundry | Active organization per session | +| org | `auth_verification` | Yes — Better Auth `verification` model | Lives on org actor because verification happens before user exists | +| org | `auth_session_index` | No — custom routing index | Maps session tokens → user actor IDs for Better Auth adapter routing | +| org | `auth_email_index` | No — custom routing index | Maps emails → user actor IDs for Better Auth adapter routing | +| org | `auth_account_index` | No — custom routing index | Maps OAuth accounts → user actor IDs for Better Auth adapter routing | + +### Action/queue mapping (user actor) + +| Action/Queue | Better Auth? | Notes | +|---|---|---| +| `createAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to create user/session/account records | +| `findOneAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter for single-record lookups with joins | +| `findManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for multi-record queries | +| `updateAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to update records | +| `updateManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for bulk updates | +| `deleteAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to delete records | +| `deleteManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for bulk deletes | +| `countAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for count queries | +| `getAppAuthState` | No — custom Foundry | Aggregates auth state for frontend consumption | +| `upsertUserProfile` | No — custom Foundry | Manages Foundry-specific user profile data | +| `upsertSessionState` | No — custom Foundry | Manages Foundry-specific session state | + +### Action/queue mapping (organization actor app-shell) + +| Action/Queue | Better Auth? | Notes | +|---|---|---| +| App-shell auth index CRUD actions | Yes — Better Auth adapter routing | Maintain lookup indexes so the adapter can route by session/email/account to the correct user actor | +| `auth_verification` CRUD | Yes — Better Auth `verification` model | Used for email verification and password resets | + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — add doc comments to each table: + - `user`, `session`, `account`: "Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database" + - `user_profiles`, `session_state`: "Custom Foundry table — not part of Better Auth" +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** — add doc comments to each action/queue: + - Better Auth adapter actions: "Better Auth adapter — called by the Better Auth adapter in better-auth.ts. Schema constrained by Better Auth." + - Custom actions: "Custom Foundry action — not part of Better Auth" +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — add doc comments to `auth_verification` (Better Auth core), and the three index tables (Better Auth adapter routing) +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — add doc comments to auth index actions marking them as Better Auth adapter routing infrastructure + +--- + +## [x] 3. Enforce `id = 1` CHECK constraint on all single-row actor tables + +**Rationale:** When an actor instance represents a single entity, tables that hold exactly one row should enforce this at the DB level with a `CHECK (id = 1)` constraint. The task actor already does this correctly; other actors don't. + +### Tables needing the constraint + +| Actor | Table | Current enforcement | Fix needed | +|---|---|---|---| +| auth-user (→ user) | `user` | None | Add `CHECK (id = 1)`, use integer PK | +| auth-user (→ user) | `user_profiles` | None | Add `CHECK (id = 1)`, use integer PK | +| github-data | `github_meta` | Hardcoded `id=1` in code only | Add `CHECK (id = 1)` in schema | +| organization | `organization_profile` | None | Add `CHECK (id = 1)`, use integer PK | +| repository | `repo_meta` | Hardcoded `id=1` in code only | Add `CHECK (id = 1)` in schema | +| task | `task` | CHECK constraint | Already correct | +| task | `task_runtime` | CHECK constraint | Already correct | + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — change `user` and `user_profiles` tables to integer PK with CHECK constraint +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** — update queries to use `id = 1` pattern +- **`foundry/packages/backend/src/services/better-auth.ts`** — update adapter to use fixed `id = 1` +- **`foundry/packages/backend/src/actors/github-data/db/schema.ts`** — add CHECK constraint to `github_meta` (already uses `id=1` in code) +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — change `organization_profile` to integer PK with CHECK constraint +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — update queries to use `id = 1` +- **`foundry/packages/backend/src/actors/repository/db/schema.ts`** — add CHECK constraint to `repo_meta` (already uses `id=1` in code) +- All affected actors — regenerate `db/migrations.ts` + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Single-row tables (tables that hold exactly one record per actor instance, e.g. metadata or profile tables) must use an integer primary key with a `CHECK (id = 1)` constraint to enforce the singleton invariant at the database level. Follow the pattern established in the task actor's `task` and `task_runtime` tables." + +--- + +## [x] 4. Move all mutation actions to queue messages + +**Rationale:** Actions should be read-only (queries). All mutations (INSERT/UPDATE/DELETE) should go through queue messages processed by workflow handlers. This ensures single-writer consistency and aligns with the actor model. No actor currently does this correctly — the history actor has the mutation in the workflow handler, but the `append` action wraps a `wait: true` queue send, which is the same anti-pattern (callers should send to the queue directly). + +### Violations by actor + +**User actor (auth-user)** — `auth-user/index.ts` — 7 mutation actions: +- `createAuthRecord` (INSERT, line 164) +- `updateAuthRecord` (UPDATE, line 205) +- `updateManyAuthRecords` (UPDATE, line 219) +- `deleteAuthRecord` (DELETE, line 234) +- `deleteManyAuthRecords` (DELETE, line 243) +- `upsertUserProfile` (UPSERT, line 283) +- `upsertSessionState` (UPSERT, line 331) + +**GitHub Data actor** — `github-data/index.ts` — 7 mutation actions: +- `fullSync` (batch INSERT/DELETE/UPDATE, line 686) +- `reloadOrganization` (batch, line 690) +- `reloadAllPullRequests` (batch, line 694) +- `reloadRepository` (INSERT/UPDATE, line 698) +- `reloadPullRequest` (INSERT/DELETE/UPDATE, line 763) +- `clearState` (batch DELETE, line 851) +- `handlePullRequestWebhook` (INSERT/UPDATE/DELETE, line 879) + +**Organization actor — `actions.ts`** — 5 mutation actions: +- `applyTaskSummaryUpdate` (UPSERT, line 464) +- `removeTaskSummary` (DELETE, line 476) +- `applyGithubRepositoryProjection` (UPSERT, line 521) +- `applyGithubDataProjection` (INSERT/UPDATE/DELETE, line 547) +- `recordGithubWebhookReceipt` (UPDATE, line 620) + +**Organization actor — `app-shell.ts`** — 38 mutation actions: + +Better Auth index mutations (11): +- `authUpsertSessionIndex` (UPSERT) +- `authDeleteSessionIndex` (DELETE) +- `authUpsertEmailIndex` (UPSERT) +- `authDeleteEmailIndex` (DELETE) +- `authUpsertAccountIndex` (UPSERT) +- `authDeleteAccountIndex` (DELETE) +- `authCreateVerification` (INSERT) +- `authUpdateVerification` (UPDATE) +- `authUpdateManyVerification` (UPDATE) +- `authDeleteVerification` (DELETE) +- `authDeleteManyVerification` (DELETE) + +Organization profile/state mutations (13): +- `updateOrganizationShellProfile` (UPDATE on organizationProfile) +- `markOrganizationSyncStarted` (UPDATE on organizationProfile) +- `applyOrganizationSyncCompleted` (UPDATE on organizationProfile) +- `markOrganizationSyncFailed` (UPDATE on organizationProfile) +- `applyOrganizationStripeCustomer` (UPDATE on organizationProfile) +- `applyOrganizationStripeSubscription` (UPSERT on organizationProfile) +- `applyOrganizationFreePlan` (UPDATE on organizationProfile) +- `setOrganizationBillingPaymentMethod` (UPDATE on organizationProfile) +- `setOrganizationBillingStatus` (UPDATE on organizationProfile) +- `upsertOrganizationInvoice` (UPSERT on invoices) +- `recordOrganizationSeatUsage` (UPSERT on seatAssignments) +- `applyGithubInstallationCreated` (UPDATE on organizationProfile) +- `applyGithubInstallationRemoved` (UPDATE on organizationProfile) + +App-level mutations that delegate + mutate (8): +- `skipAppStarterRepo` (calls upsertUserProfile) +- `starAppStarterRepo` (calls upsertUserProfile + child mutation) +- `selectAppOrganization` (calls setActiveOrganization) +- `triggerAppRepoImport` (calls markOrganizationSyncStarted) +- `createAppCheckoutSession` (calls applyOrganizationFreePlan + applyOrganizationStripeCustomer) +- `finalizeAppCheckoutSession` (calls applyOrganizationStripeCustomer) +- `cancelAppScheduledRenewal` (calls setOrganizationBillingStatus) +- `resumeAppSubscription` (calls setOrganizationBillingStatus) +- `recordAppSeatUsage` (calls recordOrganizationSeatUsage) +- `handleAppStripeWebhook` (calls multiple org mutations) +- `handleAppGithubWebhook` (calls org mutations + github-data mutations) +- `syncOrganizationShellFromGithub` (multiple DB operations) +- `applyGithubRepositoryChanges` (calls applyGithubRepositoryProjection) + +**Task actor workbench** — `task/workbench.ts` — 14 mutation actions: +- `renameWorkbenchTask` (UPDATE, line 970) +- `renameWorkbenchBranch` (UPDATE, line 988) +- `createWorkbenchSession` (INSERT, line 1039) +- `renameWorkbenchSession` (UPDATE, line 1125) +- `setWorkbenchSessionUnread` (UPDATE, line 1136) +- `updateWorkbenchDraft` (UPDATE, line 1143) +- `changeWorkbenchModel` (UPDATE, line 1152) +- `sendWorkbenchMessage` (UPDATE, line 1205) +- `stopWorkbenchSession` (UPDATE, line 1255) +- `syncWorkbenchSessionStatus` (UPDATE, line 1265) +- `closeWorkbenchSession` (UPDATE, line 1331) +- `markWorkbenchUnread` (UPDATE, line 1363) +- `publishWorkbenchPr` (UPDATE, line 1375) +- `revertWorkbenchFile` (UPDATE, line 1403) + +**Repository actor** — `repository/actions.ts` — 5 mutation actions/helpers: +- `createTask` → calls `createTaskMutation()` (INSERT on taskIndex + creates task actor) +- `registerTaskBranch` → calls `registerTaskBranchMutation()` (INSERT/UPDATE on taskIndex) +- `reinsertTaskIndexRow()` (INSERT/UPDATE, called from `getTaskEnriched`) +- `deleteStaleTaskIndexRow()` (DELETE) +- `persistRemoteUrl()` (INSERT/UPDATE on repoMeta, called from `getRepoOverview`) + +### History (audit log) actor — `append` action must also be removed + +The history actor's workflow handler is correct (mutation in queue handler), but the `append` action (line 77) is a `wait: true` wrapper around the queue send — same anti-pattern. Delete the `append` action. Callers (the `appendHistory()` helper in `task/workflow/common.ts`) should send directly to the `auditLog.command.append` queue with `wait: false` (audit log writes are fire-and-forget, no need to block the caller). + +### Reference patterns (queue handlers only, no action wrappers) +- **Task actor core** — initialize, attach, push, sync, merge, archive, kill all use queue messages directly + +### Migration approach + +This is NOT about wrapping queue sends inside actions. The mutation actions must be **removed entirely** and replaced with queue messages that callers (including `packages/client`) send directly. + +Each actor needs: +1. Define queue message types for each mutation +2. Move mutation logic from action handlers into workflow/queue handlers +3. **Delete the mutation actions** — do not wrap them +4. Update `packages/client` to send queue messages directly to the actor instead of calling the old action +5. Update any inter-actor callers (e.g. `better-auth.ts`, `app-shell.ts`, other actors) to send queue messages instead of calling actions + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Actions must be read-only. All database mutations (INSERT, UPDATE, DELETE, UPSERT) must be queue messages processed by workflow handlers. Callers (client, other actors, services) send messages directly to the queue — do not wrap queue sends inside actions. Follow the pattern established in the task workflow actor's queue handlers." + +--- + +## [ ] 5. Migrate task actor raw SQL to Drizzle migrations + +**Rationale:** The task actor uses raw `db.execute()` with `ALTER TABLE ... ADD COLUMN` in `workbench.ts` and `workflow/init.ts` instead of proper Drizzle migrations. All actor DBs should use the standard Drizzle migration pattern. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/workbench.ts`** (lines 24-56) — remove `ALTER TABLE` raw SQL, add columns to `db/schema.ts` and generate a proper migration +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** (lines 12-15) — same treatment +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — add the missing columns that are currently added via `ALTER TABLE` +- **`foundry/packages/backend/src/actors/task/db/migrations.ts`** — regenerate with new migration + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "All actor databases must use Drizzle ORM with proper schema definitions and generated migrations. No raw SQL (`db.execute()`, `ALTER TABLE`, etc.). Schema changes must go through `schema.ts` + migration generation." + +--- + +## [ ] 6. Rename History actor → Audit Log actor + +**Rationale:** The actor functions as a comprehensive audit log tracking task lifecycle events. "Audit Log" better describes its purpose. + +### Files to change + +- **`foundry/packages/backend/src/actors/history/`** → rename directory to `audit-log/` + - `index.ts` — rename export `history` → `auditLog`, display name `"History"` → `"Audit Log"`, queue `history.command.append` → `auditLog.command.append` + - Internal types: `HistoryInput` → `AuditLogInput`, `AppendHistoryCommand` → `AppendAuditLogCommand`, `ListHistoryParams` → `ListAuditLogParams` +- **`foundry/packages/backend/src/actors/keys.ts`** — `historyKey()` → `auditLogKey()` +- **`foundry/packages/backend/src/actors/handles.ts`** — `getOrCreateHistory` → `getOrCreateAuditLog`, `selfHistory` → `selfAuditLog` +- **`foundry/packages/backend/src/actors/index.ts`** — update import path and registration +- **`foundry/packages/shared/src/contracts.ts`** — `HistoryEvent` → `AuditLogEvent` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — `history()` action → `auditLog()`, update imports +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update `getOrCreateHistory` calls +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — `appendHistory()` → `appendAuditLog()` +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — update imports and calls +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — update imports and calls +- **`foundry/packages/backend/src/actors/task/workflow/push.ts`** — update imports and calls + +### Coverage gaps to fix + +The audit log only covers 9 of ~24 significant events (37.5%). The entire `task/workbench.ts` file has zero logging. Add audit log calls for: + +**High priority (missing lifecycle events):** +- `task.switch` — in `task/workflow/index.ts` handleSwitchActivity +- `task.session.created` — in `task/workbench.ts` createWorkbenchSession +- `task.session.closed` — in `task/workbench.ts` closeWorkbenchSession +- `task.session.stopped` — in `task/workbench.ts` stopWorkbenchSession + +**Medium priority (missing user actions):** +- `task.session.renamed` — renameWorkbenchSession +- `task.message.sent` — sendWorkbenchMessage +- `task.model.changed` — changeWorkbenchModel +- `task.title.changed` — renameWorkbenchTask +- `task.branch.renamed` — renameWorkbenchBranch +- `task.pr.published` — publishWorkbenchPr +- `task.file.reverted` — revertWorkbenchFile + +**Low priority / debatable:** +- `task.draft.updated`, `task.session.unread`, `task.derived.refreshed`, `task.transcript.refreshed` + +### CLAUDE.md updates needed + +- **`foundry/packages/backend/CLAUDE.md`** — rename `HistoryActor` → `AuditLogActor` in actor hierarchy, add maintenance rule: "Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of all significant operations." +- **`foundry/CLAUDE.md`** — rename "History Events" section → "Audit Log Events", update the list to include all events above, add note: "When adding new task/workbench commands, always add a corresponding audit log event." + +--- + +## [ ] 7. Move starred/default model to user actor settings + +**Dependencies:** item 1 + +**Rationale:** The starred/default model preference is currently broken — the frontend stores it in local React state that resets on reload. The org actor's `organizationProfile` table has a `defaultModel` column but there's no action to update it and it's the wrong scope anyway. This is a per-user preference, not an org setting. + +### Current state (broken) + +- **Frontend** (`mock-layout.tsx` line 313) — `useState("claude-sonnet-4")` — local state, lost on reload +- **Model picker UI** (`model-picker.tsx`) — has star icons + `onSetDefault` callback, but it only updates local state +- **Org actor** (`organization/db/schema.ts` line 43) — `defaultModel` column exists but nothing writes to it +- **No backend persistence** — starred model is not saved anywhere + +### Changes needed + +1. **Add `user_settings` table to user actor** (or add `defaultModel` column to `user_profiles`): + - `defaultModel` (text) — the user's starred/preferred model + - File: `foundry/packages/backend/src/actors/auth-user/db/schema.ts` + +2. **Add queue message to user actor** to update the default model: + - File: `foundry/packages/backend/src/actors/auth-user/index.ts` + +3. **Remove `defaultModel` from org actor** `organizationProfile` table (wrong scope): + - File: `foundry/packages/backend/src/actors/organization/db/schema.ts` + +4. **Update frontend** to read starred model from user settings (via `app` subscription) and send queue message on star click: + - File: `foundry/packages/frontend/src/components/mock-layout/model-picker.tsx` + - File: `foundry/packages/frontend/src/components/mock-layout.tsx` + +5. **Update shared types** — move `defaultModel` from `FoundryOrganizationSettings` to user settings type: + - File: `foundry/packages/shared/src/app-shell.ts` + +6. **Update client** to send the queue message to user actor: + - File: `foundry/packages/client/` + +--- + +## [ ] 8. Replace hardcoded model/agent lists with sandbox-agent API data + +**Dependencies:** items 7, 25 + +**Rationale:** The frontend hardcodes 8 models in a static list and ignores the sandbox-agent API's `GET /v1/agents` endpoint which already exposes the full agent config — models, modes, and reasoning/thought levels per agent. The frontend should consume this API 1:1 instead of maintaining its own stale copy. + +### Current state (hardcoded) + +- **`foundry/packages/frontend/src/components/mock-layout/view-model.ts`** (lines 20-39) — hardcoded `MODEL_GROUPS` with 8 models +- **`foundry/packages/client/src/workbench-model.ts`** (lines 18-37) — identical hardcoded `MODEL_GROUPS` copy +- **`foundry/packages/shared/src/workbench.ts`** (lines 5-13) — `WorkbenchModelId` hardcoded union type +- No modes or thought/reasoning levels exposed in UI at all +- No API calls to discover available models + +### What the sandbox-agent API already provides (`GET /v1/agents`) + +Per agent, the API returns: +- **models** — full list with display names (Claude: 4, Codex: 6, Cursor: 35+, OpenCode: 239) +- **modes** — execution modes (Claude: 5, Codex: 3, OpenCode: 2) +- **thought_level** — reasoning levels (Codex: low/medium/high/xhigh, Mock: low/medium/high) +- **capabilities** — plan_mode, reasoning, status support +- **credentialsAvailable** / **installed** — agent availability + +### Changes needed + +1. **Remove hardcoded model lists** from: + - `foundry/packages/frontend/src/components/mock-layout/view-model.ts` — delete `MODEL_GROUPS` + - `foundry/packages/client/src/workbench-model.ts` — delete `MODEL_GROUPS` + - `foundry/packages/shared/src/workbench.ts` — replace `WorkbenchModelId` union type with `string` (dynamic from API) + +2. **Backend: fetch and cache agent config from sandbox-agent API** + - Add an action or startup flow that calls `GET /v1/agents?config=true` on the sandbox-agent API + - Cache the result (agent list + models + modes + thought levels) in the appropriate actor + - Expose it to the frontend via the existing subscription/event system + +3. **Frontend: consume API-driven config** + - Model picker reads available models from backend-provided agent config, not hardcoded list + - Expose modes selector per agent + - Expose thought/reasoning level selector for agents that support it (Codex, Mock) + - Group models by agent as the API does (not by arbitrary provider grouping) + +4. **Update shared types** — make model/mode/thought_level types dynamic strings rather than hardcoded unions: + - `foundry/packages/shared/src/workbench.ts` + +5. **No backwards compatibility needed** — we're cleaning up, not preserving old behavior + +--- + +## [ ] 9. Flatten `taskLookup` + `taskSummaries` into single `tasks` table on org actor + +**Dependencies:** item 13 + +**Rationale:** `taskLookup` (taskId → repoId) is a strict subset of `taskSummaries` (which also has repoId + title, status, branch, PR, sessions). There's no reason for two tables with the same primary key. Flatten into one `tasks` table. + +### Current state + +- **`taskLookup`** — `taskId` (PK), `repoId` — used only for taskId → repoId resolution +- **`taskSummaries`** — `taskId` (PK), `repoId`, `title`, `status`, `repoName`, `updatedAtMs`, `branch`, `pullRequestJson`, `sessionsSummaryJson` — materialized sidebar data + +### Changes needed + +1. **Merge into single `tasks` table** in `foundry/packages/backend/src/actors/organization/db/schema.ts`: + - Drop `taskLookup` table + - Rename `taskSummaries` → `tasks` + - Keep all columns from `taskSummaries` (already includes `repoId`) + +2. **Update all references**: + - `foundry/packages/backend/src/actors/organization/actions.ts` — replace `taskLookup` queries with `tasks` table lookups + - `foundry/packages/backend/src/actors/organization/app-shell.ts` — if it references either table + - Any imports of the old table names from schema + +3. **Regenerate migrations** — `foundry/packages/backend/src/actors/organization/db/migrations.ts` + +--- + +## [x] 10. Reorganize user and organization actor actions into `actions/` folders + +**Dependencies:** items 1, 6 + +**Rationale:** Both actors cram too many concerns into single files. The organization actor has `app-shell.ts` (1,947 lines) + `actions.ts` mixing Better Auth, Stripe, GitHub, onboarding, workbench proxying, and org state. The user actor mixes Better Auth adapter CRUD with custom Foundry actions. Split into `actions/` folders grouped by domain, with `betterAuth` prefix on all Better Auth actions. + +### User actor → `user/actions/` + +| File | Actions | Source | +|---|---|---| +| `actions/better-auth.ts` | `betterAuthCreateRecord`, `betterAuthFindOneRecord`, `betterAuthFindManyRecords`, `betterAuthUpdateRecord`, `betterAuthUpdateManyRecords`, `betterAuthDeleteRecord`, `betterAuthDeleteManyRecords`, `betterAuthCountRecords` + all helper functions (`tableFor`, `columnFor`, `normalizeValue`, `clauseToExpr`, `buildWhere`, `applyJoinToRow`, `applyJoinToRows`) | Currently in `index.ts` | +| `actions/user.ts` | `getAppAuthState`, `upsertUserProfile`, `upsertSessionState` | Currently in `index.ts` | + +### Organization actor → `organization/actions/` + +**Delete `app-shell.ts`** — split its ~50 actions + helpers across these files: + +| File | Actions | Source | +|---|---|---| +| `actions/better-auth.ts` | `betterAuthFindSessionIndex`, `betterAuthUpsertSessionIndex`, `betterAuthDeleteSessionIndex`, `betterAuthFindEmailIndex`, `betterAuthUpsertEmailIndex`, `betterAuthDeleteEmailIndex`, `betterAuthFindAccountIndex`, `betterAuthUpsertAccountIndex`, `betterAuthDeleteAccountIndex`, `betterAuthCreateVerification`, `betterAuthFindOneVerification`, `betterAuthFindManyVerification`, `betterAuthUpdateVerification`, `betterAuthUpdateManyVerification`, `betterAuthDeleteVerification`, `betterAuthDeleteManyVerification`, `betterAuthCountVerification` + auth clause builder helpers | Currently in `app-shell.ts` | +| `actions/stripe.ts` | `createAppCheckoutSession`, `finalizeAppCheckoutSession`, `createAppBillingPortalSession`, `cancelAppScheduledRenewal`, `resumeAppSubscription`, `recordAppSeatUsage`, `handleAppStripeWebhook`, `applyOrganizationStripeCustomer`, `applyOrganizationStripeSubscription`, `applyOrganizationFreePlan`, `setOrganizationBillingPaymentMethod`, `setOrganizationBillingStatus`, `upsertOrganizationInvoice`, `recordOrganizationSeatUsage` | Currently in `app-shell.ts` | +| `actions/github.ts` | `resolveAppGithubToken`, `beginAppGithubInstall`, `triggerAppRepoImport`, `handleAppGithubWebhook`, `syncOrganizationShellFromGithub`, `syncGithubOrganizations`, `applyGithubInstallationCreated`, `applyGithubInstallationRemoved`, `applyGithubRepositoryChanges`, `reloadGithubOrganization`, `reloadGithubPullRequests`, `reloadGithubRepository`, `reloadGithubPullRequest`, `applyGithubRepositoryProjection`, `applyGithubDataProjection`, `recordGithubWebhookReceipt`, `refreshTaskSummaryForGithubBranch` | Currently split across `app-shell.ts` and `actions.ts` | +| `actions/onboarding.ts` | `skipAppStarterRepo`, `starAppStarterRepo`, `starSandboxAgentRepo`, `selectAppOrganization` | Currently in `app-shell.ts` | +| `actions/organization.ts` | `getAppSnapshot`, `getOrganizationShellState`, `getOrganizationShellStateIfInitialized`, `updateOrganizationShellProfile`, `updateAppOrganizationProfile`, `markOrganizationSyncStarted`, `applyOrganizationSyncCompleted`, `markOrganizationSyncFailed`, `useOrganization`, `getOrganizationSummary`, `reconcileWorkbenchState` | Currently split across `app-shell.ts` and `actions.ts` | +| `actions/tasks.ts` | `createTask`, `createWorkbenchTask`, `listTasks`, `getTask`, `switchTask`, `applyTaskSummaryUpdate`, `removeTaskSummary`, `findTaskForGithubBranch`, `applyOpenPullRequestUpdate`, `removeOpenPullRequest`, `attachTask`, `pushTask`, `syncTask`, `mergeTask`, `archiveTask`, `killTask` | Currently in `actions.ts` | +| `actions/workbench.ts` | `markWorkbenchUnread`, `renameWorkbenchTask`, `renameWorkbenchBranch`, `createWorkbenchSession`, `renameWorkbenchSession`, `setWorkbenchSessionUnread`, `updateWorkbenchDraft`, `changeWorkbenchModel`, `sendWorkbenchMessage`, `stopWorkbenchSession`, `closeWorkbenchSession`, `publishWorkbenchPr`, `revertWorkbenchFile` | Currently in `actions.ts` (proxy calls to task actor) | +| `actions/repos.ts` | `listRepos`, `getRepoOverview` | Currently in `actions.ts` | +| `actions/history.ts` | `history` (→ `auditLog` after rename) | Currently in `actions.ts` | + +Also move: +- `APP_SHELL_ORGANIZATION_ID` constant → `organization/constants.ts` +- `runOrganizationWorkflow` → `organization/workflow.ts` +- Private helpers (`buildAppSnapshot`, `assertAppOrganization`, `collectAllTaskSummaries`, etc.) → colocate with the action file that uses them + +### Files to update + +- **`foundry/packages/backend/src/services/better-auth.ts`** — update all action name references to use `betterAuth` prefix +- **`foundry/packages/backend/src/actors/organization/index.ts`** — import and spread action objects from `actions/` files instead of `app-shell.ts` + `actions.ts` +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** (or `user/index.ts`) — import actions from `actions/` files + +--- + +## [ ] 11. Standardize workflow file structure across all actors + +**Dependencies:** item 4 + +**Rationale:** Workflow logic is inconsistently placed — inline in `index.ts`, in `actions.ts`, or in a `workflow/` directory. Standardize: every actor with a workflow gets a `workflow.ts` file. If the workflow is large, use `workflow/{index,...}.ts`. + +### Changes per actor + +| Actor | Current location | New location | Notes | +|---|---|---|---| +| user (auth-user) | None | `workflow.ts` (new) | Needs a workflow for mutations (item 4) | +| github-data | Inline in `index.ts` (~57 lines) | `workflow.ts` | Extract `runGithubDataWorkflow` + handler | +| history (→ audit-log) | Inline in `index.ts` (~18 lines) | `workflow.ts` | Extract `runHistoryWorkflow` + `appendHistoryRow` | +| organization | In `actions.ts` (~51 lines) | `workflow.ts` | Extract `runOrganizationWorkflow` + queue handlers | +| repository | In `actions.ts` (~42 lines) | `workflow.ts` | Extract `runRepositoryWorkflow` + queue handlers | +| task | `workflow/` directory (926 lines) | `workflow/` directory — already correct | Keep as-is: `workflow/index.ts`, `workflow/queue.ts`, `workflow/common.ts`, `workflow/init.ts`, `workflow/commands.ts`, `workflow/push.ts` | +| sandbox | None (wrapper) | N/A | No custom workflow needed | + +### Pattern + +- **Small workflows** (< ~200 lines): single `workflow.ts` file +- **Large workflows** (> ~200 lines): `workflow/index.ts` holds the main loop, other files hold step groups: + - `workflow/index.ts` — main loop + handler dispatch + - `workflow/queue.ts` — queue name definitions (if many) + - `workflow/{group}.ts` — step/activity functions grouped by domain + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Every actor with a message queue must have its workflow logic in a dedicated `workflow.ts` file (or `workflow/index.ts` for complex actors). Do not inline workflow logic in `index.ts` or `actions.ts`. Actions are read-only handlers; workflow handlers process queue messages and perform mutations." + +--- + +--- + +## [ ] 12. Audit and remove dead code in organization actor + +**Dependencies:** item 10 + +**Rationale:** The organization actor has ~50+ actions across `app-shell.ts` and `actions.ts`. Likely some are unused or vestigial. Audit all actions and queues for dead code and remove anything that has no callers. + +### Scope + +- All actions in `organization/actions.ts` and `organization/app-shell.ts` +- All queue message types and their handlers +- Helper functions that may no longer be called +- Shared types in `packages/shared` that only served removed actions + +### Approach + +- Trace each action/queue from caller → handler to confirm it's live +- Remove any action with no callers (client, other actors, services, HTTP endpoints) +- Remove any queue handler with no senders +- Remove associated types and helpers + +--- + +## [ ] 13. Enforce coordinator pattern and fix ownership violations + +**Rationale:** The actor hierarchy follows a coordinator pattern: org → repo → task → session. The coordinator owns the index/summary of its children, handles create/destroy, and children push updates up to their coordinator. Several violations exist where levels are skipped. + +### Coordinator hierarchy (add to CLAUDE.md) + +``` +Organization (coordinator for repos) +├── Repository (coordinator for tasks) +│ └── Task (coordinator for sessions) +│ └── Session +``` + +**Rules:** +- The coordinator owns the index/summary table for its direct children +- The coordinator handles create/destroy of its direct children +- Children push summary updates UP to their direct coordinator (not skipping levels) +- Read paths go through the coordinator, not direct cross-level access +- No backwards compatibility needed — we're cleaning up + +### Violations to fix + +#### V1: Task index tables on wrong actor (HIGH) + +`taskLookup` and `taskSummaries` (item 9 merges these into `tasks`) are on the **organization** actor but should be on the **repository** actor, since repo is the coordinator for tasks. + +**Fix:** +- Move the merged `tasks` table (from item 9) to `repository/db/schema.ts` +- Repository owns task summaries, not organization +- Organization gets a `repoSummaries` table instead (repo count, latest activity, etc.) — the repo pushes its summary up to org + +#### V2: Tasks push summaries directly to org, skipping repo (HIGH) + +Task actors call `organization.applyTaskSummaryUpdate()` directly (line 464 in `actions.ts`), bypassing the repository coordinator. + +**Fix:** +- Task pushes summary to `repository.applyTaskSummaryUpdate()` instead +- Repository updates its `tasks` table, then pushes a repo summary up to organization +- Organization never receives task-level updates directly + +#### V3: Org resolves taskId → repoId from its own table (MEDIUM) + +`resolveRepoId(c, taskId)` in `organization/actions.ts` queries `taskLookup` directly. Used by `switchTask`, `attachTask`, `pushTask`, `syncTask`, `mergeTask`, `archiveTask`, `killTask` (7 actions). + +**Fix:** +- Remove `resolveRepoId()` from org actor +- Org must know the `repoId` from the caller (frontend already knows which repo a task belongs to) or query the repo actor +- Update all 7 proxy actions to require `repoId` in their input instead of looking it up + +#### V4: Duplicate task creation bookkeeping at org level (MEDIUM) + +`createTaskMutation` in org actor calls `repository.createTask()`, then independently inserts `taskLookup` and seeds `taskSummaries`. Repository already inserts its own `taskIndex` row. + +**Fix:** +- Org calls `repository.createTask()` — that's it +- Repository handles all task index bookkeeping internally +- Repository pushes the new task summary back up to org as part of its repo summary update + +### Files to change + +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — remove `taskLookup` and `taskSummaries`, add `repoSummaries` if needed +- **`foundry/packages/backend/src/actors/repository/db/schema.ts`** — add merged `tasks` table (task summaries) +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove `resolveRepoId()`, `applyTaskSummaryUpdate`, `removeTaskSummary`, `findTaskForGithubBranch`, `refreshTaskSummaryForGithubBranch`; update proxy actions to require `repoId` in input +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — add `applyTaskSummaryUpdate` action (receives from task), push repo summary to org +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — change summary push target from org → repo +- **`foundry/packages/shared/src/contracts.ts`** — update input types to include `repoId` where needed +- **`foundry/packages/client/`** — update calls to pass `repoId` + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add coordinator pattern rules: + ``` + ## Coordinator Pattern + + The actor hierarchy follows a strict coordinator pattern: + - Organization = coordinator for repositories + - Repository = coordinator for tasks + - Task = coordinator for sessions + + Rules: + - Each coordinator owns the index/summary table for its direct children. + - Only the coordinator handles create/destroy of its direct children. + - Children push summary updates to their direct coordinator only (never skip levels). + - Cross-level access (e.g. org directly querying task state) is not allowed — go through the coordinator. + - Proxy actions at higher levels (e.g. org.pushTask) must delegate to the correct coordinator, not bypass it. + ``` + +--- + +--- + +## [ ] 14. Standardize one event per subscription topic across all actors + +**Dependencies:** item 15 + +**Rationale:** Each subscription topic should have exactly one event type carrying the full replacement snapshot. The organization topic currently violates this with 7 subtypes. Additionally, event naming is inconsistent across actors. Standardize all of them. + +### Current state + +| Topic | Wire event name | Event type field | Subtypes | Issue | +|---|---|---|---|---| +| `app` | `appUpdated` | `type: "appUpdated"` | 1 | Name is fine | +| `organization` | `organizationUpdated` | 7 variants | **7** | Needs consolidation | +| `task` | `taskUpdated` | `type: "taskDetailUpdated"` | 1 | Wire name ≠ type name | +| `session` | `sessionUpdated` | `type: "sessionUpdated"` | 1 | Fine | +| `sandboxProcesses` | `processesUpdated` | `type: "processesUpdated"` | 1 | Fine | + +### Target state + +Every topic gets exactly one event. Wire event name = type field = `{topic}Updated`. Each carries the full snapshot for that topic. + +| Topic | Event name | Payload | +|---|---|---| +| `app` | `appUpdated` | `FoundryAppSnapshot` | +| `organization` | `organizationUpdated` | `OrganizationSummarySnapshot` | +| `task` | `taskUpdated` | `WorkbenchTaskDetail` | +| `session` | `sessionUpdated` | `WorkbenchSessionDetail` | +| `sandboxProcesses` | `processesUpdated` | `SandboxProcessSnapshot[]` | + +### Organization — consolidate 7 subtypes into 1 + +Remove the discriminated union. Replace all 7 subtypes: +- `taskSummaryUpdated`, `taskRemoved`, `repoAdded`, `repoUpdated`, `repoRemoved`, `pullRequestUpdated`, `pullRequestRemoved` + +With a single `organizationUpdated` event carrying the full `OrganizationSummarySnapshot`. The client replaces its cached state — same pattern as every other topic. + +### Task — fix event type name mismatch + +Wire event is `taskUpdated` but the type field says `taskDetailUpdated`. Rename to `taskUpdated` everywhere for consistency. + +### Files to change + +- **`foundry/packages/shared/src/realtime-events.ts`** — replace `OrganizationEvent` union with single event type; rename `TaskEvent.type` from `taskDetailUpdated` → `taskUpdated` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — update all 7 `c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", ... })` calls to emit single event with full snapshot +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — same for any broadcasts here +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — rename `taskDetailUpdated` → `taskUpdated` in broadcast calls +- **`foundry/packages/client/src/subscription/topics.ts`** — simplify `applyEvent` for organization topic (no more discriminated union handling); update task event type name +- **`foundry/packages/client/src/subscription/mock-manager.ts`** — update mock event handling +- **`foundry/packages/frontend/`** — update any direct references to event type names + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Each subscription topic must have exactly one event type. The event carries the full replacement snapshot for that topic — no discriminated unions, no partial patches, no subtypes. Event name must match the pattern `{topic}Updated` (e.g. `organizationUpdated`, `taskUpdated`). When state changes, broadcast the full snapshot; the client replaces its cached state." + +--- + +## [x] 15. Unify tasks and pull requests — PRs are just task data + +**Dependencies:** items 9, 13 + +**Rationale:** From the client's perspective, tasks and PRs are the same thing — a branch with work on it. The frontend already merges them into one sorted list, converting PRs to synthetic task objects with `pr:{prId}` IDs. The distinction is artificial. A "task" should represent any branch, and the task actor lazily wraps it. PR metadata is just data the task holds. + +### Current state (separate entities) + +- **Tasks**: stored in task actor SQLite, surfaced via `WorkbenchTaskSummary`, events via `taskSummaryUpdated` +- **PRs**: stored in GitHub data actor (`githubPullRequests` table), surfaced via `WorkbenchOpenPrSummary`, events via `pullRequestUpdated`/`pullRequestRemoved` +- **Frontend hack**: converts PRs to fake task objects with `pr:{prId}` IDs, merges into one list +- **Filtering logic**: org actor silently swallows `pullRequestUpdated` if a task claims the same branch — fragile coupling +- **Two separate types**: `WorkbenchTaskSummary` and `WorkbenchOpenPrSummary` with overlapping fields + +### Target state (unified) + +- **One entity**: a "task" represents a branch. Task actors are lazily created when needed (user creates one, or a PR arrives for an unclaimed branch). +- **PR data lives on the task**: the task actor stores PR metadata (number, title, state, url, isDraft, authorLogin, etc.) as part of its state, not as a separate entity +- **One type**: `WorkbenchTaskSummary` includes full PR fields (nullable). No separate `WorkbenchOpenPrSummary`. +- **One event**: `organizationUpdated` carries task summaries that include PR data. No separate PR events. +- **No synthetic IDs**: every item in the sidebar is a real task with a real taskId + +### Changes needed + +1. **Remove `WorkbenchOpenPrSummary` type** from `packages/shared/src/workbench.ts` — merge its fields into `WorkbenchTaskSummary` +2. **Expand task's `pullRequest` field** from `{ number, status }` to full PR metadata (number, title, state, url, headRefName, baseRefName, isDraft, authorLogin, updatedAtMs) +3. **Remove `openPullRequests` from `OrganizationSummarySnapshot`** — all items are tasks now +4. **Remove PR-specific events** from `realtime-events.ts`: `pullRequestUpdated`, `pullRequestRemoved` +5. **Remove PR-specific actions** from organization actor: `applyOpenPullRequestUpdate`, `removeOpenPullRequest` +6. **Remove branch-claiming filter logic** in org actor (the `if task claims branch, skip PR` check) +7. **GitHub data actor PR sync**: when PRs arrive (webhook or sync), create/update a task for that branch lazily via the repository coordinator +8. **Task actor**: store PR metadata in its DB (new columns or table), update when GitHub data pushes changes +9. **Frontend**: remove `toOpenPrTaskModel` conversion, remove `pr:` ID prefix hack, remove separate `openPullRequests` state — sidebar is just tasks +10. **Repository actor**: when a PR arrives for a branch with no task, lazily create a task actor for it (lightweight, no sandbox needed) + +### Implications for coordinator pattern (item 13) + +This reinforces: repo is the coordinator for tasks. When GitHub data detects a new PR for a branch, it tells the repo coordinator, which creates/updates the task. The task holds the PR data and pushes its summary to the repo coordinator. + +### No backwards compatibility needed + +The `authSessionIndex`, `authEmailIndex`, `authAccountIndex`, and `authVerification` tables stay on the org actor. They're routing indexes needed by the Better Auth adapter to resolve user identity before the user actor can be accessed (e.g. session token → userId lookup). Already covered in item 2 for adding comments explaining this. + +--- + +## [ ] 16. Chunk GitHub data sync and publish progress + +**Rationale:** `runFullSync` in the github-data actor fetches everything at once (all repos, branches, members, PRs), replaces all tables atomically, and has a 5-minute timeout. For large orgs this will timeout or lose all data mid-sync (replace pattern deletes everything first). Needs to be chunked with incremental progress. + +### Current state (broken for large orgs) + +- `runFullSync()` (`github-data/index.ts` line 486-538): + 1. Fetches ALL repos, branches, members, PRs in 4 sequential calls + 2. `replaceRepositories/Branches/Members/PullRequests` — deletes all rows then inserts all new rows + 3. Single 5-minute timeout wraps the entire operation + 4. No progress reporting to the client — just "Syncing GitHub data..." → "Synced N repositories" + 5. If it fails mid-sync, data is partially deleted with no recovery + +### Changes needed + +1. **Chunk the sync by repository** — sync repos first (paginated from GitHub API), then for each repo chunk, sync its branches and PRs. Members can be a separate chunk. + +2. **Incremental upsert, not replace** — don't delete-then-insert. Use upsert per row so partial sync doesn't lose data. Mark rows with a sync generation ID; after full sync completes, delete rows from previous generations. + +3. **Run in a loop, not a single step** — each chunk is a separate workflow step with its own timeout. If one chunk fails, previous chunks are persisted. + +4. **Publish progress per chunk** — after each chunk completes: + - Update `github_meta` with progress (e.g. `syncedRepos: 15/42`) + - Push progress to the organization actor + - Organization broadcasts to clients so the UI shows progress (e.g. "Syncing repositories... 15/42") + +5. **Initial sync uses the same chunked approach** — `github-data-initial-sync` step should kick off the chunked loop, not call `runFullSync` directly + +### Files to change + +- **`foundry/packages/backend/src/actors/github-data/index.ts`**: + - Refactor `runFullSync` into chunked loop + - Replace `replaceRepositories/Branches/Members/PullRequests` with upsert + generation sweep + - Add progress metadata to `github_meta` table + - Publish progress to org actor after each chunk +- **`foundry/packages/backend/src/actors/github-data/db/schema.ts`** — add sync generation column to all tables, add progress fields to `github_meta` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** (or `app-shell.ts`) — handle sync progress updates and broadcast to clients +- **`foundry/packages/shared/src/app-shell.ts`** — add sync progress fields to `FoundryGithubState` (e.g. `syncProgress: { current: number; total: number } | null`) +- **`foundry/packages/frontend/`** — show sync progress in UI (e.g. "Syncing repositories... 15/42") + +--- + +--- + +# Deferred follow-up outside this task + +## 17. Type all actor context parameters — remove `c: any` + +**Rationale:** 272+ instances of `c: any`, `ctx: any`, `loopCtx: any` across all actor code. This eliminates type safety for DB access, state access, broadcasts, and queue operations. All context parameters should use RivetKit's proper context types. + +### Scope (by file, approximate count) + +| File | `any` contexts | +|---|---| +| `organization/app-shell.ts` | ~108 | +| `organization/actions.ts` | ~56 | +| `task/workbench.ts` | ~53 | +| `github-data/index.ts` | ~23 | +| `repository/actions.ts` | ~22 | +| `sandbox/index.ts` | ~21 | +| `handles.ts` | ~19 | +| `task/workflow/commands.ts` | ~10 | +| `task/workflow/init.ts` | ~4 | +| `auth-user/index.ts` | ~2 | +| `history/index.ts` | ~2 | +| `task/workflow/index.ts` | ~2 | +| `task/workflow/common.ts` | ~2 | +| `task/workflow/push.ts` | ~1 | +| `polling.ts` | ~1 | + +### Changes needed + +1. **Determine correct RivetKit context types** — check RivetKit exports for `ActionContext`, `ActorContextOf`, `WorkflowContext`, `LoopContext`, or equivalent. Reference `polling.ts` which already defines typed contexts (`PollingActorContext`, `WorkflowPollingActorContext`). + +2. **Define per-actor context types** — each actor has its own state shape and DB schema, so the context type should be specific (e.g. `ActionContext` or similar). + +3. **Replace all `c: any`** with the proper typed context across every file listed above. + +4. **Type workflow/loop contexts** — `ctx: any` in workflow functions and `loopCtx: any` in loop callbacks need proper types too. + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "All actor context parameters (`c`, `ctx`, `loopCtx`) must be properly typed using RivetKit's context types. Never use `any` for actor contexts. Each actor should define or derive its context type from the actor definition." + +--- + +## [ ] 18. Final pass: remove all dead code + +**Dependencies:** all other items (do this last, after 17) + +**Rationale:** After completing all changes above, many actions, queues, SQLite tables, workflow steps, shared types, and helper functions will be orphaned. Do a full scan to find and remove everything that's dead. + +### Scope + +Scan the entire foundry codebase for: +- **Dead actions** — actions with no callers (client, other actors, services, HTTP endpoints) +- **Dead queues** — queue message types with no senders +- **Dead SQLite tables** — tables with no reads or writes +- **Dead workflow steps** — step names that are no longer referenced +- **Dead shared types** — types in `packages/shared` that are no longer imported +- **Dead helper functions** — private functions with no callers +- **Dead imports** — unused imports across all files + +### When to do this + +After all items 1–17 are complete. Not before — removing code while other items are in progress will create conflicts. + +--- + +## [ ] 19. Remove duplicate data between `c.state` and SQLite + +**Dependencies:** items 21, 24 + +**Rationale:** Several actors store the same data in both `c.state` (RivetKit durable state) and their SQLite tables. Mutable fields that exist in both can silently diverge — `c.state` becomes stale when the SQLite copy is updated. Per the existing CLAUDE.md rule, `c.state` should hold only small scalars/identifiers; anything queryable or mutable belongs in SQLite. + +### Duplicates found + +**Task actor** — `c.state` (`createState` in `task/index.ts` lines 124-139) vs `task`/`taskRuntime` tables: + +| Field | In SQLite? | Mutable? | Verdict | +|---|---|---|---| +| `organizationId` | No | No | **KEEP** — identity field | +| `repoId` | No | No | **KEEP** — identity field | +| `taskId` | No | No | **KEEP** — identity field | +| `repoRemote` | No (but org `repos` table has it) | No | **DELETE** — not needed on task, read from repo/org | +| `branchName` | Yes (`task.branch_name`) | Yes | **REMOVE from c.state** — HIGH risk, goes stale on rename | +| `title` | Yes (`task.title`) | Yes | **REMOVE from c.state** — HIGH risk, goes stale on rename | +| `task` (description) | Yes (`task.task`) | No | **REMOVE from c.state** — redundant | +| `sandboxProviderId` | Yes (`task.sandbox_provider_id`) | No | **REMOVE from c.state** — redundant | +| `agentType` | Yes (`task.agent_type`) | Yes | **DELETE entirely** — session-specific (item 21) | +| `explicitTitle` | No | No | **MOVE to SQLite** — creation metadata | +| `explicitBranchName` | No | No | **MOVE to SQLite** — creation metadata | +| `initialPrompt` | No | No | **DELETE entirely** — dead code, session-specific (item 21) | +| `initialized` | No | Yes | **DELETE entirely** — dead code, `status` already tracks init progress | +| `previousStatus` | No | No | **DELETE entirely** — never set, never read | + +**Repository actor** — `c.state` (`createState` in `repository/index.ts`) vs `repoMeta` table: + +| Field | Mutable? | Risk | +|---|---|---| +| `remoteUrl` | No | Low — redundant but safe | + +### Fix + +Remove all duplicated fields from `c.state`. Keep only identity fields needed for actor key resolution (e.g. `organizationId`, `repoId`, `taskId`). Read mutable data from SQLite. + +**Task actor `c.state` should become:** +```typescript +createState: (_c, input) => ({ + organizationId: input.organizationId, + repoId: input.repoId, + taskId: input.taskId, +}) +``` + +Fields already in SQLite (`branchName`, `title`, `task`, `sandboxProviderId`) — remove from `c.state`, read from SQLite only. Fields not yet in SQLite (`explicitTitle`, `explicitBranchName`) — add to `task` table, remove from `c.state`. Dead code to delete entirely: `agentType`, `initialPrompt` (item 21), `initialized`, `previousStatus`, `repoRemote`. + +**Repository actor `c.state` should become:** +```typescript +createState: (_c, input) => ({ + organizationId: input.organizationId, + repoId: input.repoId, +}) +``` + +`remoteUrl` is removed from repo actor `c.state` entirely. The repo actor reads `remoteUrl` from its own `repoMeta` SQLite table when needed. The org actor already stores `remoteUrl` in its `repos` table (source of truth from GitHub data). The `getOrCreateRepository()` helper in `handles.ts` currently requires `remoteUrl` as a parameter and passes it as `createWithInput` — this parameter must be removed. Every call site in `organization/actions.ts` and `organization/app-shell.ts` currently does a DB lookup for `remoteUrl` just to pass it to `getOrCreateRepository()` — all of those lookups go away. On actor creation, the repo actor should populate its `repoMeta.remoteUrl` by querying the org actor or github-data actor, not by receiving it as a create input. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/index.ts`** — trim `createState`, update all `c.state.*` reads for removed fields to read from SQLite instead +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — update `c.state.*` reads +- **`foundry/packages/backend/src/actors/task/workflow/*.ts`** — update `c.state.*` reads +- **`foundry/packages/backend/src/actors/repository/index.ts`** — trim `createState`, remove `remoteUrl` from input type +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update all `c.state.remoteUrl` reads to query `repoMeta` table; remove `persistRemoteUrl()` helper +- **`foundry/packages/backend/src/actors/handles.ts`** — remove `remoteUrl` parameter from `getOrCreateRepository()` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove all `remoteUrl` lookups done solely to pass to `getOrCreateRepository()` (~10 call sites) +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — same cleanup for app-shell call sites + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Never duplicate data between `c.state` and SQLite. `c.state` holds only immutable identity fields needed for actor key resolution (e.g. `organizationId`, `repoId`, `taskId`). All mutable data and anything queryable must live exclusively in SQLite. If a field can change after actor creation, it must not be in `c.state`." + +--- + +## [ ] 20. Prefix all admin/recovery actions with `admin` + +**Rationale:** Several actions are admin-only recovery/rebuild operations but their names don't distinguish them from normal product flows. Prefix with `admin` so it's immediately clear these are not part of regular user flows. + +### Actions to rename + +**Organization actor:** + +| Current name | New name | Why it's admin | +|---|---|---| +| `reconcileWorkbenchState` | `adminReconcileWorkbenchState` | Full fan-out rebuild of task summary projection | +| `reloadGithubOrganization` | `adminReloadGithubOrganization` | Manual trigger to refetch all org GitHub data | +| `reloadGithubPullRequests` | `adminReloadGithubPullRequests` | Manual trigger to refetch all PR data | +| `reloadGithubRepository` | `adminReloadGithubRepository` | Manual trigger to refetch single repo | +| `reloadGithubPullRequest` | `adminReloadGithubPullRequest` | Manual trigger to refetch single PR | + +**GitHub Data actor:** + +| Current name | New name | Why it's admin | +|---|---|---| +| `fullSync` | `adminFullSync` | Full replace of all GitHub data — recovery operation | +| `reloadOrganization` | `adminReloadOrganization` | Triggers full sync manually | +| `reloadAllPullRequests` | `adminReloadAllPullRequests` | Triggers full sync manually | +| `clearState` | `adminClearState` | Deletes all GitHub data — recovery from lost access | + +**NOT renamed** (these are triggered by webhooks/normal flows, not manual admin actions): +- `reloadRepository` — called by push/create/delete webhooks (incremental, normal flow) +- `reloadPullRequest` — called by PR webhooks (incremental, normal flow) +- `handlePullRequestWebhook` — webhook handler (normal flow) +- `syncGithubOrganizations` — called during OAuth callback (normal flow, though also used for repair) + +### Files to change + +- **`foundry/packages/backend/src/actors/github-data/index.ts`** — rename actions +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — rename actions +- **`foundry/packages/client/src/backend-client.ts`** — update method names +- **`foundry/packages/frontend/`** — update any references to renamed actions + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Admin-only actions (recovery, rebuild, manual resync, state reset) must be prefixed with `admin` (e.g. `adminReconcileState`, `adminClearState`). This makes it clear they are not part of normal product flows and should not be called from regular client code paths." + +--- + +## [ ] 21. Remove legacy/session-scoped fields from task table + +**Rationale:** The `task` table has fields that either belong on the session, are redundant with data from other actors, or are dead code from the removed local git clone. These should be cleaned up. + +### Fields to remove from `task` table and `c.state` + +**`agentType`** — Legacy from when task = 1 session. Only used for `defaultModelForAgent(c.state.agentType)` to pick the default model when creating a new session. Sessions already have their own `model` column in `taskWorkbenchSessions`. The default model for new sessions should come from user settings (see item 16 — starred model stored in user actor). Remove `agentType` from task table, `c.state`, `createState`, `TaskRecord`, and all `defaultModelForAgent()` call sites. Replace with user settings lookup. + +**`initialPrompt`** — Stored on `c.state` at task creation but **never read anywhere**. Completely dead code. This is also session-specific, not task-specific — the initial prompt belongs on the first session, not the task. Remove from `c.state`, `createState` input type, and `CreateTaskCommand`/`CreateTaskInput` types. Remove from `repository/actions.ts` create flow. + +**`prSubmitted`** — Redundant boolean set when `submitPullRequest` runs. PR state already flows from GitHub webhooks → github-data actor → branch name lookup. This boolean can go stale (PR closed and reopened, PR deleted, etc.). Remove entirely — PR existence is derivable from github-data by branch name (already how `enrichTaskRecord` and `buildTaskSummary` work). + +### Dead fields on `taskRuntime` table + +**`provisionStage`** — Values: `"queued"`, `"ready"`, `"error"`. Redundant with `status` — `init_complete` implies ready, `error` implies error. Never read in business logic. Delete. + +**`provisionStageUpdatedAt`** — Timestamp for `provisionStage` changes. Never read anywhere. Delete. + +### Dead fields on `TaskRecord` (in `workflow/common.ts`) + +These are always hardcoded to `null` — remnants of the removed local git clone: + +- `diffStat` — was populated from `branches` table (deleted) +- `hasUnpushed` — was populated from `branches` table (deleted) +- `conflictsWithMain` — was populated from `branches` table (deleted) +- `parentBranch` — was populated from `branches` table (deleted) + +Remove from `TaskRecord` type, `getCurrentRecord()`, and all consumers (contracts, mock client, tests, frontend). + +### Files to change + +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `agentType` and `prSubmitted` columns from `task` table; remove `provisionStage` and `provisionStageUpdatedAt` from `taskRuntime` table +- **`foundry/packages/backend/src/actors/task/index.ts`** — remove `agentType`, `initialPrompt`, `initialized`, `previousStatus`, `repoRemote` from `createState` and input type +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove `defaultModelForAgent()`, `agentTypeForModel()`, update session creation to use user settings for default model; remove `prSubmitted` set in `submitPullRequest` +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `agentType`, `prSubmitted`, `diffStat`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from `getCurrentRecord()` and `TaskRecord` construction +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `agentType` from task row inserts +- **`foundry/packages/shared/src/contracts.ts`** — remove `agentType`, `prSubmitted`, `diffStat`, `prUrl`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from `TaskRecord` schema (note: `prUrl` and `prAuthor` should stay if still populated by `enrichTaskRecord`, or move to the unified task/PR model from item 15) +- **`foundry/packages/client/src/mock/backend-client.ts`** — update mock to remove dead fields +- **`foundry/packages/client/test/view-model.test.ts`** — update test fixtures +- **`foundry/packages/frontend/src/features/tasks/model.test.ts`** — update test fixtures +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove any references to `agentType` in task creation input +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update `enrichTaskRecord()` to stop setting dead fields + +--- + +## [ ] 22. Move per-user UI state from task actor to user actor + +**Dependencies:** item 1 + +**Rationale:** The task actor stores UI-facing state that is user-specific, not task-global. With multiplayer (multiple users viewing the same task), this breaks — each user has their own active session, their own unread state, their own drafts. These must live on the user actor, keyed by `(taskId, sessionId)`, not on the shared task actor. + +### Per-user state currently on the task actor (wrong) + +**`taskRuntime.activeSessionId`** — Which session the user is "looking at." Used to: +- Determine which session's status drives the task-level status (running/idle) — this is wrong, the task status should reflect ALL sessions, not one user's active tab +- Return a "current" session in `attachTask` responses — this is per-user +- Migration path for legacy single-session tasks in `ensureWorkbenchSeeded` + +This should move to the user actor as `activeSessionId` per `(userId, taskId)`. + +**`taskWorkbenchSessions.unread`** — Per-user unread state stored globally on the session. If user A reads a session, user B's unread state is also cleared. Move to user actor keyed by `(userId, taskId, sessionId)`. + +**`taskWorkbenchSessions.draftText` / `draftAttachmentsJson` / `draftUpdatedAt`** — Per-user draft state stored globally. If user A starts typing a draft, it overwrites user B's draft. Move to user actor keyed by `(userId, taskId, sessionId)`. + +### What stays on the task actor (correct — task-global state) + +- `taskRuntime.activeSandboxId` — which sandbox is running (global to the task) +- `taskRuntime.activeSwitchTarget` / `activeCwd` — sandbox connection state (global) +- `taskRuntime.statusMessage` — provisioning/runtime status (global) +- `taskWorkbenchSessions.model` — which model the session uses (global) +- `taskWorkbenchSessions.status` — session runtime status (global) +- `taskWorkbenchSessions.transcriptJson` — session transcript (global) + +### Fix + +Add a `userTaskState` table to the user actor: + +```typescript +export const userTaskState = sqliteTable("user_task_state", { + taskId: text("task_id").notNull(), + sessionId: text("session_id").notNull(), + activeSessionId: text("active_session_id"), // per-user active tab + unread: integer("unread").notNull().default(0), + draftText: text("draft_text").notNull().default(""), + draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), + draftUpdatedAt: integer("draft_updated_at"), + updatedAt: integer("updated_at").notNull(), +}, (table) => ({ + pk: primaryKey(table.taskId, table.sessionId), +})); +``` + +Remove `activeSessionId` from `taskRuntime`. Remove `unread`, `draftText`, `draftAttachmentsJson`, `draftUpdatedAt` from `taskWorkbenchSessions`. + +The task-level status should be derived from ALL sessions (e.g., task is "running" if ANY session is running), not from one user's `activeSessionId`. + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — add `userTaskState` table +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `activeSessionId` from `taskRuntime`; remove `unread`, `draftText`, `draftAttachmentsJson`, `draftUpdatedAt` from `taskWorkbenchSessions` +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove all `activeSessionId` reads/writes; remove draft/unread mutation functions; task status derivation should check all sessions +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `activeSessionId` from `getCurrentRecord()` +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — remove `activeSessionId` references in `attachTask` +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `activeSessionId` initialization +- **`foundry/packages/client/`** — draft/unread/activeSession operations route to user actor instead of task actor +- **`foundry/packages/frontend/`** — update subscription to fetch per-user state from user actor + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Per-user UI state (active session tab, unread counts, draft text, draft attachments) must live on the user actor, not on shared task/session actors. Task actors hold only task-global state visible to all users. This is critical for multiplayer correctness — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts." + +--- + +## [ ] 23. Delete `getTaskEnriched` and `enrichTaskRecord` (dead code) + +**Rationale:** `getTaskEnriched` is dead code with zero callers from the client. It's also the worst fan-out pattern in the codebase: org → repo actor → task actor (`.get()`) → github-data actor (`listPullRequestsForRepository` fetches ALL PRs, then `.find()`s by branch name). This is exactly the pattern the coordinator model eliminates — task detail comes from `getTaskDetail` on the task actor, sidebar data comes from materialized `taskSummaries` on the org actor. + +### What to delete + +- **`enrichTaskRecord()`** — `repository/actions.ts:117-143`. Fetches all PRs for a repo to find one by branch name. Dead code. +- **`getTaskEnriched` action** — `repository/actions.ts:432-450`. Only caller of `enrichTaskRecord`. Dead code. +- **`getTaskEnriched` org proxy** — `organization/actions.ts:838-849`. Only caller of the repo action. Dead code. +- **`GetTaskEnrichedCommand` type** — wherever defined. + +### Files to change + +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — delete `enrichTaskRecord()` and `getTaskEnriched` action +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — delete `getTaskEnriched` proxy action + +--- + +## [ ] 24. Clean up task status tracking + +**Dependencies:** item 21 + +**Rationale:** Task status tracking is spread across `c.state`, the `task` SQLite table, and the `taskRuntime` table with redundant and dead fields. Consolidate to a single `status` enum on the `task` table. Remove `statusMessage` — human-readable status text should be derived on the client from the `status` enum, not stored on the backend. + +### Fields to delete + +| Field | Location | Why | +|---|---|---| +| `initialized` | `c.state` | Dead code — never read. `status` already tracks init progress. | +| `previousStatus` | `c.state` | Dead code — never set, never read. | +| `statusMessage` | `taskRuntime` table | Client concern — the client should derive display text from the `status` enum. The backend should not store UI copy. | +| `provisionStage` | `taskRuntime` table | Redundant — `status` already encodes provision progress (`init_bootstrap_db` → `init_enqueue_provision` → `init_complete`). | +| `provisionStageUpdatedAt` | `taskRuntime` table | Dead — never read. | + +### What remains + +- **`status`** on the `task` table — the single canonical state machine enum. Values: `init_bootstrap_db`, `init_enqueue_provision`, `init_complete`, `running`, `idle`, `error`, `archive_*`, `kill_*`, `archived`, `killed`. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `statusMessage`, `provisionStage`, `provisionStageUpdatedAt` from `taskRuntime` table +- **`foundry/packages/backend/src/actors/task/index.ts`** — remove `initialized`, `previousStatus` from `createState` +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `statusMessage` parameter from `setTaskState()`, remove it from `getCurrentRecord()` query +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `statusMessage`, `provisionStage`, `provisionStageUpdatedAt` from taskRuntime inserts/updates; remove `ensureTaskRuntimeCacheColumns()` raw ALTER TABLE for these columns +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — remove `statusMessage` from handler updates +- **`foundry/packages/backend/src/actors/task/workflow/push.ts`** — remove `statusMessage` updates +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove `statusMessage` from `buildTaskDetail()`, remove `ensureTaskRuntimeCacheColumns()` for these columns +- **`foundry/packages/shared/src/workbench.ts`** — remove `statusMessage` from `WorkbenchTaskDetail` +- **`foundry/packages/frontend/`** — derive display text from `status` enum instead of reading `statusMessage` + +--- + +## [ ] 25. Remove "Workbench" prefix from all types, functions, files, and tables + +**Rationale:** "Workbench" is not a real concept in the system. It's a namespace prefix applied to every type, function, file, and table name. The actual entities are Task, Session, Repository, Sandbox, Transcript, Draft, etc. — "Workbench" adds zero information and obscures what things actually are. + +### Rename strategy + +Drop "Workbench" everywhere. If the result collides with an existing name (e.g., auth `Session`), use the domain prefix (e.g., `TaskSession` vs auth `Session`). + +### Type renames (`shared/src/workbench.ts`) + +| Before | After | +|---|---| +| `WorkbenchTaskStatus` | `TaskStatus` (already exists as base, merge) | +| `WorkbenchAgentKind` | `AgentKind` | +| `WorkbenchModelId` | `ModelId` | +| `WorkbenchSessionStatus` | `SessionStatus` | +| `WorkbenchTranscriptEvent` | `TranscriptEvent` | +| `WorkbenchComposerDraft` | `ComposerDraft` | +| `WorkbenchSessionSummary` | `SessionSummary` | +| `WorkbenchSessionDetail` | `SessionDetail` | +| `WorkbenchFileChange` | `FileChange` | +| `WorkbenchFileTreeNode` | `FileTreeNode` | +| `WorkbenchLineAttachment` | `LineAttachment` | +| `WorkbenchHistoryEvent` | `HistoryEvent` | +| `WorkbenchDiffLineKind` | `DiffLineKind` | +| `WorkbenchParsedDiffLine` | `ParsedDiffLine` | +| `WorkbenchPullRequestSummary` | `PullRequestSummary` | +| `WorkbenchOpenPrSummary` | `OpenPrSummary` | +| `WorkbenchSandboxSummary` | `SandboxSummary` | +| `WorkbenchTaskSummary` | `TaskSummary` | +| `WorkbenchTaskDetail` | `TaskDetail` | +| `WorkbenchRepositorySummary` | `RepositorySummary` | +| `WorkbenchSession` | `TaskSession` (avoids auth `Session` collision) | +| `WorkbenchTask` | `TaskSnapshot` (avoids `task` table collision) | +| `WorkbenchRepo` | `RepoSnapshot` | +| `WorkbenchRepositorySection` | `RepositorySection` | +| `TaskWorkbenchSnapshot` | `DashboardSnapshot` | +| `WorkbenchModelOption` | `ModelOption` | +| `WorkbenchModelGroup` | `ModelGroup` | +| `TaskWorkbenchSelectInput` | `SelectTaskInput` | +| `TaskWorkbenchCreateTaskInput` | `CreateTaskInput` | +| `TaskWorkbenchRenameInput` | `RenameTaskInput` | +| `TaskWorkbenchSendMessageInput` | `SendMessageInput` | +| `TaskWorkbenchSessionInput` | `SessionInput` | +| `TaskWorkbenchRenameSessionInput` | `RenameSessionInput` | +| `TaskWorkbenchChangeModelInput` | `ChangeModelInput` | +| `TaskWorkbenchUpdateDraftInput` | `UpdateDraftInput` | +| `TaskWorkbenchSetSessionUnreadInput` | `SetSessionUnreadInput` | +| `TaskWorkbenchDiffInput` | `DiffInput` | +| `TaskWorkbenchCreateTaskResponse` | `CreateTaskResponse` | +| `TaskWorkbenchAddSessionResponse` | `AddSessionResponse` | + +### File renames + +| Before | After | +|---|---| +| `shared/src/workbench.ts` | `shared/src/types.ts` (or split into `task.ts`, `session.ts`, etc.) | +| `backend/src/actors/task/workbench.ts` | `backend/src/actors/task/sessions.ts` (already planned in item 7) | +| `client/src/workbench-client.ts` | `client/src/task-client.ts` | +| `client/src/workbench-model.ts` | `client/src/model.ts` | +| `client/src/remote/workbench-client.ts` | `client/src/remote/task-client.ts` | +| `client/src/mock/workbench-client.ts` | `client/src/mock/task-client.ts` | + +### Table rename + +| Before | After | +|---|---| +| `task_workbench_sessions` | `task_sessions` | + +### Function renames (backend — drop "Workbench" infix) + +All functions in `backend/src/actors/task/workbench.ts`: +- `createWorkbenchSession` → `createSession` +- `closeWorkbenchSession` → `closeSession` +- `changeWorkbenchModel` → `changeModel` +- `sendWorkbenchMessage` → `sendMessage` +- `stopWorkbenchSession` → `stopSession` +- `renameWorkbenchBranch` → deleted (see item 26) +- `renameWorkbenchTask` → `renameTask` +- `renameWorkbenchSession` → `renameSession` +- `revertWorkbenchFile` → `revertFile` +- `publishWorkbenchPr` → `publishPr` +- `updateWorkbenchDraft` → `updateDraft` +- `setWorkbenchSessionUnread` → `setSessionUnread` +- `markWorkbenchUnread` → `markUnread` +- `syncWorkbenchSessionStatus` → `syncSessionStatus` +- `ensureWorkbenchSeeded` → `ensureSessionSeeded` + +### Queue/command type renames (backend) + +- `TaskWorkbenchValueCommand` → `TaskValueCommand` +- `TaskWorkbenchSessionTitleCommand` → `SessionTitleCommand` +- `TaskWorkbenchSessionUnreadCommand` → `SessionUnreadCommand` + +### Scope + +~420 occurrences across shared (35+ types), backend (200+ refs), client (324 refs), frontend (96 refs). Mechanical find-and-replace once the rename map is settled. + +### Files to change + +- **`foundry/packages/shared/src/workbench.ts`** — rename file, rename all exported types +- **`foundry/packages/shared/src/index.ts`** — update re-export path +- **`foundry/packages/shared/src/app-shell.ts`** — update `WorkbenchModelId` → `ModelId` import +- **`foundry/packages/shared/src/realtime-events.ts`** — update all `Workbench*` type imports +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — rename file + all functions +- **`foundry/packages/backend/src/actors/task/index.ts`** — update imports and action registrations +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — rename `taskWorkbenchSessions` → `taskSessions` +- **`foundry/packages/backend/src/actors/task/workflow/`** — update all workbench references +- **`foundry/packages/backend/src/actors/organization/`** — update type imports and action names +- **`foundry/packages/backend/src/actors/repository/`** — update type imports +- **`foundry/packages/client/src/`** — rename files + update all type/function references +- **`foundry/packages/frontend/src/`** — update all type imports + +### CLAUDE.md update + +Update `foundry/packages/backend/CLAUDE.md` coordinator hierarchy diagram: `taskWorkbenchSessions` → `taskSessions`. + +--- + +## [ ] 26. Delete branch rename (branches immutable after creation) + +**Dependencies:** item 25 + +**Rationale:** Branch name is assigned once at task creation and never changes. Branch rename is unused in the frontend UI and SDK, adds ~80 lines of code, and creates a transactional consistency risk (git rename succeeds but index update fails). + +### Delete + +- **`task/workbench.ts`** — delete `renameWorkbenchBranch()` (~50 lines) +- **`task/index.ts`** — delete `renameWorkbenchBranch` action +- **`task/workflow/queue.ts`** — remove `"task.command.workbench.rename_branch"` queue type +- **`task/workflow/index.ts`** — remove `"task.command.workbench.rename_branch"` handler +- **`organization/actions.ts`** — delete `renameWorkbenchBranch` proxy action +- **`repository/actions.ts`** — delete `registerTaskBranch` action (only caller was rename flow) +- **`client/src/workbench-client.ts`** — remove `renameBranch` from interface +- **`client/src/remote/workbench-client.ts`** — delete `renameBranch()` method +- **`client/src/mock/workbench-client.ts`** — delete `renameBranch()` method +- **`client/src/backend-client.ts`** — delete `renameWorkbenchBranch` from interface + implementation +- **`client/src/mock/backend-client.ts`** — delete `renameWorkbenchBranch` implementation +- **`frontend/src/components/mock-layout.tsx`** — remove `renameBranch` from client interface, delete `onRenameBranch` callbacks and all `renameBranch` wiring (~8 refs) +- **`shared/src/workbench.ts`** — delete `TaskWorkbenchRenameInput` (if only used by branch rename; check if task title rename shares it) + +### Keep + +- `deriveFallbackTitle()` + `sanitizeBranchName()` + `resolveCreateFlowDecision()` — initial branch derivation at creation +- `registerTaskBranchMutation()` — used during task creation for `onBranch` path +- `renameWorkbenchTask()` — title rename is independent, stays +- `taskIndex` table — still the coordinator index for branch→task mapping + +--- + +## [ ] Final audit pass (run after all items above are complete) + +### Dead code scan + +Already tracked in item 18: once all changes are complete, do a full scan to find dead actions, queues, SQLite tables, and workflow steps that need to be removed. + +### Dead events audit + +Scan all event types emitted by actors (in `packages/shared/src/realtime-events.ts` and anywhere actors call `c.broadcast()` or similar). Cross-reference against all client subscribers (in `packages/client/` and `packages/frontend/`). Remove any events that are emitted but never subscribed to by any client. This includes events that may have been superseded by the consolidated single-topic-per-actor pattern (item 14). diff --git a/foundry/docker/frontend.dev.Dockerfile b/foundry/docker/frontend.dev.Dockerfile index 3b0d8e4..dd74dd0 100644 --- a/foundry/docker/frontend.dev.Dockerfile +++ b/foundry/docker/frontend.dev.Dockerfile @@ -8,4 +8,4 @@ RUN npm install -g pnpm@10.28.2 WORKDIR /app -CMD ["bash", "-lc", "pnpm install --force --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4173"] +CMD ["bash", "-lc", "pnpm install --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4173"] diff --git a/foundry/packages/backend/CLAUDE.md b/foundry/packages/backend/CLAUDE.md index 432bc85..ae4257e 100644 --- a/foundry/packages/backend/CLAUDE.md +++ b/foundry/packages/backend/CLAUDE.md @@ -5,14 +5,12 @@ Keep the backend actor tree aligned with this shape unless we explicitly decide to change it: ```text -OrganizationActor -├─ HistoryActor(organization-scoped global feed) +OrganizationActor (direct coordinator for tasks) +├─ AuditLogActor (organization-scoped global feed) ├─ GithubDataActor -├─ RepositoryActor(repo) -│ └─ TaskActor(task) -│ ├─ TaskSessionActor(session) × N -│ │ └─ SessionStatusSyncActor(session) × 0..1 -│ └─ Task-local workbench state +├─ TaskActor(task) +│ ├─ taskSessions → session metadata/transcripts +│ └─ taskSandboxes → sandbox instance index └─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N ``` @@ -28,53 +26,125 @@ Children push updates **up** to their direct coordinator only. Coordinators broa ### Coordinator hierarchy and index tables ```text -OrganizationActor (coordinator for repos + auth users) +OrganizationActor (coordinator for tasks + auth users) │ │ Index tables: -│ ├─ repos → RepositoryActor index (repo catalog) -│ ├─ taskLookup → TaskActor index (taskId → repoId routing) -│ ├─ taskSummaries → TaskActor index (materialized sidebar projection) -│ ├─ authSessionIndex → AuthUserActor index (session token → userId) -│ ├─ authEmailIndex → AuthUserActor index (email → userId) -│ └─ authAccountIndex → AuthUserActor index (OAuth account → userId) +│ ├─ taskIndex → TaskActor index (taskId → repoId + branchName) +│ ├─ taskSummaries → TaskActor materialized sidebar projection +│ ├─ authSessionIndex → UserActor index (session token → userId) +│ ├─ authEmailIndex → UserActor index (email → userId) +│ └─ authAccountIndex → UserActor index (OAuth account → userId) │ -├─ RepositoryActor (coordinator for tasks) +├─ TaskActor (coordinator for sessions + sandboxes) │ │ │ │ Index tables: -│ │ └─ taskIndex → TaskActor index (taskId → branchName) +│ │ ├─ taskWorkspaceSessions → Session index (session metadata + transcript) +│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history) │ │ -│ └─ TaskActor (coordinator for sessions + sandboxes) -│ │ -│ │ Index tables: -│ │ ├─ taskWorkbenchSessions → Session index (session metadata, transcript, draft) -│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history) -│ │ -│ └─ SandboxInstanceActor (leaf) +│ └─ SandboxInstanceActor (leaf) │ -├─ HistoryActor (organization-scoped audit log, not a coordinator) +├─ AuditLogActor (organization-scoped audit log, not a coordinator) └─ GithubDataActor (GitHub API cache, not a coordinator) ``` When adding a new index table, annotate it in the schema file with a doc comment identifying it as a coordinator index and which child actor it indexes (see existing examples). +## Lazy Task Actor Creation — CRITICAL + +**Task actors must NEVER be created during GitHub sync or bulk operations.** Creating hundreds of task actors simultaneously causes OOM crashes. An org can have 200+ PRs; spawning an actor per PR kills the process. + +### The two creation points + +There are exactly **two** places that may create a task actor: + +1. **`createTaskMutation`** in `task-mutations.ts` — the only backend code that calls `getOrCreateTask`. Triggered by explicit user action ("New Task" button). One actor at a time. + +2. **`backend-client.ts` client helper** — calls `client.task.getOrCreate(...)`. This is the lazy materialization point: when a user clicks a virtual task in the sidebar, the client creates the actor, and it self-initializes in `getCurrentRecord()` (`workflow/common.ts`) by reading branch/title from the org's `getTaskIndexEntry` action. + +### The rule + +### The rule + +**Never use `getOrCreateTask` inside a sync loop, webhook handler, or any bulk operation.** That's what caused the OOM — 186 actors spawned simultaneously during PR sync. + +`getOrCreateTask` IS allowed in: +- `createTaskMutation` — explicit user "New Task" action +- `requireWorkspaceTask` — user-initiated actions (createSession, sendMessage, etc.) that may hit a virtual task +- `getTask` action on the org — called by sandbox actor and client, needs to materialize virtual tasks +- `backend-client.ts` client helper — lazy materialization when user views a task + +### Virtual tasks (PR-driven) + +During PR sync, `refreshTaskSummaryForBranchMutation` is called for every changed PR (via github-data's `emitPullRequestChangeEvents`). It writes **virtual task entries** to the org actor's local `taskIndex` + `taskSummaries` tables only. No task actor is spawned. No cross-actor calls to task actors. + +When the user interacts with a virtual task (clicks it, creates a session): +1. Client or org actor calls `getOrCreate` on the task actor key → actor is created with empty DB +2. Any action on the actor calls `getCurrentRecord()` → sees empty DB → reads branch/title from org's `getTaskIndexEntry` → calls `initBootstrapDbActivity` + `initCompleteActivity` → task is now real + +### Call sites to watch + +- `refreshTaskSummaryForBranchMutation` — called in bulk during sync. Must ONLY write to org local tables. Never create task actors or call task actor actions. +- `emitPullRequestChangeEvents` in github-data — iterates all changed PRs. Must remain fire-and-forget with no actor fan-out. + ## Ownership Rules -- `OrganizationActor` is the organization coordinator and lookup/index owner. -- `HistoryActor` is organization-scoped. There is one organization-level history feed. -- `RepositoryActor` is the repo coordinator and owns repo-local caches/indexes. +- `OrganizationActor` is the organization coordinator, direct coordinator for tasks, and lookup/index owner. It owns the task index, task summaries, and repo catalog. +- `AuditLogActor` is organization-scoped. There is one organization-level audit log feed. - `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized. - `TaskActor` can have many sessions. - `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time. -- Session unread state and draft prompts are backend-owned workbench state, not frontend-local state. -- Branch rename is a real git operation, not just metadata. +- Session unread state and draft prompts are backend-owned workspace state, not frontend-local state. +- Branch names are immutable after task creation. Do not implement branch-rename flows. - `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity. - The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`. - When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default. - Only a coordinator creates/destroys its children. Do not create child actors from outside the coordinator. -- Children push state changes up to their direct coordinator only — never skip levels (e.g., task pushes to repo, not directly to org, unless org is the direct coordinator for that index). +- Children push state changes up to their direct coordinator only. Task actors push summary updates directly to the organization actor. - Read paths must use the coordinator's local index tables. Do not fan out to child actors on the hot read path. - Never build "enriched" read actions that chain through multiple actors (e.g., coordinator → child actor → sibling actor). If data from multiple actors is needed for a read, it should already be materialized in the coordinator's index tables via push updates. If it's not there, fix the write path to push it — do not add a fan-out read path. +## Drizzle Migration Maintenance + +After changing any actor's `db/schema.ts`, you **must** regenerate the corresponding migration so the runtime creates the tables that match the schema. Forgetting this step causes `no such table` errors at runtime. + +1. **Generate a new drizzle migration.** Run from `packages/backend`: + ```bash + npx drizzle-kit generate --config=./src/actors//db/drizzle.config.ts + ``` + If the interactive prompt is unavailable (e.g. in a non-TTY), manually create a new `.sql` file under `./src/actors//db/drizzle/` and add the corresponding entry to `meta/_journal.json`. + +2. **Regenerate the compiled `migrations.ts`.** Run from the foundry root: + ```bash + npx tsx packages/backend/src/actors/_scripts/generate-actor-migrations.ts + ``` + +3. **Verify insert/upsert calls.** Every column with `.notNull()` (and no `.default(...)`) must be provided a value in all `insert()` and `onConflictDoUpdate()` calls. Missing a NOT NULL column causes a runtime constraint violation, not a type error. + +4. **Nuke RivetKit state in dev** after migration changes to start fresh: + ```bash + docker compose -f compose.dev.yaml down + docker volume rm foundry_foundry_rivetkit_storage + docker compose -f compose.dev.yaml up -d + ``` + +Actors with drizzle migrations: `organization`, `audit-log`, `task`. Other actors (`user`, `github-data`) use inline migrations without drizzle. + +## Workflow Step Nesting — FORBIDDEN + +**Never call `c.step()` / `ctx.step()` from inside another step's `run` callback.** RivetKit workflow steps cannot be nested. Doing so causes the runtime error: *"Cannot start a new workflow entry while another is in progress."* + +This means: +- Functions called from within a step `run` callback must NOT use `c.step()`, `c.loop()`, `c.sleep()`, or `c.queue.next()`. +- If a mutation function needs to be called both from a step and standalone, it must only do plain DB/API work — no workflow primitives. The workflow step wrapping belongs in the workflow file, not in the mutation. +- Helper wrappers that conditionally call `c.step()` (like a `runSyncStep` pattern) are dangerous — if the caller is already inside a step, the nested `c.step()` will crash at runtime with no compile-time warning. + +**Rule of thumb:** Workflow primitives (`step`, `loop`, `sleep`, `queue.next`) may only appear at the top level of a workflow function or inside a `loop` callback — never inside a step's `run`. + +## SQLite Constraints + +- Single-row tables must use an integer primary key with `CHECK (id = 1)` to enforce the singleton invariant at the database level. +- Follow the task actor pattern for metadata/profile rows and keep the fixed row id in code as `1`, not a string sentinel. + ## Multiplayer Correctness Per-user UI state must live on the user actor, not on shared task/session actors. This is critical for multiplayer — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts. @@ -85,6 +155,49 @@ Per-user UI state must live on the user actor, not on shared task/session actors Do not store per-user preferences, selections, or ephemeral UI state on shared actors. If a field's value should differ between two users looking at the same task, it belongs on the user actor. +## Audit Log Maintenance + +Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of significant operations. + +## Debugging Actors + +### RivetKit Inspector UI + +The RivetKit inspector UI at `http://localhost:6420/ui/` is the most reliable way to debug actor state in local development. The inspector HTTP API (`/inspector/workflow-history`) has a known bug where it returns empty `{}` even when the workflow has entries — always cross-check with the UI. + +**Useful inspector URL pattern:** +``` +http://localhost:6420/ui/?u=http%3A%2F%2F127.0.0.1%3A6420&ns=default&r=default&n=[%22%22]&actorId=&tab= +``` + +Tabs: `workflow`, `database`, `state`, `queue`, `connections`, `metadata`. + +**To find actor IDs:** +```bash +curl -s 'http://127.0.0.1:6420/actors?name=organization' +``` + +**To query actor DB via bun (inside container):** +```bash +docker compose -f compose.dev.yaml exec -T backend bun -e ' + var Database = require("bun:sqlite"); + var db = new Database("/root/.local/share/foundry/rivetkit/databases/.db", { readonly: true }); + console.log(JSON.stringify(db.query("SELECT name FROM sqlite_master WHERE type=?").all("table"))); +' +``` + +**To call actor actions via inspector:** +```bash +curl -s -X POST 'http://127.0.0.1:6420/gateway//inspector/action/' \ + -H 'Content-Type: application/json' -d '{"args":[{}]}' +``` + +### Known inspector API bugs + +- `GET /inspector/workflow-history` may return `{"history":{}}` even when workflow has run. Use the UI's Workflow tab instead. +- `GET /inspector/queue` is reliable for checking pending messages. +- `GET /inspector/state` is reliable for checking actor state. + ## Maintenance - Keep this file up to date whenever actor ownership, hierarchy, or lifecycle responsibilities change. diff --git a/foundry/packages/backend/src/actors/auth-user/db/db.ts b/foundry/packages/backend/src/actors/audit-log/db/db.ts similarity index 69% rename from foundry/packages/backend/src/actors/auth-user/db/db.ts rename to foundry/packages/backend/src/actors/audit-log/db/db.ts index b434338..d808ec0 100644 --- a/foundry/packages/backend/src/actors/auth-user/db/db.ts +++ b/foundry/packages/backend/src/actors/audit-log/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const authUserDb = db({ schema, migrations }); +export const auditLogDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts b/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts new file mode 100644 index 0000000..da5e904 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/audit-log/db/drizzle", + schema: "./src/actors/audit-log/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/history/db/drizzle/0000_fluffy_kid_colt.sql b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql similarity index 100% rename from foundry/packages/backend/src/actors/history/db/drizzle/0000_fluffy_kid_colt.sql rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql new file mode 100644 index 0000000..9ada559 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql @@ -0,0 +1 @@ +ALTER TABLE `events` ADD COLUMN `repo_id` text; diff --git a/foundry/packages/backend/src/actors/history/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json similarity index 100% rename from foundry/packages/backend/src/actors/history/db/drizzle/meta/0000_snapshot.json rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json similarity index 64% rename from foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json index 940b4e6..cf2910c 100644 --- a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json @@ -1,48 +1,31 @@ { "version": "6", "dialect": "sqlite", - "id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9", - "prevId": "00000000-0000-0000-0000-000000000000", + "id": "a1b2c3d4-0001-4000-8000-000000000001", + "prevId": "e592c829-141f-4740-88b7-09cf957a4405", "tables": { - "repo_meta": { - "name": "repo_meta", + "events": { + "name": "events", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, - "autoincrement": false + "autoincrement": true }, - "remote_url": { - "name": "remote_url", + "repo_id": { + "name": "repo_id", "type": "text", "primaryKey": false, - "notNull": true, + "notNull": false, "autoincrement": false }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "task_index": { - "name": "task_index", - "columns": { "task_id": { "name": "task_id", "type": "text", - "primaryKey": true, - "notNull": true, + "primaryKey": false, + "notNull": false, "autoincrement": false }, "branch_name": { @@ -52,15 +35,22 @@ "notNull": false, "autoincrement": false }, - "created_at": { - "name": "created_at", - "type": "integer", + "kind": { + "name": "kind", + "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, - "updated_at": { - "name": "updated_at", + "payload_json": { + "name": "payload_json", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", "type": "integer", "primaryKey": false, "notNull": true, diff --git a/foundry/packages/backend/src/actors/history/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json similarity index 59% rename from foundry/packages/backend/src/actors/history/db/drizzle/meta/_journal.json rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json index 93cf8ce..0393be2 100644 --- a/foundry/packages/backend/src/actors/history/db/drizzle/meta/_journal.json +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json @@ -8,6 +8,13 @@ "when": 1773376223815, "tag": "0000_fluffy_kid_colt", "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1773376223816, + "tag": "0001_add_repo_id", + "breakpoints": true } ] } diff --git a/foundry/packages/backend/src/actors/history/db/migrations.ts b/foundry/packages/backend/src/actors/audit-log/db/migrations.ts similarity index 78% rename from foundry/packages/backend/src/actors/history/db/migrations.ts rename to foundry/packages/backend/src/actors/audit-log/db/migrations.ts index 766c225..5bf9b5a 100644 --- a/foundry/packages/backend/src/actors/history/db/migrations.ts +++ b/foundry/packages/backend/src/actors/audit-log/db/migrations.ts @@ -10,6 +10,12 @@ const journal = { tag: "0000_fluffy_kid_colt", breakpoints: true, }, + { + idx: 1, + when: 1773376223816, + tag: "0001_add_repo_id", + breakpoints: true, + }, ], } as const; @@ -24,6 +30,8 @@ export default { \`payload_json\` text NOT NULL, \`created_at\` integer NOT NULL ); +`, + m0001: `ALTER TABLE \`events\` ADD COLUMN \`repo_id\` text; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/history/db/schema.ts b/foundry/packages/backend/src/actors/audit-log/db/schema.ts similarity index 77% rename from foundry/packages/backend/src/actors/history/db/schema.ts rename to foundry/packages/backend/src/actors/audit-log/db/schema.ts index 80eb7f4..d275dd4 100644 --- a/foundry/packages/backend/src/actors/history/db/schema.ts +++ b/foundry/packages/backend/src/actors/audit-log/db/schema.ts @@ -2,10 +2,11 @@ import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; export const events = sqliteTable("events", { id: integer("id").primaryKey({ autoIncrement: true }), + repoId: text("repo_id"), taskId: text("task_id"), branchName: text("branch_name"), kind: text("kind").notNull(), - // Structured by the history event kind definitions in application code. + // Structured by the audit-log event kind definitions in application code. payloadJson: text("payload_json").notNull(), createdAt: integer("created_at").notNull(), }); diff --git a/foundry/packages/backend/src/actors/audit-log/index.ts b/foundry/packages/backend/src/actors/audit-log/index.ts new file mode 100644 index 0000000..e189011 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/index.ts @@ -0,0 +1,98 @@ +// @ts-nocheck +import { and, desc, eq } from "drizzle-orm"; +import { actor } from "rivetkit"; +import type { AuditLogEvent } from "@sandbox-agent/foundry-shared"; +import { auditLogDb } from "./db/db.js"; +import { events } from "./db/schema.js"; + +export interface AuditLogInput { + organizationId: string; +} + +export interface AppendAuditLogCommand { + kind: string; + repoId?: string; + taskId?: string; + branchName?: string; + payload: Record; +} + +export interface ListAuditLogParams { + repoId?: string; + branch?: string; + taskId?: string; + limit?: number; +} + +/** + * Organization-scoped audit log. One per org, not one per repo. + * + * The org is the coordinator for all tasks across repos, and we frequently need + * to query the full audit trail across repos (e.g. org-wide activity feed, + * compliance). A per-repo audit log would require fan-out reads every time. + * Keeping it org-scoped gives us a single queryable feed with optional repoId + * filtering when callers want a narrower view. + */ +export const auditLog = actor({ + db: auditLogDb, + options: { + name: "Audit Log", + icon: "database", + }, + createState: (_c, input: AuditLogInput) => ({ + organizationId: input.organizationId, + }), + actions: { + async append(c, body: AppendAuditLogCommand): Promise<{ ok: true }> { + const now = Date.now(); + await c.db + .insert(events) + .values({ + repoId: body.repoId ?? null, + taskId: body.taskId ?? null, + branchName: body.branchName ?? null, + kind: body.kind, + payloadJson: JSON.stringify(body.payload), + createdAt: now, + }) + .run(); + return { ok: true }; + }, + + async list(c, params?: ListAuditLogParams): Promise { + const whereParts = []; + if (params?.repoId) { + whereParts.push(eq(events.repoId, params.repoId)); + } + if (params?.taskId) { + whereParts.push(eq(events.taskId, params.taskId)); + } + if (params?.branch) { + whereParts.push(eq(events.branchName, params.branch)); + } + + const base = c.db + .select({ + id: events.id, + repoId: events.repoId, + taskId: events.taskId, + branchName: events.branchName, + kind: events.kind, + payloadJson: events.payloadJson, + createdAt: events.createdAt, + }) + .from(events); + + const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base) + .orderBy(desc(events.createdAt)) + .limit(params?.limit ?? 100) + .all(); + + return rows.map((row) => ({ + ...row, + organizationId: c.state.organizationId, + repoId: row.repoId ?? null, + })); + }, + }, +}); diff --git a/foundry/packages/backend/src/actors/auth-user/db/schema.ts b/foundry/packages/backend/src/actors/auth-user/db/schema.ts deleted file mode 100644 index b87567a..0000000 --- a/foundry/packages/backend/src/actors/auth-user/db/schema.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { integer, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core"; - -export const authUsers = sqliteTable("user", { - id: text("id").notNull().primaryKey(), - name: text("name").notNull(), - email: text("email").notNull(), - emailVerified: integer("email_verified").notNull(), - image: text("image"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const authSessions = sqliteTable( - "session", - { - id: text("id").notNull().primaryKey(), - token: text("token").notNull(), - userId: text("user_id").notNull(), - expiresAt: integer("expires_at").notNull(), - ipAddress: text("ip_address"), - userAgent: text("user_agent"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), - }, - (table) => ({ - tokenIdx: uniqueIndex("session_token_idx").on(table.token), - }), -); - -export const authAccounts = sqliteTable( - "account", - { - id: text("id").notNull().primaryKey(), - accountId: text("account_id").notNull(), - providerId: text("provider_id").notNull(), - userId: text("user_id").notNull(), - accessToken: text("access_token"), - refreshToken: text("refresh_token"), - idToken: text("id_token"), - accessTokenExpiresAt: integer("access_token_expires_at"), - refreshTokenExpiresAt: integer("refresh_token_expires_at"), - scope: text("scope"), - password: text("password"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), - }, - (table) => ({ - providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId), - }), -); - -export const userProfiles = sqliteTable("user_profiles", { - userId: text("user_id").notNull().primaryKey(), - githubAccountId: text("github_account_id"), - githubLogin: text("github_login"), - roleLabel: text("role_label").notNull(), - eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(), - starterRepoStatus: text("starter_repo_status").notNull(), - starterRepoStarredAt: integer("starter_repo_starred_at"), - starterRepoSkippedAt: integer("starter_repo_skipped_at"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const sessionState = sqliteTable("session_state", { - sessionId: text("session_id").notNull().primaryKey(), - activeOrganizationId: text("active_organization_id"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); diff --git a/foundry/packages/backend/src/actors/auth-user/index.ts b/foundry/packages/backend/src/actors/auth-user/index.ts deleted file mode 100644 index a77635a..0000000 --- a/foundry/packages/backend/src/actors/auth-user/index.ts +++ /dev/null @@ -1,353 +0,0 @@ -import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm"; -import { actor } from "rivetkit"; -import { authUserDb } from "./db/db.js"; -import { authAccounts, authSessions, authUsers, sessionState, userProfiles } from "./db/schema.js"; - -const tables = { - user: authUsers, - session: authSessions, - account: authAccounts, - userProfiles, - sessionState, -} as const; - -function tableFor(model: string) { - const table = tables[model as keyof typeof tables]; - if (!table) { - throw new Error(`Unsupported auth user model: ${model}`); - } - return table as any; -} - -function columnFor(table: any, field: string) { - const column = table[field]; - if (!column) { - throw new Error(`Unsupported auth user field: ${field}`); - } - return column; -} - -function normalizeValue(value: unknown): unknown { - if (value instanceof Date) { - return value.getTime(); - } - if (Array.isArray(value)) { - return value.map((entry) => normalizeValue(entry)); - } - return value; -} - -function clauseToExpr(table: any, clause: any) { - const column = columnFor(table, clause.field); - const value = normalizeValue(clause.value); - - switch (clause.operator) { - case "ne": - return value === null ? isNotNull(column) : ne(column, value as any); - case "lt": - return lt(column, value as any); - case "lte": - return lte(column, value as any); - case "gt": - return gt(column, value as any); - case "gte": - return gte(column, value as any); - case "in": - return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "not_in": - return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "contains": - return like(column, `%${String(value ?? "")}%`); - case "starts_with": - return like(column, `${String(value ?? "")}%`); - case "ends_with": - return like(column, `%${String(value ?? "")}`); - case "eq": - default: - return value === null ? isNull(column) : eq(column, value as any); - } -} - -function buildWhere(table: any, where: any[] | undefined) { - if (!where || where.length === 0) { - return undefined; - } - - let expr = clauseToExpr(table, where[0]); - for (const clause of where.slice(1)) { - const next = clauseToExpr(table, clause); - expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); - } - return expr; -} - -function applyJoinToRow(c: any, model: string, row: any, join: any) { - if (!row || !join) { - return row; - } - - if (model === "session" && join.user) { - return c.db - .select() - .from(authUsers) - .where(eq(authUsers.id, row.userId)) - .get() - .then((user: any) => ({ ...row, user: user ?? null })); - } - - if (model === "account" && join.user) { - return c.db - .select() - .from(authUsers) - .where(eq(authUsers.id, row.userId)) - .get() - .then((user: any) => ({ ...row, user: user ?? null })); - } - - if (model === "user" && join.account) { - return c.db - .select() - .from(authAccounts) - .where(eq(authAccounts.userId, row.id)) - .all() - .then((accounts: any[]) => ({ ...row, account: accounts })); - } - - return Promise.resolve(row); -} - -async function applyJoinToRows(c: any, model: string, rows: any[], join: any) { - if (!join || rows.length === 0) { - return rows; - } - - if (model === "session" && join.user) { - const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; - const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : []; - const userMap = new Map(users.map((user: any) => [user.id, user])); - return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); - } - - if (model === "account" && join.user) { - const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; - const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : []; - const userMap = new Map(users.map((user: any) => [user.id, user])); - return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); - } - - if (model === "user" && join.account) { - const userIds = rows.map((row) => row.id); - const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : []; - const accountsByUserId = new Map(); - for (const account of accounts) { - const entries = accountsByUserId.get(account.userId) ?? []; - entries.push(account); - accountsByUserId.set(account.userId, entries); - } - return rows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] })); - } - - return rows; -} - -export const authUser = actor({ - db: authUserDb, - options: { - name: "Auth User", - icon: "shield", - actionTimeout: 60_000, - }, - createState: (_c, input: { userId: string }) => ({ - userId: input.userId, - }), - actions: { - async createAuthRecord(c, input: { model: string; data: Record }) { - const table = tableFor(input.model); - await c.db - .insert(table) - .values(input.data as any) - .run(); - return await c.db - .select() - .from(table) - .where(eq(columnFor(table, "id"), input.data.id as any)) - .get(); - }, - - async findOneAuthRecord(c, input: { model: string; where: any[]; join?: any }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get(); - return await applyJoinToRow(c, input.model, row ?? null, input.join); - }, - - async findManyAuthRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - let query: any = c.db.select().from(table); - if (predicate) { - query = query.where(predicate); - } - if (input.sortBy?.field) { - const column = columnFor(table, input.sortBy.field); - query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); - } - if (typeof input.limit === "number") { - query = query.limit(input.limit); - } - if (typeof input.offset === "number") { - query = query.offset(input.offset); - } - const rows = await query.all(); - return await applyJoinToRows(c, input.model, rows, input.join); - }, - - async updateAuthRecord(c, input: { model: string; where: any[]; update: Record }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("updateAuthRecord requires a where clause"); - } - await c.db - .update(table) - .set(input.update as any) - .where(predicate) - .run(); - return await c.db.select().from(table).where(predicate).get(); - }, - - async updateManyAuthRecords(c, input: { model: string; where: any[]; update: Record }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("updateManyAuthRecords requires a where clause"); - } - await c.db - .update(table) - .set(input.update as any) - .where(predicate) - .run(); - const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get(); - return row?.value ?? 0; - }, - - async deleteAuthRecord(c, input: { model: string; where: any[] }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("deleteAuthRecord requires a where clause"); - } - await c.db.delete(table).where(predicate).run(); - }, - - async deleteManyAuthRecords(c, input: { model: string; where: any[] }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("deleteManyAuthRecords requires a where clause"); - } - const rows = await c.db.select().from(table).where(predicate).all(); - await c.db.delete(table).where(predicate).run(); - return rows.length; - }, - - async countAuthRecords(c, input: { model: string; where?: any[] }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - const row = predicate - ? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get() - : await c.db.select({ value: sqlCount() }).from(table).get(); - return row?.value ?? 0; - }, - - async getAppAuthState(c, input: { sessionId: string }) { - const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get(); - if (!session) { - return null; - } - const [user, profile, currentSessionState, accounts] = await Promise.all([ - c.db.select().from(authUsers).where(eq(authUsers.id, session.userId)).get(), - c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(), - c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(), - c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(), - ]); - return { - session, - user, - profile: profile ?? null, - sessionState: currentSessionState ?? null, - accounts, - }; - }, - - async upsertUserProfile( - c, - input: { - userId: string; - patch: { - githubAccountId?: string | null; - githubLogin?: string | null; - roleLabel?: string; - eligibleOrganizationIdsJson?: string; - starterRepoStatus?: string; - starterRepoStarredAt?: number | null; - starterRepoSkippedAt?: number | null; - }; - }, - ) { - const now = Date.now(); - await c.db - .insert(userProfiles) - .values({ - userId: input.userId, - githubAccountId: input.patch.githubAccountId ?? null, - githubLogin: input.patch.githubLogin ?? null, - roleLabel: input.patch.roleLabel ?? "GitHub user", - eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]", - starterRepoStatus: input.patch.starterRepoStatus ?? "pending", - starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null, - starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: userProfiles.userId, - set: { - ...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}), - ...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}), - ...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}), - ...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}), - ...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}), - ...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}), - ...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}), - updatedAt: now, - }, - }) - .run(); - - return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get(); - }, - - async upsertSessionState(c, input: { sessionId: string; activeOrganizationId: string | null }) { - const now = Date.now(); - await c.db - .insert(sessionState) - .values({ - sessionId: input.sessionId, - activeOrganizationId: input.activeOrganizationId, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: sessionState.sessionId, - set: { - activeOrganizationId: input.activeOrganizationId, - updatedAt: now, - }, - }) - .run(); - - return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(); - }, - }, -}); diff --git a/foundry/packages/backend/src/actors/events.ts b/foundry/packages/backend/src/actors/events.ts deleted file mode 100644 index 4a514ad..0000000 --- a/foundry/packages/backend/src/actors/events.ts +++ /dev/null @@ -1,104 +0,0 @@ -import type { TaskStatus, SandboxProviderId } from "@sandbox-agent/foundry-shared"; - -export interface TaskCreatedEvent { - organizationId: string; - repoId: string; - taskId: string; - sandboxProviderId: SandboxProviderId; - branchName: string; - title: string; -} - -export interface TaskStatusEvent { - organizationId: string; - repoId: string; - taskId: string; - status: TaskStatus; - message: string; -} - -export interface RepositorySnapshotEvent { - organizationId: string; - repoId: string; - updatedAt: number; -} - -export interface AgentStartedEvent { - organizationId: string; - repoId: string; - taskId: string; - sessionId: string; -} - -export interface AgentIdleEvent { - organizationId: string; - repoId: string; - taskId: string; - sessionId: string; -} - -export interface AgentErrorEvent { - organizationId: string; - repoId: string; - taskId: string; - message: string; -} - -export interface PrCreatedEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - url: string; -} - -export interface PrClosedEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - merged: boolean; -} - -export interface PrReviewEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - reviewer: string; - status: string; -} - -export interface CiStatusChangedEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - status: string; -} - -export type TaskStepName = "auto_commit" | "push" | "pr_submit"; -export type TaskStepStatus = "started" | "completed" | "skipped" | "failed"; - -export interface TaskStepEvent { - organizationId: string; - repoId: string; - taskId: string; - step: TaskStepName; - status: TaskStepStatus; - message: string; -} - -export interface BranchSwitchedEvent { - organizationId: string; - repoId: string; - taskId: string; - branchName: string; -} - -export interface SessionAttachedEvent { - organizationId: string; - repoId: string; - taskId: string; - sessionId: string; -} diff --git a/foundry/packages/backend/src/actors/github-data/db/migrations.ts b/foundry/packages/backend/src/actors/github-data/db/migrations.ts index 87cc76f..6584968 100644 --- a/foundry/packages/backend/src/actors/github-data/db/migrations.ts +++ b/foundry/packages/backend/src/actors/github-data/db/migrations.ts @@ -18,6 +18,12 @@ const journal = { tag: "0002_github_branches", breakpoints: true, }, + { + idx: 3, + when: 1773907200000, + tag: "0003_sync_progress", + breakpoints: true, + }, ], } as const; @@ -32,7 +38,8 @@ export default { \`installation_id\` integer, \`last_sync_label\` text NOT NULL, \`last_sync_at\` integer, - \`updated_at\` integer NOT NULL + \`updated_at\` integer NOT NULL, + CONSTRAINT \`github_meta_singleton_id_check\` CHECK(\`id\` = 1) ); --> statement-breakpoint CREATE TABLE \`github_repositories\` ( @@ -78,6 +85,22 @@ CREATE TABLE \`github_pull_requests\` ( \`commit_sha\` text NOT NULL, \`updated_at\` integer NOT NULL ); +`, + m0003: `ALTER TABLE \`github_meta\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`sync_phase\` text; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`processed_repository_count\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`total_repository_count\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_repositories\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_members\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_pull_requests\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_branches\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/github-data/db/schema.ts b/foundry/packages/backend/src/actors/github-data/db/schema.ts index fe37863..a11ac9a 100644 --- a/foundry/packages/backend/src/actors/github-data/db/schema.ts +++ b/foundry/packages/backend/src/actors/github-data/db/schema.ts @@ -1,15 +1,24 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; -export const githubMeta = sqliteTable("github_meta", { - id: integer("id").primaryKey(), - connectedAccount: text("connected_account").notNull(), - installationStatus: text("installation_status").notNull(), - syncStatus: text("sync_status").notNull(), - installationId: integer("installation_id"), - lastSyncLabel: text("last_sync_label").notNull(), - lastSyncAt: integer("last_sync_at"), - updatedAt: integer("updated_at").notNull(), -}); +export const githubMeta = sqliteTable( + "github_meta", + { + id: integer("id").primaryKey(), + connectedAccount: text("connected_account").notNull(), + installationStatus: text("installation_status").notNull(), + syncStatus: text("sync_status").notNull(), + installationId: integer("installation_id"), + lastSyncLabel: text("last_sync_label").notNull(), + lastSyncAt: integer("last_sync_at"), + syncGeneration: integer("sync_generation").notNull(), + syncPhase: text("sync_phase"), + processedRepositoryCount: integer("processed_repository_count").notNull(), + totalRepositoryCount: integer("total_repository_count").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("github_meta_singleton_id_check", sql`${table.id} = 1`)], +); export const githubRepositories = sqliteTable("github_repositories", { repoId: text("repo_id").notNull().primaryKey(), @@ -17,6 +26,7 @@ export const githubRepositories = sqliteTable("github_repositories", { cloneUrl: text("clone_url").notNull(), private: integer("private").notNull(), defaultBranch: text("default_branch").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); @@ -25,6 +35,7 @@ export const githubBranches = sqliteTable("github_branches", { repoId: text("repo_id").notNull(), branchName: text("branch_name").notNull(), commitSha: text("commit_sha").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); @@ -35,6 +46,7 @@ export const githubMembers = sqliteTable("github_members", { email: text("email"), role: text("role"), state: text("state").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); @@ -51,5 +63,6 @@ export const githubPullRequests = sqliteTable("github_pull_requests", { baseRefName: text("base_ref_name").notNull(), authorLogin: text("author_login"), isDraft: integer("is_draft").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts index 08c815d..a7d65a0 100644 --- a/foundry/packages/backend/src/actors/github-data/index.ts +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -1,16 +1,20 @@ // @ts-nocheck -import { eq } from "drizzle-orm"; -import { actor, queue } from "rivetkit"; -import { workflow, Loop } from "rivetkit/workflow"; +import { eq, inArray } from "drizzle-orm"; +import { actor } from "rivetkit"; import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; import { getOrCreateOrganization, getTask } from "../handles.js"; import { repoIdFromRemote } from "../../services/repo.js"; import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +// actions called directly (no queue) import { githubDataDb } from "./db/db.js"; import { githubBranches, githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; +// workflow.ts is no longer used — commands are actions now const META_ROW_ID = 1; +const SYNC_REPOSITORY_BATCH_SIZE = 10; + +type GithubSyncPhase = "discovering_repositories" | "syncing_repositories" | "syncing_branches" | "syncing_members" | "syncing_pull_requests"; interface GithubDataInput { organizationId: string; @@ -70,6 +74,8 @@ interface ClearStateInput { label: string; } +// sendOrganizationCommand removed — org actions called directly + interface PullRequestWebhookInput { connectedAccount: string; installationStatus: FoundryOrganization["github"]["installationStatus"]; @@ -93,6 +99,19 @@ interface PullRequestWebhookInput { }; } +interface GithubMetaState { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + syncStatus: FoundryOrganization["github"]["syncStatus"]; + installationId: number | null; + lastSyncLabel: string; + lastSyncAt: number | null; + syncGeneration: number; + syncPhase: GithubSyncPhase | null; + processedRepositoryCount: number; + totalRepositoryCount: number; +} + function normalizePrStatus(input: { state: string; isDraft?: boolean; merged?: boolean }): "OPEN" | "DRAFT" | "CLOSED" | "MERGED" { const state = input.state.trim().toUpperCase(); if (input.merged || state === "MERGED") return "MERGED"; @@ -106,6 +125,7 @@ function pullRequestSummaryFromRow(row: any) { repoId: row.repoId, repoFullName: row.repoFullName, number: row.number, + status: Boolean(row.isDraft) ? "draft" : "ready", title: row.title, state: row.state, url: row.url, @@ -117,7 +137,18 @@ function pullRequestSummaryFromRow(row: any) { }; } -async function readMeta(c: any) { +function chunkItems(items: T[], size: number): T[][] { + if (items.length === 0) { + return []; + } + const chunks: T[][] = []; + for (let index = 0; index < items.length; index += size) { + chunks.push(items.slice(index, index + size)); + } + return chunks; +} + +export async function readMeta(c: any): Promise { const row = await c.db.select().from(githubMeta).where(eq(githubMeta.id, META_ROW_ID)).get(); return { connectedAccount: row?.connectedAccount ?? "", @@ -126,10 +157,14 @@ async function readMeta(c: any) { installationId: row?.installationId ?? null, lastSyncLabel: row?.lastSyncLabel ?? "Waiting for first import", lastSyncAt: row?.lastSyncAt ?? null, + syncGeneration: row?.syncGeneration ?? 0, + syncPhase: (row?.syncPhase ?? null) as GithubSyncPhase | null, + processedRepositoryCount: row?.processedRepositoryCount ?? 0, + totalRepositoryCount: row?.totalRepositoryCount ?? 0, }; } -async function writeMeta(c: any, patch: Partial>>) { +async function writeMeta(c: any, patch: Partial) { const current = await readMeta(c); const next = { ...current, @@ -145,6 +180,10 @@ async function writeMeta(c: any, patch: Partial): Promise { + const meta = await writeMeta(c, patch); + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.commandApplyGithubSyncProgress({ + connectedAccount: meta.connectedAccount, + installationStatus: meta.installationStatus, + installationId: meta.installationId, + syncStatus: meta.syncStatus, + lastSyncLabel: meta.lastSyncLabel, + lastSyncAt: meta.lastSyncAt, + syncGeneration: meta.syncGeneration, + syncPhase: meta.syncPhase, + processedRepositoryCount: meta.processedRepositoryCount, + totalRepositoryCount: meta.totalRepositoryCount, + }); + return meta; +} + async function getOrganizationContext(c: any, overrides?: FullSyncInput) { + // Try to read the org profile for fallback values, but don't require it. + // Webhook-triggered syncs can arrive before the user signs in and creates the + // org profile row. The webhook callers already pass the necessary overrides + // (connectedAccount, installationId, githubLogin, kind), so we can proceed + // without the profile as long as overrides cover the required fields. const organizationHandle = await getOrCreateOrganization(c, c.state.organizationId); const organizationState = await organizationHandle.getOrganizationShellStateIfInitialized({}); - if (!organizationState) { - throw new Error(`Organization ${c.state.organizationId} is not initialized`); + + // If the org profile doesn't exist and overrides don't provide enough context, fail. + if (!organizationState && !overrides?.connectedAccount) { + throw new Error(`Organization ${c.state.organizationId} is not initialized and no override context was provided`); } + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); return { - kind: overrides?.kind ?? organizationState.snapshot.kind, - githubLogin: overrides?.githubLogin ?? organizationState.githubLogin, - connectedAccount: overrides?.connectedAccount ?? organizationState.snapshot.github.connectedAccount ?? organizationState.githubLogin, - installationId: overrides?.installationId ?? organizationState.githubInstallationId ?? null, + kind: overrides?.kind ?? organizationState?.snapshot.kind, + githubLogin: overrides?.githubLogin ?? organizationState?.githubLogin, + connectedAccount: overrides?.connectedAccount ?? organizationState?.snapshot.github.connectedAccount ?? organizationState?.githubLogin, + installationId: overrides?.installationId ?? organizationState?.githubInstallationId ?? null, installationStatus: overrides?.installationStatus ?? - organizationState.snapshot.github.installationStatus ?? - (organizationState.snapshot.kind === "personal" ? "connected" : "reconnect_required"), + organizationState?.snapshot.github.installationStatus ?? + (organizationState?.snapshot.kind === "personal" ? "connected" : "reconnect_required"), accessToken: overrides?.accessToken ?? auth?.githubToken ?? null, }; } -async function replaceRepositories(c: any, repositories: GithubRepositoryRecord[], updatedAt: number) { - await c.db.delete(githubRepositories).run(); +async function upsertRepositories(c: any, repositories: GithubRepositoryRecord[], updatedAt: number, syncGeneration: number) { for (const repository of repositories) { await c.db .insert(githubRepositories) @@ -194,14 +262,35 @@ async function replaceRepositories(c: any, repositories: GithubRepositoryRecord[ cloneUrl: repository.cloneUrl, private: repository.private ? 1 : 0, defaultBranch: repository.defaultBranch, + syncGeneration, updatedAt, }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration, + updatedAt, + }, + }) .run(); } } -async function replaceBranches(c: any, branches: GithubBranchRecord[], updatedAt: number) { - await c.db.delete(githubBranches).run(); +async function sweepRepositories(c: any, syncGeneration: number) { + const rows = await c.db.select({ repoId: githubRepositories.repoId, syncGeneration: githubRepositories.syncGeneration }).from(githubRepositories).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubRepositories).where(eq(githubRepositories.repoId, row.repoId)).run(); + } +} + +async function upsertBranches(c: any, branches: GithubBranchRecord[], updatedAt: number, syncGeneration: number) { for (const branch of branches) { await c.db .insert(githubBranches) @@ -210,14 +299,34 @@ async function replaceBranches(c: any, branches: GithubBranchRecord[], updatedAt repoId: branch.repoId, branchName: branch.branchName, commitSha: branch.commitSha, + syncGeneration, updatedAt, }) + .onConflictDoUpdate({ + target: githubBranches.branchId, + set: { + repoId: branch.repoId, + branchName: branch.branchName, + commitSha: branch.commitSha, + syncGeneration, + updatedAt, + }, + }) .run(); } } -async function replaceMembers(c: any, members: GithubMemberRecord[], updatedAt: number) { - await c.db.delete(githubMembers).run(); +async function sweepBranches(c: any, syncGeneration: number) { + const rows = await c.db.select({ branchId: githubBranches.branchId, syncGeneration: githubBranches.syncGeneration }).from(githubBranches).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubBranches).where(eq(githubBranches.branchId, row.branchId)).run(); + } +} + +async function upsertMembers(c: any, members: GithubMemberRecord[], updatedAt: number, syncGeneration: number) { for (const member of members) { await c.db .insert(githubMembers) @@ -228,14 +337,36 @@ async function replaceMembers(c: any, members: GithubMemberRecord[], updatedAt: email: member.email ?? null, role: member.role ?? null, state: member.state ?? "active", + syncGeneration, updatedAt, }) + .onConflictDoUpdate({ + target: githubMembers.memberId, + set: { + login: member.login, + displayName: member.name || member.login, + email: member.email ?? null, + role: member.role ?? null, + state: member.state ?? "active", + syncGeneration, + updatedAt, + }, + }) .run(); } } -async function replacePullRequests(c: any, pullRequests: GithubPullRequestRecord[]) { - await c.db.delete(githubPullRequests).run(); +async function sweepMembers(c: any, syncGeneration: number) { + const rows = await c.db.select({ memberId: githubMembers.memberId, syncGeneration: githubMembers.syncGeneration }).from(githubMembers).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubMembers).where(eq(githubMembers.memberId, row.memberId)).run(); + } +} + +async function upsertPullRequests(c: any, pullRequests: GithubPullRequestRecord[], syncGeneration: number) { for (const pullRequest of pullRequests) { await c.db .insert(githubPullRequests) @@ -252,19 +383,51 @@ async function replacePullRequests(c: any, pullRequests: GithubPullRequestRecord baseRefName: pullRequest.baseRefName, authorLogin: pullRequest.authorLogin ?? null, isDraft: pullRequest.isDraft ? 1 : 0, + syncGeneration, updatedAt: pullRequest.updatedAt, }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + repoId: pullRequest.repoId, + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + syncGeneration, + updatedAt: pullRequest.updatedAt, + }, + }) .run(); } } -async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string) { +async function sweepPullRequests(c: any, syncGeneration: number) { + const rows = await c.db.select({ prId: githubPullRequests.prId, syncGeneration: githubPullRequests.syncGeneration }).from(githubPullRequests).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, row.prId)).run(); + } +} + +async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string, pullRequest: ReturnType | null) { + const repositoryRecord = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); + if (!repositoryRecord) { + return; + } const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.refreshTaskSummaryForGithubBranch({ repoId, branchName }); + void organization.commandRefreshTaskSummaryForBranch({ repoId, branchName, pullRequest, repoName: repositoryRecord.fullName ?? undefined }).catch(() => {}); } async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: any[]) { - const organization = await getOrCreateOrganization(c, c.state.organizationId); const beforeById = new Map(beforeRows.map((row) => [row.prId, row])); const afterById = new Map(afterRows.map((row) => [row.prId, row])); @@ -283,24 +446,24 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: if (!changed) { continue; } - await organization.applyOpenPullRequestUpdate({ - pullRequest: pullRequestSummaryFromRow(row), - }); - await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName, pullRequestSummaryFromRow(row)); } for (const [prId, row] of beforeById) { if (afterById.has(prId)) { continue; } - await organization.removeOpenPullRequest({ prId }); - await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName, null); } } async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { + const repositoryRecord = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, row.repoId)).get(); + if (!repositoryRecord) { + return; + } const organization = await getOrCreateOrganization(c, c.state.organizationId); - const match = await organization.findTaskForGithubBranch({ + const match = await organization.findTaskForBranch({ repoId: row.repoId, branchName: row.headRefName, }); @@ -309,7 +472,7 @@ async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { } try { const task = getTask(c, c.state.organizationId, row.repoId, match.taskId); - await task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }); + void task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }).catch(() => {}); } catch { // Best-effort only. Task summary refresh will still clear the PR state. } @@ -361,8 +524,7 @@ async function resolveMembers(c: any, context: Awaited>, repositories: GithubRepositoryRecord[], ): Promise { @@ -445,20 +607,13 @@ async function listRepositoryBranchesForContext( })); } -async function resolveBranches( - _c: any, - context: Awaited>, - repositories: GithubRepositoryRecord[], -): Promise { - return (await Promise.all(repositories.map((repository) => listRepositoryBranchesForContext(context, repository)))).flat(); -} - async function refreshRepositoryBranches( c: any, context: Awaited>, repository: GithubRepositoryRecord, updatedAt: number, ): Promise { + const currentMeta = await readMeta(c); const nextBranches = await listRepositoryBranchesForContext(context, repository); await c.db .delete(githubBranches) @@ -473,6 +628,7 @@ async function refreshRepositoryBranches( repoId: branch.repoId, branchName: branch.branchName, commitSha: branch.commitSha, + syncGeneration: currentMeta.syncGeneration, updatedAt, }) .run(); @@ -483,132 +639,254 @@ async function readAllPullRequestRows(c: any) { return await c.db.select().from(githubPullRequests).all(); } -async function runFullSync(c: any, input: FullSyncInput = {}) { - const startedAt = Date.now(); - const beforeRows = await readAllPullRequestRows(c); - const context = await getOrganizationContext(c, input); +/** Config returned by fullSyncSetup, passed to subsequent sync phases. */ +export interface FullSyncConfig { + syncGeneration: number; + startedAt: number; + totalRepositoryCount: number; + connectedAccount: string; + installationStatus: string; + installationId: number | null; + beforePrRows: any[]; +} - await writeMeta(c, { +async function readRepositoriesFromDb(c: any): Promise { + const rows = await c.db.select().from(githubRepositories).all(); + return rows.map((r: any) => ({ + fullName: r.fullName, + cloneUrl: r.cloneUrl, + private: Boolean(r.private), + defaultBranch: r.defaultBranch, + })); +} + +/** + * Phase 1: Discover repositories and persist them. + * Returns the config needed by all subsequent phases, or null if nothing to do. + */ +export async function fullSyncSetup(c: any, input: FullSyncInput = {}): Promise { + const startedAt = Date.now(); + const beforePrRows = await readAllPullRequestRows(c); + const currentMeta = await readMeta(c); + const context = await getOrganizationContext(c, input); + const syncGeneration = currentMeta.syncGeneration + 1; + + await publishSyncProgress(c, { connectedAccount: context.connectedAccount, installationStatus: context.installationStatus, installationId: context.installationId, syncStatus: "syncing", lastSyncLabel: input.label?.trim() || "Syncing GitHub data...", + syncGeneration, + syncPhase: "discovering_repositories", + processedRepositoryCount: 0, + totalRepositoryCount: 0, }); const repositories = await resolveRepositories(c, context); - const branches = await resolveBranches(c, context, repositories); - const members = await resolveMembers(c, context); - const pullRequests = await resolvePullRequests(c, context, repositories); + const totalRepositoryCount = repositories.length; - await replaceRepositories(c, repositories, startedAt); - await replaceBranches(c, branches, startedAt); - await replaceMembers(c, members, startedAt); - await replacePullRequests(c, pullRequests); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubDataProjection({ + await publishSyncProgress(c, { connectedAccount: context.connectedAccount, installationStatus: context.installationStatus, installationId: context.installationId, - syncStatus: "synced", - lastSyncLabel: repositories.length > 0 ? `Synced ${repositories.length} repositories` : "No repositories available", - lastSyncAt: startedAt, - repositories, + syncStatus: "syncing", + lastSyncLabel: totalRepositoryCount > 0 ? `Importing ${totalRepositoryCount} repositories...` : "No repositories available", + syncGeneration, + syncPhase: "syncing_repositories", + processedRepositoryCount: totalRepositoryCount, + totalRepositoryCount, }); - const meta = await writeMeta(c, { - connectedAccount: context.connectedAccount, - installationStatus: context.installationStatus, - installationId: context.installationId, - syncStatus: "synced", - lastSyncLabel: repositories.length > 0 ? `Synced ${repositories.length} repositories` : "No repositories available", - lastSyncAt: startedAt, - }); - - const afterRows = await readAllPullRequestRows(c); - await emitPullRequestChangeEvents(c, beforeRows, afterRows); + await upsertRepositories(c, repositories, startedAt, syncGeneration); return { - ...meta, - repositoryCount: repositories.length, - memberCount: members.length, - pullRequestCount: afterRows.length, + syncGeneration, + startedAt, + totalRepositoryCount, + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + beforePrRows, }; } -const GITHUB_DATA_QUEUE_NAMES = ["githubData.command.syncRepos"] as const; +/** + * Phase 2 (per-batch): Fetch and upsert branches for one batch of repos. + * Returns true when all batches have been processed. + */ +export async function fullSyncBranchBatch(c: any, config: FullSyncConfig, batchIndex: number): Promise { + const repos = await readRepositoriesFromDb(c); + const batches = chunkItems(repos, SYNC_REPOSITORY_BATCH_SIZE); + if (batchIndex >= batches.length) return true; -async function runGithubDataWorkflow(ctx: any): Promise { - // Initial sync: if this actor was just created and has never synced, - // kick off the first full sync automatically. - await ctx.step({ - name: "github-data-initial-sync", - timeout: 5 * 60_000, - run: async () => { - const meta = await readMeta(ctx); - if (meta.syncStatus !== "pending") { - return; // Already synced or syncing — skip initial sync - } - try { - await runFullSync(ctx, { label: "Importing repository catalog..." }); - } catch (error) { - // Best-effort initial sync. Write the error to meta so the client - // sees the failure and can trigger a manual retry. - const currentMeta = await readMeta(ctx); - const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId); - await organization.markOrganizationSyncFailed({ - message: error instanceof Error ? error.message : "GitHub import failed", - installationStatus: currentMeta.installationStatus, - }); - } - }, + const batch = batches[batchIndex]!; + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const batchBranches = (await Promise.all(batch.map((repo) => listRepositoryBranchesForContext(context, repo)))).flat(); + await upsertBranches(c, batchBranches, config.startedAt, config.syncGeneration); + + const processedCount = Math.min((batchIndex + 1) * SYNC_REPOSITORY_BATCH_SIZE, repos.length); + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: `Synced branches for ${processedCount} of ${repos.length} repositories`, + syncGeneration: config.syncGeneration, + syncPhase: "syncing_branches", + processedRepositoryCount: processedCount, + totalRepositoryCount: repos.length, }); - // Command loop for explicit sync requests (reload, re-import, etc.) - await ctx.loop("github-data-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-github-data-command", { - names: [...GITHUB_DATA_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } + return false; +} - try { - if (msg.name === "githubData.command.syncRepos") { - await loopCtx.step({ - name: "github-data-sync-repos", - timeout: 5 * 60_000, - run: async () => { - const body = msg.body as FullSyncInput; - await runFullSync(loopCtx, body); - }, - }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - await msg.complete({ error: message }).catch(() => {}); - } +/** + * Phase 3: Resolve, upsert, and sweep members. + */ +export async function fullSyncMembers(c: any, config: FullSyncConfig): Promise { + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: "Syncing GitHub members...", + syncGeneration: config.syncGeneration, + syncPhase: "syncing_members", + processedRepositoryCount: config.totalRepositoryCount, + totalRepositoryCount: config.totalRepositoryCount, + }); - return Loop.continue(undefined); + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const members = await resolveMembers(c, context); + await upsertMembers(c, members, config.startedAt, config.syncGeneration); + await sweepMembers(c, config.syncGeneration); +} + +/** + * Phase 4 (per-batch): Fetch and upsert pull requests for one batch of repos. + * Returns true when all batches have been processed. + */ +export async function fullSyncPullRequestBatch(c: any, config: FullSyncConfig, batchIndex: number): Promise { + const repos = await readRepositoriesFromDb(c); + const batches = chunkItems(repos, SYNC_REPOSITORY_BATCH_SIZE); + if (batchIndex >= batches.length) return true; + + const batch = batches[batchIndex]!; + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const batchPRs = await listPullRequestsForRepositories(context, batch); + await upsertPullRequests(c, batchPRs, config.syncGeneration); + + const processedCount = Math.min((batchIndex + 1) * SYNC_REPOSITORY_BATCH_SIZE, repos.length); + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: `Synced pull requests for ${processedCount} of ${repos.length} repositories`, + syncGeneration: config.syncGeneration, + syncPhase: "syncing_pull_requests", + processedRepositoryCount: processedCount, + totalRepositoryCount: repos.length, + }); + + return false; +} + +/** + * Phase 5: Sweep stale data, publish final state, emit PR change events. + */ +export async function fullSyncFinalize(c: any, config: FullSyncConfig): Promise { + await sweepBranches(c, config.syncGeneration); + await sweepPullRequests(c, config.syncGeneration); + await sweepRepositories(c, config.syncGeneration); + + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "synced", + lastSyncLabel: config.totalRepositoryCount > 0 ? `Synced ${config.totalRepositoryCount} repositories` : "No repositories available", + lastSyncAt: config.startedAt, + syncGeneration: config.syncGeneration, + syncPhase: null, + processedRepositoryCount: config.totalRepositoryCount, + totalRepositoryCount: config.totalRepositoryCount, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, config.beforePrRows, afterRows); +} + +/** + * Error handler: publish error sync state when a full sync fails. + */ +/** + * Single-shot full sync: runs all phases (setup, branches, members, PRs, finalize) + * using native JS loops. This must NOT use workflow primitives (step/loop/sleep) + * because it runs inside a workflow step. See workflow.ts for context on why + * sub-loops cause HistoryDivergedError. + */ +export async function runFullSync(c: any, input: FullSyncInput = {}): Promise { + const config = await fullSyncSetup(c, input); + + // Branches — native loop over batches + for (let i = 0; ; i++) { + const done = await fullSyncBranchBatch(c, config, i); + if (done) break; + } + + // Members + await fullSyncMembers(c, config); + + // Pull requests — native loop over batches + for (let i = 0; ; i++) { + const done = await fullSyncPullRequestBatch(c, config, i); + if (done) break; + } + + // Finalize + await fullSyncFinalize(c, config); +} + +export async function fullSyncError(c: any, error: unknown): Promise { + const currentMeta = await readMeta(c); + const message = error instanceof Error ? error.message : "GitHub import failed"; + await publishSyncProgress(c, { + connectedAccount: currentMeta.connectedAccount, + installationStatus: currentMeta.installationStatus, + installationId: currentMeta.installationId, + syncStatus: "error", + lastSyncLabel: message, + syncGeneration: currentMeta.syncGeneration, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, }); } export const githubData = actor({ db: githubDataDb, - queues: Object.fromEntries(GITHUB_DATA_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "GitHub Data", icon: "github", - actionTimeout: 5 * 60_000, + actionTimeout: 10 * 60_000, }, createState: (_c, input: GithubDataInput) => ({ organizationId: input.organizationId, }), - run: workflow(runGithubDataWorkflow), actions: { async getSummary(c) { const repositories = await c.db.select().from(githubRepositories).all(); @@ -649,9 +927,13 @@ export const githubData = actor({ }; }, - async listPullRequestsForRepository(c, input: { repoId: string }) { - const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); - return rows.map(pullRequestSummaryFromRow); + async listOpenPullRequests(c) { + const rows = await c.db + .select() + .from(githubPullRequests) + .where(inArray(githubPullRequests.state, ["OPEN", "DRAFT"])) + .all(); + return rows.map((row) => pullRequestSummaryFromRow(row)); }, async listBranchesForRepository(c, input: { repoId: string }) { @@ -664,309 +946,215 @@ export const githubData = actor({ .sort((left, right) => left.branchName.localeCompare(right.branchName)); }, - async listOpenPullRequests(c) { - const rows = await c.db.select().from(githubPullRequests).all(); - return rows.map(pullRequestSummaryFromRow).sort((left, right) => right.updatedAtMs - left.updatedAtMs); - }, - - async getPullRequestForBranch(c, input: { repoId: string; branchName: string }) { - const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); - const match = rows.find((candidate) => candidate.headRefName === input.branchName) ?? null; - if (!match) { - return null; - } - return { - number: match.number, - status: match.isDraft ? ("draft" as const) : ("ready" as const), - }; - }, - - async fullSync(c, input: FullSyncInput = {}) { - return await runFullSync(c, input); - }, - - async reloadOrganization(c) { - return await runFullSync(c, { label: "Reloading GitHub organization..." }); - }, - - async reloadAllPullRequests(c) { - return await runFullSync(c, { label: "Reloading GitHub pull requests..." }); - }, - - async reloadRepository(c, input: { repoId: string }) { - const context = await getOrganizationContext(c); - const current = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); - if (!current) { - throw new Error(`Unknown GitHub repository: ${input.repoId}`); - } - const { appShell } = getActorRuntimeContext(); - const repository = - context.installationId != null - ? await appShell.github.getInstallationRepository(context.installationId, current.fullName) - : context.accessToken - ? await appShell.github.getUserRepository(context.accessToken, current.fullName) - : null; - if (!repository) { - throw new Error(`Unable to reload repository: ${current.fullName}`); - } - - const updatedAt = Date.now(); - await c.db - .insert(githubRepositories) - .values({ - repoId: input.repoId, - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private ? 1 : 0, - defaultBranch: repository.defaultBranch, - updatedAt, - }) - .onConflictDoUpdate({ - target: githubRepositories.repoId, - set: { - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private ? 1 : 0, - defaultBranch: repository.defaultBranch, - updatedAt, - }, - }) - .run(); - await refreshRepositoryBranches( - c, - context, - { - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private, - defaultBranch: repository.defaultBranch, - }, - updatedAt, - ); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubRepositoryProjection({ - repoId: input.repoId, - remoteUrl: repository.cloneUrl, - }); - return { - repoId: input.repoId, - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private, - defaultBranch: repository.defaultBranch, - }; - }, - - async reloadPullRequest(c, input: { repoId: string; prNumber: number }) { - const repository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); - if (!repository) { - throw new Error(`Unknown GitHub repository: ${input.repoId}`); - } - const context = await getOrganizationContext(c); - const { appShell } = getActorRuntimeContext(); - const pullRequest = - context.installationId != null - ? await appShell.github.getInstallationPullRequest(context.installationId, repository.fullName, input.prNumber) - : context.accessToken - ? await appShell.github.getUserPullRequest(context.accessToken, repository.fullName, input.prNumber) - : null; - if (!pullRequest) { - throw new Error(`Unable to reload pull request #${input.prNumber} for ${repository.fullName}`); - } - - const beforeRows = await readAllPullRequestRows(c); - const updatedAt = Date.now(); - const nextState = normalizePrStatus(pullRequest); - const prId = `${input.repoId}#${input.prNumber}`; - if (nextState === "CLOSED" || nextState === "MERGED") { - await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); - } else { - await c.db - .insert(githubPullRequests) - .values({ - prId, - repoId: input.repoId, - repoFullName: repository.fullName, - number: pullRequest.number, - title: pullRequest.title, - body: pullRequest.body ?? null, - state: nextState, - url: pullRequest.url, - headRefName: pullRequest.headRefName, - baseRefName: pullRequest.baseRefName, - authorLogin: pullRequest.authorLogin ?? null, - isDraft: pullRequest.isDraft ? 1 : 0, - updatedAt, - }) - .onConflictDoUpdate({ - target: githubPullRequests.prId, - set: { - title: pullRequest.title, - body: pullRequest.body ?? null, - state: nextState, - url: pullRequest.url, - headRefName: pullRequest.headRefName, - baseRefName: pullRequest.baseRefName, - authorLogin: pullRequest.authorLogin ?? null, - isDraft: pullRequest.isDraft ? 1 : 0, - updatedAt, - }, - }) - .run(); - } - - const afterRows = await readAllPullRequestRows(c); - await emitPullRequestChangeEvents(c, beforeRows, afterRows); - const closed = afterRows.find((row) => row.prId === prId); - if (!closed && (nextState === "CLOSED" || nextState === "MERGED")) { - const previous = beforeRows.find((row) => row.prId === prId); - if (previous) { - await autoArchiveTaskForClosedPullRequest(c, { - ...previous, - state: nextState, - }); + async syncRepos(c, body: any) { + try { + await runFullSync(c, body); + return { ok: true }; + } catch (error) { + try { + await fullSyncError(c, error); + } catch { + /* best effort */ } + throw error; } - return pullRequestSummaryFromRow( - afterRows.find((row) => row.prId === prId) ?? { - prId, - repoId: input.repoId, - repoFullName: repository.fullName, - number: input.prNumber, - title: pullRequest.title, - state: nextState, - url: pullRequest.url, - headRefName: pullRequest.headRefName, - baseRefName: pullRequest.baseRefName, - authorLogin: pullRequest.authorLogin ?? null, - isDraft: pullRequest.isDraft ? 1 : 0, - updatedAt, - }, - ); }, - async clearState(c, input: ClearStateInput) { - const beforeRows = await readAllPullRequestRows(c); - await c.db.delete(githubPullRequests).run(); - await c.db.delete(githubBranches).run(); - await c.db.delete(githubRepositories).run(); - await c.db.delete(githubMembers).run(); - await writeMeta(c, { - connectedAccount: input.connectedAccount, - installationStatus: input.installationStatus, - installationId: input.installationId, - syncStatus: "pending", - lastSyncLabel: input.label, - lastSyncAt: null, - }); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubDataProjection({ - connectedAccount: input.connectedAccount, - installationStatus: input.installationStatus, - installationId: input.installationId, - syncStatus: "pending", - lastSyncLabel: input.label, - lastSyncAt: null, - repositories: [], - }); - await emitPullRequestChangeEvents(c, beforeRows, []); + async reloadRepository(c, body: { repoId: string }) { + return await reloadRepositoryMutation(c, body); }, - async handlePullRequestWebhook(c, input: PullRequestWebhookInput) { - const beforeRows = await readAllPullRequestRows(c); - const repoId = repoIdFromRemote(input.repository.cloneUrl); - const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); - const updatedAt = Date.now(); - const state = normalizePrStatus(input.pullRequest); - const prId = `${repoId}#${input.pullRequest.number}`; + async clearState(c, body: any) { + await clearStateMutation(c, body); + return { ok: true }; + }, - await c.db - .insert(githubRepositories) - .values({ - repoId, - fullName: input.repository.fullName, - cloneUrl: input.repository.cloneUrl, - private: input.repository.private ? 1 : 0, - defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", - updatedAt, - }) - .onConflictDoUpdate({ - target: githubRepositories.repoId, - set: { - fullName: input.repository.fullName, - cloneUrl: input.repository.cloneUrl, - private: input.repository.private ? 1 : 0, - defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", - updatedAt, - }, - }) - .run(); - - if (state === "CLOSED" || state === "MERGED") { - await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); - } else { - await c.db - .insert(githubPullRequests) - .values({ - prId, - repoId, - repoFullName: input.repository.fullName, - number: input.pullRequest.number, - title: input.pullRequest.title, - body: input.pullRequest.body ?? null, - state, - url: input.pullRequest.url, - headRefName: input.pullRequest.headRefName, - baseRefName: input.pullRequest.baseRefName, - authorLogin: input.pullRequest.authorLogin ?? null, - isDraft: input.pullRequest.isDraft ? 1 : 0, - updatedAt, - }) - .onConflictDoUpdate({ - target: githubPullRequests.prId, - set: { - title: input.pullRequest.title, - body: input.pullRequest.body ?? null, - state, - url: input.pullRequest.url, - headRefName: input.pullRequest.headRefName, - baseRefName: input.pullRequest.baseRefName, - authorLogin: input.pullRequest.authorLogin ?? null, - isDraft: input.pullRequest.isDraft ? 1 : 0, - updatedAt, - }, - }) - .run(); - } - - await writeMeta(c, { - connectedAccount: input.connectedAccount, - installationStatus: input.installationStatus, - installationId: input.installationId, - syncStatus: "synced", - lastSyncLabel: "GitHub webhook received", - lastSyncAt: updatedAt, - }); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubRepositoryProjection({ - repoId, - remoteUrl: input.repository.cloneUrl, - }); - - const afterRows = await readAllPullRequestRows(c); - await emitPullRequestChangeEvents(c, beforeRows, afterRows); - if (state === "CLOSED" || state === "MERGED") { - const previous = beforeRows.find((row) => row.prId === prId); - if (previous) { - await autoArchiveTaskForClosedPullRequest(c, { - ...previous, - state, - }); - } - } + async handlePullRequestWebhook(c, body: any) { + await handlePullRequestWebhookMutation(c, body); + return { ok: true }; }, }, }); + +export async function reloadRepositoryMutation(c: any, input: { repoId: string }) { + const context = await getOrganizationContext(c); + const current = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!current) { + throw new Error(`Unknown GitHub repository: ${input.repoId}`); + } + const { appShell } = getActorRuntimeContext(); + const repository = + context.installationId != null + ? await appShell.github.getInstallationRepository(context.installationId, current.fullName) + : context.accessToken + ? await appShell.github.getUserRepository(context.accessToken, current.fullName) + : null; + if (!repository) { + throw new Error(`Unable to reload repository: ${current.fullName}`); + } + + const updatedAt = Date.now(); + const currentMeta = await readMeta(c); + await c.db + .insert(githubRepositories) + .values({ + repoId: input.repoId, + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + await refreshRepositoryBranches( + c, + context, + { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + defaultBranch: repository.defaultBranch, + }, + updatedAt, + ); + + return { + repoId: input.repoId, + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + defaultBranch: repository.defaultBranch, + }; +} + +export async function clearStateMutation(c: any, input: ClearStateInput) { + const beforeRows = await readAllPullRequestRows(c); + const currentMeta = await readMeta(c); + await c.db.delete(githubPullRequests).run(); + await c.db.delete(githubBranches).run(); + await c.db.delete(githubRepositories).run(); + await c.db.delete(githubMembers).run(); + await writeMeta(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "pending", + lastSyncLabel: input.label, + lastSyncAt: null, + syncGeneration: currentMeta.syncGeneration, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + await emitPullRequestChangeEvents(c, beforeRows, []); +} + +export async function handlePullRequestWebhookMutation(c: any, input: PullRequestWebhookInput) { + const beforeRows = await readAllPullRequestRows(c); + const repoId = repoIdFromRemote(input.repository.cloneUrl); + const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); + const updatedAt = Date.now(); + const currentMeta = await readMeta(c); + const state = normalizePrStatus(input.pullRequest); + const prId = `${repoId}#${input.pullRequest.number}`; + + await c.db + .insert(githubRepositories) + .values({ + repoId, + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + + if (state === "CLOSED" || state === "MERGED") { + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); + } else { + await c.db + .insert(githubPullRequests) + .values({ + prId, + repoId, + repoFullName: input.repository.fullName, + number: input.pullRequest.number, + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + } + + await publishSyncProgress(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "synced", + lastSyncLabel: "GitHub webhook received", + lastSyncAt: updatedAt, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, beforeRows, afterRows); + if (state === "CLOSED" || state === "MERGED") { + const previous = beforeRows.find((row) => row.prId === prId); + if (previous) { + await autoArchiveTaskForClosedPullRequest(c, { + ...previous, + state, + }); + } + } +} diff --git a/foundry/packages/backend/src/actors/github-data/workflow.ts b/foundry/packages/backend/src/actors/github-data/workflow.ts new file mode 100644 index 0000000..3497381 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/workflow.ts @@ -0,0 +1,81 @@ +// @ts-nocheck +import { logActorWarning, resolveErrorMessage } from "../logging.js"; + +// Dynamic imports to break circular dependency: index.ts imports workflow.ts, +// and workflow.ts needs functions from index.ts. +async function getIndexModule() { + return await import("./index.js"); +} + +export const GITHUB_DATA_QUEUE_NAMES = [ + "githubData.command.syncRepos", + "githubData.command.reloadRepository", + "githubData.command.clearState", + "githubData.command.handlePullRequestWebhook", +] as const; + +export type GithubDataQueueName = (typeof GITHUB_DATA_QUEUE_NAMES)[number]; + +export function githubDataWorkflowQueueName(name: GithubDataQueueName): GithubDataQueueName { + return name; +} + +/** + * Plain run handler (no workflow engine). Drains the queue using `c.queue.iter()` + * with completable messages. This avoids the RivetKit bug where actors created + * from another actor's workflow context never start their `run: workflow(...)`. + */ +export async function runGithubDataCommandLoop(c: any): Promise { + for await (const msg of c.queue.iter({ names: [...GITHUB_DATA_QUEUE_NAMES], completable: true })) { + try { + if (msg.name === "githubData.command.syncRepos") { + try { + const { runFullSync } = await getIndexModule(); + await runFullSync(c, msg.body); + await msg.complete({ ok: true }); + } catch (error) { + const { fullSyncError } = await getIndexModule(); + try { + await fullSyncError(c, error); + } catch { + /* best effort */ + } + const message = error instanceof Error ? error.message : String(error); + await msg.complete({ error: message }).catch(() => {}); + } + continue; + } + + if (msg.name === "githubData.command.reloadRepository") { + const { reloadRepositoryMutation } = await getIndexModule(); + const result = await reloadRepositoryMutation(c, msg.body); + await msg.complete(result); + continue; + } + + if (msg.name === "githubData.command.clearState") { + const { clearStateMutation } = await getIndexModule(); + await clearStateMutation(c, msg.body); + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === "githubData.command.handlePullRequestWebhook") { + const { handlePullRequestWebhookMutation } = await getIndexModule(); + await handlePullRequestWebhookMutation(c, msg.body); + await msg.complete({ ok: true }); + continue; + } + + logActorWarning("githubData", "unknown queue message", { queueName: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("githubData", "github-data command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + } +} diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts index bd17fb0..2cc83d9 100644 --- a/foundry/packages/backend/src/actors/handles.ts +++ b/foundry/packages/backend/src/actors/handles.ts @@ -1,4 +1,4 @@ -import { authUserKey, githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "./keys.js"; +import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey, userKey } from "./keys.js"; export function actorClient(c: any) { return c.client(); @@ -10,28 +10,14 @@ export async function getOrCreateOrganization(c: any, organizationId: string) { }); } -export async function getOrCreateAuthUser(c: any, userId: string) { - return await actorClient(c).authUser.getOrCreate(authUserKey(userId), { +export async function getOrCreateUser(c: any, userId: string) { + return await actorClient(c).user.getOrCreate(userKey(userId), { createWithInput: { userId }, }); } -export function getAuthUser(c: any, userId: string) { - return actorClient(c).authUser.get(authUserKey(userId)); -} - -export async function getOrCreateRepository(c: any, organizationId: string, repoId: string, remoteUrl: string) { - return await actorClient(c).repository.getOrCreate(repositoryKey(organizationId, repoId), { - createWithInput: { - organizationId, - repoId, - remoteUrl, - }, - }); -} - -export function getRepository(c: any, organizationId: string, repoId: string) { - return actorClient(c).repository.get(repositoryKey(organizationId, repoId)); +export function getUser(c: any, userId: string) { + return actorClient(c).user.get(userKey(userId)); } export function getTask(c: any, organizationId: string, repoId: string, taskId: string) { @@ -44,11 +30,10 @@ export async function getOrCreateTask(c: any, organizationId: string, repoId: st }); } -export async function getOrCreateHistory(c: any, organizationId: string, repoId: string) { - return await actorClient(c).history.getOrCreate(historyKey(organizationId, repoId), { +export async function getOrCreateAuditLog(c: any, organizationId: string) { + return await actorClient(c).auditLog.getOrCreate(auditLogKey(organizationId), { createWithInput: { organizationId, - repoId, }, }); } @@ -75,8 +60,8 @@ export async function getOrCreateTaskSandbox(c: any, organizationId: string, san }); } -export function selfHistory(c: any) { - return actorClient(c).history.getForId(c.actorId); +export function selfAuditLog(c: any) { + return actorClient(c).auditLog.getForId(c.actorId); } export function selfTask(c: any) { @@ -87,12 +72,8 @@ export function selfOrganization(c: any) { return actorClient(c).organization.getForId(c.actorId); } -export function selfRepository(c: any) { - return actorClient(c).repository.getForId(c.actorId); -} - -export function selfAuthUser(c: any) { - return actorClient(c).authUser.getForId(c.actorId); +export function selfUser(c: any) { + return actorClient(c).user.getForId(c.actorId); } export function selfGithubData(c: any) { diff --git a/foundry/packages/backend/src/actors/history/db/drizzle.config.ts b/foundry/packages/backend/src/actors/history/db/drizzle.config.ts deleted file mode 100644 index 3b1d8bd..0000000 --- a/foundry/packages/backend/src/actors/history/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/history/db/drizzle", - schema: "./src/actors/history/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/history/index.ts b/foundry/packages/backend/src/actors/history/index.ts deleted file mode 100644 index fa1373b..0000000 --- a/foundry/packages/backend/src/actors/history/index.ts +++ /dev/null @@ -1,115 +0,0 @@ -// @ts-nocheck -import { and, desc, eq } from "drizzle-orm"; -import { actor, queue } from "rivetkit"; -import { Loop, workflow } from "rivetkit/workflow"; -import type { HistoryEvent } from "@sandbox-agent/foundry-shared"; -import { selfHistory } from "../handles.js"; -import { historyDb } from "./db/db.js"; -import { events } from "./db/schema.js"; - -export interface HistoryInput { - organizationId: string; - repoId: string; -} - -export interface AppendHistoryCommand { - kind: string; - taskId?: string; - branchName?: string; - payload: Record; -} - -export interface ListHistoryParams { - branch?: string; - taskId?: string; - limit?: number; -} - -const HISTORY_QUEUE_NAMES = ["history.command.append"] as const; - -async function appendHistoryRow(loopCtx: any, body: AppendHistoryCommand): Promise { - const now = Date.now(); - await loopCtx.db - .insert(events) - .values({ - taskId: body.taskId ?? null, - branchName: body.branchName ?? null, - kind: body.kind, - payloadJson: JSON.stringify(body.payload), - createdAt: now, - }) - .run(); -} - -async function runHistoryWorkflow(ctx: any): Promise { - await ctx.loop("history-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-history-command", { - names: [...HISTORY_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - if (msg.name === "history.command.append") { - await loopCtx.step("append-history-row", async () => appendHistoryRow(loopCtx, msg.body as AppendHistoryCommand)); - await msg.complete({ ok: true }); - } - - return Loop.continue(undefined); - }); -} - -export const history = actor({ - db: historyDb, - queues: { - "history.command.append": queue(), - }, - options: { - name: "History", - icon: "database", - }, - createState: (_c, input: HistoryInput) => ({ - organizationId: input.organizationId, - repoId: input.repoId, - }), - actions: { - async append(c, command: AppendHistoryCommand): Promise { - const self = selfHistory(c); - await self.send("history.command.append", command, { wait: true, timeout: 15_000 }); - }, - - async list(c, params?: ListHistoryParams): Promise { - const whereParts = []; - if (params?.taskId) { - whereParts.push(eq(events.taskId, params.taskId)); - } - if (params?.branch) { - whereParts.push(eq(events.branchName, params.branch)); - } - - const base = c.db - .select({ - id: events.id, - taskId: events.taskId, - branchName: events.branchName, - kind: events.kind, - payloadJson: events.payloadJson, - createdAt: events.createdAt, - }) - .from(events); - - const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base) - .orderBy(desc(events.createdAt)) - .limit(params?.limit ?? 100) - .all(); - - return rows.map((row) => ({ - ...row, - organizationId: c.state.organizationId, - repoId: c.state.repoId, - })); - }, - }, - run: workflow(runHistoryWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts index 2f9e566..52bb914 100644 --- a/foundry/packages/backend/src/actors/index.ts +++ b/foundry/packages/backend/src/actors/index.ts @@ -1,9 +1,8 @@ -import { authUser } from "./auth-user/index.js"; +import { user } from "./user/index.js"; import { setup } from "rivetkit"; import { githubData } from "./github-data/index.js"; import { task } from "./task/index.js"; -import { history } from "./history/index.js"; -import { repository } from "./repository/index.js"; +import { auditLog } from "./audit-log/index.js"; import { taskSandbox } from "./sandbox/index.js"; import { organization } from "./organization/index.js"; import { logger } from "../logging.js"; @@ -21,23 +20,20 @@ export const registry = setup({ baseLogger: logger, }, use: { - authUser, + user, organization, - repository, task, taskSandbox, - history, + auditLog, githubData, }, }); export * from "./context.js"; -export * from "./events.js"; -export * from "./auth-user/index.js"; +export * from "./audit-log/index.js"; +export * from "./user/index.js"; export * from "./github-data/index.js"; export * from "./task/index.js"; -export * from "./history/index.js"; export * from "./keys.js"; -export * from "./repository/index.js"; export * from "./sandbox/index.js"; export * from "./organization/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts index 59e669e..03bd014 100644 --- a/foundry/packages/backend/src/actors/keys.ts +++ b/foundry/packages/backend/src/actors/keys.ts @@ -4,24 +4,21 @@ export function organizationKey(organizationId: string): ActorKey { return ["org", organizationId]; } -export function authUserKey(userId: string): ActorKey { +export function userKey(userId: string): ActorKey { return ["org", "app", "user", userId]; } -export function repositoryKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId]; -} - export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "task", taskId]; + return ["org", organizationId, "task", repoId, taskId]; } export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "history"]; +/** One audit log per org (not per repo) — see audit-log/index.ts for rationale. */ +export function auditLogKey(organizationId: string): ActorKey { + return ["org", organizationId, "audit-log"]; } export function githubDataKey(organizationId: string): ActorKey { diff --git a/foundry/packages/backend/src/actors/organization/actions.ts b/foundry/packages/backend/src/actors/organization/actions.ts index 70da62b..436765c 100644 --- a/foundry/packages/backend/src/actors/organization/actions.ts +++ b/foundry/packages/backend/src/actors/organization/actions.ts @@ -1,78 +1,29 @@ // @ts-nocheck import { desc, eq } from "drizzle-orm"; -import { Loop } from "rivetkit/workflow"; import type { - CreateTaskInput, - HistoryEvent, - HistoryQueryInput, - ListTasksInput, - SandboxProviderId, - RepoOverview, RepoRecord, - StarSandboxAgentRepoInput, - StarSandboxAgentRepoResult, - SwitchResult, - TaskRecord, - TaskSummary, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, - WorkbenchOpenPrSummary, - WorkbenchRepositorySummary, - WorkbenchSessionSummary, - WorkbenchTaskSummary, + WorkspaceRepositorySummary, + WorkspaceTaskSummary, OrganizationEvent, + OrganizationGithubSummary, OrganizationSummarySnapshot, OrganizationUseInput, } from "@sandbox-agent/foundry-shared"; -import { getActorRuntimeContext } from "../context.js"; -import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateRepository, selfOrganization } from "../handles.js"; import { logActorWarning, resolveErrorMessage } from "../logging.js"; -import { defaultSandboxProviderId } from "../../sandbox-config.js"; -import { repoIdFromRemote } from "../../services/repo.js"; -import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; -import { organizationProfile, taskLookup, repos, taskSummaries } from "./db/schema.js"; -import { agentTypeForModel } from "../task/workbench.js"; -import { expectQueueResponse } from "../../services/queue.js"; -import { organizationAppActions } from "./app-shell.js"; +import { getOrCreateGithubData } from "../handles.js"; +import { organizationProfile, taskSummaries } from "./db/schema.js"; +import { organizationAppActions } from "./actions/app.js"; +import { organizationBetterAuthActions } from "./actions/better-auth.js"; +import { organizationOnboardingActions } from "./actions/onboarding.js"; +import { organizationGithubActions } from "./actions/github.js"; +import { organizationShellActions } from "./actions/organization.js"; +import { organizationTaskActions } from "./actions/tasks.js"; interface OrganizationState { organizationId: string; } -interface GetTaskInput { - organizationId: string; - taskId: string; -} - -interface TaskProxyActionInput extends GetTaskInput { - reason?: string; -} - -interface RepoOverviewInput { - organizationId: string; - repoId: string; -} - -const ORGANIZATION_QUEUE_NAMES = ["organization.command.createTask", "organization.command.syncGithubSession"] as const; -const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; - -type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; - -export { ORGANIZATION_QUEUE_NAMES }; - -export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { - return name; -} - -const ORGANIZATION_PROFILE_ROW_ID = "profile"; +const ORGANIZATION_PROFILE_ROW_ID = 1; function assertOrganization(c: { state: OrganizationState }, organizationId: string): void { if (organizationId !== c.state.organizationId) { @@ -80,64 +31,6 @@ function assertOrganization(c: { state: OrganizationState }, organizationId: str } } -async function resolveRepoId(c: any, taskId: string): Promise { - const row = await c.db.select({ repoId: taskLookup.repoId }).from(taskLookup).where(eq(taskLookup.taskId, taskId)).get(); - - if (!row) { - throw new Error(`Unknown task: ${taskId} (not in lookup)`); - } - - return row.repoId; -} - -async function upsertTaskLookupRow(c: any, taskId: string, repoId: string): Promise { - await c.db - .insert(taskLookup) - .values({ - taskId, - repoId, - }) - .onConflictDoUpdate({ - target: taskLookup.taskId, - set: { repoId }, - }) - .run(); -} - -function parseJsonValue(value: string | null | undefined, fallback: T): T { - if (!value) { - return fallback; - } - - try { - return JSON.parse(value) as T; - } catch { - return fallback; - } -} - -async function collectAllTaskSummaries(c: any): Promise { - const repoRows = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all(); - - const all: TaskSummary[] = []; - for (const row of repoRows) { - try { - const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl); - const snapshot = await repository.listTaskSummaries({ includeArchived: true }); - all.push(...snapshot); - } catch (error) { - logActorWarning("organization", "failed collecting tasks for repo", { - organizationId: c.state.organizationId, - repoId: row.repoId, - error: resolveErrorMessage(error), - }); - } - } - - all.sort((a, b) => b.updatedAt - a.updatedAt); - return all; -} - function repoLabelFromRemote(remoteUrl: string): string { try { const url = new URL(remoteUrl.startsWith("http") ? remoteUrl : `https://${remoteUrl}`); @@ -152,34 +45,43 @@ function repoLabelFromRemote(remoteUrl: string): string { return remoteUrl; } -function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepositorySummary { - const repoTasks = taskRows.filter((task) => task.repoId === repoRow.repoId); - const latestActivityMs = repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), repoRow.updatedAt); - +function buildGithubSummary(profile: any, importedRepoCount: number): OrganizationGithubSummary { return { - id: repoRow.repoId, - label: repoLabelFromRemote(repoRow.remoteUrl), - taskCount: repoTasks.length, - latestActivityMs, + connectedAccount: profile?.githubConnectedAccount ?? "", + installationStatus: profile?.githubInstallationStatus ?? "install_required", + syncStatus: profile?.githubSyncStatus ?? "pending", + importedRepoCount, + lastSyncLabel: profile?.githubLastSyncLabel ?? "Waiting for first import", + lastSyncAt: profile?.githubLastSyncAt ?? null, + lastWebhookAt: profile?.githubLastWebhookAt ?? null, + lastWebhookEvent: profile?.githubLastWebhookEvent ?? "", + syncGeneration: profile?.githubSyncGeneration ?? 0, + syncPhase: profile?.githubSyncPhase ?? null, + processedRepositoryCount: profile?.githubProcessedRepositoryCount ?? 0, + totalRepositoryCount: profile?.githubTotalRepositoryCount ?? 0, }; } -function taskSummaryRowFromSummary(taskSummary: WorkbenchTaskSummary) { - return { - taskId: taskSummary.id, - repoId: taskSummary.repoId, - title: taskSummary.title, - status: taskSummary.status, - repoName: taskSummary.repoName, - updatedAtMs: taskSummary.updatedAtMs, - branch: taskSummary.branch, - pullRequestJson: JSON.stringify(taskSummary.pullRequest), - sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary), - }; -} +/** + * Reads the organization sidebar snapshot from local tables only — no fan-out + * to child actors. Task summaries are organization-owned and updated via push + * from task actors. + */ +async function getOrganizationSummarySnapshot(c: any): Promise { + const profile = await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)).get(); -function taskSummaryFromRow(row: any): WorkbenchTaskSummary { - return { + // Fetch repos + open PRs from github-data actor (single actor, not fan-out) + let repoRows: Array<{ repoId: string; fullName: string; cloneUrl: string; private: boolean; defaultBranch: string }> = []; + let openPullRequests: any[] = []; + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + [repoRows, openPullRequests] = await Promise.all([githubData.listRepositories({}), githubData.listOpenPullRequests({})]); + } catch { + // github-data actor may not exist yet + } + + const summaryRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + const summaries = summaryRows.map((row) => ({ id: row.taskId, repoId: row.repoId, title: row.title, @@ -187,219 +89,60 @@ function taskSummaryFromRow(row: any): WorkbenchTaskSummary { repoName: row.repoName, updatedAtMs: row.updatedAtMs, branch: row.branch ?? null, - pullRequest: parseJsonValue(row.pullRequestJson, null), - sessionsSummary: parseJsonValue(row.sessionsSummaryJson, []), - }; -} - -async function listOpenPullRequestsSnapshot(c: any, taskRows: WorkbenchTaskSummary[]): Promise { - const githubData = getGithubData(c, c.state.organizationId); - const openPullRequests = await githubData.listOpenPullRequests({}).catch(() => []); - const claimedBranches = new Set(taskRows.filter((task) => task.branch).map((task) => `${task.repoId}:${task.branch}`)); - - return openPullRequests.filter((pullRequest: WorkbenchOpenPrSummary) => !claimedBranches.has(`${pullRequest.repoId}:${pullRequest.headRefName}`)); -} - -async function reconcileWorkbenchProjection(c: any): Promise { - const repoRows = await c.db - .select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }) - .from(repos) - .orderBy(desc(repos.updatedAt)) - .all(); - - const taskRows: WorkbenchTaskSummary[] = []; - for (const row of repoRows) { - try { - const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl); - const summaries = await repository.listTaskSummaries({ includeArchived: true }); - for (const summary of summaries) { - try { - await upsertTaskLookupRow(c, summary.taskId, row.repoId); - const task = getTask(c, c.state.organizationId, row.repoId, summary.taskId); - const taskSummary = await task.getTaskSummary({}); - taskRows.push(taskSummary); - await c.db - .insert(taskSummaries) - .values(taskSummaryRowFromSummary(taskSummary)) - .onConflictDoUpdate({ - target: taskSummaries.taskId, - set: taskSummaryRowFromSummary(taskSummary), - }) - .run(); - } catch (error) { - logActorWarning("organization", "failed collecting task summary during reconciliation", { - organizationId: c.state.organizationId, - repoId: row.repoId, - taskId: summary.taskId, - error: resolveErrorMessage(error), - }); - } - } - } catch (error) { - logActorWarning("organization", "failed collecting repo during workbench reconciliation", { - organizationId: c.state.organizationId, - repoId: row.repoId, - error: resolveErrorMessage(error), - }); - } - } - - taskRows.sort((left, right) => right.updatedAtMs - left.updatedAtMs); - return { - organizationId: c.state.organizationId, - repos: repoRows.map((row) => buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), - taskSummaries: taskRows, - openPullRequests: await listOpenPullRequestsSnapshot(c, taskRows), - }; -} - -async function requireWorkbenchTask(c: any, taskId: string) { - const repoId = await resolveRepoId(c, taskId); - return getTask(c, c.state.organizationId, repoId, taskId); -} - -/** - * Reads the organization sidebar snapshot from the organization actor's local SQLite - * plus the org-scoped GitHub actor for open PRs. Task actors still push - * summary updates into `task_summaries`, so the hot read path stays bounded. - */ -async function getOrganizationSummarySnapshot(c: any): Promise { - const repoRows = await c.db - .select({ - repoId: repos.repoId, - remoteUrl: repos.remoteUrl, - updatedAt: repos.updatedAt, - }) - .from(repos) - .orderBy(desc(repos.updatedAt)) - .all(); - const taskRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); - const summaries = taskRows.map(taskSummaryFromRow); + pullRequest: row.pullRequestJson + ? (() => { + try { + return JSON.parse(row.pullRequestJson); + } catch { + return null; + } + })() + : null, + sessionsSummary: row.sessionsSummaryJson + ? (() => { + try { + return JSON.parse(row.sessionsSummaryJson); + } catch { + return []; + } + })() + : [], + })); return { organizationId: c.state.organizationId, - repos: repoRows.map((row) => buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), + github: buildGithubSummary(profile, repoRows.length), + repos: repoRows + .map((repo) => { + const repoTasks = summaries.filter((t) => t.repoId === repo.repoId); + const latestTaskMs = repoTasks.reduce((latest, t) => Math.max(latest, t.updatedAtMs), 0); + return { + id: repo.repoId, + label: repoLabelFromRemote(repo.cloneUrl), + taskCount: repoTasks.length, + latestActivityMs: latestTaskMs || Date.now(), + }; + }) + .sort((a, b) => b.latestActivityMs - a.latestActivityMs), taskSummaries: summaries, - openPullRequests: await listOpenPullRequestsSnapshot(c, summaries), + openPullRequests, }; } -async function broadcastRepoSummary( - c: any, - type: "repoAdded" | "repoUpdated", - repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, -): Promise { - const matchingTaskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoRow.repoId)).all(); - const repo = buildRepoSummary(repoRow, matchingTaskRows.map(taskSummaryFromRow)); - c.broadcast("organizationUpdated", { type, repo } satisfies OrganizationEvent); -} - -async function createTaskMutation(c: any, input: CreateTaskInput): Promise { - assertOrganization(c, input.organizationId); - - const { config } = getActorRuntimeContext(); - const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); - - const repoId = input.repoId; - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${repoId}`); - } - const remoteUrl = repoRow.remoteUrl; - - const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, remoteUrl); - - const created = await repository.createTask({ - task: input.task, - sandboxProviderId, - agentType: input.agentType ?? null, - explicitTitle: input.explicitTitle ?? null, - explicitBranchName: input.explicitBranchName ?? null, - onBranch: input.onBranch ?? null, - }); - - await c.db - .insert(taskLookup) - .values({ - taskId: created.taskId, - repoId, - }) - .onConflictDoUpdate({ - target: taskLookup.taskId, - set: { repoId }, - }) - .run(); - - try { - const task = getTask(c, c.state.organizationId, repoId, created.taskId); - await organizationActions.applyTaskSummaryUpdate(c, { - taskSummary: await task.getTaskSummary({}), - }); - } catch (error) { - logActorWarning("organization", "failed seeding task summary after task creation", { - organizationId: c.state.organizationId, - repoId, - taskId: created.taskId, - error: resolveErrorMessage(error), - }); - } - - return created; -} - -export async function runOrganizationWorkflow(ctx: any): Promise { - await ctx.loop("organization-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-organization-command", { - names: [...ORGANIZATION_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - try { - if (msg.name === "organization.command.createTask") { - const result = await loopCtx.step({ - name: "organization-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "organization.command.syncGithubSession") { - await loopCtx.step({ - name: "organization-sync-github-session", - timeout: 60_000, - run: async () => { - const { syncGithubOrganizations } = await import("./app-shell.js"); - await syncGithubOrganizations(loopCtx, msg.body as { sessionId: string; accessToken: string }); - }, - }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("organization", "organization workflow command failed", { - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch((completeError: unknown) => { - logActorWarning("organization", "organization workflow failed completing error response", { - queueName: msg.name, - error: resolveErrorMessage(completeError), - }); - }); - } - - return Loop.continue(undefined); - }); +export async function refreshOrganizationSnapshotMutation(c: any): Promise { + c.broadcast("organizationUpdated", { + type: "organizationUpdated", + snapshot: await getOrganizationSummarySnapshot(c), + } satisfies OrganizationEvent); } export const organizationActions = { + ...organizationBetterAuthActions, + ...organizationGithubActions, + ...organizationOnboardingActions, + ...organizationShellActions, ...organizationAppActions, + ...organizationTaskActions, async useOrganization(c: any, input: OrganizationUseInput): Promise<{ organizationId: string }> { assertOrganization(c, input.organizationId); return { organizationId: c.state.organizationId }; @@ -407,482 +150,98 @@ export const organizationActions = { async listRepos(c: any, input: OrganizationUseInput): Promise { assertOrganization(c, input.organizationId); - - const rows = await c.db - .select({ - repoId: repos.repoId, - remoteUrl: repos.remoteUrl, - createdAt: repos.createdAt, - updatedAt: repos.updatedAt, - }) - .from(repos) - .orderBy(desc(repos.updatedAt)) - .all(); - - return rows.map((row) => ({ - organizationId: c.state.organizationId, - repoId: row.repoId, - remoteUrl: row.remoteUrl, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - })); - }, - - async createTask(c: any, input: CreateTaskInput): Promise { - const self = selfOrganization(c); - return expectQueueResponse( - await self.send(organizationWorkflowQueueName("organization.command.createTask"), input, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { - assertOrganization(c, input.organizationId); - const { driver } = getActorRuntimeContext(); - const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); - await driver.github.starRepository(SANDBOX_AGENT_REPO, { - githubToken: auth?.githubToken ?? null, - }); - return { - repo: SANDBOX_AGENT_REPO, - starredAt: Date.now(), - }; - }, - - /** - * Called by task actors when their summary-level state changes. - * This is the write path for the local materialized projection; clients read - * the projection via `getOrganizationSummary`, but only task actors should push - * rows into it. - */ - async applyTaskSummaryUpdate(c: any, input: { taskSummary: WorkbenchTaskSummary }): Promise { - await c.db - .insert(taskSummaries) - .values(taskSummaryRowFromSummary(input.taskSummary)) - .onConflictDoUpdate({ - target: taskSummaries.taskId, - set: taskSummaryRowFromSummary(input.taskSummary), - }) - .run(); - c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies OrganizationEvent); - }, - - async removeTaskSummary(c: any, input: { taskId: string }): Promise { - await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); - c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies OrganizationEvent); - }, - - async findTaskForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { - const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.repoId)).all(); - const existing = summaries.find((summary) => summary.branch === input.branchName); - return { taskId: existing?.taskId ?? null }; - }, - - async refreshTaskSummaryForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise { - const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.repoId)).all(); - const matches = summaries.filter((summary) => summary.branch === input.branchName); - - for (const summary of matches) { - try { - const task = getTask(c, c.state.organizationId, input.repoId, summary.taskId); - await organizationActions.applyTaskSummaryUpdate(c, { - taskSummary: await task.getTaskSummary({}), - }); - } catch (error) { - logActorWarning("organization", "failed refreshing task summary for GitHub branch", { - organizationId: c.state.organizationId, - repoId: input.repoId, - branchName: input.branchName, - taskId: summary.taskId, - error: resolveErrorMessage(error), - }); - } + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + const rows = await githubData.listRepositories({}); + return rows.map((row: any) => ({ + organizationId: c.state.organizationId, + repoId: row.repoId, + remoteUrl: row.cloneUrl, + createdAt: row.updatedAt ?? Date.now(), + updatedAt: row.updatedAt ?? Date.now(), + })); + } catch { + return []; } }, - async applyOpenPullRequestUpdate(c: any, input: { pullRequest: WorkbenchOpenPrSummary }): Promise { - const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.pullRequest.repoId)).all(); - if (summaries.some((summary) => summary.branch === input.pullRequest.headRefName)) { - return; - } - c.broadcast("organizationUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies OrganizationEvent); - }, - - async removeOpenPullRequest(c: any, input: { prId: string }): Promise { - c.broadcast("organizationUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies OrganizationEvent); - }, - - async applyGithubRepositoryProjection(c: any, input: { repoId: string; remoteUrl: string }): Promise { - const now = Date.now(); - const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - await c.db - .insert(repos) - .values({ - repoId: input.repoId, - remoteUrl: input.remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl: input.remoteUrl, - updatedAt: now, - }, - }) - .run(); - await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", { - repoId: input.repoId, - remoteUrl: input.remoteUrl, - updatedAt: now, - }); - }, - - async applyGithubDataProjection( - c: any, - input: { - connectedAccount: string; - installationStatus: string; - installationId: number | null; - syncStatus: string; - lastSyncLabel: string; - lastSyncAt: number | null; - repositories: Array<{ fullName: string; cloneUrl: string; private: boolean }>; - }, - ): Promise { - const existingRepos = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }).from(repos).all(); - const existingById = new Map(existingRepos.map((repo) => [repo.repoId, repo])); - const nextRepoIds = new Set(); - const now = Date.now(); - - for (const repository of input.repositories) { - const repoId = repoIdFromRemote(repository.cloneUrl); - nextRepoIds.add(repoId); - await c.db - .insert(repos) - .values({ - repoId, - remoteUrl: repository.cloneUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl: repository.cloneUrl, - updatedAt: now, - }, - }) - .run(); - await broadcastRepoSummary(c, existingById.has(repoId) ? "repoUpdated" : "repoAdded", { - repoId, - remoteUrl: repository.cloneUrl, - updatedAt: now, - }); - } - - for (const repo of existingRepos) { - if (nextRepoIds.has(repo.repoId)) { - continue; - } - await c.db.delete(repos).where(eq(repos.repoId, repo.repoId)).run(); - c.broadcast("organizationUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies OrganizationEvent); - } - - const profile = await c.db - .select({ id: organizationProfile.id }) - .from(organizationProfile) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .get(); - if (profile) { - await c.db - .update(organizationProfile) - .set({ - githubConnectedAccount: input.connectedAccount, - githubInstallationStatus: input.installationStatus, - githubSyncStatus: input.syncStatus, - githubInstallationId: input.installationId, - githubLastSyncLabel: input.lastSyncLabel, - githubLastSyncAt: input.lastSyncAt, - updatedAt: now, - }) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .run(); - } - }, - - async recordGithubWebhookReceipt( - c: any, - input: { - organizationId: string; - event: string; - action?: string | null; - receivedAt?: number; - }, - ): Promise { - assertOrganization(c, input.organizationId); - - const profile = await c.db - .select({ id: organizationProfile.id }) - .from(organizationProfile) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .get(); - if (!profile) { - return; - } - - await c.db - .update(organizationProfile) - .set({ - githubLastWebhookAt: input.receivedAt ?? Date.now(), - githubLastWebhookEvent: input.action ? `${input.event}.${input.action}` : input.event, - }) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .run(); - }, - async getOrganizationSummary(c: any, input: OrganizationUseInput): Promise { assertOrganization(c, input.organizationId); return await getOrganizationSummarySnapshot(c); }, - - async reconcileWorkbenchState(c: any, input: OrganizationUseInput): Promise { - assertOrganization(c, input.organizationId); - return await reconcileWorkbenchProjection(c); - }, - - async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { - // Step 1: Create the task record (wait: true — local state mutations only). - const created = await organizationActions.createTask(c, { - organizationId: c.state.organizationId, - repoId: input.repoId, - task: input.task, - ...(input.title ? { explicitTitle: input.title } : {}), - ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), - ...(input.model ? { agentType: agentTypeForModel(input.model) } : {}), - }); - - // Step 2: Enqueue session creation + initial message (wait: false). - // The task workflow creates the session record and sends the message in - // the background. The client observes progress via push events on the - // task subscription topic. - const task = await requireWorkbenchTask(c, created.taskId); - await task.createWorkbenchSessionAndSend({ - model: input.model, - text: input.task, - }); - - return { taskId: created.taskId }; - }, - - async markWorkbenchUnread(c: any, input: TaskWorkbenchSelectInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.markWorkbenchUnread({}); - }, - - async renameWorkbenchTask(c: any, input: TaskWorkbenchRenameInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.renameWorkbenchTask(input); - }, - - async renameWorkbenchBranch(c: any, input: TaskWorkbenchRenameInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.renameWorkbenchBranch(input); - }, - - async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { - const task = await requireWorkbenchTask(c, input.taskId); - return await task.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) }); - }, - - async renameWorkbenchSession(c: any, input: TaskWorkbenchRenameSessionInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.renameWorkbenchSession(input); - }, - - async setWorkbenchSessionUnread(c: any, input: TaskWorkbenchSetSessionUnreadInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.setWorkbenchSessionUnread(input); - }, - - async updateWorkbenchDraft(c: any, input: TaskWorkbenchUpdateDraftInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.updateWorkbenchDraft(input); - }, - - async changeWorkbenchModel(c: any, input: TaskWorkbenchChangeModelInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.changeWorkbenchModel(input); - }, - - async sendWorkbenchMessage(c: any, input: TaskWorkbenchSendMessageInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.sendWorkbenchMessage(input); - }, - - async stopWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.stopWorkbenchSession(input); - }, - - async closeWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.closeWorkbenchSession(input); - }, - - async publishWorkbenchPr(c: any, input: TaskWorkbenchSelectInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.publishWorkbenchPr({}); - }, - - async revertWorkbenchFile(c: any, input: TaskWorkbenchDiffInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.revertWorkbenchFile(input); - }, - - async reloadGithubOrganization(c: any): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadOrganization({}); - }, - - async reloadGithubPullRequests(c: any): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadAllPullRequests({}); - }, - - async reloadGithubRepository(c: any, input: { repoId: string }): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadRepository(input); - }, - - async reloadGithubPullRequest(c: any, input: { repoId: string; prNumber: number }): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadPullRequest(input); - }, - - async listTasks(c: any, input: ListTasksInput): Promise { - assertOrganization(c, input.organizationId); - - if (input.repoId) { - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${input.repoId}`); - } - - const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); - return await repository.listTaskSummaries({ includeArchived: true }); - } - - return await collectAllTaskSummaries(c); - }, - - async getRepoOverview(c: any, input: RepoOverviewInput): Promise { - assertOrganization(c, input.organizationId); - - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${input.repoId}`); - } - - const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); - return await repository.getRepoOverview({}); - }, - - async switchTask(c: any, taskId: string): Promise { - const repoId = await resolveRepoId(c, taskId); - const h = getTask(c, c.state.organizationId, repoId, taskId); - const record = await h.get(); - const switched = await h.switch(); - - return { - organizationId: c.state.organizationId, - taskId, - sandboxProviderId: record.sandboxProviderId, - switchTarget: switched.switchTarget, - }; - }, - - async history(c: any, input: HistoryQueryInput): Promise { - assertOrganization(c, input.organizationId); - - const limit = input.limit ?? 20; - const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all(); - - const allEvents: HistoryEvent[] = []; - - for (const row of repoRows) { - try { - const hist = await getOrCreateHistory(c, c.state.organizationId, row.repoId); - const items = await hist.list({ - branch: input.branch, - taskId: input.taskId, - limit, - }); - allEvents.push(...items); - } catch (error) { - logActorWarning("organization", "history lookup failed for repo", { - organizationId: c.state.organizationId, - repoId: row.repoId, - error: resolveErrorMessage(error), - }); - } - } - - allEvents.sort((a, b) => b.createdAt - a.createdAt); - return allEvents.slice(0, limit); - }, - - async getTask(c: any, input: GetTaskInput): Promise { - assertOrganization(c, input.organizationId); - - const repoId = await resolveRepoId(c, input.taskId); - - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${repoId}`); - } - - const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, repoRow.remoteUrl); - return await repository.getTaskEnriched({ taskId: input.taskId }); - }, - - async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - return await h.attach({ reason: input.reason }); - }, - - async pushTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.push({ reason: input.reason }); - }, - - async syncTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.sync({ reason: input.reason }); - }, - - async mergeTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.merge({ reason: input.reason }); - }, - - async archiveTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.archive({ reason: input.reason }); - }, - - async killTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.kill({ reason: input.reason }); - }, }; + +export async function applyGithubSyncProgressMutation( + c: any, + input: { + connectedAccount: string; + installationStatus: string; + installationId: number | null; + syncStatus: string; + lastSyncLabel: string; + lastSyncAt: number | null; + syncGeneration: number; + syncPhase: string | null; + processedRepositoryCount: number; + totalRepositoryCount: number; + }, +): Promise { + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubConnectedAccount: input.connectedAccount, + githubInstallationStatus: input.installationStatus, + githubSyncStatus: input.syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: input.lastSyncLabel, + githubLastSyncAt: input.lastSyncAt, + githubSyncGeneration: input.syncGeneration, + githubSyncPhase: input.syncPhase, + githubProcessedRepositoryCount: input.processedRepositoryCount, + githubTotalRepositoryCount: input.totalRepositoryCount, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); + + await refreshOrganizationSnapshotMutation(c); +} + +export async function recordGithubWebhookReceiptMutation( + c: any, + input: { + organizationId: string; + event: string; + action?: string | null; + receivedAt?: number; + }, +): Promise { + assertOrganization(c, input.organizationId); + + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubLastWebhookAt: input.receivedAt ?? Date.now(), + githubLastWebhookEvent: input.action ? `${input.event}.${input.action}` : input.event, + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); +} diff --git a/foundry/packages/backend/src/actors/organization/actions/app.ts b/foundry/packages/backend/src/actors/organization/actions/app.ts new file mode 100644 index 0000000..d3cc329 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/app.ts @@ -0,0 +1 @@ +export { organizationAppActions } from "../app-shell.js"; diff --git a/foundry/packages/backend/src/actors/organization/actions/better-auth.ts b/foundry/packages/backend/src/actors/organization/actions/better-auth.ts new file mode 100644 index 0000000..37f34b4 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/better-auth.ts @@ -0,0 +1,323 @@ +import { + and, + asc, + count as sqlCount, + desc, + eq, + gt, + gte, + inArray, + isNotNull, + isNull, + like, + lt, + lte, + ne, + notInArray, + or, +} from "drizzle-orm"; +import { authAccountIndex, authEmailIndex, authSessionIndex, authVerification } from "../db/schema.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../constants.js"; + +function assertAppOrganization(c: any): void { + if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { + throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); + } +} + +function organizationAuthColumn(table: any, field: string): any { + const column = table[field]; + if (!column) { + throw new Error(`Unknown auth table field: ${field}`); + } + return column; +} + +function normalizeAuthValue(value: unknown): unknown { + if (value instanceof Date) { + return value.getTime(); + } + if (Array.isArray(value)) { + return value.map((entry) => normalizeAuthValue(entry)); + } + return value; +} + +function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { + const column = organizationAuthColumn(table, clause.field); + const value = normalizeAuthValue(clause.value); + switch (clause.operator) { + case "ne": + return value === null ? isNotNull(column) : ne(column, value as any); + case "lt": + return lt(column, value as any); + case "lte": + return lte(column, value as any); + case "gt": + return gt(column, value as any); + case "gte": + return gte(column, value as any); + case "in": + return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "not_in": + return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "contains": + return like(column, `%${String(value ?? "")}%`); + case "starts_with": + return like(column, `${String(value ?? "")}%`); + case "ends_with": + return like(column, `%${String(value ?? "")}`); + case "eq": + default: + return value === null ? isNull(column) : eq(column, value as any); + } +} + +function organizationBetterAuthWhere(table: any, clauses: any[] | undefined): any { + if (!clauses || clauses.length === 0) { + return undefined; + } + let expr = organizationAuthClause(table, clauses[0]); + for (const clause of clauses.slice(1)) { + const next = organizationAuthClause(table, clause); + expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); + } + return expr; +} + +export async function betterAuthUpsertSessionIndexMutation(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authSessionIndex) + .values({ + sessionId: input.sessionId, + sessionToken: input.sessionToken, + userId: input.userId, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authSessionIndex.sessionId, + set: { + sessionToken: input.sessionToken, + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authSessionIndex).where(eq(authSessionIndex.sessionId, input.sessionId)).get(); +} + +export async function betterAuthDeleteSessionIndexMutation(c: any, input: { sessionId?: string; sessionToken?: string }) { + assertAppOrganization(c); + + const clauses = [ + ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), + ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), + ]; + if (clauses.length === 0) { + return; + } + const predicate = organizationBetterAuthWhere(authSessionIndex, clauses); + await c.db.delete(authSessionIndex).where(predicate!).run(); +} + +export async function betterAuthUpsertEmailIndexMutation(c: any, input: { email: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authEmailIndex) + .values({ + email: input.email, + userId: input.userId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authEmailIndex.email, + set: { + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); +} + +export async function betterAuthDeleteEmailIndexMutation(c: any, input: { email: string }) { + assertAppOrganization(c); + await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); +} + +export async function betterAuthUpsertAccountIndexMutation( + c: any, + input: { id: string; providerId: string; accountId: string; userId: string }, +) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authAccountIndex) + .values({ + id: input.id, + providerId: input.providerId, + accountId: input.accountId, + userId: input.userId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authAccountIndex.id, + set: { + providerId: input.providerId, + accountId: input.accountId, + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); +} + +export async function betterAuthDeleteAccountIndexMutation(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + assertAppOrganization(c); + + if (input.id) { + await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); + return; + } + if (input.providerId && input.accountId) { + await c.db + .delete(authAccountIndex) + .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) + .run(); + } +} + +export async function betterAuthCreateVerificationMutation(c: any, input: { data: Record }) { + assertAppOrganization(c); + + await c.db.insert(authVerification).values(input.data as any).run(); + return await c.db.select().from(authVerification).where(eq(authVerification.id, input.data.id as string)).get(); +} + +export async function betterAuthUpdateVerificationMutation(c: any, input: { where: any[]; update: Record }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return null; + } + await c.db.update(authVerification).set(input.update as any).where(predicate).run(); + return await c.db.select().from(authVerification).where(predicate).get(); +} + +export async function betterAuthUpdateManyVerificationMutation(c: any, input: { where: any[]; update: Record }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return 0; + } + await c.db.update(authVerification).set(input.update as any).where(predicate).run(); + const row = await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get(); + return row?.value ?? 0; +} + +export async function betterAuthDeleteVerificationMutation(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return; + } + await c.db.delete(authVerification).where(predicate).run(); +} + +export async function betterAuthDeleteManyVerificationMutation(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return 0; + } + const rows = await c.db.select().from(authVerification).where(predicate).all(); + await c.db.delete(authVerification).where(predicate).run(); + return rows.length; +} + +export const organizationBetterAuthActions = { + async betterAuthFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { + assertAppOrganization(c); + + const clauses = [ + ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), + ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), + ]; + if (clauses.length === 0) { + return null; + } + const predicate = organizationBetterAuthWhere(authSessionIndex, clauses); + return await c.db.select().from(authSessionIndex).where(predicate!).get(); + }, + + async betterAuthFindEmailIndex(c: any, input: { email: string }) { + assertAppOrganization(c); + return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); + }, + + async betterAuthFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + assertAppOrganization(c); + + if (input.id) { + return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); + } + if (!input.providerId || !input.accountId) { + return null; + } + return await c.db + .select() + .from(authAccountIndex) + .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) + .get(); + }, + + async betterAuthFindOneVerification(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; + }, + + async betterAuthFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + let query = c.db.select().from(authVerification); + if (predicate) { + query = query.where(predicate); + } + if (input.sortBy?.field) { + const column = organizationAuthColumn(authVerification, input.sortBy.field); + query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); + } + if (typeof input.limit === "number") { + query = query.limit(input.limit); + } + if (typeof input.offset === "number") { + query = query.offset(input.offset); + } + return await query.all(); + }, + + async betterAuthCountVerification(c: any, input: { where?: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + const row = predicate + ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() + : await c.db.select({ value: sqlCount() }).from(authVerification).get(); + return row?.value ?? 0; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/github.ts b/foundry/packages/backend/src/actors/organization/actions/github.ts new file mode 100644 index 0000000..ff14d7e --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/github.ts @@ -0,0 +1,78 @@ +import { desc } from "drizzle-orm"; +import type { FoundryAppSnapshot } from "@sandbox-agent/foundry-shared"; +import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js"; +import { authSessionIndex } from "../db/schema.js"; +import { + assertAppOrganization, + buildAppSnapshot, + requireEligibleOrganization, + requireSignedInSession, + markOrganizationSyncStartedMutation, +} from "../app-shell.js"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { refreshOrganizationSnapshotMutation } from "../actions.js"; + +export const organizationGithubActions = { + async resolveAppGithubToken( + c: any, + input: { organizationId: string; requireRepoScope?: boolean }, + ): Promise<{ accessToken: string; scopes: string[] } | null> { + assertAppOrganization(c); + const auth = getBetterAuthService(); + const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); + + for (const row of rows) { + const authState = await auth.getAuthState(row.sessionId); + if (authState?.sessionState?.activeOrganizationId !== input.organizationId) { + continue; + } + + const token = await auth.getAccessTokenForSession(row.sessionId); + if (!token?.accessToken) { + continue; + } + + const scopes = token.scopes; + if (input.requireRepoScope !== false && scopes.length > 0 && !scopes.some((scope) => scope === "repo" || scope.startsWith("repo:"))) { + continue; + } + + return { + accessToken: token.accessToken, + scopes, + }; + } + + return null; + }, + + async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + + const githubData = await getOrCreateGithubData(c, input.organizationId); + const summary = await githubData.getSummary({}); + if (summary.syncStatus === "syncing") { + return await buildAppSnapshot(c, input.sessionId); + } + + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + await organizationHandle.commandMarkSyncStarted({ label: "Importing repository catalog..." }); + await organizationHandle.commandBroadcastSnapshot({}); + + void githubData.syncRepos({ label: "Importing repository catalog..." }).catch(() => {}); + + return await buildAppSnapshot(c, input.sessionId); + }, + + async adminReloadGithubOrganization(c: any): Promise { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + await githubData.syncRepos({ label: "Reloading GitHub organization..." }); + }, + + async adminReloadGithubRepository(c: any, input: { repoId: string }): Promise { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + await githubData.reloadRepository(input); + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/onboarding.ts b/foundry/packages/backend/src/actors/organization/actions/onboarding.ts new file mode 100644 index 0000000..22153f4 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/onboarding.ts @@ -0,0 +1,82 @@ +import { randomUUID } from "node:crypto"; +import type { FoundryAppSnapshot, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult } from "@sandbox-agent/foundry-shared"; +import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js"; +import { + assertAppOrganization, + buildAppSnapshot, + getOrganizationState, + requireEligibleOrganization, + requireSignedInSession, +} from "../app-shell.js"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { getActorRuntimeContext } from "../../context.js"; +import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js"; + +const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; + +export const organizationOnboardingActions = { + async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + starterRepoStatus: "skipped", + starterRepoSkippedAt: Date.now(), + starterRepoStarredAt: null, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.starSandboxAgentRepo({ + organizationId: input.organizationId, + }); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + starterRepoStatus: "starred", + starterRepoStarredAt: Date.now(), + starterRepoSkippedAt: null, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); + await getOrCreateGithubData(c, input.organizationId); + return await buildAppSnapshot(c, input.sessionId); + }, + + async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.kind !== "organization") { + return { + url: `${appShell.appUrl}/organizations/${input.organizationId}`, + }; + } + return { + url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), + }; + }, + + async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { + const { driver } = getActorRuntimeContext(); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.starRepository(SANDBOX_AGENT_REPO, { + githubToken: auth?.githubToken ?? null, + }); + return { + repo: SANDBOX_AGENT_REPO, + starredAt: Date.now(), + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/organization.ts b/foundry/packages/backend/src/actors/organization/actions/organization.ts new file mode 100644 index 0000000..d38e113 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/organization.ts @@ -0,0 +1,55 @@ +import type { FoundryAppSnapshot, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { getOrCreateOrganization } from "../../handles.js"; +// actions called directly (no queue) +import { + assertAppOrganization, + assertOrganizationShell, + buildAppSnapshot, + buildOrganizationState, + buildOrganizationStateIfInitialized, + requireEligibleOrganization, + requireSignedInSession, +} from "../app-shell.js"; +// org queue names removed — using direct actions + +export const organizationShellActions = { + async getAppSnapshot(c: any, input: { sessionId: string }): Promise { + return await buildAppSnapshot(c, input.sessionId); + }, + + async setAppDefaultModel(c: any, input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + defaultModel: input.defaultModel, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async updateAppOrganizationProfile( + c: any, + input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, + ): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.commandUpdateShellProfile({ + displayName: input.displayName, + slug: input.slug, + primaryDomain: input.primaryDomain, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async getOrganizationShellState(c: any): Promise { + assertOrganizationShell(c); + return await buildOrganizationState(c); + }, + + async getOrganizationShellStateIfInitialized(c: any): Promise { + assertOrganizationShell(c); + return await buildOrganizationStateIfInitialized(c); + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts b/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts new file mode 100644 index 0000000..73abea2 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts @@ -0,0 +1,543 @@ +// @ts-nocheck +import { randomUUID } from "node:crypto"; +import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; +import type { + RepoOverview, + SandboxProviderId, + TaskRecord, + TaskSummary, + WorkspacePullRequestSummary, + WorkspaceSessionSummary, + WorkspaceTaskSummary, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../../context.js"; +import { getGithubData, getOrCreateAuditLog, getOrCreateTask, getTask } from "../../handles.js"; +// task actions called directly (no queue) +import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../../services/create-flow.js"; +// actions return directly (no queue response unwrapping) +import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { taskIndex, taskSummaries } from "../db/schema.js"; +import { refreshOrganizationSnapshotMutation } from "../actions.js"; + +interface CreateTaskCommand { + repoId: string; + task: string; + sandboxProviderId: SandboxProviderId; + explicitTitle: string | null; + explicitBranchName: string | null; + onBranch: string | null; +} + +interface RegisterTaskBranchCommand { + repoId: string; + taskId: string; + branchName: string; + requireExistingRemote?: boolean; +} + +function isStaleTaskReferenceError(error: unknown): boolean { + const message = resolveErrorMessage(error); + return isActorNotFoundError(error) || message.startsWith("Task not found:"); +} + +function parseJsonValue(value: string | null | undefined, fallback: T): T { + if (!value) { + return fallback; + } + + try { + return JSON.parse(value) as T; + } catch { + return fallback; + } +} + +function taskSummaryRowFromSummary(taskSummary: WorkspaceTaskSummary) { + return { + taskId: taskSummary.id, + repoId: taskSummary.repoId, + title: taskSummary.title, + status: taskSummary.status, + repoName: taskSummary.repoName, + updatedAtMs: taskSummary.updatedAtMs, + branch: taskSummary.branch, + pullRequestJson: JSON.stringify(taskSummary.pullRequest), + sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary), + }; +} + +export function taskSummaryFromRow(repoId: string, row: any): WorkspaceTaskSummary { + return { + id: row.taskId, + repoId, + title: row.title, + status: row.status, + repoName: row.repoName, + updatedAtMs: row.updatedAtMs, + branch: row.branch ?? null, + pullRequest: parseJsonValue(row.pullRequestJson, null), + sessionsSummary: parseJsonValue(row.sessionsSummaryJson, []), + }; +} + +export async function upsertTaskSummary(c: any, taskSummary: WorkspaceTaskSummary): Promise { + await c.db + .insert(taskSummaries) + .values(taskSummaryRowFromSummary(taskSummary)) + .onConflictDoUpdate({ + target: taskSummaries.taskId, + set: taskSummaryRowFromSummary(taskSummary), + }) + .run(); +} + +async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { + try { + await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); + } catch { + // Best effort cleanup only. + } +} + +async function listKnownTaskBranches(c: any, repoId: string): Promise { + const rows = await c.db + .select({ branchName: taskIndex.branchName }) + .from(taskIndex) + .where(and(eq(taskIndex.repoId, repoId), isNotNull(taskIndex.branchName))) + .all(); + return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); +} + +async function resolveGitHubRepository(c: any, repoId: string) { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getRepository({ repoId }).catch(() => null); +} + +async function listGitHubBranches(c: any, repoId: string): Promise> { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.listBranchesForRepository({ repoId }).catch(() => []); +} + +async function resolveRepositoryRemoteUrl(c: any, repoId: string): Promise { + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = repository?.cloneUrl?.trim(); + if (!remoteUrl) { + throw new Error(`Missing remote URL for repo ${repoId}`); + } + return remoteUrl; +} + +/** + * The ONLY backend code path that creates a task actor via getOrCreateTask. + * Called when a user explicitly creates a new task (not during sync/webhooks). + * + * All other code must use getTask (handles.ts) which calls .get() and will + * error if the actor doesn't exist. Virtual tasks created during PR sync + * are materialized lazily by the client's getOrCreate in backend-client.ts. + * + * NEVER call this from a sync loop or webhook handler. + */ +export async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { + const organizationId = c.state.organizationId; + const repoId = cmd.repoId; + await resolveRepositoryRemoteUrl(c, repoId); + const onBranch = cmd.onBranch?.trim() || null; + const taskId = randomUUID(); + let initialBranchName: string | null = null; + let initialTitle: string | null = null; + + if (onBranch) { + initialBranchName = onBranch; + initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); + + await registerTaskBranchMutation(c, { + repoId, + taskId, + branchName: onBranch, + requireExistingRemote: true, + }); + } else { + const reservedBranches = await listKnownTaskBranches(c, repoId); + const resolved = resolveCreateFlowDecision({ + task: cmd.task, + explicitTitle: cmd.explicitTitle ?? undefined, + explicitBranchName: cmd.explicitBranchName ?? undefined, + localBranches: [], + taskBranches: reservedBranches, + }); + + initialBranchName = resolved.branchName; + initialTitle = resolved.title; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + repoId, + branchName: resolved.branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoNothing() + .run(); + } + + let taskHandle: Awaited>; + try { + taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { + organizationId, + repoId, + taskId, + }); + } catch (error) { + if (initialBranchName) { + await deleteStaleTaskIndexRow(c, taskId); + } + throw error; + } + + const created = await taskHandle.initialize({ + sandboxProviderId: cmd.sandboxProviderId, + branchName: initialBranchName, + title: initialTitle, + task: cmd.task, + }); + + try { + await upsertTaskSummary(c, await taskHandle.getTaskSummary({})); + await refreshOrganizationSnapshotMutation(c); + } catch (error) { + logActorWarning("organization", "failed seeding task summary after task creation", { + organizationId, + repoId, + taskId, + error: resolveErrorMessage(error), + }); + } + + const auditLog = await getOrCreateAuditLog(c, organizationId); + void auditLog.append({ + kind: "task.created", + repoId, + taskId, + payload: { + repoId, + sandboxProviderId: cmd.sandboxProviderId, + }, + }); + + try { + const taskSummary = await taskHandle.getTaskSummary({}); + await upsertTaskSummary(c, taskSummary); + } catch (error) { + logActorWarning("organization", "failed seeding organization task projection", { + organizationId, + repoId, + taskId, + error: resolveErrorMessage(error), + }); + } + + return created; +} + +export async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { + const branchName = cmd.branchName.trim(); + if (!branchName) { + throw new Error("branchName is required"); + } + + const existingOwner = await c.db + .select({ taskId: taskIndex.taskId }) + .from(taskIndex) + .where(and(eq(taskIndex.branchName, branchName), eq(taskIndex.repoId, cmd.repoId), ne(taskIndex.taskId, cmd.taskId))) + .get(); + + if (existingOwner) { + let ownerMissing = false; + try { + await getTask(c, c.state.organizationId, cmd.repoId, existingOwner.taskId).get(); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + ownerMissing = true; + await deleteStaleTaskIndexRow(c, existingOwner.taskId); + } else { + throw error; + } + } + if (!ownerMissing) { + throw new Error(`branch is already assigned to a different task: ${branchName}`); + } + } + + const branches = await listGitHubBranches(c, cmd.repoId); + const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null; + if (cmd.requireExistingRemote && !branchMatch) { + throw new Error(`Remote branch not found: ${branchName}`); + } + + const repository = await resolveGitHubRepository(c, cmd.repoId); + const defaultBranch = repository?.defaultBranch ?? "main"; + const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? ""; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId: cmd.taskId, + repoId: cmd.repoId, + branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); + + return { branchName, headSha }; +} + +export async function applyTaskSummaryUpdateMutation(c: any, input: { taskSummary: WorkspaceTaskSummary }): Promise { + await upsertTaskSummary(c, input.taskSummary); + await refreshOrganizationSnapshotMutation(c); +} + +export async function removeTaskSummaryMutation(c: any, input: { taskId: string }): Promise { + await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); + await refreshOrganizationSnapshotMutation(c); +} + +/** + * Called for every changed PR during sync and on webhook PR events. + * Runs in a bulk loop — MUST NOT create task actors or make cross-actor calls + * to task actors. Only writes to the org's local taskIndex/taskSummaries tables. + * Task actors are created lazily when the user views the task. + */ +export async function refreshTaskSummaryForBranchMutation( + c: any, + input: { repoId: string; branchName: string; pullRequest?: WorkspacePullRequestSummary | null; repoName?: string }, +): Promise { + const pullRequest = input.pullRequest ?? null; + let rows = await c.db + .select({ taskId: taskSummaries.taskId }) + .from(taskSummaries) + .where(and(eq(taskSummaries.branch, input.branchName), eq(taskSummaries.repoId, input.repoId))) + .all(); + + if (rows.length === 0 && pullRequest) { + // Create a virtual task entry in the org's local tables only. + // No task actor is spawned — it will be created lazily when the user + // clicks on the task in the sidebar (the "materialize" path). + const taskId = randomUUID(); + const now = Date.now(); + const title = pullRequest.title?.trim() || input.branchName; + const repoName = input.repoName ?? `${c.state.organizationId}/${input.repoId}`; + + await c.db + .insert(taskIndex) + .values({ taskId, repoId: input.repoId, branchName: input.branchName, createdAt: now, updatedAt: now }) + .onConflictDoNothing() + .run(); + + await c.db + .insert(taskSummaries) + .values({ + taskId, + repoId: input.repoId, + title, + status: "init_complete", + repoName, + updatedAtMs: pullRequest.updatedAtMs ?? now, + branch: input.branchName, + pullRequestJson: JSON.stringify(pullRequest), + sessionsSummaryJson: "[]", + }) + .onConflictDoNothing() + .run(); + + rows = [{ taskId }]; + } else { + // Update PR data on existing task summaries locally. + // If a real task actor exists, also notify it. + for (const row of rows) { + // Update the local summary with the new PR data + await c.db + .update(taskSummaries) + .set({ + pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null, + updatedAtMs: pullRequest?.updatedAtMs ?? Date.now(), + }) + .where(eq(taskSummaries.taskId, row.taskId)) + .run(); + + // Best-effort notify the task actor if it exists (fire-and-forget) + try { + const task = getTask(c, c.state.organizationId, input.repoId, row.taskId); + void task.pullRequestSync({ pullRequest }).catch(() => {}); + } catch { + // Task actor doesn't exist yet — that's fine, it's virtual + } + } + } + + await refreshOrganizationSnapshotMutation(c); +} + +export function sortOverviewBranches( + branches: Array<{ + branchName: string; + commitSha: string; + taskId: string | null; + taskTitle: string | null; + taskStatus: TaskRecord["status"] | null; + pullRequest: WorkspacePullRequestSummary | null; + ciStatus: string | null; + updatedAt: number; + }>, + defaultBranch: string | null, +) { + return [...branches].sort((left, right) => { + if (defaultBranch) { + if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1; + if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1; + } + if (Boolean(left.taskId) !== Boolean(right.taskId)) { + return left.taskId ? -1 : 1; + } + if (left.updatedAt !== right.updatedAt) { + return right.updatedAt - left.updatedAt; + } + return left.branchName.localeCompare(right.branchName); + }); +} + +export async function listTaskSummariesForRepo(c: any, repoId: string, includeArchived = false): Promise { + const rows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows + .map((row) => ({ + organizationId: c.state.organizationId, + repoId, + taskId: row.taskId, + branchName: row.branch ?? null, + title: row.title, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + })) + .filter((row) => includeArchived || row.status !== "archived"); +} + +export async function listAllTaskSummaries(c: any, includeArchived = false): Promise { + const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows + .map((row) => ({ + organizationId: c.state.organizationId, + repoId: row.repoId, + taskId: row.taskId, + branchName: row.branch ?? null, + title: row.title, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + })) + .filter((row) => includeArchived || row.status !== "archived"); +} + +export async function listWorkspaceTaskSummaries(c: any): Promise { + const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows.map((row) => taskSummaryFromRow(row.repoId, row)); +} + +export async function getRepoOverviewFromOrg(c: any, repoId: string): Promise { + const now = Date.now(); + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId); + const githubBranches = await listGitHubBranches(c, repoId).catch(() => []); + const taskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).all(); + + const taskMetaByBranch = new Map< + string, + { taskId: string; title: string | null; status: TaskRecord["status"] | null; updatedAt: number; pullRequest: WorkspacePullRequestSummary | null } + >(); + for (const row of taskRows) { + if (!row.branch) { + continue; + } + taskMetaByBranch.set(row.branch, { + taskId: row.taskId, + title: row.title ?? null, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + }); + } + + const branchMap = new Map(); + for (const branch of githubBranches) { + branchMap.set(branch.branchName, branch); + } + for (const branchName of taskMetaByBranch.keys()) { + if (!branchMap.has(branchName)) { + branchMap.set(branchName, { branchName, commitSha: "" }); + } + } + if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) { + branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" }); + } + + const branches = sortOverviewBranches( + [...branchMap.values()].map((branch) => { + const taskMeta = taskMetaByBranch.get(branch.branchName); + const pr = taskMeta?.pullRequest ?? null; + return { + branchName: branch.branchName, + commitSha: branch.commitSha, + taskId: taskMeta?.taskId ?? null, + taskTitle: taskMeta?.title ?? null, + taskStatus: taskMeta?.status ?? null, + pullRequest: pr, + ciStatus: null, + updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now), + }; + }), + repository?.defaultBranch ?? null, + ); + + return { + organizationId: c.state.organizationId, + repoId, + remoteUrl, + baseRef: repository?.defaultBranch ?? null, + fetchedAt: now, + branches, + }; +} + +export async function getRepositoryMetadataFromOrg( + c: any, + repoId: string, +): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId); + return { + defaultBranch: repository?.defaultBranch ?? null, + fullName: repository?.fullName ?? null, + remoteUrl, + }; +} + +export async function findTaskForBranch(c: any, repoId: string, branchName: string): Promise<{ taskId: string | null }> { + const row = await c.db + .select({ taskId: taskSummaries.taskId }) + .from(taskSummaries) + .where(and(eq(taskSummaries.branch, branchName), eq(taskSummaries.repoId, repoId))) + .get(); + return { taskId: row?.taskId ?? null }; +} diff --git a/foundry/packages/backend/src/actors/organization/actions/tasks.ts b/foundry/packages/backend/src/actors/organization/actions/tasks.ts new file mode 100644 index 0000000..118ff15 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/tasks.ts @@ -0,0 +1,340 @@ +// @ts-nocheck +import { desc, eq } from "drizzle-orm"; +import type { + AuditLogEvent, + CreateTaskInput, + HistoryQueryInput, + ListTasksInput, + RepoOverview, + SwitchResult, + TaskRecord, + TaskSummary, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../../context.js"; +import { getOrCreateAuditLog, getOrCreateTask, getTask as getTaskHandle } from "../../handles.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { taskIndex, taskSummaries } from "../db/schema.js"; +import { + createTaskMutation, + getRepoOverviewFromOrg, + getRepositoryMetadataFromOrg, + findTaskForBranch, + listTaskSummariesForRepo, + listAllTaskSummaries, +} from "./task-mutations.js"; + +function assertOrganization(c: { state: { organizationId: string } }, organizationId: string): void { + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`); + } +} + +/** + * Look up the repoId for a task from the local task index. + * Used when callers (e.g. sandbox actor) only have taskId but need repoId + * to construct the task actor key. + */ +async function resolveTaskRepoId(c: any, taskId: string): Promise { + const row = await c.db.select({ repoId: taskIndex.repoId }).from(taskIndex).where(eq(taskIndex.taskId, taskId)).get(); + if (!row) { + throw new Error(`Task ${taskId} not found in task index`); + } + return row.repoId; +} + +/** + * Get or lazily create a task actor for a user-initiated action. + * Uses getOrCreate because the user may be interacting with a virtual task + * (PR-driven) that has no actor yet. The task actor self-initializes in + * getCurrentRecord() from the org's getTaskIndexEntry data. + * + * This is safe because requireWorkspaceTask is only called from user-initiated + * actions (createSession, sendMessage, etc.), never from sync loops. + * See CLAUDE.md "Lazy Task Actor Creation". + */ +async function requireWorkspaceTask(c: any, repoId: string, taskId: string) { + return getOrCreateTask(c, c.state.organizationId, repoId, taskId, { + organizationId: c.state.organizationId, + repoId, + taskId, + }); +} + +interface GetTaskInput { + organizationId: string; + repoId: string; + taskId: string; +} + +interface TaskProxyActionInput extends GetTaskInput { + reason?: string; +} + +interface RepoOverviewInput { + organizationId: string; + repoId: string; +} + +export { createTaskMutation }; + +export const organizationTaskActions = { + async createTask(c: any, input: CreateTaskInput): Promise { + assertOrganization(c, input.organizationId); + const { config } = getActorRuntimeContext(); + const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); + + // Self-call: call the mutation directly since we're inside the org actor + return await createTaskMutation(c, { + repoId: input.repoId, + task: input.task, + sandboxProviderId, + explicitTitle: input.explicitTitle ?? null, + explicitBranchName: input.explicitBranchName ?? null, + onBranch: input.onBranch ?? null, + }); + }, + + async materializeTask(c: any, input: { organizationId: string; repoId: string; virtualTaskId: string }): Promise { + assertOrganization(c, input.organizationId); + const { config } = getActorRuntimeContext(); + // Self-call: call the mutation directly + return await createTaskMutation(c, { + repoId: input.repoId, + task: input.virtualTaskId, + sandboxProviderId: defaultSandboxProviderId(config), + explicitTitle: null, + explicitBranchName: null, + onBranch: null, + }); + }, + + async createWorkspaceTask(c: any, input: TaskWorkspaceCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { + const created = await organizationTaskActions.createTask(c, { + organizationId: c.state.organizationId, + repoId: input.repoId, + task: input.task, + ...(input.title ? { explicitTitle: input.title } : {}), + ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), + }); + + const task = await requireWorkspaceTask(c, input.repoId, created.taskId); + void task + .createSessionAndSend({ + model: input.model, + text: input.task, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + + return { taskId: created.taskId }; + }, + + async markWorkspaceUnread(c: any, input: TaskWorkspaceSelectInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.markUnread({ authSessionId: input.authSessionId }); + }, + + async renameWorkspaceTask(c: any, input: TaskWorkspaceRenameInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.renameTask({ value: input.value }); + }, + + async createWorkspaceSession(c: any, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + return await task.createSession({ + ...(input.model ? { model: input.model } : {}), + ...(input.authSessionId ? { authSessionId: input.authSessionId } : {}), + }); + }, + + async renameWorkspaceSession(c: any, input: TaskWorkspaceRenameSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.renameSession({ sessionId: input.sessionId, title: input.title, authSessionId: input.authSessionId }); + }, + + async selectWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.selectSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }); + }, + + async setWorkspaceSessionUnread(c: any, input: TaskWorkspaceSetSessionUnreadInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.setSessionUnread({ sessionId: input.sessionId, unread: input.unread, authSessionId: input.authSessionId }); + }, + + async updateWorkspaceDraft(c: any, input: TaskWorkspaceUpdateDraftInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .updateDraft({ + sessionId: input.sessionId, + text: input.text, + attachments: input.attachments, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + }, + + async changeWorkspaceModel(c: any, input: TaskWorkspaceChangeModelInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.changeModel({ sessionId: input.sessionId, model: input.model, authSessionId: input.authSessionId }); + }, + + async sendWorkspaceMessage(c: any, input: TaskWorkspaceSendMessageInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .sendMessage({ + sessionId: input.sessionId, + text: input.text, + attachments: input.attachments, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + }, + + async stopWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.stopSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }).catch(() => {}); + }, + + async closeWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.closeSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }).catch(() => {}); + }, + + async publishWorkspacePr(c: any, input: TaskWorkspaceSelectInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.publishPr({}).catch(() => {}); + }, + + async revertWorkspaceFile(c: any, input: TaskWorkspaceDiffInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.revertFile(input).catch(() => {}); + }, + + async getRepoOverview(c: any, input: RepoOverviewInput): Promise { + assertOrganization(c, input.organizationId); + + return await getRepoOverviewFromOrg(c, input.repoId); + }, + + async listTasks(c: any, input: ListTasksInput): Promise { + assertOrganization(c, input.organizationId); + if (input.repoId) { + return await listTaskSummariesForRepo(c, input.repoId, true); + } + return await listAllTaskSummaries(c, true); + }, + + async switchTask(c: any, input: { repoId: string; taskId: string }): Promise { + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + const record = await h.get(); + const switched = await h.switchTask({}); + return { + organizationId: c.state.organizationId, + taskId: input.taskId, + sandboxProviderId: record.sandboxProviderId, + switchTarget: switched.switchTarget, + }; + }, + + async auditLog(c: any, input: HistoryQueryInput): Promise { + assertOrganization(c, input.organizationId); + const auditLog = await getOrCreateAuditLog(c, c.state.organizationId); + return await auditLog.list({ + repoId: input.repoId, + branch: input.branch, + taskId: input.taskId, + limit: input.limit ?? 20, + }); + }, + + async getTask(c: any, input: GetTaskInput): Promise { + assertOrganization(c, input.organizationId); + // Resolve repoId from local task index if not provided (e.g. sandbox actor only has taskId) + const repoId = input.repoId || (await resolveTaskRepoId(c, input.taskId)); + // Use getOrCreate — the task may be virtual (PR-driven, no actor yet). + // The task actor self-initializes in getCurrentRecord(). + const handle = await getOrCreateTask(c, c.state.organizationId, repoId, input.taskId, { + organizationId: c.state.organizationId, + repoId, + taskId: input.taskId, + }); + return await handle.get(); + }, + + async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + return await h.attach({ reason: input.reason }); + }, + + async pushTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.push({ reason: input.reason }).catch(() => {}); + }, + + async syncTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.sync({ reason: input.reason }).catch(() => {}); + }, + + async mergeTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.merge({ reason: input.reason }).catch(() => {}); + }, + + async archiveTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.archive({ reason: input.reason }).catch(() => {}); + }, + + async killTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.kill({ reason: input.reason }).catch(() => {}); + }, + + async getRepositoryMetadata(c: any, input: { repoId: string }): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + return await getRepositoryMetadataFromOrg(c, input.repoId); + }, + + async findTaskForBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { + return await findTaskForBranch(c, input.repoId, input.branchName); + }, + + /** + * Lightweight read of task index + summary data. Used by the task actor + * to self-initialize when lazily materialized from a virtual task. + * Does NOT trigger materialization — no circular dependency. + */ + async getTaskIndexEntry(c: any, input: { taskId: string }): Promise<{ branchName: string | null; title: string | null } | null> { + const idx = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(eq(taskIndex.taskId, input.taskId)).get(); + const summary = await c.db.select({ title: taskSummaries.title }).from(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).get(); + if (!idx && !summary) return null; + return { + branchName: idx?.branchName ?? null, + title: summary?.title ?? null, + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/app-shell.ts b/foundry/packages/backend/src/actors/organization/app-shell.ts index 3339590..dce5855 100644 --- a/foundry/packages/backend/src/actors/organization/app-shell.ts +++ b/foundry/packages/backend/src/actors/organization/app-shell.ts @@ -1,4 +1,4 @@ -import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm"; +import { desc, eq } from "drizzle-orm"; import { randomUUID } from "node:crypto"; import type { FoundryAppSnapshot, @@ -8,109 +8,35 @@ import type { FoundryOrganizationMember, FoundryUser, UpdateFoundryOrganizationProfileInput, + WorkspaceModelId, } from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; import { getOrCreateGithubData, getOrCreateOrganization, selfOrganization } from "../handles.js"; import { GitHubAppError } from "../../services/app-github.js"; import { getBetterAuthService } from "../../services/better-auth.js"; import { repoIdFromRemote, repoLabelFromRemote } from "../../services/repo.js"; import { logger } from "../../logging.js"; -import { - authAccountIndex, - authEmailIndex, - authSessionIndex, - authVerification, - invoices, - organizationMembers, - organizationProfile, - repos, - seatAssignments, - stripeLookup, -} from "./db/schema.js"; - -export const APP_SHELL_ORGANIZATION_ID = "app"; - -// ── Better Auth adapter where-clause helpers ── -// These convert the adapter's `{ field, value, operator }` clause arrays into -// Drizzle predicates for organization-level auth index / verification tables. - -function organizationAuthColumn(table: any, field: string): any { - const column = table[field]; - if (!column) { - throw new Error(`Unknown auth table field: ${field}`); - } - return column; -} - -function normalizeAuthValue(value: unknown): unknown { - if (value instanceof Date) { - return value.getTime(); - } - if (Array.isArray(value)) { - return value.map((entry) => normalizeAuthValue(entry)); - } - return value; -} - -function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { - const column = organizationAuthColumn(table, clause.field); - const value = normalizeAuthValue(clause.value); - switch (clause.operator) { - case "ne": - return value === null ? isNotNull(column) : ne(column, value as any); - case "lt": - return lt(column, value as any); - case "lte": - return lte(column, value as any); - case "gt": - return gt(column, value as any); - case "gte": - return gte(column, value as any); - case "in": - return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "not_in": - return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "contains": - return like(column, `%${String(value ?? "")}%`); - case "starts_with": - return like(column, `${String(value ?? "")}%`); - case "ends_with": - return like(column, `%${String(value ?? "")}`); - case "eq": - default: - return value === null ? isNull(column) : eq(column, value as any); - } -} - -function organizationAuthWhere(table: any, clauses: any[] | undefined): any { - if (!clauses || clauses.length === 0) { - return undefined; - } - let expr = organizationAuthClause(table, clauses[0]); - for (const clause of clauses.slice(1)) { - const next = organizationAuthClause(table, clause); - expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); - } - return expr; -} +import { invoices, organizationMembers, organizationProfile, seatAssignments, stripeLookup } from "./db/schema.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./constants.js"; const githubWebhookLogger = logger.child({ scope: "github-webhook", }); -const PROFILE_ROW_ID = "profile"; +const PROFILE_ROW_ID = 1; function roundDurationMs(start: number): number { return Math.round((performance.now() - start) * 100) / 100; } -function assertAppOrganization(c: any): void { +export function assertAppOrganization(c: any): void { if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); } } -function assertOrganizationShell(c: any): void { +export function assertOrganizationShell(c: any): void { if (c.state.organizationId === APP_SHELL_ORGANIZATION_ID) { throw new Error("Organization action cannot run on the reserved app organization"); } @@ -132,10 +58,6 @@ function organizationOrganizationId(kind: FoundryOrganization["kind"], login: st return kind === "personal" ? personalOrganizationId(login) : slugify(login); } -function hasRepoScope(scopes: string[]): boolean { - return scopes.some((scope) => scope === "repo" || scope.startsWith("repo:")); -} - function parseEligibleOrganizationIds(value: string): string[] { try { const parsed = JSON.parse(value); @@ -217,7 +139,9 @@ function stripeWebhookSubscription(event: any) { }; } -async function getOrganizationState(organization: any) { +// sendOrganizationCommand removed — org actions called directly + +export async function getOrganizationState(organization: any) { return await organization.getOrganizationShellState({}); } @@ -290,7 +214,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization }; } -async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { +export async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { assertAppOrganization(c); const startedAt = performance.now(); const auth = getBetterAuthService(); @@ -359,6 +283,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa githubLogin: profile?.githubLogin ?? "", roleLabel: profile?.roleLabel ?? "GitHub user", eligibleOrganizationIds, + defaultModel: profile?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, } : null; @@ -404,7 +329,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa return snapshot; } -async function requireSignedInSession(c: any, sessionId: string) { +export async function requireSignedInSession(c: any, sessionId: string) { const auth = getBetterAuthService(); const authState = await auth.getAuthState(sessionId); const user = authState?.user ?? null; @@ -431,7 +356,7 @@ async function requireSignedInSession(c: any, sessionId: string) { }; } -function requireEligibleOrganization(session: any, organizationId: string): void { +export function requireEligibleOrganization(session: any, organizationId: string): void { const eligibleOrganizationIds = parseEligibleOrganizationIds(session.eligibleOrganizationIdsJson); if (!eligibleOrganizationIds.includes(organizationId)) { throw new Error(`Organization ${organizationId} is not available in this app session`); @@ -557,7 +482,7 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin const organizationId = organizationOrganizationId(account.kind, account.githubLogin); const installation = installations.find((candidate) => candidate.accountLogin === account.githubLogin) ?? null; const organization = await getOrCreateOrganization(c, organizationId); - await organization.syncOrganizationShellFromGithub({ + await organization.commandSyncOrganizationShellFromGithub({ userId: githubUserId, userName: viewer.name || viewer.login, userEmail: viewer.email ?? `${viewer.login}@users.noreply.github.com`, @@ -641,17 +566,22 @@ async function listOrganizationInvoices(c: any): Promise { assertOrganizationShell(c); - const rows = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all(); - return rows.map((row) => repoLabelFromRemote(row.remoteUrl)).sort((left, right) => left.localeCompare(right)); + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + const rows = await githubData.listRepositories({}); + return rows.map((row: any) => repoLabelFromRemote(row.cloneUrl)).sort((a: string, b: string) => a.localeCompare(b)); + } catch { + return []; + } } -async function buildOrganizationState(c: any) { +export async function buildOrganizationState(c: any) { const startedAt = performance.now(); const row = await requireOrganizationProfileRow(c); return await buildOrganizationStateFromRow(c, row, startedAt); } -async function buildOrganizationStateIfInitialized(c: any) { +export async function buildOrganizationStateIfInitialized(c: any) { const startedAt = performance.now(); const row = await readOrganizationProfileRow(c); if (!row) { @@ -685,7 +615,6 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number slug: row.slug, primaryDomain: row.primaryDomain, seatAccrualMode: "first_prompt", - defaultModel: row.defaultModel, autoImportRepos: row.autoImportRepos === 1, }, github: { @@ -697,6 +626,10 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number lastSyncAt: row.githubLastSyncAt ?? null, lastWebhookAt: row.githubLastWebhookAt ?? null, lastWebhookEvent: row.githubLastWebhookEvent ?? "", + syncGeneration: row.githubSyncGeneration ?? 0, + syncPhase: row.githubSyncPhase ?? null, + processedRepositoryCount: row.githubProcessedRepositoryCount ?? 0, + totalRepositoryCount: row.githubTotalRepositoryCount ?? 0, }, billing: { planId: row.billingPlanId, @@ -744,396 +677,13 @@ async function applySubscriptionState( }, fallbackPlanId: FoundryBillingPlanId, ): Promise { - await organization.applyOrganizationStripeSubscription({ + await organization.commandApplyStripeSubscription({ subscription, fallbackPlanId, }); } export const organizationAppActions = { - async authFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppOrganization(c); - - const clauses = [ - ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), - ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), - ]; - if (clauses.length === 0) { - return null; - } - const predicate = organizationAuthWhere(authSessionIndex, clauses); - return await c.db.select().from(authSessionIndex).where(predicate!).get(); - }, - - async authUpsertSessionIndex(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { - assertAppOrganization(c); - - const now = Date.now(); - await c.db - .insert(authSessionIndex) - .values({ - sessionId: input.sessionId, - sessionToken: input.sessionToken, - userId: input.userId, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: authSessionIndex.sessionId, - set: { - sessionToken: input.sessionToken, - userId: input.userId, - updatedAt: now, - }, - }) - .run(); - return await c.db.select().from(authSessionIndex).where(eq(authSessionIndex.sessionId, input.sessionId)).get(); - }, - - async authDeleteSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppOrganization(c); - - const clauses = [ - ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), - ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), - ]; - if (clauses.length === 0) { - return; - } - const predicate = organizationAuthWhere(authSessionIndex, clauses); - await c.db.delete(authSessionIndex).where(predicate!).run(); - }, - - async authFindEmailIndex(c: any, input: { email: string }) { - assertAppOrganization(c); - - return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); - }, - - async authUpsertEmailIndex(c: any, input: { email: string; userId: string }) { - assertAppOrganization(c); - - const now = Date.now(); - await c.db - .insert(authEmailIndex) - .values({ - email: input.email, - userId: input.userId, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: authEmailIndex.email, - set: { - userId: input.userId, - updatedAt: now, - }, - }) - .run(); - return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); - }, - - async authDeleteEmailIndex(c: any, input: { email: string }) { - assertAppOrganization(c); - - await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); - }, - - async authFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppOrganization(c); - - if (input.id) { - return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); - } - if (!input.providerId || !input.accountId) { - return null; - } - return await c.db - .select() - .from(authAccountIndex) - .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) - .get(); - }, - - async authUpsertAccountIndex(c: any, input: { id: string; providerId: string; accountId: string; userId: string }) { - assertAppOrganization(c); - - const now = Date.now(); - await c.db - .insert(authAccountIndex) - .values({ - id: input.id, - providerId: input.providerId, - accountId: input.accountId, - userId: input.userId, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: authAccountIndex.id, - set: { - providerId: input.providerId, - accountId: input.accountId, - userId: input.userId, - updatedAt: now, - }, - }) - .run(); - return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); - }, - - async authDeleteAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppOrganization(c); - - if (input.id) { - await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); - return; - } - if (input.providerId && input.accountId) { - await c.db - .delete(authAccountIndex) - .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) - .run(); - } - }, - - async authCreateVerification(c: any, input: { data: Record }) { - assertAppOrganization(c); - - await c.db - .insert(authVerification) - .values(input.data as any) - .run(); - return await c.db - .select() - .from(authVerification) - .where(eq(authVerification.id, input.data.id as string)) - .get(); - }, - - async authFindOneVerification(c: any, input: { where: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; - }, - - async authFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - let query = c.db.select().from(authVerification); - if (predicate) { - query = query.where(predicate); - } - if (input.sortBy?.field) { - const column = organizationAuthColumn(authVerification, input.sortBy.field); - query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); - } - if (typeof input.limit === "number") { - query = query.limit(input.limit); - } - if (typeof input.offset === "number") { - query = query.offset(input.offset); - } - return await query.all(); - }, - - async authUpdateVerification(c: any, input: { where: any[]; update: Record }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return null; - } - await c.db - .update(authVerification) - .set(input.update as any) - .where(predicate) - .run(); - return await c.db.select().from(authVerification).where(predicate).get(); - }, - - async authUpdateManyVerification(c: any, input: { where: any[]; update: Record }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return 0; - } - await c.db - .update(authVerification) - .set(input.update as any) - .where(predicate) - .run(); - const row = await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get(); - return row?.value ?? 0; - }, - - async authDeleteVerification(c: any, input: { where: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return; - } - await c.db.delete(authVerification).where(predicate).run(); - }, - - async authDeleteManyVerification(c: any, input: { where: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return 0; - } - const rows = await c.db.select().from(authVerification).where(predicate).all(); - await c.db.delete(authVerification).where(predicate).run(); - return rows.length; - }, - - async authCountVerification(c: any, input: { where?: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - const row = predicate - ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() - : await c.db.select({ value: sqlCount() }).from(authVerification).get(); - return row?.value ?? 0; - }, - - async getAppSnapshot(c: any, input: { sessionId: string }): Promise { - return await buildAppSnapshot(c, input.sessionId); - }, - - async resolveAppGithubToken( - c: any, - input: { organizationId: string; requireRepoScope?: boolean }, - ): Promise<{ accessToken: string; scopes: string[] } | null> { - assertAppOrganization(c); - const auth = getBetterAuthService(); - const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); - - for (const row of rows) { - const authState = await auth.getAuthState(row.sessionId); - if (authState?.sessionState?.activeOrganizationId !== input.organizationId) { - continue; - } - - const token = await auth.getAccessTokenForSession(row.sessionId); - if (!token?.accessToken) { - continue; - } - - const scopes = token.scopes; - if (input.requireRepoScope !== false && scopes.length > 0 && !hasRepoScope(scopes)) { - continue; - } - - return { - accessToken: token.accessToken, - scopes, - }; - } - - return null; - }, - - async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - await getBetterAuthService().upsertUserProfile(session.authUserId, { - starterRepoStatus: "skipped", - starterRepoSkippedAt: Date.now(), - starterRepoStarredAt: null, - }); - return await buildAppSnapshot(c, input.sessionId); - }, - - async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - const organization = await getOrCreateOrganization(c, input.organizationId); - await organization.starSandboxAgentRepo({ - organizationId: input.organizationId, - }); - await getBetterAuthService().upsertUserProfile(session.authUserId, { - starterRepoStatus: "starred", - starterRepoStarredAt: Date.now(), - starterRepoSkippedAt: null, - }); - return await buildAppSnapshot(c, input.sessionId); - }, - - async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); - - // Ensure the GitHub data actor exists. If it's newly created, its own - // workflow will detect the pending sync status and run the initial - // full sync automatically — no orchestration needed here. - await getOrCreateGithubData(c, input.organizationId); - - return await buildAppSnapshot(c, input.sessionId); - }, - - async updateAppOrganizationProfile( - c: any, - input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, - ): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - const organization = await getOrCreateOrganization(c, input.organizationId); - await organization.updateOrganizationShellProfile({ - displayName: input.displayName, - slug: input.slug, - primaryDomain: input.primaryDomain, - }); - return await buildAppSnapshot(c, input.sessionId); - }, - - async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - - const githubData = await getOrCreateGithubData(c, input.organizationId); - const summary = await githubData.getSummary({}); - if (summary.syncStatus === "syncing") { - return await buildAppSnapshot(c, input.sessionId); - } - - // Mark sync started on the organization, then send directly to the - // GitHub data actor's own workflow queue. - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - await organizationHandle.markOrganizationSyncStarted({ - label: "Importing repository catalog...", - }); - - await githubData.send("githubData.command.syncRepos", { label: "Importing repository catalog..." }, { wait: false }); - - return await buildAppSnapshot(c, input.sessionId); - }, - - async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - const { appShell } = getActorRuntimeContext(); - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - const organizationState = await getOrganizationState(organizationHandle); - if (organizationState.snapshot.kind !== "organization") { - return { - url: `${appShell.appUrl}/organizations/${input.organizationId}`, - }; - } - return { - url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), - }; - }, - async createAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; planId: FoundryBillingPlanId }): Promise<{ url: string }> { assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); @@ -1143,7 +693,9 @@ export const organizationAppActions = { const organizationState = await getOrganizationState(organizationHandle); if (input.planId === "free") { - await organizationHandle.applyOrganizationFreePlan({ clearSubscription: false }); + await organizationHandle.commandApplyFreePlan({ + clearSubscription: false, + }); return { url: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, }; @@ -1162,7 +714,9 @@ export const organizationAppActions = { email: session.currentUserEmail, }) ).id; - await organizationHandle.applyOrganizationStripeCustomer({ customerId }); + await organizationHandle.commandApplyStripeCustomer({ + customerId, + }); await upsertStripeLookupEntries(c, input.organizationId, customerId, null); } @@ -1190,7 +744,9 @@ export const organizationAppActions = { const completion = await appShell.stripe.retrieveCheckoutCompletion(input.checkoutSessionId); if (completion.customerId) { - await organizationHandle.applyOrganizationStripeCustomer({ customerId: completion.customerId }); + await organizationHandle.commandApplyStripeCustomer({ + customerId: completion.customerId, + }); } await upsertStripeLookupEntries(c, input.organizationId, completion.customerId, completion.subscriptionId); @@ -1200,7 +756,7 @@ export const organizationAppActions = { } if (completion.paymentMethodLabel) { - await organizationHandle.setOrganizationBillingPaymentMethod({ + await organizationHandle.commandSetPaymentMethod({ label: completion.paymentMethodLabel, }); } @@ -1240,7 +796,9 @@ export const organizationAppActions = { await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await organizationHandle.setOrganizationBillingStatus({ status: "scheduled_cancel" }); + await organizationHandle.commandSetBillingStatus({ + status: "scheduled_cancel", + }); } return await buildAppSnapshot(c, input.sessionId); @@ -1259,7 +817,9 @@ export const organizationAppActions = { await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await organizationHandle.setOrganizationBillingStatus({ status: "active" }); + await organizationHandle.commandSetBillingStatus({ + status: "active", + }); } return await buildAppSnapshot(c, input.sessionId); @@ -1270,7 +830,7 @@ export const organizationAppActions = { const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const organization = await getOrCreateOrganization(c, input.organizationId); - await organization.recordOrganizationSeatUsage({ + await organization.commandRecordSeatUsage({ email: session.currentUserEmail, }); return await buildAppSnapshot(c, input.sessionId); @@ -1293,7 +853,9 @@ export const organizationAppActions = { if (organizationId) { const organization = await getOrCreateOrganization(c, organizationId); if (typeof object.customer === "string") { - await organization.applyOrganizationStripeCustomer({ customerId: object.customer }); + await organization.commandApplyStripeCustomer({ + customerId: object.customer, + }); } await upsertStripeLookupEntries( c, @@ -1326,7 +888,9 @@ export const organizationAppActions = { const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); if (organizationId) { const organization = await getOrCreateOrganization(c, organizationId); - await organization.applyOrganizationFreePlan({ clearSubscription: true }); + await organization.commandApplyFreePlan({ + clearSubscription: true, + }); } return { ok: true }; } @@ -1338,7 +902,7 @@ export const organizationAppActions = { const organization = await getOrCreateOrganization(c, organizationId); const rawAmount = typeof invoice.amount_paid === "number" ? invoice.amount_paid : invoice.amount_due; const amountUsd = Math.round((typeof rawAmount === "number" ? rawAmount : 0) / 100); - await organization.upsertOrganizationInvoice({ + await organization.commandUpsertInvoice({ id: String(invoice.id), label: typeof invoice.number === "string" ? `Invoice ${invoice.number}` : "Stripe invoice", issuedAt: formatUnixDate(typeof invoice.created === "number" ? invoice.created : Math.floor(Date.now() / 1000)), @@ -1374,7 +938,7 @@ export const organizationAppActions = { const organizationId = organizationOrganizationId(kind, accountLogin); const receivedAt = Date.now(); const organization = await getOrCreateOrganization(c, organizationId); - await organization.recordGithubWebhookReceipt({ + await organization.commandRecordGithubWebhookReceipt({ organizationId: organizationId, event, action: body.action ?? null, @@ -1400,14 +964,16 @@ export const organizationAppActions = { label: "GitHub App installation removed", }); } else if (body.action === "created") { - await githubData.fullSync({ - connectedAccount: accountLogin, - installationStatus: "connected", - installationId: body.installation?.id ?? null, - githubLogin: accountLogin, - kind, - label: "Syncing GitHub data from installation webhook...", - }); + void githubData + .syncRepos({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Syncing GitHub data from installation webhook...", + }) + .catch(() => {}); } else if (body.action === "suspend") { await githubData.clearState({ connectedAccount: accountLogin, @@ -1416,14 +982,16 @@ export const organizationAppActions = { label: "GitHub App installation suspended", }); } else if (body.action === "unsuspend") { - await githubData.fullSync({ - connectedAccount: accountLogin, - installationStatus: "connected", - installationId: body.installation?.id ?? null, - githubLogin: accountLogin, - kind, - label: "Resyncing GitHub data after unsuspend...", - }); + void githubData + .syncRepos({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after unsuspend...", + }) + .catch(() => {}); } return { ok: true }; } @@ -1440,14 +1008,16 @@ export const organizationAppActions = { }, "repository_membership_changed", ); - await githubData.fullSync({ - connectedAccount: accountLogin, - installationStatus: "connected", - installationId: body.installation?.id ?? null, - githubLogin: accountLogin, - kind, - label: "Resyncing GitHub data after repository access change...", - }); + void githubData + .syncRepos({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after repository access change...", + }) + .catch(() => {}); return { ok: true }; } @@ -1486,6 +1056,7 @@ export const organizationAppActions = { }, pullRequest: { number: body.pull_request.number, + status: body.pull_request.draft ? "draft" : "ready", title: body.pull_request.title ?? "", body: body.pull_request.body ?? null, state: body.pull_request.state ?? "open", @@ -1520,422 +1091,321 @@ export const organizationAppActions = { ); return { ok: true }; }, +}; - async syncOrganizationShellFromGithub( - c: any, - input: { - userId: string; - userName: string; - userEmail: string; - githubUserLogin: string; - githubAccountId: string; - githubLogin: string; - githubAccountType: string; - kind: FoundryOrganization["kind"]; - displayName: string; - installationId: number | null; - appConfigured: boolean; - }, - ): Promise<{ organizationId: string }> { - assertOrganizationShell(c); - const now = Date.now(); - const existing = await readOrganizationProfileRow(c); - const slug = existing?.slug ?? slugify(input.githubLogin); - const organizationId = organizationOrganizationId(input.kind, input.githubLogin); - if (organizationId !== c.state.organizationId) { - throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); - } +export async function syncOrganizationShellFromGithubMutation( + c: any, + input: { + userId: string; + userName: string; + userEmail: string; + githubUserLogin: string; + githubAccountId: string; + githubLogin: string; + githubAccountType: string; + kind: FoundryOrganization["kind"]; + displayName: string; + installationId: number | null; + appConfigured: boolean; + }, +): Promise<{ organizationId: string }> { + assertOrganizationShell(c); + const now = Date.now(); + const existing = await readOrganizationProfileRow(c); + const slug = existing?.slug ?? slugify(input.githubLogin); + const organizationId = organizationOrganizationId(input.kind, input.githubLogin); + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); + } - const installationStatus = - input.kind === "personal" ? "connected" : input.installationId ? "connected" : input.appConfigured ? "install_required" : "reconnect_required"; - const syncStatus = existing?.githubSyncStatus ?? legacyRepoImportStatusToGithubSyncStatus(existing?.repoImportStatus); - const lastSyncLabel = - syncStatus === "synced" - ? existing.githubLastSyncLabel - : installationStatus === "connected" - ? "Waiting for first import" - : installationStatus === "install_required" - ? "GitHub App installation required" - : "GitHub App configuration incomplete"; - const hasStripeBillingState = Boolean(existing?.stripeCustomerId || existing?.stripeSubscriptionId || existing?.stripePriceId); - const defaultBillingPlanId = input.kind === "personal" || !hasStripeBillingState ? "free" : (existing?.billingPlanId ?? "team"); - const defaultSeatsIncluded = input.kind === "personal" || !hasStripeBillingState ? 1 : (existing?.billingSeatsIncluded ?? 5); - const defaultPaymentMethodLabel = - input.kind === "personal" - ? "No card required" - : hasStripeBillingState - ? (existing?.billingPaymentMethodLabel ?? "Payment method on file") - : "No payment method on file"; + const installationStatus = + input.kind === "personal" ? "connected" : input.installationId ? "connected" : input.appConfigured ? "install_required" : "reconnect_required"; + const syncStatus = existing?.githubSyncStatus ?? legacyRepoImportStatusToGithubSyncStatus(existing?.repoImportStatus); + const lastSyncLabel = + syncStatus === "synced" + ? existing.githubLastSyncLabel + : installationStatus === "connected" + ? "Waiting for first import" + : installationStatus === "install_required" + ? "GitHub App installation required" + : "GitHub App configuration incomplete"; + const hasStripeBillingState = Boolean(existing?.stripeCustomerId || existing?.stripeSubscriptionId || existing?.stripePriceId); + const defaultBillingPlanId = input.kind === "personal" || !hasStripeBillingState ? "free" : (existing?.billingPlanId ?? "team"); + const defaultSeatsIncluded = input.kind === "personal" || !hasStripeBillingState ? 1 : (existing?.billingSeatsIncluded ?? 5); + const defaultPaymentMethodLabel = + input.kind === "personal" + ? "No card required" + : hasStripeBillingState + ? (existing?.billingPaymentMethodLabel ?? "Payment method on file") + : "No payment method on file"; - await c.db - .insert(organizationProfile) - .values({ - id: PROFILE_ROW_ID, + await c.db + .insert(organizationProfile) + .values({ + id: PROFILE_ROW_ID, + kind: input.kind, + githubAccountId: input.githubAccountId, + githubLogin: input.githubLogin, + githubAccountType: input.githubAccountType, + displayName: input.displayName, + slug, + defaultModel: existing?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + primaryDomain: existing?.primaryDomain ?? (input.kind === "personal" ? "personal" : `${slug}.github`), + autoImportRepos: existing?.autoImportRepos ?? 1, + repoImportStatus: existing?.repoImportStatus ?? "not_started", + githubConnectedAccount: input.githubLogin, + githubInstallationStatus: installationStatus, + githubSyncStatus: syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: lastSyncLabel, + githubLastSyncAt: existing?.githubLastSyncAt ?? null, + githubSyncGeneration: existing?.githubSyncGeneration ?? 0, + githubSyncPhase: existing?.githubSyncPhase ?? null, + githubProcessedRepositoryCount: existing?.githubProcessedRepositoryCount ?? 0, + githubTotalRepositoryCount: existing?.githubTotalRepositoryCount ?? 0, + stripeCustomerId: existing?.stripeCustomerId ?? null, + stripeSubscriptionId: existing?.stripeSubscriptionId ?? null, + stripePriceId: existing?.stripePriceId ?? null, + billingPlanId: defaultBillingPlanId, + billingStatus: existing?.billingStatus ?? "active", + billingSeatsIncluded: defaultSeatsIncluded, + billingTrialEndsAt: existing?.billingTrialEndsAt ?? null, + billingRenewalAt: existing?.billingRenewalAt ?? null, + billingPaymentMethodLabel: defaultPaymentMethodLabel, + createdAt: existing?.createdAt ?? now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: organizationProfile.id, + set: { kind: input.kind, githubAccountId: input.githubAccountId, githubLogin: input.githubLogin, githubAccountType: input.githubAccountType, displayName: input.displayName, - slug, - primaryDomain: existing?.primaryDomain ?? (input.kind === "personal" ? "personal" : `${slug}.github`), - defaultModel: existing?.defaultModel ?? "claude-sonnet-4", - autoImportRepos: existing?.autoImportRepos ?? 1, - repoImportStatus: existing?.repoImportStatus ?? "not_started", githubConnectedAccount: input.githubLogin, githubInstallationStatus: installationStatus, githubSyncStatus: syncStatus, githubInstallationId: input.installationId, githubLastSyncLabel: lastSyncLabel, githubLastSyncAt: existing?.githubLastSyncAt ?? null, - stripeCustomerId: existing?.stripeCustomerId ?? null, - stripeSubscriptionId: existing?.stripeSubscriptionId ?? null, - stripePriceId: existing?.stripePriceId ?? null, + githubSyncGeneration: existing?.githubSyncGeneration ?? 0, + githubSyncPhase: existing?.githubSyncPhase ?? null, + githubProcessedRepositoryCount: existing?.githubProcessedRepositoryCount ?? 0, + githubTotalRepositoryCount: existing?.githubTotalRepositoryCount ?? 0, billingPlanId: defaultBillingPlanId, - billingStatus: existing?.billingStatus ?? "active", billingSeatsIncluded: defaultSeatsIncluded, - billingTrialEndsAt: existing?.billingTrialEndsAt ?? null, - billingRenewalAt: existing?.billingRenewalAt ?? null, billingPaymentMethodLabel: defaultPaymentMethodLabel, - createdAt: existing?.createdAt ?? now, updatedAt: now, - }) - .onConflictDoUpdate({ - target: organizationProfile.id, - set: { - kind: input.kind, - githubAccountId: input.githubAccountId, - githubLogin: input.githubLogin, - githubAccountType: input.githubAccountType, - displayName: input.displayName, - githubConnectedAccount: input.githubLogin, - githubInstallationStatus: installationStatus, - githubSyncStatus: syncStatus, - githubInstallationId: input.installationId, - githubLastSyncLabel: lastSyncLabel, - githubLastSyncAt: existing?.githubLastSyncAt ?? null, - billingPlanId: defaultBillingPlanId, - billingSeatsIncluded: defaultSeatsIncluded, - billingPaymentMethodLabel: defaultPaymentMethodLabel, - updatedAt: now, - }, - }) - .run(); + }, + }) + .run(); - await c.db - .insert(organizationMembers) - .values({ - id: input.userId, + await c.db + .insert(organizationMembers) + .values({ + id: input.userId, + name: input.userName, + email: input.userEmail, + role: input.kind === "personal" ? "owner" : "admin", + state: "active", + updatedAt: now, + }) + .onConflictDoUpdate({ + target: organizationMembers.id, + set: { name: input.userName, email: input.userEmail, role: input.kind === "personal" ? "owner" : "admin", state: "active", updatedAt: now, + }, + }) + .run(); + + // Auto-trigger github-data sync when the org has a connected installation + // but hasn't synced yet. This handles the common case where a personal + // account or an org with an existing GitHub App installation signs in for + // the first time on a fresh DB — the installation webhook already fired + // before the org actor existed, so we kick off the sync here instead. + const needsInitialSync = installationStatus === "connected" && syncStatus === "pending"; + if (needsInitialSync) { + const githubData = await getOrCreateGithubData(c, organizationId); + void githubData + .syncRepos({ + connectedAccount: input.githubLogin, + installationStatus: "connected", + installationId: input.installationId, + githubLogin: input.githubLogin, + kind: input.kind, + label: "Initial repository sync...", }) - .onConflictDoUpdate({ - target: organizationMembers.id, - set: { - name: input.userName, - email: input.userEmail, - role: input.kind === "personal" ? "owner" : "admin", - state: "active", - updatedAt: now, - }, - }) - .run(); + .catch(() => {}); + } - return { organizationId }; - }, + return { organizationId }; +} - async getOrganizationShellState(c: any): Promise { - assertOrganizationShell(c); - return await buildOrganizationState(c); - }, - - async getOrganizationShellStateIfInitialized(c: any): Promise { - assertOrganizationShell(c); - return await buildOrganizationStateIfInitialized(c); - }, - - async updateOrganizationShellProfile(c: any, input: Pick): Promise { - assertOrganizationShell(c); - const existing = await requireOrganizationProfileRow(c); - await c.db - .update(organizationProfile) - .set({ - displayName: input.displayName.trim() || existing.displayName, - slug: input.slug.trim() || existing.slug, - primaryDomain: input.primaryDomain.trim() || existing.primaryDomain, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async markOrganizationSyncStarted(c: any, input: { label: string }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubSyncStatus: "syncing", - githubLastSyncLabel: input.label, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationSyncCompleted( - c: any, - input: { - repositories: Array<{ fullName: string; cloneUrl: string; private: boolean }>; - installationStatus: FoundryOrganization["github"]["installationStatus"]; - lastSyncLabel: string; - }, - ): Promise { - assertOrganizationShell(c); - const now = Date.now(); - for (const repository of input.repositories) { - const remoteUrl = repository.cloneUrl; - await c.db - .insert(repos) - .values({ - repoId: repoIdFromRemote(remoteUrl), - remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl, - updatedAt: now, - }, - }) - .run(); - } - await c.db - .update(organizationProfile) - .set({ - githubInstallationStatus: input.installationStatus, - githubSyncStatus: "synced", - githubLastSyncLabel: input.lastSyncLabel, - githubLastSyncAt: now, - updatedAt: now, - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async markOrganizationSyncFailed(c: any, input: { message: string; installationStatus: FoundryOrganization["github"]["installationStatus"] }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubInstallationStatus: input.installationStatus, - githubSyncStatus: "error", - githubLastSyncLabel: input.message, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationStripeCustomer(c: any, input: { customerId: string }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - stripeCustomerId: input.customerId, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationStripeSubscription( - c: any, - input: { - subscription: { - id: string; - customerId: string; - priceId: string | null; - status: string; - cancelAtPeriodEnd: boolean; - currentPeriodEnd: number | null; - trialEnd: number | null; - defaultPaymentMethodLabel: string; - }; - fallbackPlanId: FoundryBillingPlanId; - }, - ): Promise { - assertOrganizationShell(c); - const { appShell } = getActorRuntimeContext(); - const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; - await c.db - .update(organizationProfile) - .set({ - stripeCustomerId: input.subscription.customerId || null, - stripeSubscriptionId: input.subscription.id || null, - stripePriceId: input.subscription.priceId, - billingPlanId: planId, - billingStatus: stripeStatusToBillingStatus(input.subscription.status, input.subscription.cancelAtPeriodEnd), - billingSeatsIncluded: seatsIncludedForPlan(planId), - billingTrialEndsAt: input.subscription.trialEnd ? new Date(input.subscription.trialEnd * 1000).toISOString() : null, - billingRenewalAt: input.subscription.currentPeriodEnd ? new Date(input.subscription.currentPeriodEnd * 1000).toISOString() : null, - billingPaymentMethodLabel: input.subscription.defaultPaymentMethodLabel || "Payment method on file", - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationFreePlan(c: any, input: { clearSubscription: boolean }): Promise { - assertOrganizationShell(c); - const patch: Record = { - billingPlanId: "free", - billingStatus: "active", - billingSeatsIncluded: 1, - billingTrialEndsAt: null, - billingRenewalAt: null, - billingPaymentMethodLabel: "No card required", +export async function updateOrganizationShellProfileMutation( + c: any, + input: Pick, +): Promise { + assertOrganizationShell(c); + const existing = await requireOrganizationProfileRow(c); + await c.db + .update(organizationProfile) + .set({ + displayName: input.displayName.trim() || existing.displayName, + slug: input.slug.trim() || existing.slug, + primaryDomain: input.primaryDomain.trim() || existing.primaryDomain, updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function markOrganizationSyncStartedMutation(c: any, input: { label: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + githubSyncStatus: "syncing", + githubLastSyncLabel: input.label, + githubSyncPhase: "discovering_repositories", + githubProcessedRepositoryCount: 0, + githubTotalRepositoryCount: 0, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationStripeCustomerMutation(c: any, input: { customerId: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + stripeCustomerId: input.customerId, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationStripeSubscriptionMutation( + c: any, + input: { + subscription: { + id: string; + customerId: string; + priceId: string | null; + status: string; + cancelAtPeriodEnd: boolean; + currentPeriodEnd: number | null; + trialEnd: number | null; + defaultPaymentMethodLabel: string; }; - if (input.clearSubscription) { - patch.stripeSubscriptionId = null; - patch.stripePriceId = null; - } - await c.db.update(organizationProfile).set(patch).where(eq(organizationProfile.id, PROFILE_ROW_ID)).run(); + fallbackPlanId: FoundryBillingPlanId; }, +): Promise { + assertOrganizationShell(c); + const { appShell } = getActorRuntimeContext(); + const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; + await c.db + .update(organizationProfile) + .set({ + stripeCustomerId: input.subscription.customerId || null, + stripeSubscriptionId: input.subscription.id || null, + stripePriceId: input.subscription.priceId, + billingPlanId: planId, + billingStatus: stripeStatusToBillingStatus(input.subscription.status, input.subscription.cancelAtPeriodEnd), + billingSeatsIncluded: seatsIncludedForPlan(planId), + billingTrialEndsAt: input.subscription.trialEnd ? new Date(input.subscription.trialEnd * 1000).toISOString() : null, + billingRenewalAt: input.subscription.currentPeriodEnd ? new Date(input.subscription.currentPeriodEnd * 1000).toISOString() : null, + billingPaymentMethodLabel: input.subscription.defaultPaymentMethodLabel || "Payment method on file", + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} - async setOrganizationBillingPaymentMethod(c: any, input: { label: string }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - billingPaymentMethodLabel: input.label, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, +export async function applyOrganizationFreePlanMutation(c: any, input: { clearSubscription: boolean }): Promise { + assertOrganizationShell(c); + const patch: Record = { + billingPlanId: "free", + billingStatus: "active", + billingSeatsIncluded: 1, + billingTrialEndsAt: null, + billingRenewalAt: null, + billingPaymentMethodLabel: "No card required", + updatedAt: Date.now(), + }; + if (input.clearSubscription) { + patch.stripeSubscriptionId = null; + patch.stripePriceId = null; + } + await c.db.update(organizationProfile).set(patch).where(eq(organizationProfile.id, PROFILE_ROW_ID)).run(); +} - async setOrganizationBillingStatus(c: any, input: { status: FoundryBillingState["status"] }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - billingStatus: input.status, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, +export async function setOrganizationBillingPaymentMethodMutation(c: any, input: { label: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + billingPaymentMethodLabel: input.label, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} - async upsertOrganizationInvoice(c: any, input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }): Promise { - assertOrganizationShell(c); - await c.db - .insert(invoices) - .values({ - id: input.id, +export async function setOrganizationBillingStatusMutation(c: any, input: { status: FoundryBillingState["status"] }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + billingStatus: input.status, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function upsertOrganizationInvoiceMutation( + c: any, + input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }, +): Promise { + assertOrganizationShell(c); + await c.db + .insert(invoices) + .values({ + id: input.id, + label: input.label, + issuedAt: input.issuedAt, + amountUsd: input.amountUsd, + status: input.status, + createdAt: Date.now(), + }) + .onConflictDoUpdate({ + target: invoices.id, + set: { label: input.label, issuedAt: input.issuedAt, amountUsd: input.amountUsd, status: input.status, - createdAt: Date.now(), - }) - .onConflictDoUpdate({ - target: invoices.id, - set: { - label: input.label, - issuedAt: input.issuedAt, - amountUsd: input.amountUsd, - status: input.status, - }, - }) - .run(); - }, + }, + }) + .run(); +} - async recordOrganizationSeatUsage(c: any, input: { email: string }): Promise { - assertOrganizationShell(c); - await c.db - .insert(seatAssignments) - .values({ - email: input.email, - createdAt: Date.now(), - }) - .onConflictDoNothing() - .run(); - }, - - async applyGithubInstallationCreated(c: any, input: { installationId: number }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubInstallationId: input.installationId, - githubInstallationStatus: "connected", - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyGithubInstallationRemoved(c: any, _input: {}): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubInstallationId: null, - githubInstallationStatus: "install_required", - githubSyncStatus: "pending", - githubLastSyncLabel: "GitHub App installation removed", - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyGithubRepositoryChanges(c: any, input: { added: Array<{ fullName: string; private: boolean }>; removed: string[] }): Promise { - assertOrganizationShell(c); - const now = Date.now(); - - for (const repo of input.added) { - const remoteUrl = `https://github.com/${repo.fullName}.git`; - const repoId = repoIdFromRemote(remoteUrl); - await c.db - .insert(repos) - .values({ - repoId, - remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl, - updatedAt: now, - }, - }) - .run(); - } - - for (const fullName of input.removed) { - const remoteUrl = `https://github.com/${fullName}.git`; - const repoId = repoIdFromRemote(remoteUrl); - await c.db.delete(repos).where(eq(repos.repoId, repoId)).run(); - } - - const repoCount = (await c.db.select().from(repos).all()).length; - await c.db - .update(organizationProfile) - .set({ - githubSyncStatus: "synced", - githubLastSyncLabel: `${repoCount} repositories synced`, - githubLastSyncAt: now, - updatedAt: now, - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, -}; +export async function recordOrganizationSeatUsageMutation(c: any, input: { email: string }): Promise { + assertOrganizationShell(c); + await c.db + .insert(seatAssignments) + .values({ + email: input.email, + createdAt: Date.now(), + }) + .onConflictDoNothing() + .run(); +} diff --git a/foundry/packages/backend/src/actors/organization/constants.ts b/foundry/packages/backend/src/actors/organization/constants.ts new file mode 100644 index 0000000..0b8e3c0 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/constants.ts @@ -0,0 +1 @@ +export const APP_SHELL_ORGANIZATION_ID = "app"; diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql index 09b77f9..80be04f 100644 --- a/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql @@ -56,6 +56,10 @@ CREATE TABLE `organization_profile` ( `github_last_sync_at` integer, `github_last_webhook_at` integer, `github_last_webhook_event` text, + `github_sync_generation` integer NOT NULL, + `github_sync_phase` text, + `github_processed_repository_count` integer NOT NULL, + `github_total_repository_count` integer NOT NULL, `stripe_customer_id` text, `stripe_subscription_id` text, `stripe_price_id` text, @@ -86,8 +90,3 @@ CREATE TABLE `stripe_lookup` ( `organization_id` text NOT NULL, `updated_at` integer NOT NULL ); ---> statement-breakpoint -CREATE TABLE `task_lookup` ( - `task_id` text PRIMARY KEY NOT NULL, - `repo_id` text NOT NULL -); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql new file mode 100644 index 0000000..74d63ef --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql @@ -0,0 +1,50 @@ +CREATE TABLE `auth_session_index` ( + `session_id` text PRIMARY KEY NOT NULL, + `session_token` text NOT NULL, + `user_id` text NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `auth_email_index` ( + `email` text PRIMARY KEY NOT NULL, + `user_id` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `auth_account_index` ( + `id` text PRIMARY KEY NOT NULL, + `provider_id` text NOT NULL, + `account_id` text NOT NULL, + `user_id` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `auth_verification` ( + `id` text PRIMARY KEY NOT NULL, + `identifier` text NOT NULL, + `value` text NOT NULL, + `expires_at` integer NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_index` ( + `task_id` text PRIMARY KEY NOT NULL, + `repo_id` text NOT NULL, + `branch_name` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_summaries` ( + `task_id` text PRIMARY KEY NOT NULL, + `repo_id` text NOT NULL, + `title` text NOT NULL, + `status` text NOT NULL, + `repo_name` text NOT NULL, + `updated_at_ms` integer NOT NULL, + `branch` text, + `pull_request_json` text, + `sessions_summary_json` text DEFAULT '[]' NOT NULL +); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json index cdcc44c..a29c546 100644 --- a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json @@ -373,6 +373,34 @@ "notNull": false, "autoincrement": false }, + "github_sync_generation": { + "name": "github_sync_generation", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_sync_phase": { + "name": "github_sync_phase", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_processed_repository_count": { + "name": "github_processed_repository_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_total_repository_count": { + "name": "github_total_repository_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, "stripe_customer_id": { "name": "stripe_customer_id", "type": "text", @@ -549,30 +577,6 @@ "compositePrimaryKeys": {}, "uniqueConstraints": {}, "checkConstraints": {} - }, - "task_lookup": { - "name": "task_lookup", - "columns": { - "task_id": { - "name": "task_id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "repo_id": { - "name": "repo_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} } }, "views": {}, diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json index e3668a1..41ea23b 100644 --- a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json @@ -8,6 +8,13 @@ "when": 1773376221152, "tag": "0000_melted_viper", "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1773840000000, + "tag": "0001_add_auth_and_task_tables", + "breakpoints": true } ] } diff --git a/foundry/packages/backend/src/actors/organization/db/migrations.ts b/foundry/packages/backend/src/actors/organization/db/migrations.ts index b3e09f1..a7e8abc 100644 --- a/foundry/packages/backend/src/actors/organization/db/migrations.ts +++ b/foundry/packages/backend/src/actors/organization/db/migrations.ts @@ -12,20 +12,8 @@ const journal = { }, { idx: 1, - when: 1773638400000, - tag: "0001_auth_index_tables", - breakpoints: true, - }, - { - idx: 2, - when: 1773720000000, - tag: "0002_task_summaries", - breakpoints: true, - }, - { - idx: 3, - when: 1773810001000, - tag: "0003_drop_provider_profiles", + when: 1773840000000, + tag: "0001_add_auth_and_task_tables", breakpoints: true, }, ], @@ -92,6 +80,10 @@ CREATE TABLE \`organization_profile\` ( \`github_last_sync_at\` integer, \`github_last_webhook_at\` integer, \`github_last_webhook_event\` text, + \`github_sync_generation\` integer NOT NULL, + \`github_sync_phase\` text, + \`github_processed_repository_count\` integer NOT NULL, + \`github_total_repository_count\` integer NOT NULL, \`stripe_customer_id\` text, \`stripe_subscription_id\` text, \`stripe_price_id\` text, @@ -122,13 +114,8 @@ CREATE TABLE \`stripe_lookup\` ( \`organization_id\` text NOT NULL, \`updated_at\` integer NOT NULL ); ---> statement-breakpoint -CREATE TABLE \`task_lookup\` ( - \`task_id\` text PRIMARY KEY NOT NULL, - \`repo_id\` text NOT NULL -); `, - m0001: `CREATE TABLE IF NOT EXISTS \`auth_session_index\` ( + m0001: `CREATE TABLE \`auth_session_index\` ( \`session_id\` text PRIMARY KEY NOT NULL, \`session_token\` text NOT NULL, \`user_id\` text NOT NULL, @@ -136,13 +123,13 @@ CREATE TABLE \`task_lookup\` ( \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE IF NOT EXISTS \`auth_email_index\` ( +CREATE TABLE \`auth_email_index\` ( \`email\` text PRIMARY KEY NOT NULL, \`user_id\` text NOT NULL, \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE IF NOT EXISTS \`auth_account_index\` ( +CREATE TABLE \`auth_account_index\` ( \`id\` text PRIMARY KEY NOT NULL, \`provider_id\` text NOT NULL, \`account_id\` text NOT NULL, @@ -150,7 +137,7 @@ CREATE TABLE IF NOT EXISTS \`auth_account_index\` ( \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE IF NOT EXISTS \`auth_verification\` ( +CREATE TABLE \`auth_verification\` ( \`id\` text PRIMARY KEY NOT NULL, \`identifier\` text NOT NULL, \`value\` text NOT NULL, @@ -158,8 +145,16 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` ( \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); -`, - m0002: `CREATE TABLE IF NOT EXISTS \`task_summaries\` ( +--> statement-breakpoint +CREATE TABLE \`task_index\` ( + \`task_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`branch_name\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`task_summaries\` ( \`task_id\` text PRIMARY KEY NOT NULL, \`repo_id\` text NOT NULL, \`title\` text NOT NULL, @@ -170,8 +165,6 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` ( \`pull_request_json\` text, \`sessions_summary_json\` text DEFAULT '[]' NOT NULL ); -`, - m0003: `DROP TABLE IF EXISTS \`provider_profiles\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/organization/db/schema.ts b/foundry/packages/backend/src/actors/organization/db/schema.ts index dd4fa40..5071a25 100644 --- a/foundry/packages/backend/src/actors/organization/db/schema.ts +++ b/foundry/packages/backend/src/actors/organization/db/schema.ts @@ -1,34 +1,34 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; // SQLite is per organization actor instance, so no organizationId column needed. /** - * Coordinator index of RepositoryActor instances. - * The organization actor is the coordinator for repositories. - * Rows are created/removed when repos are added/removed from the organization. + * Coordinator index of TaskActor instances. + * The organization actor is the direct coordinator for tasks (not a per-repo + * actor) because the sidebar needs to query all tasks across all repos on + * every snapshot. With many repos, fanning out to N repo actors on the hot + * read path is too expensive — owning the index here keeps that a single + * local table scan. Each row maps a taskId to its repo and immutable branch + * name. Used for branch conflict checking (scoped by repoId) and + * task-by-branch lookups. */ -export const repos = sqliteTable("repos", { - repoId: text("repo_id").notNull().primaryKey(), - remoteUrl: text("remote_url").notNull(), +export const taskIndex = sqliteTable("task_index", { + taskId: text("task_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + branchName: text("branch_name"), createdAt: integer("created_at").notNull(), updatedAt: integer("updated_at").notNull(), }); /** - * Coordinator index of TaskActor instances. - * Fast taskId → repoId lookup so the organization can route requests - * to the correct RepositoryActor without scanning all repos. - */ -export const taskLookup = sqliteTable("task_lookup", { - taskId: text("task_id").notNull().primaryKey(), - repoId: text("repo_id").notNull(), -}); - -/** - * Coordinator index of TaskActor instances — materialized sidebar projection. - * Task actors push summary updates to the organization actor via - * applyTaskSummaryUpdate(). Source of truth lives on each TaskActor; - * this table exists so organization reads stay local without fan-out. + * Organization-owned materialized task summary projection. + * Task actors push summary updates directly to the organization coordinator, + * which keeps this table local for fast list/lookups without fan-out. + * Same rationale as taskIndex: the sidebar repeatedly reads all tasks across + * all repos, so the org must own the materialized view to avoid O(repos) + * actor fan-out on the hot read path. */ export const taskSummaries = sqliteTable("task_summaries", { taskId: text("task_id").notNull().primaryKey(), @@ -42,38 +42,46 @@ export const taskSummaries = sqliteTable("task_summaries", { sessionsSummaryJson: text("sessions_summary_json").notNull().default("[]"), }); -export const organizationProfile = sqliteTable("organization_profile", { - id: text("id").notNull().primaryKey(), - kind: text("kind").notNull(), - githubAccountId: text("github_account_id").notNull(), - githubLogin: text("github_login").notNull(), - githubAccountType: text("github_account_type").notNull(), - displayName: text("display_name").notNull(), - slug: text("slug").notNull(), - primaryDomain: text("primary_domain").notNull(), - defaultModel: text("default_model").notNull(), - autoImportRepos: integer("auto_import_repos").notNull(), - repoImportStatus: text("repo_import_status").notNull(), - githubConnectedAccount: text("github_connected_account").notNull(), - githubInstallationStatus: text("github_installation_status").notNull(), - githubSyncStatus: text("github_sync_status").notNull(), - githubInstallationId: integer("github_installation_id"), - githubLastSyncLabel: text("github_last_sync_label").notNull(), - githubLastSyncAt: integer("github_last_sync_at"), - githubLastWebhookAt: integer("github_last_webhook_at"), - githubLastWebhookEvent: text("github_last_webhook_event"), - stripeCustomerId: text("stripe_customer_id"), - stripeSubscriptionId: text("stripe_subscription_id"), - stripePriceId: text("stripe_price_id"), - billingPlanId: text("billing_plan_id").notNull(), - billingStatus: text("billing_status").notNull(), - billingSeatsIncluded: integer("billing_seats_included").notNull(), - billingTrialEndsAt: text("billing_trial_ends_at"), - billingRenewalAt: text("billing_renewal_at"), - billingPaymentMethodLabel: text("billing_payment_method_label").notNull(), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); +export const organizationProfile = sqliteTable( + "organization_profile", + { + id: integer("id").primaryKey(), + kind: text("kind").notNull(), + githubAccountId: text("github_account_id").notNull(), + githubLogin: text("github_login").notNull(), + githubAccountType: text("github_account_type").notNull(), + displayName: text("display_name").notNull(), + slug: text("slug").notNull(), + defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID), + primaryDomain: text("primary_domain").notNull(), + autoImportRepos: integer("auto_import_repos").notNull(), + repoImportStatus: text("repo_import_status").notNull(), + githubConnectedAccount: text("github_connected_account").notNull(), + githubInstallationStatus: text("github_installation_status").notNull(), + githubSyncStatus: text("github_sync_status").notNull(), + githubInstallationId: integer("github_installation_id"), + githubLastSyncLabel: text("github_last_sync_label").notNull(), + githubLastSyncAt: integer("github_last_sync_at"), + githubLastWebhookAt: integer("github_last_webhook_at"), + githubLastWebhookEvent: text("github_last_webhook_event"), + githubSyncGeneration: integer("github_sync_generation").notNull(), + githubSyncPhase: text("github_sync_phase"), + githubProcessedRepositoryCount: integer("github_processed_repository_count").notNull(), + githubTotalRepositoryCount: integer("github_total_repository_count").notNull(), + stripeCustomerId: text("stripe_customer_id"), + stripeSubscriptionId: text("stripe_subscription_id"), + stripePriceId: text("stripe_price_id"), + billingPlanId: text("billing_plan_id").notNull(), + billingStatus: text("billing_status").notNull(), + billingSeatsIncluded: integer("billing_seats_included").notNull(), + billingTrialEndsAt: text("billing_trial_ends_at"), + billingRenewalAt: text("billing_renewal_at"), + billingPaymentMethodLabel: text("billing_payment_method_label").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("organization_profile_singleton_id_check", sql`${table.id} = 1`)], +); export const organizationMembers = sqliteTable("organization_members", { id: text("id").notNull().primaryKey(), @@ -133,6 +141,7 @@ export const authAccountIndex = sqliteTable("auth_account_index", { updatedAt: integer("updated_at").notNull(), }); +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ export const authVerification = sqliteTable("auth_verification", { id: text("id").notNull().primaryKey(), identifier: text("identifier").notNull(), diff --git a/foundry/packages/backend/src/actors/organization/index.ts b/foundry/packages/backend/src/actors/organization/index.ts index 1ea0196..1bd8896 100644 --- a/foundry/packages/backend/src/actors/organization/index.ts +++ b/foundry/packages/backend/src/actors/organization/index.ts @@ -1,11 +1,10 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; +import { actor } from "rivetkit"; import { organizationDb } from "./db/db.js"; -import { runOrganizationWorkflow, ORGANIZATION_QUEUE_NAMES, organizationActions } from "./actions.js"; +import { organizationActions } from "./actions.js"; +import { organizationCommandActions } from "./workflow.js"; export const organization = actor({ db: organizationDb, - queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "Organization", icon: "compass", @@ -14,6 +13,8 @@ export const organization = actor({ createState: (_c, organizationId: string) => ({ organizationId, }), - actions: organizationActions, - run: workflow(runOrganizationWorkflow), + actions: { + ...organizationActions, + ...organizationCommandActions, + }, }); diff --git a/foundry/packages/backend/src/actors/organization/queues.ts b/foundry/packages/backend/src/actors/organization/queues.ts new file mode 100644 index 0000000..f84e818 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/queues.ts @@ -0,0 +1,39 @@ +export const ORGANIZATION_QUEUE_NAMES = [ + "organization.command.createTask", + "organization.command.materializeTask", + "organization.command.registerTaskBranch", + "organization.command.applyTaskSummaryUpdate", + "organization.command.removeTaskSummary", + "organization.command.refreshTaskSummaryForBranch", + "organization.command.snapshot.broadcast", + "organization.command.syncGithubSession", + "organization.command.better_auth.session_index.upsert", + "organization.command.better_auth.session_index.delete", + "organization.command.better_auth.email_index.upsert", + "organization.command.better_auth.email_index.delete", + "organization.command.better_auth.account_index.upsert", + "organization.command.better_auth.account_index.delete", + "organization.command.better_auth.verification.create", + "organization.command.better_auth.verification.update", + "organization.command.better_auth.verification.update_many", + "organization.command.better_auth.verification.delete", + "organization.command.better_auth.verification.delete_many", + "organization.command.github.sync_progress.apply", + "organization.command.github.webhook_receipt.record", + "organization.command.github.organization_shell.sync_from_github", + "organization.command.shell.profile.update", + "organization.command.shell.sync_started.mark", + "organization.command.billing.stripe_customer.apply", + "organization.command.billing.stripe_subscription.apply", + "organization.command.billing.free_plan.apply", + "organization.command.billing.payment_method.set", + "organization.command.billing.status.set", + "organization.command.billing.invoice.upsert", + "organization.command.billing.seat_usage.record", +] as const; + +export type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; + +export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { + return name; +} diff --git a/foundry/packages/backend/src/actors/organization/workflow.ts b/foundry/packages/backend/src/actors/organization/workflow.ts new file mode 100644 index 0000000..189225b --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/workflow.ts @@ -0,0 +1,163 @@ +// @ts-nocheck +/** + * Organization command actions — converted from queue handlers to direct actions. + * Each export becomes an action on the organization actor. + */ +import { applyGithubSyncProgressMutation, recordGithubWebhookReceiptMutation, refreshOrganizationSnapshotMutation } from "./actions.js"; +import { + applyTaskSummaryUpdateMutation, + createTaskMutation, + refreshTaskSummaryForBranchMutation, + registerTaskBranchMutation, + removeTaskSummaryMutation, +} from "./actions/task-mutations.js"; +import { + betterAuthCreateVerificationMutation, + betterAuthDeleteAccountIndexMutation, + betterAuthDeleteEmailIndexMutation, + betterAuthDeleteManyVerificationMutation, + betterAuthDeleteSessionIndexMutation, + betterAuthDeleteVerificationMutation, + betterAuthUpdateManyVerificationMutation, + betterAuthUpdateVerificationMutation, + betterAuthUpsertAccountIndexMutation, + betterAuthUpsertEmailIndexMutation, + betterAuthUpsertSessionIndexMutation, +} from "./actions/better-auth.js"; +import { + applyOrganizationFreePlanMutation, + applyOrganizationStripeCustomerMutation, + applyOrganizationStripeSubscriptionMutation, + markOrganizationSyncStartedMutation, + recordOrganizationSeatUsageMutation, + setOrganizationBillingPaymentMethodMutation, + setOrganizationBillingStatusMutation, + syncOrganizationShellFromGithubMutation, + updateOrganizationShellProfileMutation, + upsertOrganizationInvoiceMutation, +} from "./app-shell.js"; + +export const organizationCommandActions = { + async commandCreateTask(c: any, body: any) { + return await createTaskMutation(c, body); + }, + async commandMaterializeTask(c: any, body: any) { + return await createTaskMutation(c, body); + }, + async commandRegisterTaskBranch(c: any, body: any) { + return await registerTaskBranchMutation(c, body); + }, + async commandApplyTaskSummaryUpdate(c: any, body: any) { + await applyTaskSummaryUpdateMutation(c, body); + return { ok: true }; + }, + async commandRemoveTaskSummary(c: any, body: any) { + await removeTaskSummaryMutation(c, body); + return { ok: true }; + }, + async commandRefreshTaskSummaryForBranch(c: any, body: any) { + await refreshTaskSummaryForBranchMutation(c, body); + return { ok: true }; + }, + async commandBroadcastSnapshot(c: any, _body: any) { + await refreshOrganizationSnapshotMutation(c); + return { ok: true }; + }, + async commandSyncGithubSession(c: any, body: any) { + const { syncGithubOrganizations } = await import("./app-shell.js"); + await syncGithubOrganizations(c, body); + return { ok: true }; + }, + + // Better Auth index actions + async commandBetterAuthSessionIndexUpsert(c: any, body: any) { + return await betterAuthUpsertSessionIndexMutation(c, body); + }, + async commandBetterAuthSessionIndexDelete(c: any, body: any) { + await betterAuthDeleteSessionIndexMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthEmailIndexUpsert(c: any, body: any) { + return await betterAuthUpsertEmailIndexMutation(c, body); + }, + async commandBetterAuthEmailIndexDelete(c: any, body: any) { + await betterAuthDeleteEmailIndexMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthAccountIndexUpsert(c: any, body: any) { + return await betterAuthUpsertAccountIndexMutation(c, body); + }, + async commandBetterAuthAccountIndexDelete(c: any, body: any) { + await betterAuthDeleteAccountIndexMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthVerificationCreate(c: any, body: any) { + return await betterAuthCreateVerificationMutation(c, body); + }, + async commandBetterAuthVerificationUpdate(c: any, body: any) { + return await betterAuthUpdateVerificationMutation(c, body); + }, + async commandBetterAuthVerificationUpdateMany(c: any, body: any) { + return await betterAuthUpdateManyVerificationMutation(c, body); + }, + async commandBetterAuthVerificationDelete(c: any, body: any) { + await betterAuthDeleteVerificationMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthVerificationDeleteMany(c: any, body: any) { + return await betterAuthDeleteManyVerificationMutation(c, body); + }, + + // GitHub sync actions + async commandApplyGithubSyncProgress(c: any, body: any) { + await applyGithubSyncProgressMutation(c, body); + return { ok: true }; + }, + async commandRecordGithubWebhookReceipt(c: any, body: any) { + await recordGithubWebhookReceiptMutation(c, body); + return { ok: true }; + }, + async commandSyncOrganizationShellFromGithub(c: any, body: any) { + return await syncOrganizationShellFromGithubMutation(c, body); + }, + + // Shell/profile actions + async commandUpdateShellProfile(c: any, body: any) { + await updateOrganizationShellProfileMutation(c, body); + return { ok: true }; + }, + async commandMarkSyncStarted(c: any, body: any) { + await markOrganizationSyncStartedMutation(c, body); + return { ok: true }; + }, + + // Billing actions + async commandApplyStripeCustomer(c: any, body: any) { + await applyOrganizationStripeCustomerMutation(c, body); + return { ok: true }; + }, + async commandApplyStripeSubscription(c: any, body: any) { + await applyOrganizationStripeSubscriptionMutation(c, body); + return { ok: true }; + }, + async commandApplyFreePlan(c: any, body: any) { + await applyOrganizationFreePlanMutation(c, body); + return { ok: true }; + }, + async commandSetPaymentMethod(c: any, body: any) { + await setOrganizationBillingPaymentMethodMutation(c, body); + return { ok: true }; + }, + async commandSetBillingStatus(c: any, body: any) { + await setOrganizationBillingStatusMutation(c, body); + return { ok: true }; + }, + async commandUpsertInvoice(c: any, body: any) { + await upsertOrganizationInvoiceMutation(c, body); + return { ok: true }; + }, + async commandRecordSeatUsage(c: any, body: any) { + await recordOrganizationSeatUsageMutation(c, body); + return { ok: true }; + }, +}; diff --git a/foundry/packages/backend/src/actors/repository/actions.ts b/foundry/packages/backend/src/actors/repository/actions.ts deleted file mode 100644 index 9ef8e75..0000000 --- a/foundry/packages/backend/src/actors/repository/actions.ts +++ /dev/null @@ -1,557 +0,0 @@ -// @ts-nocheck -import { randomUUID } from "node:crypto"; -import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; -import { Loop } from "rivetkit/workflow"; -import type { AgentType, RepoOverview, SandboxProviderId, TaskRecord, TaskSummary } from "@sandbox-agent/foundry-shared"; -import { getGithubData, getOrCreateHistory, getOrCreateTask, getTask, selfRepository } from "../handles.js"; -import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js"; -import { expectQueueResponse } from "../../services/queue.js"; -import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; -import { repoMeta, taskIndex } from "./db/schema.js"; - -interface CreateTaskCommand { - task: string; - sandboxProviderId: SandboxProviderId; - agentType: AgentType | null; - explicitTitle: string | null; - explicitBranchName: string | null; - initialPrompt: string | null; - onBranch: string | null; -} - -interface RegisterTaskBranchCommand { - taskId: string; - branchName: string; - requireExistingRemote?: boolean; -} - -interface ListTaskSummariesCommand { - includeArchived?: boolean; -} - -interface GetTaskEnrichedCommand { - taskId: string; -} - -interface GetPullRequestForBranchCommand { - branchName: string; -} - -const REPOSITORY_QUEUE_NAMES = ["repository.command.createTask", "repository.command.registerTaskBranch"] as const; - -type RepositoryQueueName = (typeof REPOSITORY_QUEUE_NAMES)[number]; - -export { REPOSITORY_QUEUE_NAMES }; - -export function repositoryWorkflowQueueName(name: RepositoryQueueName): RepositoryQueueName { - return name; -} - -function isStaleTaskReferenceError(error: unknown): boolean { - const message = resolveErrorMessage(error); - return isActorNotFoundError(error) || message.startsWith("Task not found:"); -} - -async function persistRemoteUrl(c: any, remoteUrl: string): Promise { - c.state.remoteUrl = remoteUrl; - await c.db - .insert(repoMeta) - .values({ - id: 1, - remoteUrl, - updatedAt: Date.now(), - }) - .onConflictDoUpdate({ - target: repoMeta.id, - set: { - remoteUrl, - updatedAt: Date.now(), - }, - }) - .run(); -} - -async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { - try { - await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); - } catch { - // Best effort cleanup only. - } -} - -async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise { - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName, - createdAt: updatedAt || now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); -} - -async function listKnownTaskBranches(c: any): Promise { - const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); -} - -async function resolveGitHubRepository(c: any) { - const githubData = getGithubData(c, c.state.organizationId); - return await githubData.getRepository({ repoId: c.state.repoId }).catch(() => null); -} - -async function listGitHubBranches(c: any): Promise> { - const githubData = getGithubData(c, c.state.organizationId); - return await githubData.listBranchesForRepository({ repoId: c.state.repoId }).catch(() => []); -} - -async function enrichTaskRecord(c: any, record: TaskRecord): Promise { - const branchName = record.branchName?.trim() || null; - if (!branchName) { - return record; - } - - const pr = - branchName != null - ? await getGithubData(c, c.state.organizationId) - .listPullRequestsForRepository({ repoId: c.state.repoId }) - .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) - .catch(() => null) - : null; - - return { - ...record, - prUrl: pr?.url ?? null, - prAuthor: pr?.authorLogin ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - diffStat: record.diffStat ?? null, - hasUnpushed: record.hasUnpushed ?? null, - conflictsWithMain: record.conflictsWithMain ?? null, - parentBranch: record.parentBranch ?? null, - }; -} - -async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { - const organizationId = c.state.organizationId; - const repoId = c.state.repoId; - const repoRemote = c.state.remoteUrl; - const onBranch = cmd.onBranch?.trim() || null; - const taskId = randomUUID(); - let initialBranchName: string | null = null; - let initialTitle: string | null = null; - - await persistRemoteUrl(c, repoRemote); - - if (onBranch) { - initialBranchName = onBranch; - initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); - - await registerTaskBranchMutation(c, { - taskId, - branchName: onBranch, - requireExistingRemote: true, - }); - } else { - const reservedBranches = await listKnownTaskBranches(c); - const resolved = resolveCreateFlowDecision({ - task: cmd.task, - explicitTitle: cmd.explicitTitle ?? undefined, - explicitBranchName: cmd.explicitBranchName ?? undefined, - localBranches: [], - taskBranches: reservedBranches, - }); - - initialBranchName = resolved.branchName; - initialTitle = resolved.title; - - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName: resolved.branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoNothing() - .run(); - } - - let taskHandle: Awaited>; - try { - taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { - organizationId, - repoId, - taskId, - repoRemote, - branchName: initialBranchName, - title: initialTitle, - task: cmd.task, - sandboxProviderId: cmd.sandboxProviderId, - agentType: cmd.agentType, - explicitTitle: null, - explicitBranchName: null, - initialPrompt: cmd.initialPrompt, - }); - } catch (error) { - if (initialBranchName) { - await deleteStaleTaskIndexRow(c, taskId); - } - throw error; - } - - const created = await taskHandle.initialize({ sandboxProviderId: cmd.sandboxProviderId }); - - const history = await getOrCreateHistory(c, organizationId, repoId); - await history.append({ - kind: "task.created", - taskId, - payload: { - repoId, - sandboxProviderId: cmd.sandboxProviderId, - }, - }); - - return created; -} - -async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const branchName = cmd.branchName.trim(); - if (!branchName) { - throw new Error("branchName is required"); - } - - await persistRemoteUrl(c, c.state.remoteUrl); - - const existingOwner = await c.db - .select({ taskId: taskIndex.taskId }) - .from(taskIndex) - .where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId))) - .get(); - - if (existingOwner) { - let ownerMissing = false; - try { - await getTask(c, c.state.organizationId, c.state.repoId, existingOwner.taskId).get(); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - ownerMissing = true; - await deleteStaleTaskIndexRow(c, existingOwner.taskId); - } else { - throw error; - } - } - if (!ownerMissing) { - throw new Error(`branch is already assigned to a different task: ${branchName}`); - } - } - - const branches = await listGitHubBranches(c); - const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null; - if (cmd.requireExistingRemote && !branchMatch) { - throw new Error(`Remote branch not found: ${branchName}`); - } - - const repository = await resolveGitHubRepository(c); - const defaultBranch = repository?.defaultBranch ?? "main"; - const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? ""; - - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId: cmd.taskId, - branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); - - return { branchName, headSha }; -} - -async function listTaskSummaries(c: any, includeArchived = false): Promise { - const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all(); - const records: TaskSummary[] = []; - - for (const row of taskRows) { - try { - const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); - if (!includeArchived && record.status === "archived") { - continue; - } - records.push({ - organizationId: record.organizationId, - repoId: record.repoId, - taskId: record.taskId, - branchName: record.branchName, - title: record.title, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - continue; - } - logActorWarning("repository", "failed loading task summary row", { - organizationId: c.state.organizationId, - repoId: c.state.repoId, - taskId: row.taskId, - error: resolveErrorMessage(error), - }); - } - } - - records.sort((a, b) => b.updatedAt - a.updatedAt); - return records; -} - -function sortOverviewBranches( - branches: Array<{ - branchName: string; - commitSha: string; - taskId: string | null; - taskTitle: string | null; - taskStatus: TaskRecord["status"] | null; - prNumber: number | null; - prState: string | null; - prUrl: string | null; - ciStatus: string | null; - reviewStatus: string | null; - reviewer: string | null; - updatedAt: number; - }>, - defaultBranch: string | null, -) { - return [...branches].sort((left, right) => { - if (defaultBranch) { - if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1; - if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1; - } - if (Boolean(left.taskId) !== Boolean(right.taskId)) { - return left.taskId ? -1 : 1; - } - if (left.updatedAt !== right.updatedAt) { - return right.updatedAt - left.updatedAt; - } - return left.branchName.localeCompare(right.branchName); - }); -} - -export async function runRepositoryWorkflow(ctx: any): Promise { - await ctx.loop("repository-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-repository-command", { - names: [...REPOSITORY_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - try { - if (msg.name === "repository.command.createTask") { - const result = await loopCtx.step({ - name: "repository-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "repository.command.registerTaskBranch") { - const result = await loopCtx.step({ - name: "repository-register-task-branch", - timeout: 60_000, - run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("repository", "repository workflow command failed", { - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch(() => {}); - } - - return Loop.continue(undefined); - }); -} - -export const repositoryActions = { - async createTask(c: any, cmd: CreateTaskCommand): Promise { - const self = selfRepository(c); - return expectQueueResponse( - await self.send(repositoryWorkflowQueueName("repository.command.createTask"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async listReservedBranches(c: any): Promise { - return await listKnownTaskBranches(c); - }, - - async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const self = selfRepository(c); - return expectQueueResponse<{ branchName: string; headSha: string }>( - await self.send(repositoryWorkflowQueueName("repository.command.registerTaskBranch"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise { - return await listTaskSummaries(c, cmd?.includeArchived === true); - }, - - async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise { - const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get(); - if (!row) { - const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); - await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now()); - return await enrichTaskRecord(c, record); - } - - try { - const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); - return await enrichTaskRecord(c, record); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, cmd.taskId); - throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); - } - throw error; - } - }, - - async getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { - const repository = await resolveGitHubRepository(c); - return { - defaultBranch: repository?.defaultBranch ?? null, - fullName: repository?.fullName ?? null, - remoteUrl: c.state.remoteUrl, - }; - }, - - async getRepoOverview(c: any): Promise { - await persistRemoteUrl(c, c.state.remoteUrl); - - const now = Date.now(); - const repository = await resolveGitHubRepository(c); - const githubBranches = await listGitHubBranches(c).catch(() => []); - const githubData = getGithubData(c, c.state.organizationId); - const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); - const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); - - const taskRows = await c.db - .select({ - taskId: taskIndex.taskId, - branchName: taskIndex.branchName, - updatedAt: taskIndex.updatedAt, - }) - .from(taskIndex) - .all(); - - const taskMetaByBranch = new Map(); - for (const row of taskRows) { - if (!row.branchName) { - continue; - } - try { - const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); - taskMetaByBranch.set(row.branchName, { - taskId: row.taskId, - title: record.title ?? null, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - continue; - } - } - } - - const branchMap = new Map(); - for (const branch of githubBranches) { - branchMap.set(branch.branchName, branch); - } - for (const branchName of taskMetaByBranch.keys()) { - if (!branchMap.has(branchName)) { - branchMap.set(branchName, { branchName, commitSha: "" }); - } - } - if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) { - branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" }); - } - - const branches = sortOverviewBranches( - [...branchMap.values()].map((branch) => { - const taskMeta = taskMetaByBranch.get(branch.branchName); - const pr = prByBranch.get(branch.branchName); - return { - branchName: branch.branchName, - commitSha: branch.commitSha, - taskId: taskMeta?.taskId ?? null, - taskTitle: taskMeta?.title ?? null, - taskStatus: taskMeta?.status ?? null, - prNumber: pr?.number ?? null, - prState: pr?.state ?? null, - prUrl: pr?.url ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now), - }; - }), - repository?.defaultBranch ?? null, - ); - - return { - organizationId: c.state.organizationId, - repoId: c.state.repoId, - remoteUrl: c.state.remoteUrl, - baseRef: repository?.defaultBranch ?? null, - fetchedAt: now, - branches, - }; - }, - - async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> { - const branchName = cmd.branchName?.trim(); - if (!branchName) { - return null; - } - const githubData = getGithubData(c, c.state.organizationId); - return await githubData.getPullRequestForBranch({ - repoId: c.state.repoId, - branchName, - }); - }, -}; diff --git a/foundry/packages/backend/src/actors/repository/db/db.ts b/foundry/packages/backend/src/actors/repository/db/db.ts deleted file mode 100644 index 79bed8e..0000000 --- a/foundry/packages/backend/src/actors/repository/db/db.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { db } from "rivetkit/db/drizzle"; -import * as schema from "./schema.js"; -import migrations from "./migrations.js"; - -export const repositoryDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts b/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts deleted file mode 100644 index 8b9a1b9..0000000 --- a/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/repository/db/drizzle", - schema: "./src/actors/repository/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql b/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql deleted file mode 100644 index 14bc071..0000000 --- a/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE `repo_meta` ( - `id` integer PRIMARY KEY NOT NULL, - `remote_url` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `task_index` ( - `task_id` text PRIMARY KEY NOT NULL, - `branch_name` text, - `created_at` integer NOT NULL, - `updated_at` integer NOT NULL -); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json deleted file mode 100644 index deebd86..0000000 --- a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": "7", - "dialect": "sqlite", - "entries": [ - { - "idx": 0, - "version": "6", - "when": 1773376221848, - "tag": "0000_useful_la_nuit", - "breakpoints": true - } - ] -} diff --git a/foundry/packages/backend/src/actors/repository/db/migrations.ts b/foundry/packages/backend/src/actors/repository/db/migrations.ts deleted file mode 100644 index ebdb167..0000000 --- a/foundry/packages/backend/src/actors/repository/db/migrations.ts +++ /dev/null @@ -1,43 +0,0 @@ -// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. -// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). -// Do not hand-edit this file. - -const journal = { - entries: [ - { - idx: 0, - when: 1773376221848, - tag: "0000_useful_la_nuit", - breakpoints: true, - }, - { - idx: 1, - when: 1778900000000, - tag: "0001_remove_local_git_state", - breakpoints: true, - }, - ], -} as const; - -export default { - journal, - migrations: { - m0000: `CREATE TABLE \`repo_meta\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`remote_url\` text NOT NULL, -\t\`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`task_index\` ( -\t\`task_id\` text PRIMARY KEY NOT NULL, -\t\`branch_name\` text, -\t\`created_at\` integer NOT NULL, -\t\`updated_at\` integer NOT NULL -); -`, - m0001: `DROP TABLE IF EXISTS \`branches\`; ---> statement-breakpoint -DROP TABLE IF EXISTS \`repo_action_jobs\`; -`, - } as const, -}; diff --git a/foundry/packages/backend/src/actors/repository/db/schema.ts b/foundry/packages/backend/src/actors/repository/db/schema.ts deleted file mode 100644 index 2f597e8..0000000 --- a/foundry/packages/backend/src/actors/repository/db/schema.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; - -// SQLite is per repository actor instance (organizationId+repoId). - -export const repoMeta = sqliteTable("repo_meta", { - id: integer("id").primaryKey(), - remoteUrl: text("remote_url").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -/** - * Coordinator index of TaskActor instances. - * The repository actor is the coordinator for tasks. Each row maps a - * taskId to its branch name. Used for branch conflict checking and - * task-by-branch lookups. Rows are inserted at task creation and - * updated on branch rename. - */ -export const taskIndex = sqliteTable("task_index", { - taskId: text("task_id").notNull().primaryKey(), - branchName: text("branch_name"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); diff --git a/foundry/packages/backend/src/actors/repository/index.ts b/foundry/packages/backend/src/actors/repository/index.ts deleted file mode 100644 index 4253a90..0000000 --- a/foundry/packages/backend/src/actors/repository/index.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import { repositoryDb } from "./db/db.js"; -import { REPOSITORY_QUEUE_NAMES, repositoryActions, runRepositoryWorkflow } from "./actions.js"; - -export interface RepositoryInput { - organizationId: string; - repoId: string; - remoteUrl: string; -} - -export const repository = actor({ - db: repositoryDb, - queues: Object.fromEntries(REPOSITORY_QUEUE_NAMES.map((name) => [name, queue()])), - options: { - name: "Repository", - icon: "folder", - actionTimeout: 5 * 60_000, - }, - createState: (_c, input: RepositoryInput) => ({ - organizationId: input.organizationId, - repoId: input.repoId, - remoteUrl: input.remoteUrl, - }), - actions: repositoryActions, - run: workflow(runRepositoryWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/sandbox/index.ts b/foundry/packages/backend/src/actors/sandbox/index.ts index 2e2087b..a35a149 100644 --- a/foundry/packages/backend/src/actors/sandbox/index.ts +++ b/foundry/packages/backend/src/actors/sandbox/index.ts @@ -2,12 +2,14 @@ import { actor } from "rivetkit"; import { e2b, sandboxActor } from "rivetkit/sandbox"; import { existsSync } from "node:fs"; import Dockerode from "dockerode"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS, workspaceModelGroupsFromSandboxAgents, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; import { SandboxAgent } from "sandbox-agent"; import { getActorRuntimeContext } from "../context.js"; import { organizationKey } from "../keys.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; -const SANDBOX_REPO_CWD = "/home/sandbox/organization/repo"; +const SANDBOX_REPO_CWD = "/home/user/repo"; const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full"; const DEFAULT_LOCAL_SANDBOX_PORT = 2468; const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" }); @@ -203,6 +205,13 @@ const baseTaskSandbox = sandboxActor({ create: () => ({ template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x", envs: sandboxEnvObject(), + // TEMPORARY: Default E2B timeout is 5 minutes which is too short. + // Set to 1 hour as a stopgap. Remove this once the E2B provider in + // sandbox-agent uses betaCreate + autoPause (see + // .context/proposal-rivetkit-sandbox-resilience.md). At that point + // the provider handles timeout/pause lifecycle and this override is + // unnecessary. + timeoutMs: 60 * 60 * 1000, }), installAgents: ["claude", "codex"], }); @@ -219,8 +228,12 @@ async function broadcastProcesses(c: any, actions: Record { return provider; } +async function listWorkspaceModelGroupsForSandbox(c: any): Promise { + const provider = await providerForConnection(c); + if (!provider || !c.state.sandboxId || typeof provider.connectAgent !== "function") { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } + + try { + const client = await provider.connectAgent(c.state.sandboxId, { + waitForHealth: { + timeoutMs: 15_000, + }, + }); + const listed = await client.listAgents({ config: true }); + const groups = workspaceModelGroupsFromSandboxAgents(Array.isArray(listed?.agents) ? listed.agents : []); + return groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS; + } catch { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } +} + const baseActions = baseTaskSandbox.config.actions as Record Promise>; export const taskSandbox = actor({ @@ -316,6 +349,19 @@ export const taskSandbox = actor({ return sanitizeActorResult(await session.prompt([{ type: "text", text }])); }, + async listProcesses(c: any): Promise { + try { + return await baseActions.listProcesses(c); + } catch (error) { + // Sandbox may be gone (E2B timeout, destroyed, etc.) — degrade to empty + logActorWarning("taskSandbox", "listProcesses failed, sandbox may be expired", { + sandboxId: c.state.sandboxId, + error: resolveErrorMessage(error), + }); + return { processes: [] }; + } + }, + async createProcess(c: any, request: any): Promise { const created = await baseActions.createProcess(c, request); await broadcastProcesses(c, baseActions); @@ -360,6 +406,10 @@ export const taskSandbox = actor({ } }, + async listWorkspaceModelGroups(c: any): Promise { + return await listWorkspaceModelGroupsForSandbox(c); + }, + async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { const { config } = getActorRuntimeContext(); const { taskId } = parseTaskSandboxKey(c.key); diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql b/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql index b9ef95a..c6a346a 100644 --- a/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql +++ b/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql @@ -3,10 +3,9 @@ CREATE TABLE `task` ( `branch_name` text, `title` text, `task` text NOT NULL, - `provider_id` text NOT NULL, + `sandbox_provider_id` text NOT NULL, `status` text NOT NULL, - `agent_type` text DEFAULT 'claude', - `pr_submitted` integer DEFAULT 0, + `pull_request_json` text, `created_at` integer NOT NULL, `updated_at` integer NOT NULL, CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1) @@ -15,33 +14,33 @@ CREATE TABLE `task` ( CREATE TABLE `task_runtime` ( `id` integer PRIMARY KEY NOT NULL, `active_sandbox_id` text, - `active_session_id` text, `active_switch_target` text, `active_cwd` text, - `status_message` text, + `git_state_json` text, + `git_state_updated_at` integer, `updated_at` integer NOT NULL, CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1) ); --> statement-breakpoint CREATE TABLE `task_sandboxes` ( `sandbox_id` text PRIMARY KEY NOT NULL, - `provider_id` text NOT NULL, + `sandbox_provider_id` text NOT NULL, `sandbox_actor_id` text, `switch_target` text NOT NULL, `cwd` text, - `status_message` text, `created_at` integer NOT NULL, `updated_at` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE `task_workbench_sessions` ( +CREATE TABLE `task_workspace_sessions` ( `session_id` text PRIMARY KEY NOT NULL, + `sandbox_session_id` text, `session_name` text NOT NULL, `model` text NOT NULL, - `unread` integer DEFAULT 0 NOT NULL, - `draft_text` text DEFAULT '' NOT NULL, - `draft_attachments_json` text DEFAULT '[]' NOT NULL, - `draft_updated_at` integer, + `status` text DEFAULT 'ready' NOT NULL, + `error_message` text, + `transcript_json` text DEFAULT '[]' NOT NULL, + `transcript_updated_at` integer, `created` integer DEFAULT 1 NOT NULL, `closed` integer DEFAULT 0 NOT NULL, `thinking_since_ms` integer, diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json index b8a5879..7397b89 100644 --- a/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json @@ -35,8 +35,8 @@ "notNull": true, "autoincrement": false }, - "provider_id": { - "name": "provider_id", + "sandbox_provider_id": { + "name": "sandbox_provider_id", "type": "text", "primaryKey": false, "notNull": true, @@ -49,21 +49,12 @@ "notNull": true, "autoincrement": false }, - "agent_type": { - "name": "agent_type", + "pull_request_json": { + "name": "pull_request_json", "type": "text", "primaryKey": false, "notNull": false, - "autoincrement": false, - "default": "'claude'" - }, - "pr_submitted": { - "name": "pr_submitted", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false, - "default": 0 + "autoincrement": false }, "created_at": { "name": "created_at", @@ -108,13 +99,6 @@ "notNull": false, "autoincrement": false }, - "active_session_id": { - "name": "active_session_id", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, "active_switch_target": { "name": "active_switch_target", "type": "text", @@ -129,13 +113,20 @@ "notNull": false, "autoincrement": false }, - "status_message": { - "name": "status_message", + "git_state_json": { + "name": "git_state_json", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, + "git_state_updated_at": { + "name": "git_state_updated_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "updated_at": { "name": "updated_at", "type": "integer", @@ -165,8 +156,8 @@ "notNull": true, "autoincrement": false }, - "provider_id": { - "name": "provider_id", + "sandbox_provider_id": { + "name": "sandbox_provider_id", "type": "text", "primaryKey": false, "notNull": true, @@ -193,13 +184,6 @@ "notNull": false, "autoincrement": false }, - "status_message": { - "name": "status_message", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, "created_at": { "name": "created_at", "type": "integer", @@ -221,8 +205,8 @@ "uniqueConstraints": {}, "checkConstraints": {} }, - "task_workbench_sessions": { - "name": "task_workbench_sessions", + "task_workspace_sessions": { + "name": "task_workspace_sessions", "columns": { "session_id": { "name": "session_id", @@ -231,6 +215,13 @@ "notNull": true, "autoincrement": false }, + "sandbox_session_id": { + "name": "sandbox_session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "session_name": { "name": "session_name", "type": "text", @@ -245,32 +236,31 @@ "notNull": true, "autoincrement": false }, - "unread": { - "name": "unread", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "draft_text": { - "name": "draft_text", + "status": { + "name": "status", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false, - "default": "''" + "default": "'ready'" }, - "draft_attachments_json": { - "name": "draft_attachments_json", + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "transcript_json": { + "name": "transcript_json", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false, "default": "'[]'" }, - "draft_updated_at": { - "name": "draft_updated_at", + "transcript_updated_at": { + "name": "transcript_updated_at", "type": "integer", "primaryKey": false, "notNull": false, diff --git a/foundry/packages/backend/src/actors/task/db/migrations.ts b/foundry/packages/backend/src/actors/task/db/migrations.ts index dc3193e..1e6ff76 100644 --- a/foundry/packages/backend/src/actors/task/db/migrations.ts +++ b/foundry/packages/backend/src/actors/task/db/migrations.ts @@ -10,12 +10,6 @@ const journal = { tag: "0000_charming_maestro", breakpoints: true, }, - { - idx: 1, - when: 1773810000000, - tag: "0001_sandbox_provider_columns", - breakpoints: true, - }, ], } as const; @@ -27,10 +21,9 @@ export default { \`branch_name\` text, \`title\` text, \`task\` text NOT NULL, - \`provider_id\` text NOT NULL, + \`sandbox_provider_id\` text NOT NULL, \`status\` text NOT NULL, - \`agent_type\` text DEFAULT 'claude', - \`pr_submitted\` integer DEFAULT 0, + \`pull_request_json\` text, \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL, CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1) @@ -39,43 +32,39 @@ export default { CREATE TABLE \`task_runtime\` ( \`id\` integer PRIMARY KEY NOT NULL, \`active_sandbox_id\` text, - \`active_session_id\` text, \`active_switch_target\` text, \`active_cwd\` text, - \`status_message\` text, + \`git_state_json\` text, + \`git_state_updated_at\` integer, \`updated_at\` integer NOT NULL, CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1) ); --> statement-breakpoint CREATE TABLE \`task_sandboxes\` ( \`sandbox_id\` text PRIMARY KEY NOT NULL, - \`provider_id\` text NOT NULL, + \`sandbox_provider_id\` text NOT NULL, \`sandbox_actor_id\` text, \`switch_target\` text NOT NULL, \`cwd\` text, - \`status_message\` text, \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE \`task_workbench_sessions\` ( +CREATE TABLE \`task_workspace_sessions\` ( \`session_id\` text PRIMARY KEY NOT NULL, + \`sandbox_session_id\` text, \`session_name\` text NOT NULL, \`model\` text NOT NULL, - \`unread\` integer DEFAULT 0 NOT NULL, - \`draft_text\` text DEFAULT '' NOT NULL, - \`draft_attachments_json\` text DEFAULT '[]' NOT NULL, - \`draft_updated_at\` integer, + \`status\` text DEFAULT 'ready' NOT NULL, + \`error_message\` text, + \`transcript_json\` text DEFAULT '[]' NOT NULL, + \`transcript_updated_at\` integer, \`created\` integer DEFAULT 1 NOT NULL, \`closed\` integer DEFAULT 0 NOT NULL, \`thinking_since_ms\` integer, -\`created_at\` integer NOT NULL, + \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); -`, - m0001: `ALTER TABLE \`task\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; ---> statement-breakpoint -ALTER TABLE \`task_sandboxes\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/task/db/schema.ts b/foundry/packages/backend/src/actors/task/db/schema.ts index 889aa31..651ff76 100644 --- a/foundry/packages/backend/src/actors/task/db/schema.ts +++ b/foundry/packages/backend/src/actors/task/db/schema.ts @@ -11,8 +11,7 @@ export const task = sqliteTable( task: text("task").notNull(), sandboxProviderId: text("sandbox_provider_id").notNull(), status: text("status").notNull(), - agentType: text("agent_type").default("claude"), - prSubmitted: integer("pr_submitted").default(0), + pullRequestJson: text("pull_request_json"), createdAt: integer("created_at").notNull(), updatedAt: integer("updated_at").notNull(), }, @@ -24,14 +23,10 @@ export const taskRuntime = sqliteTable( { id: integer("id").primaryKey(), activeSandboxId: text("active_sandbox_id"), - activeSessionId: text("active_session_id"), activeSwitchTarget: text("active_switch_target"), activeCwd: text("active_cwd"), - statusMessage: text("status_message"), gitStateJson: text("git_state_json"), gitStateUpdatedAt: integer("git_state_updated_at"), - provisionStage: text("provision_stage"), - provisionStageUpdatedAt: integer("provision_stage_updated_at"), updatedAt: integer("updated_at").notNull(), }, (table) => [check("task_runtime_singleton_id_check", sql`${table.id} = 1`)], @@ -48,18 +43,17 @@ export const taskSandboxes = sqliteTable("task_sandboxes", { sandboxActorId: text("sandbox_actor_id"), switchTarget: text("switch_target").notNull(), cwd: text("cwd"), - statusMessage: text("status_message"), createdAt: integer("created_at").notNull(), updatedAt: integer("updated_at").notNull(), }); /** - * Coordinator index of workbench sessions within this task. + * Coordinator index of workspace sessions within this task. * The task actor is the coordinator for sessions. Each row holds session * metadata, model, status, transcript, and draft state. Sessions are * sub-entities of the task — no separate session actor in the DB. */ -export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", { +export const taskWorkspaceSessions = sqliteTable("task_workspace_sessions", { sessionId: text("session_id").notNull().primaryKey(), sandboxSessionId: text("sandbox_session_id"), sessionName: text("session_name").notNull(), @@ -68,11 +62,6 @@ export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", { errorMessage: text("error_message"), transcriptJson: text("transcript_json").notNull().default("[]"), transcriptUpdatedAt: integer("transcript_updated_at"), - unread: integer("unread").notNull().default(0), - draftText: text("draft_text").notNull().default(""), - // Structured by the workbench composer attachment payload format. - draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), - draftUpdatedAt: integer("draft_updated_at"), created: integer("created").notNull().default(1), closed: integer("closed").notNull().default(0), thinkingSinceMs: integer("thinking_since_ms"), diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts index f2b9e51..7e1c5e2 100644 --- a/foundry/packages/backend/src/actors/task/index.ts +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -1,393 +1,47 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import type { - AgentType, - TaskRecord, - TaskWorkbenchChangeModelInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchUpdateDraftInput, - SandboxProviderId, -} from "@sandbox-agent/foundry-shared"; -import { expectQueueResponse } from "../../services/queue.js"; -import { selfTask } from "../handles.js"; +import { actor } from "rivetkit"; +import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { taskDb } from "./db/db.js"; import { getCurrentRecord } from "./workflow/common.js"; -import { - changeWorkbenchModel, - closeWorkbenchSession, - createWorkbenchSession, - getSessionDetail, - getTaskDetail, - getTaskSummary, - markWorkbenchUnread, - publishWorkbenchPr, - renameWorkbenchBranch, - renameWorkbenchTask, - renameWorkbenchSession, - revertWorkbenchFile, - sendWorkbenchMessage, - syncWorkbenchSessionStatus, - setWorkbenchSessionUnread, - stopWorkbenchSession, - updateWorkbenchDraft, -} from "./workbench.js"; -import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js"; +import { getSessionDetail, getTaskDetail, getTaskSummary } from "./workspace.js"; +import { taskCommandActions } from "./workflow/index.js"; export interface TaskInput { organizationId: string; repoId: string; taskId: string; - repoRemote: string; - branchName: string | null; - title: string | null; - task: string; - sandboxProviderId: SandboxProviderId; - agentType: AgentType | null; - explicitTitle: string | null; - explicitBranchName: string | null; - initialPrompt: string | null; -} - -interface InitializeCommand { - sandboxProviderId?: SandboxProviderId; -} - -interface TaskActionCommand { - reason?: string; -} - -interface TaskSessionCommand { - sessionId: string; -} - -interface TaskStatusSyncCommand { - sessionId: string; - status: "running" | "idle" | "error"; - at: number; -} - -interface TaskWorkbenchValueCommand { - value: string; -} - -interface TaskWorkbenchSessionTitleCommand { - sessionId: string; - title: string; -} - -interface TaskWorkbenchSessionUnreadCommand { - sessionId: string; - unread: boolean; -} - -interface TaskWorkbenchUpdateDraftCommand { - sessionId: string; - text: string; - attachments: Array; -} - -interface TaskWorkbenchChangeModelCommand { - sessionId: string; - model: string; -} - -interface TaskWorkbenchSendMessageCommand { - sessionId: string; - text: string; - attachments: Array; -} - -interface TaskWorkbenchCreateSessionCommand { - model?: string; -} - -interface TaskWorkbenchCreateSessionAndSendCommand { - model?: string; - text: string; -} - -interface TaskWorkbenchSessionCommand { - sessionId: string; } export const task = actor({ db: taskDb, - queues: Object.fromEntries(TASK_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "Task", icon: "wrench", - actionTimeout: 5 * 60_000, + actionTimeout: 10 * 60_000, }, createState: (_c, input: TaskInput) => ({ organizationId: input.organizationId, repoId: input.repoId, taskId: input.taskId, - repoRemote: input.repoRemote, - branchName: input.branchName, - title: input.title, - task: input.task, - sandboxProviderId: input.sandboxProviderId, - agentType: input.agentType, - explicitTitle: input.explicitTitle, - explicitBranchName: input.explicitBranchName, - initialPrompt: input.initialPrompt, - initialized: false, - previousStatus: null as string | null, }), actions: { - async initialize(c, cmd: InitializeCommand): Promise { - const self = selfTask(c); - const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, { - wait: true, - timeout: 10_000, - }); - return expectQueueResponse(result); - }, - - async provision(c, cmd: InitializeCommand): Promise<{ ok: true }> { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.provision"), cmd ?? {}, { - wait: false, - }); - return { ok: true }; - }, - - async attach(c, cmd?: TaskActionCommand): Promise<{ target: string; sessionId: string | null }> { - const self = selfTask(c); - const result = await self.send(taskWorkflowQueueName("task.command.attach"), cmd ?? {}, { - wait: true, - timeout: 10_000, - }); - return expectQueueResponse<{ target: string; sessionId: string | null }>(result); - }, - - async switch(c): Promise<{ switchTarget: string }> { - const self = selfTask(c); - const result = await self.send( - taskWorkflowQueueName("task.command.switch"), - {}, - { - wait: true, - timeout: 10_000, - }, - ); - return expectQueueResponse<{ switchTarget: string }>(result); - }, - - async push(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.push"), cmd ?? {}, { - wait: false, - }); - }, - - async sync(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.sync"), cmd ?? {}, { - wait: false, - }); - }, - - async merge(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.merge"), cmd ?? {}, { - wait: false, - }); - }, - - async archive(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.archive"), cmd ?? {}, { - wait: false, - }); - }, - - async kill(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.kill"), cmd ?? {}, { - wait: false, - }); - }, - async get(c): Promise { - return await getCurrentRecord({ db: c.db, state: c.state }); + return await getCurrentRecord(c); }, async getTaskSummary(c) { return await getTaskSummary(c); }, - async getTaskDetail(c) { - return await getTaskDetail(c); + async getTaskDetail(c, input?: { authSessionId?: string }) { + return await getTaskDetail(c, input?.authSessionId); }, - async getSessionDetail(c, input: { sessionId: string }) { - return await getSessionDetail(c, input.sessionId); + async getSessionDetail(c, input: { sessionId: string; authSessionId?: string }) { + return await getSessionDetail(c, input.sessionId, input.authSessionId); }, - async markWorkbenchUnread(c): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.mark_unread"), - {}, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async renameWorkbenchTask(c, input: TaskWorkbenchRenameInput): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.rename_task"), { value: input.value } satisfies TaskWorkbenchValueCommand, { - wait: true, - timeout: 20_000, - }); - }, - - async renameWorkbenchBranch(c, input: TaskWorkbenchRenameInput): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.rename_branch"), { value: input.value } satisfies TaskWorkbenchValueCommand, { - wait: false, - }); - }, - - async createWorkbenchSession(c, input?: { model?: string }): Promise<{ sessionId: string }> { - const self = selfTask(c); - const result = await self.send( - taskWorkflowQueueName("task.command.workbench.create_session"), - { ...(input?.model ? { model: input.model } : {}) } satisfies TaskWorkbenchCreateSessionCommand, - { - wait: true, - timeout: 10_000, - }, - ); - return expectQueueResponse<{ sessionId: string }>(result); - }, - - /** - * Fire-and-forget: creates a workbench session and sends the initial message. - * Used by createWorkbenchTask so the caller doesn't block on session creation. - */ - async createWorkbenchSessionAndSend(c, input: { model?: string; text: string }): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.create_session_and_send"), - { model: input.model, text: input.text } satisfies TaskWorkbenchCreateSessionAndSendCommand, - { wait: false }, - ); - }, - - async renameWorkbenchSession(c, input: TaskWorkbenchRenameSessionInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.rename_session"), - { sessionId: input.sessionId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async setWorkbenchSessionUnread(c, input: TaskWorkbenchSetSessionUnreadInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.set_session_unread"), - { sessionId: input.sessionId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async updateWorkbenchDraft(c, input: TaskWorkbenchUpdateDraftInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.update_draft"), - { - sessionId: input.sessionId, - text: input.text, - attachments: input.attachments, - } satisfies TaskWorkbenchUpdateDraftCommand, - { - wait: false, - }, - ); - }, - - async changeWorkbenchModel(c, input: TaskWorkbenchChangeModelInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.change_model"), - { sessionId: input.sessionId, model: input.model } satisfies TaskWorkbenchChangeModelCommand, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.send_message"), - { - sessionId: input.sessionId, - text: input.text, - attachments: input.attachments, - } satisfies TaskWorkbenchSendMessageCommand, - { - wait: false, - }, - ); - }, - - async stopWorkbenchSession(c, input: TaskSessionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { - wait: false, - }); - }, - - async syncWorkbenchSessionStatus(c, input: TaskStatusSyncCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.sync_session_status"), input, { - wait: true, - timeout: 20_000, - }); - }, - - async closeWorkbenchSession(c, input: TaskSessionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { - wait: false, - }); - }, - - async publishWorkbenchPr(c): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.publish_pr"), - {}, - { - wait: false, - }, - ); - }, - - async revertWorkbenchFile(c, input: { path: string }): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.revert_file"), input, { - wait: false, - }); - }, + ...taskCommandActions, }, - run: workflow(runTaskWorkflow), }); -export { TASK_QUEUE_NAMES }; +export { taskWorkflowQueueName } from "./workflow/index.js"; diff --git a/foundry/packages/backend/src/actors/task/workflow/commands.ts b/foundry/packages/backend/src/actors/task/workflow/commands.ts index d03ade1..7ba2d2b 100644 --- a/foundry/packages/backend/src/actors/task/workflow/commands.ts +++ b/foundry/packages/backend/src/actors/task/workflow/commands.ts @@ -2,8 +2,8 @@ import { eq } from "drizzle-orm"; import { getTaskSandbox } from "../../handles.js"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; -import { task as taskTable, taskRuntime } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js"; +import { task as taskTable } from "../db/schema.js"; +import { TASK_ROW_ID, appendAuditLog, getCurrentRecord, setTaskState } from "./common.js"; import { pushActiveBranchActivity } from "./push.js"; async function withTimeout(promise: Promise, timeoutMs: number, label: string): Promise { @@ -25,6 +25,7 @@ async function withTimeout(promise: Promise, timeoutMs: number, label: str export async function handleAttachActivity(loopCtx: any, msg: any): Promise { const record = await getCurrentRecord(loopCtx); let target = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === record.activeSandboxId)?.switchTarget ?? ""; + const sessionId = msg.body?.sessionId ?? null; if (record.activeSandboxId) { try { @@ -38,14 +39,14 @@ export async function handleAttachActivity(loopCtx: any, msg: any): Promise await msg.complete({ ok: true }); } -export async function handleSimpleCommandActivity(loopCtx: any, msg: any, statusMessage: string, historyKind: string): Promise { - const db = loopCtx.db; - await db.update(taskRuntime).set({ statusMessage, updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run(); - - await appendHistory(loopCtx, historyKind, { reason: msg.body?.reason ?? null }); +export async function handleSimpleCommandActivity(loopCtx: any, msg: any, historyKind: string): Promise { + await appendAuditLog(loopCtx, historyKind, { reason: msg.body?.reason ?? null }); await msg.complete({ ok: true }); } export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { - await setTaskState(loopCtx, "archive_stop_status_sync", "stopping status sync"); + await setTaskState(loopCtx, "archive_stop_status_sync"); const record = await getCurrentRecord(loopCtx); if (record.activeSandboxId) { - await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox"); + await setTaskState(loopCtx, "archive_release_sandbox"); void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => { logActorWarning("task.commands", "failed to release sandbox during archive", { organizationId: loopCtx.state.organizationId, @@ -90,17 +88,15 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { - await setTaskState(loopCtx, "kill_destroy_sandbox", "destroying sandbox"); + await setTaskState(loopCtx, "kill_destroy_sandbox"); const record = await getCurrentRecord(loopCtx); if (!record.activeSandboxId) { return; @@ -110,13 +106,11 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise { } export async function killWriteDbActivity(loopCtx: any, msg: any): Promise { - await setTaskState(loopCtx, "kill_finalize", "finalizing kill"); + await setTaskState(loopCtx, "kill_finalize"); const db = loopCtx.db; await db.update(taskTable).set({ status: "killed", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run(); - await db.update(taskRuntime).set({ statusMessage: "killed", updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run(); - - await appendHistory(loopCtx, "task.kill", { reason: msg.body?.reason ?? null }); + await appendAuditLog(loopCtx, "task.kill", { reason: msg.body?.reason ?? null }); await msg.complete({ ok: true }); } diff --git a/foundry/packages/backend/src/actors/task/workflow/common.ts b/foundry/packages/backend/src/actors/task/workflow/common.ts index ae1e8dd..cbe63e6 100644 --- a/foundry/packages/backend/src/actors/task/workflow/common.ts +++ b/foundry/packages/backend/src/actors/task/workflow/common.ts @@ -2,8 +2,10 @@ import { eq } from "drizzle-orm"; import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared"; import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js"; -import { historyKey } from "../../keys.js"; -import { broadcastTaskUpdate } from "../workbench.js"; +import { getOrCreateAuditLog, getOrCreateOrganization } from "../../handles.js"; +import { broadcastTaskUpdate } from "../workspace.js"; +import { getActorRuntimeContext } from "../../context.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; export const TASK_ROW_ID = 1; @@ -56,50 +58,32 @@ export function buildAgentPrompt(task: string): string { return task.trim(); } -export async function setTaskState(ctx: any, status: TaskStatus, statusMessage?: string): Promise { +export async function setTaskState(ctx: any, status: TaskStatus): Promise { const now = Date.now(); const db = ctx.db; await db.update(taskTable).set({ status, updatedAt: now }).where(eq(taskTable.id, TASK_ROW_ID)).run(); - if (statusMessage != null) { - await db - .insert(taskRuntime) - .values({ - id: TASK_ROW_ID, - activeSandboxId: null, - activeSessionId: null, - activeSwitchTarget: null, - activeCwd: null, - statusMessage, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskRuntime.id, - set: { - statusMessage, - updatedAt: now, - }, - }) - .run(); - } - await broadcastTaskUpdate(ctx); } +/** + * Read the task's current record from its local SQLite DB. + * If the task actor was lazily created (virtual task from PR sync) and has no + * DB rows yet, auto-initializes by reading branch/title from the org actor's + * getTaskIndexEntry. This is the self-initialization path for lazy task actors. + */ export async function getCurrentRecord(ctx: any): Promise { const db = ctx.db; - const row = await db + const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId); + let row = await db .select({ branchName: taskTable.branchName, title: taskTable.title, task: taskTable.task, sandboxProviderId: taskTable.sandboxProviderId, status: taskTable.status, - statusMessage: taskRuntime.statusMessage, + pullRequestJson: taskTable.pullRequestJson, activeSandboxId: taskRuntime.activeSandboxId, - activeSessionId: taskRuntime.activeSessionId, - agentType: taskTable.agentType, - prSubmitted: taskTable.prSubmitted, createdAt: taskTable.createdAt, updatedAt: taskTable.updatedAt, }) @@ -109,7 +93,58 @@ export async function getCurrentRecord(ctx: any): Promise { .get(); if (!row) { - throw new Error(`Task not found: ${ctx.state.taskId}`); + // Virtual task — auto-initialize from org actor's task index data + let branchName: string | null = null; + let title = "Untitled"; + try { + const entry = await organization.getTaskIndexEntry({ taskId: ctx.state.taskId }); + branchName = entry?.branchName ?? null; + title = entry?.title ?? title; + } catch {} + + const { config } = getActorRuntimeContext(); + const { initBootstrapDbActivity, initCompleteActivity } = await import("./init.js"); + await initBootstrapDbActivity(ctx, { + sandboxProviderId: defaultSandboxProviderId(config), + branchName, + title, + task: title, + }); + await initCompleteActivity(ctx, { sandboxProviderId: defaultSandboxProviderId(config) }); + + // Re-read the row after initialization + const initialized = await db + .select({ + branchName: taskTable.branchName, + title: taskTable.title, + task: taskTable.task, + sandboxProviderId: taskTable.sandboxProviderId, + status: taskTable.status, + pullRequestJson: taskTable.pullRequestJson, + activeSandboxId: taskRuntime.activeSandboxId, + createdAt: taskTable.createdAt, + updatedAt: taskTable.updatedAt, + }) + .from(taskTable) + .leftJoin(taskRuntime, eq(taskTable.id, taskRuntime.id)) + .where(eq(taskTable.id, TASK_ROW_ID)) + .get(); + + if (!initialized) { + throw new Error(`Task not found after initialization: ${ctx.state.taskId}`); + } + + row = initialized; + } + + const repositoryMetadata = await organization.getRepositoryMetadata({ repoId: ctx.state.repoId }); + let pullRequest = null; + if (row.pullRequestJson) { + try { + pullRequest = JSON.parse(row.pullRequestJson); + } catch { + pullRequest = null; + } } const sandboxes = await db @@ -128,16 +163,15 @@ export async function getCurrentRecord(ctx: any): Promise { return { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId, - repoRemote: ctx.state.repoRemote, + repoRemote: repositoryMetadata.remoteUrl, taskId: ctx.state.taskId, branchName: row.branchName, title: row.title, task: row.task, sandboxProviderId: row.sandboxProviderId, status: row.status, - statusMessage: row.statusMessage ?? null, activeSandboxId: row.activeSandboxId ?? null, - activeSessionId: row.activeSessionId ?? null, + pullRequest, sandboxes: sandboxes.map((sb) => ({ sandboxId: sb.sandboxId, sandboxProviderId: sb.sandboxProviderId, @@ -147,31 +181,19 @@ export async function getCurrentRecord(ctx: any): Promise { createdAt: sb.createdAt, updatedAt: sb.updatedAt, })), - agentType: row.agentType ?? null, - prSubmitted: Boolean(row.prSubmitted), - diffStat: null, - hasUnpushed: null, - conflictsWithMain: null, - parentBranch: null, - prUrl: null, - prAuthor: null, - ciStatus: null, - reviewStatus: null, - reviewer: null, createdAt: row.createdAt, updatedAt: row.updatedAt, } as TaskRecord; } -export async function appendHistory(ctx: any, kind: string, payload: Record): Promise { - const client = ctx.client(); - const history = await client.history.getOrCreate(historyKey(ctx.state.organizationId, ctx.state.repoId), { - createWithInput: { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId }, - }); - await history.append({ +export async function appendAuditLog(ctx: any, kind: string, payload: Record): Promise { + const row = await ctx.db.select({ branchName: taskTable.branchName }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get(); + const auditLog = await getOrCreateAuditLog(ctx, ctx.state.organizationId); + void auditLog.append({ kind, + repoId: ctx.state.repoId, taskId: ctx.state.taskId, - branchName: ctx.state.branchName, + branchName: row?.branchName ?? null, payload, }); diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts index f6ffd10..69004ee 100644 --- a/foundry/packages/backend/src/actors/task/workflow/index.ts +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -1,4 +1,3 @@ -import { Loop } from "rivetkit/workflow"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; import { getCurrentRecord } from "./common.js"; import { initBootstrapDbActivity, initCompleteActivity, initEnqueueProvisionActivity, initFailedActivity } from "./init.js"; @@ -12,283 +11,254 @@ import { killDestroySandboxActivity, killWriteDbActivity, } from "./commands.js"; -import { TASK_QUEUE_NAMES } from "./queue.js"; import { - changeWorkbenchModel, - closeWorkbenchSession, - createWorkbenchSession, - ensureWorkbenchSession, - refreshWorkbenchDerivedState, - refreshWorkbenchSessionTranscript, - markWorkbenchUnread, - publishWorkbenchPr, - renameWorkbenchBranch, - renameWorkbenchTask, - renameWorkbenchSession, - revertWorkbenchFile, - sendWorkbenchMessage, - setWorkbenchSessionUnread, - stopWorkbenchSession, - syncWorkbenchSessionStatus, - updateWorkbenchDraft, -} from "../workbench.js"; + changeWorkspaceModel, + closeWorkspaceSession, + createWorkspaceSession, + ensureWorkspaceSession, + refreshWorkspaceDerivedState, + refreshWorkspaceSessionTranscript, + markWorkspaceUnread, + publishWorkspacePr, + renameWorkspaceTask, + renameWorkspaceSession, + selectWorkspaceSession, + revertWorkspaceFile, + sendWorkspaceMessage, + setWorkspaceSessionUnread, + stopWorkspaceSession, + syncTaskPullRequest, + syncWorkspaceSessionStatus, + updateWorkspaceDraft, +} from "../workspace.js"; -export { TASK_QUEUE_NAMES, taskWorkflowQueueName } from "./queue.js"; +export { taskWorkflowQueueName } from "./queue.js"; -type TaskQueueName = (typeof TASK_QUEUE_NAMES)[number]; +/** + * Task command actions — converted from queue/workflow handlers to direct actions. + * Each export becomes an action on the task actor. + */ +export const taskCommandActions = { + async initialize(c: any, body: any) { + await initBootstrapDbActivity(c, body); + await initEnqueueProvisionActivity(c, body); + return await getCurrentRecord(c); + }, -type WorkflowHandler = (loopCtx: any, msg: { name: TaskQueueName; body: any; complete: (response: unknown) => Promise }) => Promise; - -const commandHandlers: Record = { - "task.command.initialize": async (loopCtx, msg) => { - const body = msg.body; - - await loopCtx.step("init-bootstrap-db", async () => initBootstrapDbActivity(loopCtx, body)); - await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body)); - await loopCtx.removed("init-dispatch-provision-v2", "step"); - const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx)); + async provision(c: any, body: any) { try { - await msg.complete(currentRecord); + await initCompleteActivity(c, body); + return { ok: true }; } catch (error) { - logActorWarning("task.workflow", "initialize completion failed", { - error: resolveErrorMessage(error), - }); + await initFailedActivity(c, error, body); + return { ok: false, error: resolveErrorMessage(error) }; } }, - "task.command.provision": async (loopCtx, msg) => { - await loopCtx.removed("init-failed", "step"); - await loopCtx.removed("init-failed-v2", "step"); + async attach(c: any, body: any) { + // handleAttachActivity expects msg with complete — adapt + const result = { value: undefined as any }; + const msg = { + name: "task.command.attach", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleAttachActivity(c, msg); + return result.value; + }, + + async switchTask(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.switch", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleSwitchActivity(c, msg); + return result.value; + }, + + async push(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.push", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handlePushActivity(c, msg); + return result.value; + }, + + async sync(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.sync", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleSimpleCommandActivity(c, msg, "task.sync"); + return result.value; + }, + + async merge(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.merge", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleSimpleCommandActivity(c, msg, "task.merge"); + return result.value; + }, + + async archive(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.archive", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleArchiveActivity(c, msg); + return result.value; + }, + + async kill(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.kill", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await killDestroySandboxActivity(c); + await killWriteDbActivity(c, msg); + return result.value; + }, + + async getRecord(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.get", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleGetActivity(c, msg); + return result.value; + }, + + async pullRequestSync(c: any, body: any) { + await syncTaskPullRequest(c, body?.pullRequest ?? null); + return { ok: true }; + }, + + async markUnread(c: any, body: any) { + await markWorkspaceUnread(c, body?.authSessionId); + return { ok: true }; + }, + + async renameTask(c: any, body: any) { + await renameWorkspaceTask(c, body.value); + return { ok: true }; + }, + + async createSession(c: any, body: any) { + return await createWorkspaceSession(c, body?.model, body?.authSessionId); + }, + + async createSessionAndSend(c: any, body: any) { try { - await loopCtx.removed("init-ensure-name", "step"); - await loopCtx.removed("init-assert-name", "step"); - await loopCtx.removed("init-create-sandbox", "step"); - await loopCtx.removed("init-ensure-agent", "step"); - await loopCtx.removed("init-start-sandbox-instance", "step"); - await loopCtx.removed("init-expose-sandbox", "step"); - await loopCtx.removed("init-create-session", "step"); - await loopCtx.removed("init-write-db", "step"); - await loopCtx.removed("init-start-status-sync", "step"); - await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, msg.body)); - await msg.complete({ ok: true }); - } catch (error) { - await loopCtx.step("init-failed-v3", async () => initFailedActivity(loopCtx, error)); - await msg.complete({ - ok: false, - error: resolveErrorMessage(error), - }); - } - }, - - "task.command.attach": async (loopCtx, msg) => { - await loopCtx.step("handle-attach", async () => handleAttachActivity(loopCtx, msg)); - }, - - "task.command.switch": async (loopCtx, msg) => { - await loopCtx.step("handle-switch", async () => handleSwitchActivity(loopCtx, msg)); - }, - - "task.command.push": async (loopCtx, msg) => { - await loopCtx.step("handle-push", async () => handlePushActivity(loopCtx, msg)); - }, - - "task.command.sync": async (loopCtx, msg) => { - await loopCtx.step("handle-sync", async () => handleSimpleCommandActivity(loopCtx, msg, "sync requested", "task.sync")); - }, - - "task.command.merge": async (loopCtx, msg) => { - await loopCtx.step("handle-merge", async () => handleSimpleCommandActivity(loopCtx, msg, "merge requested", "task.merge")); - }, - - "task.command.archive": async (loopCtx, msg) => { - await loopCtx.step("handle-archive", async () => handleArchiveActivity(loopCtx, msg)); - }, - - "task.command.kill": async (loopCtx, msg) => { - await loopCtx.step("kill-destroy-sandbox", async () => killDestroySandboxActivity(loopCtx)); - await loopCtx.step("kill-write-db", async () => killWriteDbActivity(loopCtx, msg)); - }, - - "task.command.get": async (loopCtx, msg) => { - await loopCtx.step("handle-get", async () => handleGetActivity(loopCtx, msg)); - }, - - "task.command.workbench.mark_unread": async (loopCtx, msg) => { - await loopCtx.step("workbench-mark-unread", async () => markWorkbenchUnread(loopCtx)); - await msg.complete({ ok: true }); - }, - - "task.command.workbench.rename_task": async (loopCtx, msg) => { - await loopCtx.step("workbench-rename-task", async () => renameWorkbenchTask(loopCtx, msg.body.value)); - await msg.complete({ ok: true }); - }, - - "task.command.workbench.rename_branch": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-rename-branch", - timeout: 5 * 60_000, - run: async () => renameWorkbenchBranch(loopCtx, msg.body.value), - }); - await msg.complete({ ok: true }); - }, - - "task.command.workbench.create_session": async (loopCtx, msg) => { - try { - const created = await loopCtx.step({ - name: "workbench-create-session", - timeout: 5 * 60_000, - run: async () => createWorkbenchSession(loopCtx, msg.body?.model), - }); - await msg.complete(created); - } catch (error) { - await msg.complete({ error: resolveErrorMessage(error) }); - } - }, - - "task.command.workbench.create_session_and_send": async (loopCtx, msg) => { - try { - const created = await loopCtx.step({ - name: "workbench-create-session-for-send", - timeout: 5 * 60_000, - run: async () => createWorkbenchSession(loopCtx, msg.body?.model), - }); - await loopCtx.step({ - name: "workbench-send-initial-message", - timeout: 5 * 60_000, - run: async () => sendWorkbenchMessage(loopCtx, created.sessionId, msg.body.text, []), - }); + const created = await createWorkspaceSession(c, body?.model, body?.authSessionId); + await sendWorkspaceMessage(c, created.sessionId, body.text, [], body?.authSessionId); } catch (error) { logActorWarning("task.workflow", "create_session_and_send failed", { error: resolveErrorMessage(error), }); } - await msg.complete({ ok: true }); + return { ok: true }; }, - "task.command.workbench.ensure_session": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-ensure-session", - timeout: 5 * 60_000, - run: async () => ensureWorkbenchSession(loopCtx, msg.body.sessionId, msg.body?.model), - }); - await msg.complete({ ok: true }); + async ensureSession(c: any, body: any) { + await ensureWorkspaceSession(c, body.sessionId, body?.model, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.rename_session": async (loopCtx, msg) => { - await loopCtx.step("workbench-rename-session", async () => renameWorkbenchSession(loopCtx, msg.body.sessionId, msg.body.title)); - await msg.complete({ ok: true }); + async renameSession(c: any, body: any) { + await renameWorkspaceSession(c, body.sessionId, body.title); + return { ok: true }; }, - "task.command.workbench.set_session_unread": async (loopCtx, msg) => { - await loopCtx.step("workbench-set-session-unread", async () => setWorkbenchSessionUnread(loopCtx, msg.body.sessionId, msg.body.unread)); - await msg.complete({ ok: true }); + async selectSession(c: any, body: any) { + await selectWorkspaceSession(c, body.sessionId, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.update_draft": async (loopCtx, msg) => { - await loopCtx.step("workbench-update-draft", async () => updateWorkbenchDraft(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments)); - await msg.complete({ ok: true }); + async setSessionUnread(c: any, body: any) { + await setWorkspaceSessionUnread(c, body.sessionId, body.unread, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.change_model": async (loopCtx, msg) => { - await loopCtx.step("workbench-change-model", async () => changeWorkbenchModel(loopCtx, msg.body.sessionId, msg.body.model)); - await msg.complete({ ok: true }); + async updateDraft(c: any, body: any) { + await updateWorkspaceDraft(c, body.sessionId, body.text, body.attachments, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.send_message": async (loopCtx, msg) => { - try { - await loopCtx.step({ - name: "workbench-send-message", - timeout: 10 * 60_000, - run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments), - }); - await msg.complete({ ok: true }); - } catch (error) { - await msg.complete({ error: resolveErrorMessage(error) }); - } + async changeModel(c: any, body: any) { + await changeWorkspaceModel(c, body.sessionId, body.model, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.stop_session": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-stop-session", - timeout: 5 * 60_000, - run: async () => stopWorkbenchSession(loopCtx, msg.body.sessionId), - }); - await msg.complete({ ok: true }); + async sendMessage(c: any, body: any) { + await sendWorkspaceMessage(c, body.sessionId, body.text, body.attachments, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.sync_session_status": async (loopCtx, msg) => { - await loopCtx.step("workbench-sync-session-status", async () => syncWorkbenchSessionStatus(loopCtx, msg.body.sessionId, msg.body.status, msg.body.at)); - await msg.complete({ ok: true }); + async stopSession(c: any, body: any) { + await stopWorkspaceSession(c, body.sessionId); + return { ok: true }; }, - "task.command.workbench.refresh_derived": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-refresh-derived", - timeout: 5 * 60_000, - run: async () => refreshWorkbenchDerivedState(loopCtx), - }); - await msg.complete({ ok: true }); + async syncSessionStatus(c: any, body: any) { + await syncWorkspaceSessionStatus(c, body.sessionId, body.status, body.at); + return { ok: true }; }, - "task.command.workbench.refresh_session_transcript": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-refresh-session-transcript", - timeout: 60_000, - run: async () => refreshWorkbenchSessionTranscript(loopCtx, msg.body.sessionId), - }); - await msg.complete({ ok: true }); + async refreshDerived(c: any, _body: any) { + await refreshWorkspaceDerivedState(c); + return { ok: true }; }, - "task.command.workbench.close_session": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-close-session", - timeout: 5 * 60_000, - run: async () => closeWorkbenchSession(loopCtx, msg.body.sessionId), - }); - await msg.complete({ ok: true }); + async refreshSessionTranscript(c: any, body: any) { + await refreshWorkspaceSessionTranscript(c, body.sessionId); + return { ok: true }; }, - "task.command.workbench.publish_pr": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-publish-pr", - timeout: 10 * 60_000, - run: async () => publishWorkbenchPr(loopCtx), - }); - await msg.complete({ ok: true }); + async closeSession(c: any, body: any) { + await closeWorkspaceSession(c, body.sessionId, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.revert_file": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-revert-file", - timeout: 5 * 60_000, - run: async () => revertWorkbenchFile(loopCtx, msg.body.path), - }); - await msg.complete({ ok: true }); + async publishPr(c: any, _body: any) { + await publishWorkspacePr(c); + return { ok: true }; + }, + + async revertFile(c: any, body: any) { + await revertWorkspaceFile(c, body.path); + return { ok: true }; }, }; - -export async function runTaskWorkflow(ctx: any): Promise { - await ctx.loop("task-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-command", { - names: [...TASK_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - const handler = commandHandlers[msg.name as TaskQueueName]; - if (handler) { - try { - await handler(loopCtx, msg); - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("task.workflow", "task workflow command failed", { - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch(() => {}); - } - } - return Loop.continue(undefined); - }); -} diff --git a/foundry/packages/backend/src/actors/task/workflow/init.ts b/foundry/packages/backend/src/actors/task/workflow/init.ts index 8a9962d..08085e8 100644 --- a/foundry/packages/backend/src/actors/task/workflow/init.ts +++ b/foundry/packages/backend/src/actors/task/workflow/init.ts @@ -1,49 +1,44 @@ // @ts-nocheck import { eq } from "drizzle-orm"; import { getActorRuntimeContext } from "../../context.js"; -import { getOrCreateHistory, selfTask } from "../../handles.js"; +import { selfTask } from "../../handles.js"; import { resolveErrorMessage } from "../../logging.js"; import { defaultSandboxProviderId } from "../../../sandbox-config.js"; import { task as taskTable, taskRuntime } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; -import { taskWorkflowQueueName } from "./queue.js"; - -async function ensureTaskRuntimeCacheColumns(db: any): Promise { - await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {}); - await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {}); - await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {}); - await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {}); -} +import { TASK_ROW_ID, appendAuditLog, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; +// task actions called directly (no queue) export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise { const { config } = getActorRuntimeContext(); - const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config); + const task = body?.task; + if (typeof task !== "string" || task.trim().length === 0) { + throw new Error("task initialize requires the task prompt"); + } const now = Date.now(); - await ensureTaskRuntimeCacheColumns(loopCtx.db); - await loopCtx.db .insert(taskTable) .values({ id: TASK_ROW_ID, - branchName: loopCtx.state.branchName, - title: loopCtx.state.title, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task, sandboxProviderId, status: "init_bootstrap_db", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, createdAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskTable.id, set: { - branchName: loopCtx.state.branchName, - title: loopCtx.state.title, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task, sandboxProviderId, status: "init_bootstrap_db", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, updatedAt: now, }, }) @@ -54,26 +49,18 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< .values({ id: TASK_ROW_ID, activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: "provisioning", gitStateJson: null, gitStateUpdatedAt: null, - provisionStage: "queued", - provisionStageUpdatedAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskRuntime.id, set: { activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: "provisioning", - provisionStage: "queued", - provisionStageUpdatedAt: now, updatedAt: now, }, }) @@ -81,22 +68,11 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< } export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise { - await setTaskState(loopCtx, "init_enqueue_provision", "provision queued"); - await loopCtx.db - .update(taskRuntime) - .set({ - provisionStage: "queued", - provisionStageUpdatedAt: Date.now(), - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); + await setTaskState(loopCtx, "init_enqueue_provision"); const self = selfTask(loopCtx); try { - await self.send(taskWorkflowQueueName("task.command.provision"), body, { - wait: false, - }); + void self.provision(body).catch(() => {}); } catch (error) { logActorWarning("task.init", "background provision command failed", { organizationId: loopCtx.state.organizationId, @@ -111,60 +87,52 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro export async function initCompleteActivity(loopCtx: any, body: any): Promise { const now = Date.now(); const { config } = getActorRuntimeContext(); - const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config); - await setTaskState(loopCtx, "init_complete", "task initialized"); + await setTaskState(loopCtx, "init_complete"); await loopCtx.db .update(taskRuntime) .set({ - statusMessage: "ready", - provisionStage: "ready", - provisionStageUpdatedAt: now, updatedAt: now, }) .where(eq(taskRuntime.id, TASK_ROW_ID)) .run(); - const history = await getOrCreateHistory(loopCtx, loopCtx.state.organizationId, loopCtx.state.repoId); - await history.append({ - kind: "task.initialized", - taskId: loopCtx.state.taskId, - branchName: loopCtx.state.branchName, + await appendAuditLog(loopCtx, "task.initialized", { payload: { sandboxProviderId }, }); - - loopCtx.state.initialized = true; } -export async function initFailedActivity(loopCtx: any, error: unknown): Promise { +export async function initFailedActivity(loopCtx: any, error: unknown, body?: any): Promise { const now = Date.now(); const detail = resolveErrorDetail(error); const messages = collectErrorMessages(error); const { config } = getActorRuntimeContext(); - const sandboxProviderId = loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); + const sandboxProviderId = defaultSandboxProviderId(config); + const task = typeof body?.task === "string" ? body.task : null; await loopCtx.db .insert(taskTable) .values({ id: TASK_ROW_ID, - branchName: loopCtx.state.branchName ?? null, - title: loopCtx.state.title ?? null, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task: task ?? detail, sandboxProviderId, status: "error", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, createdAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskTable.id, set: { - branchName: loopCtx.state.branchName ?? null, - title: loopCtx.state.title ?? null, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task: task ?? detail, sandboxProviderId, status: "error", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, updatedAt: now, }, }) @@ -175,30 +143,22 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise< .values({ id: TASK_ROW_ID, activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: detail, - provisionStage: "error", - provisionStageUpdatedAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskRuntime.id, set: { activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: detail, - provisionStage: "error", - provisionStageUpdatedAt: now, updatedAt: now, }, }) .run(); - await appendHistory(loopCtx, "task.error", { + await appendAuditLog(loopCtx, "task.error", { detail, messages, }); diff --git a/foundry/packages/backend/src/actors/task/workflow/push.ts b/foundry/packages/backend/src/actors/task/workflow/push.ts index c525ebe..f15ab0b 100644 --- a/foundry/packages/backend/src/actors/task/workflow/push.ts +++ b/foundry/packages/backend/src/actors/task/workflow/push.ts @@ -1,9 +1,7 @@ // @ts-nocheck -import { eq } from "drizzle-orm"; import { getTaskSandbox } from "../../handles.js"; import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js"; -import { taskRuntime, taskSandboxes } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js"; +import { appendAuditLog, getCurrentRecord } from "./common.js"; export interface PushActiveBranchOptions { reason?: string | null; @@ -13,7 +11,7 @@ export interface PushActiveBranchOptions { export async function pushActiveBranchActivity(loopCtx: any, options: PushActiveBranchOptions = {}): Promise { const record = await getCurrentRecord(loopCtx); const activeSandboxId = record.activeSandboxId; - const branchName = loopCtx.state.branchName ?? record.branchName; + const branchName = record.branchName; if (!activeSandboxId) { throw new Error("cannot push: no active sandbox"); @@ -28,19 +26,6 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive throw new Error("cannot push: active sandbox cwd is not set"); } - const now = Date.now(); - await loopCtx.db - .update(taskRuntime) - .set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - - await loopCtx.db - .update(taskSandboxes) - .set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now }) - .where(eq(taskSandboxes.sandboxId, activeSandboxId)) - .run(); - const script = [ "set -euo pipefail", `cd ${JSON.stringify(cwd)}`, @@ -68,20 +53,7 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive throw new Error(`git push failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`); } - const updatedAt = Date.now(); - await loopCtx.db - .update(taskRuntime) - .set({ statusMessage: `push complete for ${branchName}`, updatedAt }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - - await loopCtx.db - .update(taskSandboxes) - .set({ statusMessage: `push complete for ${branchName}`, updatedAt }) - .where(eq(taskSandboxes.sandboxId, activeSandboxId)) - .run(); - - await appendHistory(loopCtx, options.historyKind ?? "task.push", { + await appendAuditLog(loopCtx, options.historyKind ?? "task.push", { reason: options.reason ?? null, branchName, sandboxId: activeSandboxId, diff --git a/foundry/packages/backend/src/actors/task/workflow/queue.ts b/foundry/packages/backend/src/actors/task/workflow/queue.ts index 3e613e2..133a657 100644 --- a/foundry/packages/backend/src/actors/task/workflow/queue.ts +++ b/foundry/packages/backend/src/actors/task/workflow/queue.ts @@ -9,24 +9,25 @@ export const TASK_QUEUE_NAMES = [ "task.command.archive", "task.command.kill", "task.command.get", - "task.command.workbench.mark_unread", - "task.command.workbench.rename_task", - "task.command.workbench.rename_branch", - "task.command.workbench.create_session", - "task.command.workbench.create_session_and_send", - "task.command.workbench.ensure_session", - "task.command.workbench.rename_session", - "task.command.workbench.set_session_unread", - "task.command.workbench.update_draft", - "task.command.workbench.change_model", - "task.command.workbench.send_message", - "task.command.workbench.stop_session", - "task.command.workbench.sync_session_status", - "task.command.workbench.refresh_derived", - "task.command.workbench.refresh_session_transcript", - "task.command.workbench.close_session", - "task.command.workbench.publish_pr", - "task.command.workbench.revert_file", + "task.command.pull_request.sync", + "task.command.workspace.mark_unread", + "task.command.workspace.rename_task", + "task.command.workspace.create_session", + "task.command.workspace.create_session_and_send", + "task.command.workspace.ensure_session", + "task.command.workspace.rename_session", + "task.command.workspace.select_session", + "task.command.workspace.set_session_unread", + "task.command.workspace.update_draft", + "task.command.workspace.change_model", + "task.command.workspace.send_message", + "task.command.workspace.stop_session", + "task.command.workspace.sync_session_status", + "task.command.workspace.refresh_derived", + "task.command.workspace.refresh_session_transcript", + "task.command.workspace.close_session", + "task.command.workspace.publish_pr", + "task.command.workspace.revert_file", ] as const; export function taskWorkflowQueueName(name: string): string { diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workspace.ts similarity index 66% rename from foundry/packages/backend/src/actors/task/workbench.ts rename to foundry/packages/backend/src/actors/task/workspace.ts index d6698ca..7505d01 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workspace.ts @@ -2,13 +2,24 @@ import { randomUUID } from "node:crypto"; import { basename, dirname } from "node:path"; import { asc, eq } from "drizzle-orm"; +import { + DEFAULT_WORKSPACE_MODEL_GROUPS, + DEFAULT_WORKSPACE_MODEL_ID, + workspaceAgentForModel, + workspaceSandboxAgentIdForModel, +} from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateRepository, getOrCreateTaskSandbox, getOrCreateOrganization, getTaskSandbox, selfTask } from "../handles.js"; +import { getOrCreateOrganization, getOrCreateTaskSandbox, getOrCreateUser, getTaskSandbox, selfTask } from "../handles.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; import { SANDBOX_REPO_CWD } from "../sandbox/index.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; +import { getBetterAuthService } from "../../services/better-auth.js"; +// expectQueueResponse removed — actions return values directly import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; import { githubRepoFullNameFromRemote } from "../../services/repo.js"; -import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js"; +// organization actions called directly (no queue) + +import { task as taskTable, taskRuntime, taskSandboxes, taskWorkspaceSessions } from "./db/schema.js"; import { getCurrentRecord } from "./workflow/common.js"; function emptyGitState() { @@ -20,62 +31,29 @@ function emptyGitState() { }; } -async function ensureWorkbenchSessionTable(c: any): Promise { - await c.db.execute(` - CREATE TABLE IF NOT EXISTS task_workbench_sessions ( - session_id text PRIMARY KEY NOT NULL, - sandbox_session_id text, - session_name text NOT NULL, - model text NOT NULL, - status text DEFAULT 'ready' NOT NULL, - error_message text, - transcript_json text DEFAULT '[]' NOT NULL, - transcript_updated_at integer, - unread integer DEFAULT 0 NOT NULL, - draft_text text DEFAULT '' NOT NULL, - draft_attachments_json text DEFAULT '[]' NOT NULL, - draft_updated_at integer, - created integer DEFAULT 1 NOT NULL, - closed integer DEFAULT 0 NOT NULL, - thinking_since_ms integer, - created_at integer NOT NULL, - updated_at integer NOT NULL - ) - `); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN sandbox_session_id text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN status text DEFAULT 'ready' NOT NULL`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN error_message text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN transcript_json text DEFAULT '[]' NOT NULL`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN transcript_updated_at integer`).catch(() => {}); -} - -async function ensureTaskRuntimeCacheColumns(c: any): Promise { - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {}); -} - -function defaultModelForAgent(agentType: string | null | undefined) { - return agentType === "codex" ? "gpt-5.3-codex" : "claude-sonnet-4"; -} - -function isCodexModel(model: string) { - return model.startsWith("gpt-") || model.startsWith("o"); -} +const FALLBACK_MODEL = DEFAULT_WORKSPACE_MODEL_ID; function agentKindForModel(model: string) { - if (isCodexModel(model)) { - return "Codex"; - } - return "Claude"; + return workspaceAgentForModel(model); } -export function agentTypeForModel(model: string) { - if (isCodexModel(model)) { - return "codex"; +export function sandboxAgentIdForModel(model: string) { + return workspaceSandboxAgentIdForModel(model); +} + +async function resolveWorkspaceModelGroups(c: any): Promise { + try { + const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + const groups = await sandbox.listWorkspaceModelGroups(); + return Array.isArray(groups) && groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS; + } catch { + return DEFAULT_WORKSPACE_MODEL_GROUPS; } - return "claude"; +} + +async function resolveSandboxAgentForModel(c: any, model: string): Promise { + const groups = await resolveWorkspaceModelGroups(c); + return workspaceSandboxAgentIdForModel(model, groups); } function repoLabelFromRemote(remoteUrl: string): string { @@ -93,6 +71,11 @@ function repoLabelFromRemote(remoteUrl: string): string { return basename(trimmed.replace(/\.git$/, "")); } +async function getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const organization = await getOrCreateOrganization(c, c.state.organizationId); + return await organization.getRepositoryMetadata({ repoId: c.state.repoId }); +} + function parseDraftAttachments(value: string | null | undefined): Array { if (!value) { return []; @@ -168,8 +151,7 @@ export function shouldRecreateSessionForModelChange(meta: { } async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }): Promise> { - await ensureWorkbenchSessionTable(c); - const rows = await c.db.select().from(taskWorkbenchSessions).orderBy(asc(taskWorkbenchSessions.createdAt)).all(); + const rows = await c.db.select().from(taskWorkspaceSessions).orderBy(asc(taskWorkspaceSessions.createdAt)).all(); const mapped = rows.map((row: any) => ({ ...row, id: row.sessionId, @@ -179,9 +161,6 @@ async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean } errorMessage: row.errorMessage ?? null, transcript: parseTranscript(row.transcriptJson), transcriptUpdatedAt: row.transcriptUpdatedAt ?? null, - draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), - draftUpdatedAtMs: row.draftUpdatedAt ?? null, - unread: row.unread === 1, created: row.created === 1, closed: row.closed === 1, })); @@ -199,8 +178,7 @@ async function nextSessionName(c: any): Promise { } async function readSessionMeta(c: any, sessionId: string): Promise { - await ensureWorkbenchSessionTable(c); - const row = await c.db.select().from(taskWorkbenchSessions).where(eq(taskWorkbenchSessions.sessionId, sessionId)).get(); + const row = await c.db.select().from(taskWorkspaceSessions).where(eq(taskWorkspaceSessions.sessionId, sessionId)).get(); if (!row) { return null; @@ -215,28 +193,107 @@ async function readSessionMeta(c: any, sessionId: string): Promise { errorMessage: row.errorMessage ?? null, transcript: parseTranscript(row.transcriptJson), transcriptUpdatedAt: row.transcriptUpdatedAt ?? null, - draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), - draftUpdatedAtMs: row.draftUpdatedAt ?? null, - unread: row.unread === 1, created: row.created === 1, closed: row.closed === 1, }; } +async function getUserTaskState(c: any, authSessionId?: string | null): Promise<{ activeSessionId: string | null; bySessionId: Map }> { + if (!authSessionId) { + return { activeSessionId: null, bySessionId: new Map() }; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return { activeSessionId: null, bySessionId: new Map() }; + } + + const user = await getOrCreateUser(c, userId); + const state = await user.getTaskState({ taskId: c.state.taskId }); + const bySessionId = new Map( + (state?.sessions ?? []).map((row: any) => [ + row.sessionId, + { + unread: Boolean(row.unread), + draftText: row.draftText ?? "", + draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), + draftUpdatedAtMs: row.draftUpdatedAt ?? null, + }, + ]), + ); + return { + activeSessionId: state?.activeSessionId ?? null, + bySessionId, + }; +} + +async function upsertUserTaskState(c: any, authSessionId: string | null | undefined, sessionId: string, patch: Record): Promise { + if (!authSessionId) { + return; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return; + } + + const user = await getOrCreateUser(c, userId); + await user.taskStateUpsert({ + taskId: c.state.taskId, + sessionId, + patch, + }); +} + +async function deleteUserTaskState(c: any, authSessionId: string | null | undefined, sessionId: string): Promise { + if (!authSessionId) { + return; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return; + } + + const user = await getOrCreateUser(c, userId); + await user.taskStateDelete({ + taskId: c.state.taskId, + sessionId, + }); +} + +async function resolveDefaultModel(c: any, authSessionId?: string | null): Promise { + if (!authSessionId) { + return FALLBACK_MODEL; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return FALLBACK_MODEL; + } + + const user = await getOrCreateUser(c, userId); + const userState = await user.getAppAuthState({ sessionId: authSessionId }); + return userState?.profile?.defaultModel ?? FALLBACK_MODEL; +} + async function ensureSessionMeta( c: any, params: { sessionId: string; sandboxSessionId?: string | null; model?: string; + authSessionId?: string | null; sessionName?: string; - unread?: boolean; created?: boolean; status?: "pending_provision" | "pending_session_create" | "ready" | "error"; errorMessage?: string | null; }, ): Promise { - await ensureWorkbenchSessionTable(c); const existing = await readSessionMeta(c, params.sessionId); if (existing) { return existing; @@ -244,11 +301,10 @@ async function ensureSessionMeta( const now = Date.now(); const sessionName = params.sessionName ?? (await nextSessionName(c)); - const model = params.model ?? defaultModelForAgent(c.state.agentType); - const unread = params.unread ?? false; + const model = params.model ?? (await resolveDefaultModel(c, params.authSessionId)); await c.db - .insert(taskWorkbenchSessions) + .insert(taskWorkspaceSessions) .values({ sessionId: params.sessionId, sandboxSessionId: params.sandboxSessionId ?? null, @@ -258,10 +314,6 @@ async function ensureSessionMeta( errorMessage: params.errorMessage ?? null, transcriptJson: "[]", transcriptUpdatedAt: null, - unread: unread ? 1 : 0, - draftText: "", - draftAttachmentsJson: "[]", - draftUpdatedAt: null, created: params.created === false ? 0 : 1, closed: 0, thinkingSinceMs: null, @@ -276,19 +328,18 @@ async function ensureSessionMeta( async function updateSessionMeta(c: any, sessionId: string, values: Record): Promise { await ensureSessionMeta(c, { sessionId }); await c.db - .update(taskWorkbenchSessions) + .update(taskWorkspaceSessions) .set({ ...values, updatedAt: Date.now(), }) - .where(eq(taskWorkbenchSessions.sessionId, sessionId)) + .where(eq(taskWorkspaceSessions.sessionId, sessionId)) .run(); return await readSessionMeta(c, sessionId); } async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise { - await ensureWorkbenchSessionTable(c); - const row = await c.db.select().from(taskWorkbenchSessions).where(eq(taskWorkbenchSessions.sandboxSessionId, sandboxSessionId)).get(); + const row = await c.db.select().from(taskWorkspaceSessions).where(eq(taskWorkspaceSessions.sandboxSessionId, sandboxSessionId)).get(); if (!row) { return null; } @@ -298,17 +349,17 @@ async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: strin async function requireReadySessionMeta(c: any, sessionId: string): Promise { const meta = await readSessionMeta(c, sessionId); if (!meta) { - throw new Error(`Unknown workbench session: ${sessionId}`); + throw new Error(`Unknown workspace session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { - throw new Error(meta.errorMessage ?? "This workbench session is still preparing"); + throw new Error(meta.errorMessage ?? "This workspace session is still preparing"); } return meta; } export function requireSendableSessionMeta(meta: any, sessionId: string): any { if (!meta) { - throw new Error(`Unknown workbench session: ${sessionId}`); + throw new Error(`Unknown workspace session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { throw new Error(`Session is not ready (status: ${meta.status}). Wait for session provisioning to complete.`); @@ -336,7 +387,7 @@ async function getTaskSandboxRuntime( }> { const { config } = getActorRuntimeContext(); const sandboxId = stableSandboxId(c); - const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? c.state.sandboxProviderId ?? null); + const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? null); const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, sandboxId, {}); const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null; const switchTarget = sandboxProviderId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; @@ -350,7 +401,6 @@ async function getTaskSandboxRuntime( sandboxActorId: typeof actorId === "string" ? actorId : null, switchTarget, cwd: SANDBOX_REPO_CWD, - statusMessage: "sandbox ready", createdAt: now, updatedAt: now, }) @@ -389,7 +439,7 @@ async function getTaskSandboxRuntime( /** * Track whether the sandbox repo has been fully prepared (cloned + fetched + checked out) * for the current actor lifecycle. Subsequent calls can skip the expensive `git fetch` - * when `skipFetch` is true (used by sendWorkbenchMessage to avoid blocking on every prompt). + * when `skipFetch` is true (used by sendWorkspaceMessage to avoid blocking on every prompt). */ let sandboxRepoPrepared = false; @@ -405,8 +455,7 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any, opts?: { ski } const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - const metadata = await repository.getRepositoryMetadata({}); + const metadata = await getRepositoryMetadata(c); const baseRef = metadata.defaultBranch ?? "main"; const sandboxRepoRoot = dirname(SANDBOX_REPO_CWD); const script = [ @@ -414,7 +463,7 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any, opts?: { ski `mkdir -p ${JSON.stringify(sandboxRepoRoot)}`, "git config --global credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'", `if [ ! -d ${JSON.stringify(`${SANDBOX_REPO_CWD}/.git`)} ]; then rm -rf ${JSON.stringify(SANDBOX_REPO_CWD)} && git clone ${JSON.stringify( - c.state.repoRemote, + metadata.remoteUrl, )} ${JSON.stringify(SANDBOX_REPO_CWD)}; fi`, `cd ${JSON.stringify(SANDBOX_REPO_CWD)}`, "git fetch origin --prune", @@ -452,7 +501,7 @@ async function executeInSandbox( label: string; }, ): Promise<{ exitCode: number; result: string }> { - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); const response = await runtime.sandbox.runProcess({ @@ -555,7 +604,7 @@ function buildFileTree(paths: string[]): Array { return sortNodes(root.children.values()); } -async function collectWorkbenchGitState(c: any, record: any) { +async function collectWorkspaceGitState(c: any, record: any) { const activeSandboxId = record.activeSandboxId; const activeSandbox = activeSandboxId != null ? ((record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === activeSandboxId) ?? null) : null; const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null; @@ -628,7 +677,6 @@ async function collectWorkbenchGitState(c: any, record: any) { } async function readCachedGitState(c: any): Promise<{ fileChanges: Array; diffs: Record; fileTree: Array; updatedAt: number | null }> { - await ensureTaskRuntimeCacheColumns(c); const row = await c.db .select({ gitStateJson: taskRuntime.gitStateJson, @@ -645,7 +693,6 @@ async function readCachedGitState(c: any): Promise<{ fileChanges: Array; di } async function writeCachedGitState(c: any, gitState: { fileChanges: Array; diffs: Record; fileTree: Array }): Promise { - await ensureTaskRuntimeCacheColumns(c); const now = Date.now(); await c.db .update(taskRuntime) @@ -687,102 +734,78 @@ async function writeSessionTranscript(c: any, sessionId: string, transcript: Arr }); } -async function enqueueWorkbenchRefresh( +async function enqueueWorkspaceRefresh( c: any, - command: "task.command.workbench.refresh_derived" | "task.command.workbench.refresh_session_transcript", + command: "task.command.workspace.refresh_derived" | "task.command.workspace.refresh_session_transcript", body: Record, ): Promise { - const self = selfTask(c); - await self.send(command, body, { wait: false }); + // Call directly since we're inside the task actor (no queue needed) + if (command === "task.command.workspace.refresh_derived") { + void refreshWorkspaceDerivedState(c).catch(() => {}); + } else { + void refreshWorkspaceSessionTranscript(c, body.sessionId as string).catch(() => {}); + } } -async function enqueueWorkbenchEnsureSession(c: any, sessionId: string): Promise { - const self = selfTask(c); - await self.send( - "task.command.workbench.ensure_session", - { - sessionId, - }, - { - wait: false, - }, - ); +async function enqueueWorkspaceEnsureSession(c: any, sessionId: string): Promise { + // Call directly since we're inside the task actor + void ensureWorkspaceSession(c, sessionId).catch(() => {}); } -function pendingWorkbenchSessionStatus(record: any): "pending_provision" | "pending_session_create" { +function pendingWorkspaceSessionStatus(record: any): "pending_provision" | "pending_session_create" { return record.activeSandboxId ? "pending_session_create" : "pending_provision"; } -async function maybeScheduleWorkbenchRefreshes(c: any, record: any, sessions: Array): Promise { +async function maybeScheduleWorkspaceRefreshes(c: any, record: any, sessions: Array): Promise { const gitState = await readCachedGitState(c); if (record.activeSandboxId && !gitState.updatedAt) { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_derived", {}); } for (const session of sessions) { if (session.closed || session.status !== "ready" || !session.sandboxSessionId || session.transcriptUpdatedAt) { continue; } - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId: session.sandboxSessionId, }); } } -function activeSessionStatus(record: any, sessionId: string) { - if (record.activeSessionId !== sessionId) { - return "idle"; +function computeWorkspaceTaskStatus(record: any, sessions: Array) { + if (record.status && String(record.status).startsWith("init_")) { + return record.status; } - - if (record.status === "running") { + if (record.status === "archived" || record.status === "killed") { + return record.status; + } + if (sessions.some((session) => session.closed !== true && session.thinkingSinceMs)) { return "running"; } - if (record.status === "error") { + if (sessions.some((session) => session.closed !== true && session.status === "error")) { return "error"; } return "idle"; } -async function readPullRequestSummary(c: any, branchName: string | null) { - if (!branchName) { - return null; - } - - try { - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - return await repository.getPullRequestForBranch({ branchName }); - } catch { - return null; - } +export async function ensureWorkspaceSeeded(c: any): Promise { + return await getCurrentRecord(c); } -export async function ensureWorkbenchSeeded(c: any): Promise { - await ensureTaskRuntimeCacheColumns(c); - const record = await getCurrentRecord({ db: c.db, state: c.state }); - if (record.activeSessionId) { - await ensureSessionMeta(c, { - sessionId: record.activeSessionId, - sandboxSessionId: record.activeSessionId, - model: defaultModelForAgent(record.agentType), - sessionName: "Session 1", - status: "ready", - }); - } - return record; -} - -function buildSessionSummary(record: any, meta: any): any { +function buildSessionSummary(meta: any, userState?: any): any { const derivedSandboxSessionId = meta.status === "ready" ? (meta.sandboxSessionId ?? null) : null; const sessionStatus = meta.status === "pending_provision" || meta.status === "pending_session_create" ? meta.status - : meta.status === "ready" && derivedSandboxSessionId - ? activeSessionStatus(record, derivedSandboxSessionId) + : meta.thinkingSinceMs + ? "running" : meta.status === "error" ? "error" - : "ready"; + : meta.status === "ready" && derivedSandboxSessionId + ? "idle" + : "ready"; let thinkingSinceMs = meta.thinkingSinceMs ?? null; - let unread = Boolean(meta.unread); + let unread = Boolean(userState?.unread); if (thinkingSinceMs && sessionStatus !== "running") { thinkingSinceMs = null; unread = true; @@ -803,8 +826,8 @@ function buildSessionSummary(record: any, meta: any): any { }; } -function buildSessionDetailFromMeta(record: any, meta: any): any { - const summary = buildSessionSummary(record, meta); +function buildSessionDetailFromMeta(meta: any, userState?: any): any { + const summary = buildSessionSummary(meta, userState); return { sessionId: meta.sessionId, sandboxSessionId: summary.sandboxSessionId ?? null, @@ -817,57 +840,56 @@ function buildSessionDetailFromMeta(record: any, meta: any): any { created: summary.created, errorMessage: summary.errorMessage, draft: { - text: meta.draftText ?? "", - attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [], - updatedAtMs: meta.draftUpdatedAtMs ?? null, + text: userState?.draftText ?? "", + attachments: Array.isArray(userState?.draftAttachments) ? userState.draftAttachments : [], + updatedAtMs: userState?.draftUpdatedAtMs ?? null, }, transcript: meta.transcript ?? [], }; } /** - * Builds a WorkbenchTaskSummary from local task actor state. Task actors push + * Builds a WorkspaceTaskSummary from local task actor state. Task actors push * this to the parent organization actor so organization sidebar reads stay local. */ -export async function buildTaskSummary(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function buildTaskSummary(c: any, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); + const repositoryMetadata = await getRepositoryMetadata(c); const sessions = await listSessionMetaRows(c); - await maybeScheduleWorkbenchRefreshes(c, record, sessions); + await maybeScheduleWorkspaceRefreshes(c, record, sessions); + const userTaskState = await getUserTaskState(c, authSessionId); + const taskStatus = computeWorkspaceTaskStatus(record, sessions); + const activeSessionId = + userTaskState.activeSessionId && sessions.some((meta) => meta.sessionId === userTaskState.activeSessionId) ? userTaskState.activeSessionId : null; return { id: c.state.taskId, repoId: c.state.repoId, title: record.title ?? "New Task", - status: record.status ?? "new", - repoName: repoLabelFromRemote(c.state.repoRemote), + status: taskStatus, + repoName: repoLabelFromRemote(repositoryMetadata.remoteUrl), updatedAtMs: record.updatedAt, branch: record.branchName, - pullRequest: await readPullRequestSummary(c, record.branchName), - sessionsSummary: sessions.map((meta) => buildSessionSummary(record, meta)), + pullRequest: record.pullRequest ?? null, + activeSessionId, + sessionsSummary: sessions.map((meta) => buildSessionSummary(meta, userTaskState.bySessionId.get(meta.sessionId))), }; } /** - * Builds a WorkbenchTaskDetail from local task actor state for direct task + * Builds a WorkspaceTaskDetail from local task actor state for direct task * subscribers. This is a full replacement payload, not a patch. */ -export async function buildTaskDetail(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function buildTaskDetail(c: any, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); const gitState = await readCachedGitState(c); const sessions = await listSessionMetaRows(c); - await maybeScheduleWorkbenchRefreshes(c, record, sessions); - const summary = await buildTaskSummary(c); + await maybeScheduleWorkspaceRefreshes(c, record, sessions); + const summary = await buildTaskSummary(c, authSessionId); return { ...summary, task: record.task, - agentType: record.agentType === "claude" || record.agentType === "codex" ? record.agentType : null, - runtimeStatus: record.status, - statusMessage: record.statusMessage ?? null, - activeSessionId: record.activeSessionId ?? null, - diffStat: record.diffStat ?? null, - prUrl: record.prUrl ?? null, - reviewStatus: record.reviewStatus ?? null, fileChanges: gitState.fileChanges, diffs: gitState.diffs, fileTree: gitState.fileTree, @@ -882,50 +904,63 @@ export async function buildTaskDetail(c: any): Promise { } /** - * Builds a WorkbenchSessionDetail for a specific session. + * Builds a WorkspaceSessionDetail for a specific session. */ -export async function buildSessionDetail(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function buildSessionDetail(c: any, sessionId: string, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { - throw new Error(`Unknown workbench session: ${sessionId}`); + throw new Error(`Unknown workspace session: ${sessionId}`); } + const userTaskState = await getUserTaskState(c, authSessionId); + const userSessionState = userTaskState.bySessionId.get(sessionId); - if (!meta.sandboxSessionId) { - return buildSessionDetailFromMeta(record, meta); + // Skip live transcript fetch if the sandbox session doesn't exist yet or + // the session is still provisioning — the sandbox API will block/timeout. + const isPending = meta.status === "pending_provision" || meta.status === "pending_session_create"; + if (!meta.sandboxSessionId || isPending) { + return buildSessionDetailFromMeta(meta, userSessionState); } try { const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) { await writeSessionTranscript(c, meta.sessionId, transcript); - return buildSessionDetailFromMeta(record, { - ...meta, - transcript, - transcriptUpdatedAt: Date.now(), - }); + return buildSessionDetailFromMeta( + { + ...meta, + transcript, + transcriptUpdatedAt: Date.now(), + }, + userSessionState, + ); } - } catch { - // Session detail reads should degrade to cached transcript data if the live sandbox is unavailable. + } catch (error) { + // Session detail reads degrade to cached transcript when sandbox is unavailable. + logActorWarning("task", "readSessionTranscript failed, using cached transcript", { + taskId: c.state.taskId, + sessionId, + error: resolveErrorMessage(error), + }); } - return buildSessionDetailFromMeta(record, meta); + return buildSessionDetailFromMeta(meta, userSessionState); } export async function getTaskSummary(c: any): Promise { return await buildTaskSummary(c); } -export async function getTaskDetail(c: any): Promise { - return await buildTaskDetail(c); +export async function getTaskDetail(c: any, authSessionId?: string): Promise { + return await buildTaskDetail(c, authSessionId); } -export async function getSessionDetail(c: any, sessionId: string): Promise { - return await buildSessionDetail(c, sessionId); +export async function getSessionDetail(c: any, sessionId: string, authSessionId?: string): Promise { + return await buildSessionDetail(c, sessionId, authSessionId); } /** - * Replaces the old notifyWorkbenchUpdated pattern. + * Replaces the old notifyWorkspaceUpdated pattern. * * The task actor emits two kinds of updates: * - Push summary state up to the parent organization actor so the sidebar @@ -934,9 +969,9 @@ export async function getSessionDetail(c: any, sessionId: string): Promise */ export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise { const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); + await organization.commandApplyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); c.broadcast("taskUpdated", { - type: "taskDetailUpdated", + type: "taskUpdated", detail: await buildTaskDetail(c), }); @@ -948,15 +983,15 @@ export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string } } -export async function refreshWorkbenchDerivedState(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); - const gitState = await collectWorkbenchGitState(c, record); +export async function refreshWorkspaceDerivedState(c: any): Promise { + const record = await ensureWorkspaceSeeded(c); + const gitState = await collectWorkspaceGitState(c, record); await writeCachedGitState(c, gitState); await broadcastTaskUpdate(c); } -export async function refreshWorkbenchSessionTranscript(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function refreshWorkspaceSessionTranscript(c: any, sessionId: string): Promise { + const record = await ensureWorkspaceSeeded(c); const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await readSessionMeta(c, sessionId)); if (!meta?.sandboxSessionId) { return; @@ -967,7 +1002,7 @@ export async function refreshWorkbenchSessionTranscript(c: any, sessionId: strin await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } -export async function renameWorkbenchTask(c: any, value: string): Promise { +export async function renameWorkspaceTask(c: any, value: string): Promise { const nextTitle = value.trim(); if (!nextTitle) { throw new Error("task title is required"); @@ -981,85 +1016,51 @@ export async function renameWorkbenchTask(c: any, value: string): Promise }) .where(eq(taskTable.id, 1)) .run(); - c.state.title = nextTitle; await broadcastTaskUpdate(c); } -export async function renameWorkbenchBranch(c: any, value: string): Promise { - const nextBranch = value.trim(); - if (!nextBranch) { - throw new Error("branch name is required"); - } - - const record = await ensureWorkbenchSeeded(c); - if (!record.branchName) { - throw new Error("cannot rename branch before task branch exists"); - } - if (!record.activeSandboxId) { - throw new Error("cannot rename branch without an active sandbox"); - } - const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null; - if (!activeSandbox?.cwd) { - throw new Error("cannot rename branch without a sandbox cwd"); - } - - const renameResult = await executeInSandbox(c, { - sandboxId: record.activeSandboxId, - cwd: activeSandbox.cwd, - command: [ - `git branch -m ${JSON.stringify(record.branchName)} ${JSON.stringify(nextBranch)}`, - `if git ls-remote --exit-code --heads origin ${JSON.stringify(record.branchName)} >/dev/null 2>&1; then git push origin :${JSON.stringify(record.branchName)}; fi`, - `git push origin ${JSON.stringify(nextBranch)}`, - `git branch --set-upstream-to=${JSON.stringify(`origin/${nextBranch}`)} ${JSON.stringify(nextBranch)} || git push --set-upstream origin ${JSON.stringify(nextBranch)}`, - ].join(" && "), - label: `git branch -m ${record.branchName} ${nextBranch}`, - }); - if (renameResult.exitCode !== 0) { - throw new Error(`branch rename failed (${renameResult.exitCode}): ${renameResult.result}`); - } - +export async function syncTaskPullRequest(c: any, pullRequest: any): Promise { + const now = pullRequest?.updatedAtMs ?? Date.now(); await c.db .update(taskTable) .set({ - branchName: nextBranch, - updatedAt: Date.now(), + pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null, + updatedAt: now, }) .where(eq(taskTable.id, 1)) .run(); - c.state.branchName = nextBranch; - - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - await repository.registerTaskBranch({ - taskId: c.state.taskId, - branchName: nextBranch, - }); await broadcastTaskUpdate(c); } -export async function createWorkbenchSession(c: any, model?: string): Promise<{ sessionId: string }> { +export async function createWorkspaceSession(c: any, model?: string, authSessionId?: string): Promise<{ sessionId: string }> { const sessionId = `session-${randomUUID()}`; - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); await ensureSessionMeta(c, { sessionId, - model: model ?? defaultModelForAgent(record.agentType), + model: model ?? (await resolveDefaultModel(c, authSessionId)), + authSessionId, sandboxSessionId: null, - status: pendingWorkbenchSessionStatus(record), + status: pendingWorkspaceSessionStatus(record), created: false, }); + await upsertUserTaskState(c, authSessionId, sessionId, { + activeSessionId: sessionId, + unread: false, + }); await broadcastTaskUpdate(c, { sessionId: sessionId }); - await enqueueWorkbenchEnsureSession(c, sessionId); + await enqueueWorkspaceEnsureSession(c, sessionId); return { sessionId }; } -export async function ensureWorkbenchSession(c: any, sessionId: string, model?: string): Promise { +export async function ensureWorkspaceSession(c: any, sessionId: string, model?: string, authSessionId?: string): Promise { const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { return; } - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); if (meta.sandboxSessionId && meta.status === "ready") { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId: meta.sandboxSessionId, }); await broadcastTaskUpdate(c, { sessionId: sessionId }); @@ -1075,10 +1076,12 @@ export async function ensureWorkbenchSession(c: any, sessionId: string, model?: try { const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); + const resolvedModel = model ?? meta.model ?? (await resolveDefaultModel(c, authSessionId)); + const resolvedAgent = await resolveSandboxAgentForModel(c, resolvedModel); await runtime.sandbox.createSession({ id: meta.sandboxSessionId ?? sessionId, - agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)), - model: model ?? meta.model ?? defaultModelForAgent(record.agentType), + agent: resolvedAgent, + model: resolvedModel, sessionInit: { cwd: runtime.cwd, }, @@ -1089,7 +1092,7 @@ export async function ensureWorkbenchSession(c: any, sessionId: string, model?: status: "ready", errorMessage: null, }); - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId: meta.sandboxSessionId ?? sessionId, }); } catch (error) { @@ -1102,27 +1105,17 @@ export async function ensureWorkbenchSession(c: any, sessionId: string, model?: await broadcastTaskUpdate(c, { sessionId: sessionId }); } -export async function enqueuePendingWorkbenchSessions(c: any): Promise { - const self = selfTask(c); +export async function enqueuePendingWorkspaceSessions(c: any): Promise { const pending = (await listSessionMetaRows(c, { includeClosed: true })).filter( (row) => row.closed !== true && row.status !== "ready" && row.status !== "error", ); for (const row of pending) { - await self.send( - "task.command.workbench.ensure_session", - { - sessionId: row.sessionId, - model: row.model, - }, - { - wait: false, - }, - ); + void ensureWorkspaceSession(c, row.sessionId, row.model).catch(() => {}); } } -export async function renameWorkbenchSession(c: any, sessionId: string, title: string): Promise { +export async function renameWorkspaceSession(c: any, sessionId: string, title: string): Promise { const trimmed = title.trim(); if (!trimmed) { throw new Error("session title is required"); @@ -1133,15 +1126,26 @@ export async function renameWorkbenchSession(c: any, sessionId: string, title: s await broadcastTaskUpdate(c, { sessionId }); } -export async function setWorkbenchSessionUnread(c: any, sessionId: string, unread: boolean): Promise { - await updateSessionMeta(c, sessionId, { - unread: unread ? 1 : 0, +export async function selectWorkspaceSession(c: any, sessionId: string, authSessionId?: string): Promise { + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + return; + } + await upsertUserTaskState(c, authSessionId, sessionId, { + activeSessionId: sessionId, }); await broadcastTaskUpdate(c, { sessionId }); } -export async function updateWorkbenchDraft(c: any, sessionId: string, text: string, attachments: Array): Promise { - await updateSessionMeta(c, sessionId, { +export async function setWorkspaceSessionUnread(c: any, sessionId: string, unread: boolean, authSessionId?: string): Promise { + await upsertUserTaskState(c, authSessionId, sessionId, { + unread, + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function updateWorkspaceDraft(c: any, sessionId: string, text: string, attachments: Array, authSessionId?: string): Promise { + await upsertUserTaskState(c, authSessionId, sessionId, { draftText: text, draftAttachmentsJson: JSON.stringify(attachments), draftUpdatedAt: Date.now(), @@ -1149,7 +1153,7 @@ export async function updateWorkbenchDraft(c: any, sessionId: string, text: stri await broadcastTaskUpdate(c, { sessionId }); } -export async function changeWorkbenchModel(c: any, sessionId: string, model: string): Promise { +export async function changeWorkspaceModel(c: any, sessionId: string, model: string, _authSessionId?: string): Promise { const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { return; @@ -1159,7 +1163,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str return; } - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); let nextMeta = await updateSessionMeta(c, sessionId, { model, }); @@ -1170,7 +1174,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str await sandbox.destroySession(nextMeta.sandboxSessionId); nextMeta = await updateSessionMeta(c, sessionId, { sandboxSessionId: null, - status: pendingWorkbenchSessionStatus(record), + status: pendingWorkspaceSessionStatus(record), errorMessage: null, transcriptJson: "[]", transcriptUpdatedAt: null, @@ -1191,20 +1195,20 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str } } else if (nextMeta.status !== "ready") { nextMeta = await updateSessionMeta(c, sessionId, { - status: pendingWorkbenchSessionStatus(record), + status: pendingWorkspaceSessionStatus(record), errorMessage: null, }); } if (shouldEnsure) { - await enqueueWorkbenchEnsureSession(c, sessionId); + await enqueueWorkspaceEnsureSession(c, sessionId); } await broadcastTaskUpdate(c, { sessionId }); } -export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array): Promise { +export async function sendWorkspaceMessage(c: any, sessionId: string, text: string, attachments: Array, authSessionId?: string): Promise { const meta = requireSendableSessionMeta(await readSessionMeta(c, sessionId), sessionId); - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); const runtime = await getTaskSandboxRuntime(c, record); // Skip git fetch on subsequent messages — the repo was already prepared during session // creation. This avoids a 5-30s network round-trip to GitHub on every prompt. @@ -1217,42 +1221,36 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri } await updateSessionMeta(c, sessionId, { - unread: 0, created: 1, + thinkingSinceMs: Date.now(), + }); + await upsertUserTaskState(c, authSessionId, sessionId, { + unread: false, draftText: "", draftAttachmentsJson: "[]", draftUpdatedAt: Date.now(), - thinkingSinceMs: Date.now(), + activeSessionId: sessionId, }); - await c.db - .update(taskRuntime) - .set({ - activeSessionId: meta.sandboxSessionId, - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, 1)) - .run(); - - await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "running", Date.now()); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "running", Date.now()); try { await runtime.sandbox.sendPrompt({ sessionId: meta.sandboxSessionId, prompt: prompt.join("\n\n"), }); - await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "idle", Date.now()); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "idle", Date.now()); } catch (error) { await updateSessionMeta(c, sessionId, { status: "error", errorMessage: error instanceof Error ? error.message : String(error), }); - await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "error", Date.now()); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "error", Date.now()); throw error; } } -export async function stopWorkbenchSession(c: any, sessionId: string): Promise { +export async function stopWorkspaceSession(c: any, sessionId: string): Promise { const meta = await requireReadySessionMeta(c, sessionId); const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(meta.sandboxSessionId); @@ -1262,39 +1260,10 @@ export async function stopWorkbenchSession(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function syncWorkspaceSessionStatus(c: any, sessionId: string, status: "running" | "idle" | "error", at: number): Promise { const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { sessionId: sessionId, sandboxSessionId: sessionId })); let changed = false; - if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) { - const mappedStatus = status === "running" ? "running" : status === "error" ? "error" : "idle"; - if (record.status !== mappedStatus) { - await c.db - .update(taskTable) - .set({ - status: mappedStatus, - updatedAt: at, - }) - .where(eq(taskTable.id, 1)) - .run(); - changed = true; - } - - const statusMessage = `session:${status}`; - if (record.statusMessage !== statusMessage) { - await c.db - .update(taskRuntime) - .set({ - statusMessage, - updatedAt: at, - }) - .where(eq(taskRuntime.id, 1)) - .run(); - changed = true; - } - } - if (status === "running") { if (!meta.thinkingSinceMs) { await updateSessionMeta(c, sessionId, { @@ -1309,27 +1278,30 @@ export async function syncWorkbenchSessionStatus(c: any, sessionId: string, stat }); changed = true; } - if (!meta.unread && shouldMarkSessionUnreadForStatus(meta, status)) { - await updateSessionMeta(c, sessionId, { - unread: 1, - }); - changed = true; - } } if (changed) { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + const sessions = await listSessionMetaRows(c, { includeClosed: true }); + const nextStatus = computeWorkspaceTaskStatus(await ensureWorkspaceSeeded(c), sessions); + await c.db + .update(taskTable) + .set({ + status: nextStatus, + updatedAt: at, + }) + .where(eq(taskTable.id, 1)) + .run(); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId, }); if (status !== "running") { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_derived", {}); } await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } } -export async function closeWorkbenchSession(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function closeWorkspaceSession(c: any, sessionId: string, authSessionId?: string): Promise { const sessions = await listSessionMetaRows(c); if (sessions.filter((candidate) => candidate.closed !== true).length <= 1) { return; @@ -1347,61 +1319,63 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise< closed: 1, thinkingSinceMs: null, }); - if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) { - await c.db - .update(taskRuntime) - .set({ - activeSessionId: null, - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, 1)) - .run(); + const remainingSessions = sessions.filter((candidate) => candidate.sessionId !== sessionId && candidate.closed !== true); + const userTaskState = await getUserTaskState(c, authSessionId); + if (userTaskState.activeSessionId === sessionId && remainingSessions[0]) { + await upsertUserTaskState(c, authSessionId, remainingSessions[0].sessionId, { + activeSessionId: remainingSessions[0].sessionId, + }); } + await deleteUserTaskState(c, authSessionId, sessionId); await broadcastTaskUpdate(c); } -export async function markWorkbenchUnread(c: any): Promise { +export async function markWorkspaceUnread(c: any, authSessionId?: string): Promise { const sessions = await listSessionMetaRows(c); const latest = sessions[sessions.length - 1]; if (!latest) { return; } - await updateSessionMeta(c, latest.sessionId, { - unread: 1, + await upsertUserTaskState(c, authSessionId, latest.sessionId, { + unread: true, }); await broadcastTaskUpdate(c, { sessionId: latest.sessionId }); } -export async function publishWorkbenchPr(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function publishWorkspacePr(c: any): Promise { + const record = await ensureWorkspaceSeeded(c); if (!record.branchName) { throw new Error("cannot publish PR without a branch"); } - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - const metadata = await repository.getRepositoryMetadata({}); - const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(c.state.repoRemote); + const metadata = await getRepositoryMetadata(c); + const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(metadata.remoteUrl); if (!repoFullName) { - throw new Error(`Unable to resolve GitHub repository for ${c.state.repoRemote}`); + throw new Error(`Unable to resolve GitHub repository for ${metadata.remoteUrl}`); } const { driver } = getActorRuntimeContext(); const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); - await driver.github.createPr(repoFullName, record.branchName, record.title ?? c.state.task, undefined, { + const created = await driver.github.createPr(repoFullName, record.branchName, record.title ?? record.task, undefined, { githubToken: auth?.githubToken ?? null, baseBranch: metadata.defaultBranch ?? undefined, }); - await c.db - .update(taskTable) - .set({ - prSubmitted: 1, - updatedAt: Date.now(), - }) - .where(eq(taskTable.id, 1)) - .run(); - await broadcastTaskUpdate(c); + await syncTaskPullRequest(c, { + number: created.number, + status: "ready", + title: record.title ?? record.task, + body: null, + state: "open", + url: created.url, + headRefName: record.branchName, + baseRefName: metadata.defaultBranch ?? "main", + authorLogin: null, + isDraft: false, + merged: false, + updatedAtMs: Date.now(), + }); } -export async function revertWorkbenchFile(c: any, path: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function revertWorkspaceFile(c: any, path: string): Promise { + const record = await ensureWorkspaceSeeded(c); if (!record.activeSandboxId) { throw new Error("cannot revert file without an active sandbox"); } @@ -1419,6 +1393,6 @@ export async function revertWorkbenchFile(c: any, path: string): Promise { if (result.exitCode !== 0) { throw new Error(`file revert failed (${result.exitCode}): ${result.result}`); } - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_derived", {}); await broadcastTaskUpdate(c); } diff --git a/foundry/packages/backend/src/actors/user/actions/better-auth.ts b/foundry/packages/backend/src/actors/user/actions/better-auth.ts new file mode 100644 index 0000000..0fd950e --- /dev/null +++ b/foundry/packages/backend/src/actors/user/actions/better-auth.ts @@ -0,0 +1,47 @@ +import { asc, count as sqlCount, desc } from "drizzle-orm"; +import { applyJoinToRow, applyJoinToRows, buildWhere, columnFor, tableFor } from "../query-helpers.js"; + +export const betterAuthActions = { + // Better Auth adapter action — called by the Better Auth adapter in better-auth.ts. + // Schema and behavior are constrained by Better Auth. + async betterAuthFindOneRecord(c, input: { model: string; where: any[]; join?: any }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get(); + return await applyJoinToRow(c, input.model, row ?? null, input.join); + }, + + // Better Auth adapter action — called by the Better Auth adapter in better-auth.ts. + // Schema and behavior are constrained by Better Auth. + async betterAuthFindManyRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + let query: any = c.db.select().from(table); + if (predicate) { + query = query.where(predicate); + } + if (input.sortBy?.field) { + const column = columnFor(input.model, table, input.sortBy.field); + query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); + } + if (typeof input.limit === "number") { + query = query.limit(input.limit); + } + if (typeof input.offset === "number") { + query = query.offset(input.offset); + } + const rows = await query.all(); + return await applyJoinToRows(c, input.model, rows, input.join); + }, + + // Better Auth adapter action — called by the Better Auth adapter in better-auth.ts. + // Schema and behavior are constrained by Better Auth. + async betterAuthCountRecords(c, input: { model: string; where?: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + const row = predicate + ? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get() + : await c.db.select({ value: sqlCount() }).from(table).get(); + return row?.value ?? 0; + }, +}; diff --git a/foundry/packages/backend/src/actors/user/actions/user.ts b/foundry/packages/backend/src/actors/user/actions/user.ts new file mode 100644 index 0000000..714b2b6 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/actions/user.ts @@ -0,0 +1,44 @@ +import { eq } from "drizzle-orm"; +import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "../db/schema.js"; +import { materializeRow } from "../query-helpers.js"; + +export const userActions = { + // Custom Foundry action — not part of Better Auth. + async getAppAuthState(c, input: { sessionId: string }) { + const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get(); + if (!session) { + return null; + } + const [user, profile, currentSessionState, accounts] = await Promise.all([ + c.db.select().from(authUsers).where(eq(authUsers.authUserId, session.userId)).get(), + c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(), + c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(), + c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(), + ]); + return { + session, + user: materializeRow("user", user), + profile: profile ?? null, + sessionState: currentSessionState ?? null, + accounts, + }; + }, + + // Custom Foundry action — not part of Better Auth. + async getTaskState(c, input: { taskId: string }) { + const rows = await c.db.select().from(userTaskState).where(eq(userTaskState.taskId, input.taskId)).all(); + const activeSessionId = rows.find((row) => typeof row.activeSessionId === "string" && row.activeSessionId.length > 0)?.activeSessionId ?? null; + return { + taskId: input.taskId, + activeSessionId, + sessions: rows.map((row) => ({ + sessionId: row.sessionId, + unread: row.unread === 1, + draftText: row.draftText, + draftAttachmentsJson: row.draftAttachmentsJson, + draftUpdatedAt: row.draftUpdatedAt ?? null, + updatedAt: row.updatedAt, + })), + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/history/db/db.ts b/foundry/packages/backend/src/actors/user/db/db.ts similarity index 70% rename from foundry/packages/backend/src/actors/history/db/db.ts rename to foundry/packages/backend/src/actors/user/db/db.ts index ef76e36..a864893 100644 --- a/foundry/packages/backend/src/actors/history/db/db.ts +++ b/foundry/packages/backend/src/actors/user/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const historyDb = db({ schema, migrations }); +export const userDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/auth-user/db/migrations.ts b/foundry/packages/backend/src/actors/user/db/migrations.ts similarity index 65% rename from foundry/packages/backend/src/actors/auth-user/db/migrations.ts rename to foundry/packages/backend/src/actors/user/db/migrations.ts index be7cb17..da92bdc 100644 --- a/foundry/packages/backend/src/actors/auth-user/db/migrations.ts +++ b/foundry/packages/backend/src/actors/user/db/migrations.ts @@ -10,6 +10,12 @@ const journal = { tag: "0000_auth_user", breakpoints: true, }, + { + idx: 1, + when: 1773532800000, + tag: "0001_user_task_state", + breakpoints: true, + }, ], } as const; @@ -17,15 +23,19 @@ export default { journal, migrations: { m0000: `CREATE TABLE \`user\` ( - \`id\` text PRIMARY KEY NOT NULL, + \`id\` integer PRIMARY KEY NOT NULL, + \`auth_user_id\` text NOT NULL, \`name\` text NOT NULL, \`email\` text NOT NULL, \`email_verified\` integer NOT NULL, \`image\` text, \`created_at\` integer NOT NULL, - \`updated_at\` integer NOT NULL + \`updated_at\` integer NOT NULL, + CONSTRAINT \`user_singleton_id_check\` CHECK(\`id\` = 1) ); --> statement-breakpoint +CREATE UNIQUE INDEX \`user_auth_user_id_idx\` ON \`user\` (\`auth_user_id\`); +--> statement-breakpoint CREATE TABLE \`session\` ( \`id\` text PRIMARY KEY NOT NULL, \`token\` text NOT NULL, @@ -58,23 +68,39 @@ CREATE TABLE \`account\` ( CREATE UNIQUE INDEX \`account_provider_account_idx\` ON \`account\` (\`provider_id\`, \`account_id\`); --> statement-breakpoint CREATE TABLE \`user_profiles\` ( - \`user_id\` text PRIMARY KEY NOT NULL, + \`id\` integer PRIMARY KEY NOT NULL, + \`user_id\` text NOT NULL, \`github_account_id\` text, \`github_login\` text, \`role_label\` text NOT NULL, + \`default_model\` text DEFAULT 'gpt-5.3-codex' NOT NULL, \`eligible_organization_ids_json\` text NOT NULL, \`starter_repo_status\` text NOT NULL, \`starter_repo_starred_at\` integer, \`starter_repo_skipped_at\` integer, \`created_at\` integer NOT NULL, - \`updated_at\` integer NOT NULL + \`updated_at\` integer NOT NULL, + CONSTRAINT \`user_profiles_singleton_id_check\` CHECK(\`id\` = 1) ); --> statement-breakpoint +CREATE UNIQUE INDEX \`user_profiles_user_id_idx\` ON \`user_profiles\` (\`user_id\`); +--> statement-breakpoint CREATE TABLE \`session_state\` ( \`session_id\` text PRIMARY KEY NOT NULL, \`active_organization_id\` text, \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL +);`, + m0001: `CREATE TABLE \`user_task_state\` ( + \`task_id\` text NOT NULL, + \`session_id\` text NOT NULL, + \`active_session_id\` text, + \`unread\` integer DEFAULT 0 NOT NULL, + \`draft_text\` text DEFAULT '' NOT NULL, + \`draft_attachments_json\` text DEFAULT '[]' NOT NULL, + \`draft_updated_at\` integer, + \`updated_at\` integer NOT NULL, + PRIMARY KEY(\`task_id\`, \`session_id\`) );`, } as const, }; diff --git a/foundry/packages/backend/src/actors/user/db/schema.ts b/foundry/packages/backend/src/actors/user/db/schema.ts new file mode 100644 index 0000000..6a87a11 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/db/schema.ts @@ -0,0 +1,112 @@ +import { check, integer, primaryKey, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core"; +import { sql } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authUsers = sqliteTable( + "user", + { + id: integer("id").primaryKey(), + authUserId: text("auth_user_id").notNull(), + name: text("name").notNull(), + email: text("email").notNull(), + emailVerified: integer("email_verified").notNull(), + image: text("image"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + authUserIdIdx: uniqueIndex("user_auth_user_id_idx").on(table.authUserId), + singletonCheck: check("user_singleton_id_check", sql`${table.id} = 1`), + }), +); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authSessions = sqliteTable( + "session", + { + id: text("id").notNull().primaryKey(), + token: text("token").notNull(), + userId: text("user_id").notNull(), + expiresAt: integer("expires_at").notNull(), + ipAddress: text("ip_address"), + userAgent: text("user_agent"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + tokenIdx: uniqueIndex("session_token_idx").on(table.token), + }), +); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authAccounts = sqliteTable( + "account", + { + id: text("id").notNull().primaryKey(), + accountId: text("account_id").notNull(), + providerId: text("provider_id").notNull(), + userId: text("user_id").notNull(), + accessToken: text("access_token"), + refreshToken: text("refresh_token"), + idToken: text("id_token"), + accessTokenExpiresAt: integer("access_token_expires_at"), + refreshTokenExpiresAt: integer("refresh_token_expires_at"), + scope: text("scope"), + password: text("password"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId), + }), +); + +/** Custom Foundry table — not part of Better Auth. */ +export const userProfiles = sqliteTable( + "user_profiles", + { + id: integer("id").primaryKey(), + userId: text("user_id").notNull(), + githubAccountId: text("github_account_id"), + githubLogin: text("github_login"), + roleLabel: text("role_label").notNull(), + defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID), + eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(), + starterRepoStatus: text("starter_repo_status").notNull(), + starterRepoStarredAt: integer("starter_repo_starred_at"), + starterRepoSkippedAt: integer("starter_repo_skipped_at"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + userIdIdx: uniqueIndex("user_profiles_user_id_idx").on(table.userId), + singletonCheck: check("user_profiles_singleton_id_check", sql`${table.id} = 1`), + }), +); + +/** Custom Foundry table — not part of Better Auth. */ +export const sessionState = sqliteTable("session_state", { + sessionId: text("session_id").notNull().primaryKey(), + activeOrganizationId: text("active_organization_id"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** Custom Foundry table — not part of Better Auth. Stores per-user task/session UI state. */ +export const userTaskState = sqliteTable( + "user_task_state", + { + taskId: text("task_id").notNull(), + sessionId: text("session_id").notNull(), + activeSessionId: text("active_session_id"), + unread: integer("unread").notNull().default(0), + draftText: text("draft_text").notNull().default(""), + draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), + draftUpdatedAt: integer("draft_updated_at"), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + pk: primaryKey({ columns: [table.taskId, table.sessionId] }), + }), +); diff --git a/foundry/packages/backend/src/actors/user/index.ts b/foundry/packages/backend/src/actors/user/index.ts new file mode 100644 index 0000000..8a15b58 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/index.ts @@ -0,0 +1,60 @@ +import { actor } from "rivetkit"; +import { userDb } from "./db/db.js"; +import { betterAuthActions } from "./actions/better-auth.js"; +import { userActions } from "./actions/user.js"; +import { + createAuthRecordMutation, + updateAuthRecordMutation, + updateManyAuthRecordsMutation, + deleteAuthRecordMutation, + deleteManyAuthRecordsMutation, + upsertUserProfileMutation, + upsertSessionStateMutation, + upsertTaskStateMutation, + deleteTaskStateMutation, +} from "./workflow.js"; + +export const user = actor({ + db: userDb, + options: { + name: "User", + icon: "shield", + actionTimeout: 60_000, + }, + createState: (_c, input: { userId: string }) => ({ + userId: input.userId, + }), + actions: { + ...betterAuthActions, + ...userActions, + async authCreate(c, body) { + return await createAuthRecordMutation(c, body); + }, + async authUpdate(c, body) { + return await updateAuthRecordMutation(c, body); + }, + async authUpdateMany(c, body) { + return await updateManyAuthRecordsMutation(c, body); + }, + async authDelete(c, body) { + await deleteAuthRecordMutation(c, body); + return { ok: true }; + }, + async authDeleteMany(c, body) { + return await deleteManyAuthRecordsMutation(c, body); + }, + async profileUpsert(c, body) { + return await upsertUserProfileMutation(c, body); + }, + async sessionStateUpsert(c, body) { + return await upsertSessionStateMutation(c, body); + }, + async taskStateUpsert(c, body) { + return await upsertTaskStateMutation(c, body); + }, + async taskStateDelete(c, body) { + await deleteTaskStateMutation(c, body); + return { ok: true }; + }, + }, +}); diff --git a/foundry/packages/backend/src/actors/user/query-helpers.ts b/foundry/packages/backend/src/actors/user/query-helpers.ts new file mode 100644 index 0000000..5bdee10 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/query-helpers.ts @@ -0,0 +1,197 @@ +import { and, eq, inArray, isNotNull, isNull, like, lt, lte, gt, gte, ne, notInArray, or } from "drizzle-orm"; +import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js"; + +export const userTables = { + user: authUsers, + session: authSessions, + account: authAccounts, + userProfiles, + sessionState, + userTaskState, +} as const; + +export function tableFor(model: string) { + const table = userTables[model as keyof typeof userTables]; + if (!table) { + throw new Error(`Unsupported user model: ${model}`); + } + return table as any; +} + +function dbFieldFor(model: string, field: string): string { + if (model === "user" && field === "id") { + return "authUserId"; + } + return field; +} + +export function materializeRow(model: string, row: any) { + if (!row || model !== "user") { + return row; + } + + const { id: _singletonId, authUserId, ...rest } = row; + return { + id: authUserId, + ...rest, + }; +} + +export function persistInput(model: string, data: Record) { + if (model !== "user") { + return data; + } + + const { id, ...rest } = data; + return { + id: 1, + authUserId: id, + ...rest, + }; +} + +export function persistPatch(model: string, data: Record) { + if (model !== "user") { + return data; + } + + const { id, ...rest } = data; + return { + ...(id !== undefined ? { authUserId: id } : {}), + ...rest, + }; +} + +export function columnFor(model: string, table: any, field: string) { + const column = table[dbFieldFor(model, field)]; + if (!column) { + throw new Error(`Unsupported user field: ${model}.${field}`); + } + return column; +} + +export function normalizeValue(value: unknown): unknown { + if (value instanceof Date) { + return value.getTime(); + } + if (Array.isArray(value)) { + return value.map((entry) => normalizeValue(entry)); + } + return value; +} + +export function clauseToExpr(table: any, clause: any) { + const model = table === authUsers ? "user" : table === authSessions ? "session" : table === authAccounts ? "account" : ""; + const column = columnFor(model, table, clause.field); + const value = normalizeValue(clause.value); + + switch (clause.operator) { + case "ne": + return value === null ? isNotNull(column) : ne(column, value as any); + case "lt": + return lt(column, value as any); + case "lte": + return lte(column, value as any); + case "gt": + return gt(column, value as any); + case "gte": + return gte(column, value as any); + case "in": + return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "not_in": + return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "contains": + return like(column, `%${String(value ?? "")}%`); + case "starts_with": + return like(column, `${String(value ?? "")}%`); + case "ends_with": + return like(column, `%${String(value ?? "")}`); + case "eq": + default: + return value === null ? isNull(column) : eq(column, value as any); + } +} + +export function buildWhere(table: any, where: any[] | undefined) { + if (!where || where.length === 0) { + return undefined; + } + + let expr = clauseToExpr(table, where[0]); + for (const clause of where.slice(1)) { + const next = clauseToExpr(table, clause); + expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); + } + return expr; +} + +export function applyJoinToRow(c: any, model: string, row: any, join: any) { + const materialized = materializeRow(model, row); + if (!materialized || !join) { + return materialized; + } + + if (model === "session" && join.user) { + return c.db + .select() + .from(authUsers) + .where(eq(authUsers.authUserId, materialized.userId)) + .get() + .then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null })); + } + + if (model === "account" && join.user) { + return c.db + .select() + .from(authUsers) + .where(eq(authUsers.authUserId, materialized.userId)) + .get() + .then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null })); + } + + if (model === "user" && join.account) { + return c.db + .select() + .from(authAccounts) + .where(eq(authAccounts.userId, materialized.id)) + .all() + .then((accounts: any[]) => ({ ...materialized, account: accounts })); + } + + return Promise.resolve(materialized); +} + +export async function applyJoinToRows(c: any, model: string, rows: any[], join: any) { + if (!join || rows.length === 0) { + return rows.map((row) => materializeRow(model, row)); + } + + if (model === "session" && join.user) { + const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; + const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : []; + const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)])); + return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); + } + + if (model === "account" && join.user) { + const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; + const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : []; + const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)])); + return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); + } + + if (model === "user" && join.account) { + const materializedRows = rows.map((row) => materializeRow("user", row)); + const userIds = materializedRows.map((row) => row.id); + const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : []; + const accountsByUserId = new Map(); + for (const account of accounts) { + const entries = accountsByUserId.get(account.userId) ?? []; + entries.push(account); + accountsByUserId.set(account.userId, entries); + } + return materializedRows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] })); + } + + return rows.map((row) => materializeRow(model, row)); +} diff --git a/foundry/packages/backend/src/actors/user/workflow.ts b/foundry/packages/backend/src/actors/user/workflow.ts new file mode 100644 index 0000000..9bf2675 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/workflow.ts @@ -0,0 +1,197 @@ +import { eq, count as sqlCount, and } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; +import { authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js"; +import { buildWhere, columnFor, materializeRow, persistInput, persistPatch, tableFor } from "./query-helpers.js"; + +export async function createAuthRecordMutation(c: any, input: { model: string; data: Record }) { + const table = tableFor(input.model); + const persisted = persistInput(input.model, input.data); + await c.db + .insert(table) + .values(persisted as any) + .run(); + const row = await c.db + .select() + .from(table) + .where(eq(columnFor(input.model, table, "id"), input.data.id as any)) + .get(); + return materializeRow(input.model, row); +} + +export async function updateAuthRecordMutation(c: any, input: { model: string; where: any[]; update: Record }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("updateAuthRecord requires a where clause"); + await c.db + .update(table) + .set(persistPatch(input.model, input.update) as any) + .where(predicate) + .run(); + return materializeRow(input.model, await c.db.select().from(table).where(predicate).get()); +} + +export async function updateManyAuthRecordsMutation(c: any, input: { model: string; where: any[]; update: Record }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("updateManyAuthRecords requires a where clause"); + await c.db + .update(table) + .set(persistPatch(input.model, input.update) as any) + .where(predicate) + .run(); + const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get(); + return row?.value ?? 0; +} + +export async function deleteAuthRecordMutation(c: any, input: { model: string; where: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("deleteAuthRecord requires a where clause"); + await c.db.delete(table).where(predicate).run(); +} + +export async function deleteManyAuthRecordsMutation(c: any, input: { model: string; where: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("deleteManyAuthRecords requires a where clause"); + const rows = await c.db.select().from(table).where(predicate).all(); + await c.db.delete(table).where(predicate).run(); + return rows.length; +} + +export async function upsertUserProfileMutation( + c: any, + input: { + userId: string; + patch: { + githubAccountId?: string | null; + githubLogin?: string | null; + roleLabel?: string; + defaultModel?: string; + eligibleOrganizationIdsJson?: string; + starterRepoStatus?: string; + starterRepoStarredAt?: number | null; + starterRepoSkippedAt?: number | null; + }; + }, +) { + const now = Date.now(); + await c.db + .insert(userProfiles) + .values({ + id: 1, + userId: input.userId, + githubAccountId: input.patch.githubAccountId ?? null, + githubLogin: input.patch.githubLogin ?? null, + roleLabel: input.patch.roleLabel ?? "GitHub user", + defaultModel: input.patch.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]", + starterRepoStatus: input.patch.starterRepoStatus ?? "pending", + starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null, + starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: userProfiles.userId, + set: { + ...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}), + ...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}), + ...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}), + ...(input.patch.defaultModel !== undefined ? { defaultModel: input.patch.defaultModel } : {}), + ...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}), + ...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}), + ...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}), + ...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}), + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get(); +} + +export async function upsertSessionStateMutation(c: any, input: { sessionId: string; activeOrganizationId: string | null }) { + const now = Date.now(); + await c.db + .insert(sessionState) + .values({ + sessionId: input.sessionId, + activeOrganizationId: input.activeOrganizationId, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: sessionState.sessionId, + set: { activeOrganizationId: input.activeOrganizationId, updatedAt: now }, + }) + .run(); + return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(); +} + +export async function upsertTaskStateMutation( + c: any, + input: { + taskId: string; + sessionId: string; + patch: { + activeSessionId?: string | null; + unread?: boolean; + draftText?: string; + draftAttachmentsJson?: string; + draftUpdatedAt?: number | null; + }; + }, +) { + const now = Date.now(); + const existing = await c.db + .select() + .from(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .get(); + + if (input.patch.activeSessionId !== undefined) { + await c.db.update(userTaskState).set({ activeSessionId: input.patch.activeSessionId, updatedAt: now }).where(eq(userTaskState.taskId, input.taskId)).run(); + } + + await c.db + .insert(userTaskState) + .values({ + taskId: input.taskId, + sessionId: input.sessionId, + activeSessionId: input.patch.activeSessionId ?? existing?.activeSessionId ?? null, + unread: input.patch.unread !== undefined ? (input.patch.unread ? 1 : 0) : (existing?.unread ?? 0), + draftText: input.patch.draftText ?? existing?.draftText ?? "", + draftAttachmentsJson: input.patch.draftAttachmentsJson ?? existing?.draftAttachmentsJson ?? "[]", + draftUpdatedAt: input.patch.draftUpdatedAt === undefined ? (existing?.draftUpdatedAt ?? null) : input.patch.draftUpdatedAt, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: [userTaskState.taskId, userTaskState.sessionId], + set: { + ...(input.patch.activeSessionId !== undefined ? { activeSessionId: input.patch.activeSessionId } : {}), + ...(input.patch.unread !== undefined ? { unread: input.patch.unread ? 1 : 0 } : {}), + ...(input.patch.draftText !== undefined ? { draftText: input.patch.draftText } : {}), + ...(input.patch.draftAttachmentsJson !== undefined ? { draftAttachmentsJson: input.patch.draftAttachmentsJson } : {}), + ...(input.patch.draftUpdatedAt !== undefined ? { draftUpdatedAt: input.patch.draftUpdatedAt } : {}), + updatedAt: now, + }, + }) + .run(); + + return await c.db + .select() + .from(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .get(); +} + +export async function deleteTaskStateMutation(c: any, input: { taskId: string; sessionId?: string }) { + if (input.sessionId) { + await c.db + .delete(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .run(); + return; + } + await c.db.delete(userTaskState).where(eq(userTaskState.taskId, input.taskId)).run(); +} diff --git a/foundry/packages/backend/src/index.ts b/foundry/packages/backend/src/index.ts index 3af36c3..8f82d8b 100644 --- a/foundry/packages/backend/src/index.ts +++ b/foundry/packages/backend/src/index.ts @@ -10,7 +10,7 @@ import { createDefaultDriver } from "./driver.js"; import { createClient } from "rivetkit/client"; import { initBetterAuthService } from "./services/better-auth.js"; import { createDefaultAppShellServices } from "./services/app-shell-runtime.js"; -import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/app-shell.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/constants.js"; import { logger } from "./logging.js"; export interface BackendStartOptions { @@ -48,6 +48,19 @@ function isRivetRequest(request: Request): boolean { } export async function startBackend(options: BackendStartOptions = {}): Promise { + // Prevent the sandbox-agent SDK's unhandled SQLite constraint errors from + // crashing the entire process. The SDK has a bug where duplicate event + // inserts (sandbox_agent_events UNIQUE constraint) throw from an internal + // async path with no catch. Log and continue. + process.on("uncaughtException", (error) => { + logger.error({ error: error?.message ?? String(error), stack: error?.stack }, "uncaughtException (kept alive)"); + }); + process.on("unhandledRejection", (reason) => { + const msg = reason instanceof Error ? reason.message : String(reason); + const stack = reason instanceof Error ? reason.stack : undefined; + logger.error({ error: msg, stack }, "unhandledRejection (kept alive)"); + }); + // sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth. // Normalize to keep local dev + docker-compose simple. if (!process.env.CODEX_API_KEY && process.env.OPENAI_API_KEY) { diff --git a/foundry/packages/backend/src/services/better-auth.ts b/foundry/packages/backend/src/services/better-auth.ts index 4509402..c36b900 100644 --- a/foundry/packages/backend/src/services/better-auth.ts +++ b/foundry/packages/backend/src/services/better-auth.ts @@ -1,8 +1,11 @@ import { betterAuth } from "better-auth"; import { createAdapterFactory } from "better-auth/adapters"; -import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; -import { authUserKey, organizationKey } from "../actors/keys.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js"; +// organization actions are called directly (no queue) +// user actor actions are called directly (no queue) +import { organizationKey, userKey } from "../actors/keys.js"; import { logger } from "../logging.js"; +// expectQueueResponse removed — actions return values directly const AUTH_BASE_PATH = "/v1/auth"; const SESSION_COOKIE = "better-auth.session_token"; @@ -59,6 +62,8 @@ function resolveRouteUserId(organization: any, resolved: any): string | null { return null; } +// sendOrganizationCommand removed — org actions are called directly + export interface BetterAuthService { auth: any; resolveSession(headers: Headers): Promise<{ session: any; user: any } | null>; @@ -75,7 +80,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } // getOrCreate is intentional here: the adapter runs during Better Auth callbacks - // which can fire before any explicit create path. The app organization and auth user + // which can fire before any explicit create path. The app organization and user // actors must exist by the time the adapter needs them. const appOrganization = () => actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { @@ -83,9 +88,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin }); // getOrCreate is intentional: Better Auth creates user records during OAuth - // callbacks, so the auth-user actor must be lazily provisioned on first access. - const getAuthUser = async (userId: string) => - await actorClient.authUser.getOrCreate(authUserKey(userId), { + // callbacks, so the user actor must be lazily provisioned on first access. + const getUser = async (userId: string) => + await actorClient.user.getOrCreate(userKey(userId), { createWithInput: { userId }, }); @@ -110,7 +115,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const email = direct("email"); if (typeof email === "string" && email.length > 0) { const organization = await appOrganization(); - const resolved = await organization.authFindEmailIndex({ email: email.toLowerCase() }); + const resolved = await organization.betterAuthFindEmailIndex({ email: email.toLowerCase() }); return resolveRouteUserId(organization, resolved); } return null; @@ -125,7 +130,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const sessionToken = direct("token") ?? data?.token; if (typeof sessionId === "string" || typeof sessionToken === "string") { const organization = await appOrganization(); - const resolved = await organization.authFindSessionIndex({ + const resolved = await organization.betterAuthFindSessionIndex({ ...(typeof sessionId === "string" ? { sessionId } : {}), ...(typeof sessionToken === "string" ? { sessionToken } : {}), }); @@ -144,11 +149,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const accountId = direct("accountId") ?? data?.accountId; const organization = await appOrganization(); if (typeof accountRecordId === "string" && accountRecordId.length > 0) { - const resolved = await organization.authFindAccountIndex({ id: accountRecordId }); + const resolved = await organization.betterAuthFindAccountIndex({ id: accountRecordId }); return resolveRouteUserId(organization, resolved); } if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) { - const resolved = await organization.authFindAccountIndex({ providerId, accountId }); + const resolved = await organization.betterAuthFindAccountIndex({ providerId, accountId }); return resolveRouteUserId(organization, resolved); } return null; @@ -157,9 +162,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; }; - const ensureOrganizationVerification = async (method: string, payload: Record) => { + const ensureOrganizationVerification = async (actionName: string, payload: Record) => { const organization = await appOrganization(); - return await organization[method](payload); + return await (organization as any)[actionName](payload); }; return { @@ -170,7 +175,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin create: async ({ model, data }) => { const transformed = await transformInput(data, model, "create", true); if (model === "verification") { - return await ensureOrganizationVerification("authCreateVerification", { data: transformed }); + return await ensureOrganizationVerification("commandBetterAuthVerificationCreate", { data: transformed }); } const userId = await resolveUserIdForQuery(model, undefined, transformed); @@ -178,19 +183,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin throw new Error(`Unable to resolve auth actor for create(${model})`); } - const userActor = await getAuthUser(userId); - const created = await userActor.createAuthRecord({ model, data: transformed }); + const userActor = await getUser(userId); + const created = await userActor.authCreate({ model, data: transformed }); const organization = await appOrganization(); if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) { - await organization.authUpsertEmailIndex({ + await organization.commandBetterAuthEmailIndexUpsert({ email: transformed.email.toLowerCase(), userId, }); } if (model === "session") { - await organization.authUpsertSessionIndex({ + await organization.commandBetterAuthSessionIndexUpsert({ sessionId: String(created.id), sessionToken: String(created.token), userId, @@ -198,7 +203,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account") { - await organization.authUpsertAccountIndex({ + await organization.commandBetterAuthAccountIndexUpsert({ id: String(created.id), providerId: String(created.providerId), accountId: String(created.accountId), @@ -212,7 +217,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin findOne: async ({ model, where, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findOne" }); if (model === "verification") { - return await ensureOrganizationVerification("authFindOneVerification", { where: transformedWhere, join }); + const organization = await appOrganization(); + return await organization.betterAuthFindOneVerification({ where: transformedWhere, join }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -220,15 +226,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; } - const userActor = await getAuthUser(userId); - const found = await userActor.findOneAuthRecord({ model, where: transformedWhere, join }); + const userActor = await getUser(userId); + const found = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere, join }); return found ? ((await transformOutput(found, model, undefined, join)) as any) : null; }, findMany: async ({ model, where, limit, sortBy, offset, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findMany" }); if (model === "verification") { - return await ensureOrganizationVerification("authFindManyVerification", { + const organization = await appOrganization(); + return await organization.betterAuthFindManyVerification({ where: transformedWhere, limit, sortBy, @@ -244,7 +251,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const resolved = await Promise.all( (tokenClause.value as string[]).map(async (sessionToken: string) => ({ sessionToken, - route: await organization.authFindSessionIndex({ sessionToken }), + route: await organization.betterAuthFindSessionIndex({ sessionToken }), })), ); const byUser = new Map(); @@ -259,11 +266,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const rows = []; for (const [userId, tokens] of byUser) { - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const scopedWhere = transformedWhere.map((entry: any) => entry.field === "token" && entry.operator === "in" ? { ...entry, value: tokens } : entry, ); - const found = await userActor.findManyAuthRecords({ model, where: scopedWhere, limit, sortBy, offset, join }); + const found = await userActor.betterAuthFindManyRecords({ model, where: scopedWhere, limit, sortBy, offset, join }); rows.push(...found); } return await Promise.all(rows.map(async (row: any) => await transformOutput(row, model, undefined, join))); @@ -275,8 +282,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return []; } - const userActor = await getAuthUser(userId); - const found = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit, sortBy, offset, join }); + const userActor = await getUser(userId); + const found = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit, sortBy, offset, join }); return await Promise.all(found.map(async (row: any) => await transformOutput(row, model, undefined, join))); }, @@ -284,7 +291,10 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "update" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureOrganizationVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("commandBetterAuthVerificationUpdate", { + where: transformedWhere, + update: transformedUpdate, + }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -292,29 +302,34 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; } - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const before = model === "user" - ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) : model === "account" - ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) : model === "session" - ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) : null; - const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate }); + const updated = await userActor.authUpdate({ model, where: transformedWhere, update: transformedUpdate }); const organization = await appOrganization(); if (model === "user" && updated) { if (before?.email && before.email !== updated.email) { - await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.commandBetterAuthEmailIndexDelete({ + email: before.email.toLowerCase(), + }); } if (updated.email) { - await organization.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId }); + await organization.commandBetterAuthEmailIndexUpsert({ + email: updated.email.toLowerCase(), + userId, + }); } } if (model === "session" && updated) { - await organization.authUpsertSessionIndex({ + await organization.commandBetterAuthSessionIndexUpsert({ sessionId: String(updated.id), sessionToken: String(updated.token), userId, @@ -322,7 +337,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account" && updated) { - await organization.authUpsertAccountIndex({ + await organization.commandBetterAuthAccountIndexUpsert({ id: String(updated.id), providerId: String(updated.providerId), accountId: String(updated.accountId), @@ -337,7 +352,10 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "updateMany" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureOrganizationVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("commandBetterAuthVerificationUpdateMany", { + where: transformedWhere, + update: transformedUpdate, + }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -345,14 +363,15 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } - const userActor = await getAuthUser(userId); - return await userActor.updateManyAuthRecords({ model, where: transformedWhere, update: transformedUpdate }); + const userActor = await getUser(userId); + return await userActor.authUpdateMany({ model, where: transformedWhere, update: transformedUpdate }); }, delete: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "delete" }); if (model === "verification") { - await ensureOrganizationVerification("authDeleteVerification", { where: transformedWhere }); + const organization = await appOrganization(); + await organization.commandBetterAuthVerificationDelete({ where: transformedWhere }); return; } @@ -361,20 +380,20 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return; } - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const organization = await appOrganization(); - const before = await userActor.findOneAuthRecord({ model, where: transformedWhere }); - await userActor.deleteAuthRecord({ model, where: transformedWhere }); + const before = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }); + await userActor.authDelete({ model, where: transformedWhere }); if (model === "session" && before) { - await organization.authDeleteSessionIndex({ + await organization.commandBetterAuthSessionIndexDelete({ sessionId: before.id, sessionToken: before.token, }); } if (model === "account" && before) { - await organization.authDeleteAccountIndex({ + await organization.commandBetterAuthAccountIndexDelete({ id: before.id, providerId: before.providerId, accountId: before.accountId, @@ -382,14 +401,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "user" && before?.email) { - await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.commandBetterAuthEmailIndexDelete({ + email: before.email.toLowerCase(), + }); } }, deleteMany: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" }); if (model === "verification") { - return await ensureOrganizationVerification("authDeleteManyVerification", { where: transformedWhere }); + return await ensureOrganizationVerification("commandBetterAuthVerificationDeleteMany", { where: transformedWhere }); } if (model === "session") { @@ -397,12 +418,12 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin if (!userId) { return 0; } - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const organization = await appOrganization(); - const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 }); - const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere }); + const sessions = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit: 5000 }); + const deleted = await userActor.authDeleteMany({ model, where: transformedWhere }); for (const session of sessions) { - await organization.authDeleteSessionIndex({ + await organization.commandBetterAuthSessionIndexDelete({ sessionId: session.id, sessionToken: session.token, }); @@ -415,15 +436,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } - const userActor = await getAuthUser(userId); - const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere }); + const userActor = await getUser(userId); + const deleted = await userActor.authDeleteMany({ model, where: transformedWhere }); return deleted; }, count: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "count" }); if (model === "verification") { - return await ensureOrganizationVerification("authCountVerification", { where: transformedWhere }); + const organization = await appOrganization(); + return await organization.betterAuthCountVerification({ where: transformedWhere }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -431,8 +453,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } - const userActor = await getAuthUser(userId); - return await userActor.countAuthRecords({ model, where: transformedWhere }); + const userActor = await getUser(userId); + return await userActor.betterAuthCountRecords({ model, where: transformedWhere }); }, }; }, @@ -477,17 +499,17 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin async getAuthState(sessionId: string) { const organization = await appOrganization(); - const route = await organization.authFindSessionIndex({ sessionId }); + const route = await organization.betterAuthFindSessionIndex({ sessionId }); if (!route?.userId) { return null; } - const userActor = await getAuthUser(route.userId); + const userActor = await getUser(route.userId); return await userActor.getAppAuthState({ sessionId }); }, async upsertUserProfile(userId: string, patch: Record) { - const userActor = await getAuthUser(userId); - return await userActor.upsertUserProfile({ userId, patch }); + const userActor = await getUser(userId); + return await userActor.profileUpsert({ userId, patch }); }, async setActiveOrganization(sessionId: string, activeOrganizationId: string | null) { @@ -495,8 +517,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin if (!authState?.user?.id) { throw new Error(`Unknown auth session ${sessionId}`); } - const userActor = await getAuthUser(authState.user.id); - return await userActor.upsertSessionState({ sessionId, activeOrganizationId }); + const userActor = await getUser(authState.user.id); + return await userActor.sessionStateUpsert({ sessionId, activeOrganizationId }); }, async getAccessTokenForSession(sessionId: string) { diff --git a/foundry/packages/backend/src/services/branch-name-prefixes.ts b/foundry/packages/backend/src/services/branch-name-prefixes.ts new file mode 100644 index 0000000..aaccaee --- /dev/null +++ b/foundry/packages/backend/src/services/branch-name-prefixes.ts @@ -0,0 +1,584 @@ +// Auto-generated list of branch name prefixes. +// Source: McMaster-Carr product catalog. +export const BRANCH_NAME_PREFIXES: readonly string[] = [ + "abrasive-blasters", + "ac-motors", + "access-doors", + "adjustable-handles", + "aerosol-paint", + "air-cleaners", + "air-cylinders", + "air-filters", + "air-hose", + "air-knives", + "air-nozzles", + "air-regulators", + "air-ride-wheels", + "air-slides", + "alligator-clips", + "alloy-steel", + "aluminum-honeycomb", + "angle-indicators", + "antiseize-lubricants", + "antislip-fluid", + "backlight-panel-kits", + "ball-bearings", + "ball-end-mills", + "ball-joint-linkages", + "ball-transfers", + "band-clamps", + "band-saw-blades", + "bar-clamps", + "bar-grating", + "barbed-hose-fittings", + "barbed-tube-fittings", + "basket-strainers", + "batch-cans", + "battery-chargers", + "battery-holders", + "bead-chain", + "beam-clamps", + "belt-conveyors", + "bench-scales", + "bench-vises", + "bin-boxes", + "bin-storage", + "binding-posts", + "blank-tags", + "blasting-cabinets", + "blind-rivets", + "bluetooth-padlocks", + "boring-lathe-tools", + "box-reducers", + "box-wrenches", + "braided-hose", + "brass-pipe-fittings", + "breather-vents", + "butt-splices", + "c-clamps", + "cable-cutters", + "cable-holders", + "cable-tie-mounts", + "cable-ties", + "cam-handles", + "cam-latches", + "cam-locks", + "cap-nuts", + "captive-panel-screws", + "carbide-burs", + "carbide-inserts", + "carbon-fiber", + "carbon-steel", + "cardstock-tags", + "carriage-bolts", + "cast-acrylic", + "cast-iron", + "cast-nylon", + "casting-compounds", + "ceiling-lights", + "ceramic-adhesives", + "chain-slings", + "check-valves", + "chemical-hose", + "chemistry-meters", + "chemistry-testing", + "chip-clearing-tools", + "chucking-reamers", + "cinching-straps", + "circuit-breakers", + "circular-saw-blades", + "circular-saws", + "clamping-hangers", + "clevis-pins", + "clevis-rod-ends", + "clip-on-nuts", + "coaxial-connectors", + "coaxial-cords", + "coiled-spring-pins", + "compact-connectors", + "computer-adapters", + "concrete-adhesives", + "concrete-repair", + "contour-transfers", + "conveyor-belt-lacing", + "conveyor-belting", + "conveyor-brushes", + "conveyor-rollers", + "coolant-hose", + "copper-tube-fittings", + "copper-tubing", + "cord-grips", + "cord-reels", + "cotter-pins", + "coupling-nuts", + "cpvc-pipe-fittings", + "cup-brushes", + "cutoff-wheels", + "cylinder-hones", + "cylinder-racks", + "cylinder-trucks", + "data-cable", + "data-connectors", + "dc-motors", + "dead-blow-hammers", + "delrin-acetal-resin", + "desiccant-air-dryers", + "desktop-cranes", + "dial-calipers", + "dial-indicators", + "die-springs", + "direct-heaters", + "disconnect-switches", + "dispensing-needles", + "dispensing-pumps", + "disposable-clothing", + "disposable-gloves", + "document-protectors", + "door-closers", + "door-handles", + "door-holders", + "dowel-pins", + "drafting-equipment", + "drain-cleaners", + "drainage-mats", + "draw-latches", + "drawer-cabinets", + "drawer-slides", + "drill-bit-sets", + "drill-bits", + "drill-bushings", + "drill-chucks", + "drill-presses", + "drilling-screws", + "drinking-fountains", + "drive-anchors", + "drive-rollers", + "drive-shafts", + "drum-faucets", + "drum-pumps", + "drum-top-vacuums", + "drum-trucks", + "dry-box-gloves", + "dry-erase-boards", + "dry-film-lubricants", + "duct-fans", + "duct-hose", + "duct-tape", + "dust-collectors", + "dustless-chalk", + "edge-trim", + "electric-actuators", + "electric-drills", + "electric-drum-pumps", + "electric-mixers", + "electrical-switches", + "electrical-tape", + "electronic-calipers", + "enclosure-heaters", + "enclosure-panels", + "ethernet-cords", + "exhaust-fans", + "exit-lights", + "expansion-joints", + "expansion-plugs", + "extension-cords", + "extension-springs", + "fabric-snaps", + "fan-blades", + "fep-tubing", + "fiberglass-grating", + "file-holders", + "filter-bag-housings", + "filter-bags", + "filter-cartridges", + "fire-fighting-hose", + "first-aid-supplies", + "fixture-clamps", + "flange-locknuts", + "flange-mount-seals", + "flap-sanding-discs", + "flap-sanding-wheels", + "flared-tube-fittings", + "flashing-lights", + "flat-washers", + "flexible-shafts", + "flexible-shank-burs", + "flexible-trays", + "float-valves", + "floor-locks", + "floor-marking-tape", + "floor-scales", + "floor-squeegees", + "flow-sights", + "flow-switches", + "flowmeter-totalizers", + "foot-switches", + "force-gauges", + "fume-exhausters", + "garbage-bags", + "garden-hose", + "gas-hose", + "gas-regulators", + "gas-springs", + "gauge-blocks", + "glass-sights", + "gold-wire", + "grab-latches", + "grease-fittings", + "grinding-bits", + "grinding-wheels", + "hand-brushes", + "hand-chain-hoists", + "hand-reamers", + "hand-trucks", + "hand-wheels", + "hand-winches", + "hanging-scales", + "hard-hats", + "hardened-shafts", + "hardness-testers", + "heat-exchangers", + "heat-guns", + "heat-lamps", + "heat-sealable-bags", + "heat-set-inserts", + "heat-shrink-tubing", + "heat-sinks", + "heated-scrapers", + "helical-inserts", + "hex-bit-sockets", + "hex-head-screws", + "hex-nuts", + "high-accuracy-rulers", + "high-amp-relays", + "high-vacuum-filters", + "high-vacuum-sights", + "hinge-adjusters", + "hoist-rings", + "hole-saws", + "hose-couplings", + "hose-reels", + "hot-melt-glue", + "hydraulic-cylinders", + "hydraulic-hose", + "hydraulic-jacks", + "iec-connectors", + "immersion-heaters", + "impression-foam", + "indicating-lights", + "inflatable-wedges", + "ink-markers", + "insertion-heaters", + "inspection-mirrors", + "instrument-carts", + "insulation-jacketing", + "jam-removers", + "jigsaw-blades", + "key-cabinets", + "key-locking-inserts", + "key-stock", + "keyed-drive-shafts", + "keyseat-end-mills", + "l-key-sets", + "l-keys", + "label-holders", + "latching-connectors", + "lathe-tools", + "lavatory-partitions", + "lead-screws", + "leveling-lasers", + "leveling-mounts", + "lid-supports", + "lift-off-hinges", + "lift-trucks", + "light-bulbs", + "limit-switches", + "linear-ball-bearings", + "liquid-level-gauges", + "lock-washers", + "lockout-devices", + "loop-clamps", + "loop-hangers", + "machine-brackets", + "machine-handles", + "machine-keys", + "magnetic-base-drills", + "magnetic-bumpers", + "masking-tape", + "masonry-drill-bits", + "medium-amp-relays", + "metal-cable-ties", + "metal-panels", + "metal-plates", + "metal-tags", + "metering-pumps", + "metric-o-rings", + "mil-spec-connectors", + "mobile-lift-tables", + "motor-controls", + "motor-starters", + "mountable-cable-ties", + "mounting-tape", + "neoprene-foam", + "nickel-titanium", + "nonmarring-hammers", + "nonslip-bumpers", + "nylon-rivets", + "nylon-tubing", + "o-rings", + "oil-level-indicators", + "oil-reservoirs", + "oil-skimmers", + "on-off-valves", + "open-end-wrenches", + "outlet-boxes", + "outlet-strips", + "packaging-tape", + "paint-brushes", + "paint-markers", + "paint-sprayers", + "pallet-racks", + "pallet-trucks", + "panel-air-filters", + "parts-baskets", + "pendant-switches", + "perforated-sheets", + "pest-control", + "petroleum-hose", + "piano-hinges", + "pipe-couplings", + "pipe-gaskets", + "pipe-markers", + "pipe-wrenches", + "plank-grating", + "plastic-clamps", + "plastic-mesh", + "plate-lifting-clamps", + "platinum-wire", + "plier-clamps", + "plug-gauges", + "portable-lights", + "power-cords", + "power-supplied", + "power-supplies", + "precision-knives", + "press-fit-nuts", + "press-in-nuts", + "protecting-tape", + "protective-coatings", + "protective-curtains", + "protective-panels", + "protective-wrap", + "proximity-switches", + "pull-handles", + "push-brooms", + "push-nuts", + "push-on-seals", + "pvc-pipe-fittings", + "pvc-tubing", + "quick-release-pins", + "ratchet-pullers", + "recycled-plastics", + "repair-adhesives", + "repair-clamps", + "reusable-cable-ties", + "ring-terminals", + "rivet-nuts", + "robot-base-mounts", + "robot-bases", + "rocker-switches", + "rod-wipers", + "roller-bearings", + "roller-chain", + "roller-conveyors", + "roof-exhaust-fans", + "roof-repair", + "rotary-broaches", + "rotary-hammers", + "rotary-shaft-seals", + "rotating-cranes", + "rotating-joints", + "router-bits", + "rtd-probes", + "rubber-edge-seals", + "rubber-tread-wheels", + "rubber-tubing", + "safety-cabinets", + "safety-glasses", + "safety-mirrors", + "sanding-belts", + "sanding-discs", + "sanding-guides", + "sanding-rolls", + "sanding-sheets", + "screw-extractors", + "screw-jacks", + "scrub-brushes", + "sealing-washers", + "security-lights", + "sensor-connectors", + "set-screws", + "setup-clamps", + "shaft-collars", + "shaft-couplings", + "shaft-repair-sleeves", + "shaft-supports", + "sharpening-stones", + "sheet-metal-cutters", + "shelf-cabinets", + "shim-stock", + "shim-tape", + "shipping-pails", + "shock-absorbers", + "shoulder-screws", + "shower-stations", + "silicone-foam", + "sleeve-bearings", + "slide-bolts", + "slitting-saws", + "slotted-spring-pins", + "sludge-samplers", + "small-parts-storage", + "snap-acting-switches", + "soap-dispensers", + "socket-head-screws", + "socket-organizers", + "socket-wrenches", + "soldering-irons", + "solid-rivets", + "solid-rod-ends", + "sound-insulation", + "space-heaters", + "spacing-beads", + "spanner-wrenches", + "specialty-pliers", + "specialty-vises", + "specialty-washers", + "speed-reducers", + "splicing-connectors", + "spray-bottles", + "spray-nozzles", + "spring-clamps", + "spring-plungers", + "spring-steel", + "square-drive-sockets", + "square-end-mills", + "square-nuts", + "squeeze-bottles", + "stack-lights", + "stainless-steel", + "stair-treads", + "static-control-mats", + "steel-carts", + "steel-pipe-fittings", + "steel-pipe-flanges", + "steel-stamps", + "steel-tubing", + "step-ladders", + "stepper-motors", + "storage-bags", + "storage-boxes", + "storage-chests", + "straight-ladders", + "strap-hinges", + "stretch-wrap", + "strip-doors", + "strip-springs", + "strobe-lights", + "structural-adhesives", + "strut-channel", + "strut-channel-nuts", + "strut-mount-clamps", + "suction-cup-lifters", + "suction-strainers", + "super-absorbent-foam", + "super-flexible-glass", + "surface-fillers", + "surface-mount-hinges", + "t-handle-keys", + "t-slotted-framing", + "tamper-seals", + "tank-level-measurers", + "tape-dispensers", + "tape-measures", + "taper-pins", + "tapping-screws", + "teflon-ptfe", + "terminal-blocks", + "test-indicators", + "test-leads", + "test-weights", + "tethered-knobs", + "thermal-insulation", + "thread-adapters", + "thread-sealant-tape", + "thread-sealants", + "threaded-inserts", + "threaded-standoffs", + "threaded-studs", + "thrust-ball-bearings", + "thrust-bearings", + "thumb-nuts", + "thumb-screws", + "tie-down-rings", + "time-clocks", + "timer-relays", + "timer-switches", + "toggle-clamps", + "toggle-switches", + "tool-holders", + "tool-sets", + "tool-steel", + "torque-wrenches", + "torsion-springs", + "tote-boxes", + "touch-bars", + "track-casters", + "track-rollers", + "track-wheels", + "traction-mats", + "trolley-systems", + "tube-brushes", + "tube-fittings", + "tubular-light-bulbs", + "turn-lock-connectors", + "twist-ties", + "u-bolts", + "u-joints", + "ul-class-fuses", + "unthreaded-spacers", + "usb-adapters", + "usb-cords", + "utility-knives", + "v-belts", + "vacuum-cups", + "vacuum-pumps", + "wall-louvers", + "wash-fountains", + "wash-guns", + "waste-containers", + "water-deionizers", + "water-filters", + "water-hose", + "water-removal-pumps", + "weather-stations", + "web-slings", + "weld-nuts", + "welding-clothing", + "welding-helmets", + "wet-dry-vacuums", + "wet-mops", + "wheel-brushes", + "wing-nuts", + "wire-cloth", + "wire-connectors", + "wire-cutting-pliers", + "wire-partitions", + "wire-rope", + "wire-rope-clamps", + "wire-wrap", + "wool-felt", + "work-platforms", + "workbench-legs", + "woven-wire-cloth", +] as const; diff --git a/foundry/packages/backend/src/services/create-flow.ts b/foundry/packages/backend/src/services/create-flow.ts index 8341399..eb9e53f 100644 --- a/foundry/packages/backend/src/services/create-flow.ts +++ b/foundry/packages/backend/src/services/create-flow.ts @@ -1,3 +1,5 @@ +import { BRANCH_NAME_PREFIXES } from "./branch-name-prefixes.js"; + export interface ResolveCreateFlowDecisionInput { task: string; explicitTitle?: string; @@ -89,30 +91,42 @@ export function sanitizeBranchName(input: string): string { return trimmed.slice(0, 50).replace(/-+$/g, ""); } +function generateRandomSuffix(length: number): string { + const chars = "abcdefghijklmnopqrstuvwxyz0123456789"; + let result = ""; + for (let i = 0; i < length; i++) { + result += chars[Math.floor(Math.random() * chars.length)]; + } + return result; +} + +function generateBranchName(): string { + const prefix = BRANCH_NAME_PREFIXES[Math.floor(Math.random() * BRANCH_NAME_PREFIXES.length)]!; + const suffix = generateRandomSuffix(4); + return `${prefix}-${suffix}`; +} + export function resolveCreateFlowDecision(input: ResolveCreateFlowDecisionInput): ResolveCreateFlowDecisionResult { const explicitBranch = input.explicitBranchName?.trim(); const title = deriveFallbackTitle(input.task, input.explicitTitle); - const generatedBase = sanitizeBranchName(title) || "task"; - - const branchBase = explicitBranch && explicitBranch.length > 0 ? explicitBranch : generatedBase; const existingBranches = new Set(input.localBranches.map((value) => value.trim()).filter((value) => value.length > 0)); const existingTaskBranches = new Set(input.taskBranches.map((value) => value.trim()).filter((value) => value.length > 0)); const conflicts = (name: string): boolean => existingBranches.has(name) || existingTaskBranches.has(name); - if (explicitBranch && conflicts(branchBase)) { - throw new Error(`Branch '${branchBase}' already exists. Choose a different --name/--branch value.`); + if (explicitBranch && explicitBranch.length > 0) { + if (conflicts(explicitBranch)) { + throw new Error(`Branch '${explicitBranch}' already exists. Choose a different --name/--branch value.`); + } + return { title, branchName: explicitBranch }; } - if (explicitBranch) { - return { title, branchName: branchBase }; - } - - let candidate = branchBase; - let index = 2; - while (conflicts(candidate)) { - candidate = `${branchBase}-${index}`; - index += 1; + // Generate a random McMaster-Carr-style branch name, retrying on conflicts + let candidate = generateBranchName(); + let attempts = 0; + while (conflicts(candidate) && attempts < 100) { + candidate = generateBranchName(); + attempts += 1; } return { diff --git a/foundry/packages/backend/src/services/github-auth.ts b/foundry/packages/backend/src/services/github-auth.ts index ebbbce9..aa475b0 100644 --- a/foundry/packages/backend/src/services/github-auth.ts +++ b/foundry/packages/backend/src/services/github-auth.ts @@ -1,5 +1,5 @@ import { getOrCreateOrganization } from "../actors/handles.js"; -import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js"; export interface ResolvedGithubAuth { githubToken: string; diff --git a/foundry/packages/backend/test/create-flow.test.ts b/foundry/packages/backend/test/create-flow.test.ts index 498c4dc..8c66cb4 100644 --- a/foundry/packages/backend/test/create-flow.test.ts +++ b/foundry/packages/backend/test/create-flow.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "vitest"; import { deriveFallbackTitle, resolveCreateFlowDecision, sanitizeBranchName } from "../src/services/create-flow.js"; +import { BRANCH_NAME_PREFIXES } from "../src/services/branch-name-prefixes.js"; describe("create flow decision", () => { it("derives a conventional-style fallback title from task text", () => { @@ -17,15 +18,49 @@ describe("create flow decision", () => { expect(sanitizeBranchName(" spaces everywhere ")).toBe("spaces-everywhere"); }); - it("auto-increments generated branch names for conflicts", () => { + it("generates a McMaster-Carr-style branch name with random suffix", () => { const resolved = resolveCreateFlowDecision({ task: "Add auth", - localBranches: ["feat-add-auth"], - taskBranches: ["feat-add-auth-2"], + localBranches: [], + taskBranches: [], }); expect(resolved.title).toBe("feat: Add auth"); - expect(resolved.branchName).toBe("feat-add-auth-3"); + // Branch name should be "-<4-char-suffix>" where prefix is from BRANCH_NAME_PREFIXES + const lastDash = resolved.branchName.lastIndexOf("-"); + const prefix = resolved.branchName.slice(0, lastDash); + const suffix = resolved.branchName.slice(lastDash + 1); + expect(BRANCH_NAME_PREFIXES).toContain(prefix); + expect(suffix).toMatch(/^[a-z0-9]{4}$/); + }); + + it("avoids conflicts by generating a different random name", () => { + // Even with a conflicting branch, it should produce something different + const resolved = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [], + taskBranches: [], + }); + + // Running again with the first result as a conflict should produce a different name + const resolved2 = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [resolved.branchName], + taskBranches: [], + }); + + expect(resolved2.branchName).not.toBe(resolved.branchName); + }); + + it("uses explicit branch name when provided", () => { + const resolved = resolveCreateFlowDecision({ + task: "new task", + explicitBranchName: "my-branch", + localBranches: [], + taskBranches: [], + }); + + expect(resolved.branchName).toBe("my-branch"); }); it("fails when explicit branch already exists", () => { diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts index ac5f3c8..c3b2a10 100644 --- a/foundry/packages/backend/test/keys.test.ts +++ b/foundry/packages/backend/test/keys.test.ts @@ -1,14 +1,13 @@ import { describe, expect, it } from "vitest"; -import { githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; +import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; describe("actor keys", () => { it("prefixes every key with organization namespace", () => { const keys = [ organizationKey("default"), - repositoryKey("default", "repo"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), - historyKey("default", "repo"), + auditLogKey("default"), githubDataKey("default"), ]; diff --git a/foundry/packages/backend/test/organization-isolation.test.ts b/foundry/packages/backend/test/organization-isolation.test.ts index fcd1950..f5d58f2 100644 --- a/foundry/packages/backend/test/organization-isolation.test.ts +++ b/foundry/packages/backend/test/organization-isolation.test.ts @@ -8,6 +8,7 @@ import { describe, expect, it } from "vitest"; import { setupTest } from "rivetkit/test"; import { organizationKey } from "../src/actors/keys.js"; import { registry } from "../src/actors/index.js"; +import { organizationWorkflowQueueName } from "../src/actors/organization/queues.js"; import { repoIdFromRemote } from "../src/services/repo.js"; import { createTestDriver } from "./helpers/test-driver.js"; import { createTestRuntimeContext } from "./helpers/test-context.js"; @@ -51,8 +52,8 @@ describe("organization isolation", () => { const { repoPath } = createRepo(); const repoId = repoIdFromRemote(repoPath); - await wsA.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); - await wsB.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); + await wsA.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true }); + await wsB.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true }); await wsA.createTask({ organizationId: "alpha", diff --git a/foundry/packages/backend/test/workbench-unread.test.ts b/foundry/packages/backend/test/workspace-unread.test.ts similarity index 92% rename from foundry/packages/backend/test/workbench-unread.test.ts rename to foundry/packages/backend/test/workspace-unread.test.ts index fc94e97..5f7221a 100644 --- a/foundry/packages/backend/test/workbench-unread.test.ts +++ b/foundry/packages/backend/test/workspace-unread.test.ts @@ -1,7 +1,7 @@ import { describe, expect, it } from "vitest"; -import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js"; +import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workspace.js"; -describe("workbench unread status transitions", () => { +describe("workspace unread status transitions", () => { it("marks unread when a running session first becomes idle", () => { expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "idle")).toBe(true); }); @@ -15,7 +15,7 @@ describe("workbench unread status transitions", () => { }); }); -describe("workbench model changes", () => { +describe("workspace model changes", () => { it("recreates an unused ready session so the selected model takes effect", () => { expect( shouldRecreateSessionForModelChange({ @@ -58,9 +58,9 @@ describe("workbench model changes", () => { }); }); -describe("workbench send readiness", () => { +describe("workspace send readiness", () => { it("rejects unknown sessions", () => { - expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workbench session: session-1"); + expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workspace session: session-1"); }); it("rejects pending sessions", () => { diff --git a/foundry/packages/cli/src/tui.ts b/foundry/packages/cli/src/tui.ts index c3aba9e..062bb95 100644 --- a/foundry/packages/cli/src/tui.ts +++ b/foundry/packages/cli/src/tui.ts @@ -1,4 +1,4 @@ -import type { AppConfig, TaskRecord } from "@sandbox-agent/foundry-shared"; +import type { AppConfig, TaskRecord, WorkspaceTaskDetail } from "@sandbox-agent/foundry-shared"; import { spawnSync } from "node:child_process"; import { createBackendClientFromConfig, filterTasks, formatRelativeAge, groupTaskStatus } from "@sandbox-agent/foundry-client"; import { CLI_BUILD_ID } from "./build-id.js"; @@ -51,14 +51,28 @@ interface DisplayRow { age: string; } +type TuiTaskRow = TaskRecord & Pick & { activeSessionId?: string | null }; + interface RenderOptions { width?: number; height?: number; } -async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { const rows = await client.listTasks(organizationId); - return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); + return await Promise.all( + rows.map(async (row) => { + const [task, detail] = await Promise.all([ + client.getTask(organizationId, row.repoId, row.taskId), + client.getTaskDetail(organizationId, row.repoId, row.taskId).catch(() => null), + ]); + return { + ...task, + pullRequest: detail?.pullRequest ?? null, + activeSessionId: detail?.activeSessionId ?? null, + }; + }), + ); } function pad(input: string, width: number): string { @@ -143,29 +157,17 @@ function agentSymbol(status: TaskRecord["status"]): string { return "-"; } -function toDisplayRow(row: TaskRecord): DisplayRow { - const conflictPrefix = row.conflictsWithMain === "true" ? "\u26A0 " : ""; - - const prLabel = row.prUrl ? `#${row.prUrl.match(/\/pull\/(\d+)/)?.[1] ?? "?"}` : row.prSubmitted ? "sub" : "-"; - - const ciLabel = row.ciStatus ?? "-"; - const reviewLabel = row.reviewStatus - ? row.reviewStatus === "approved" - ? "ok" - : row.reviewStatus === "changes_requested" - ? "chg" - : row.reviewStatus === "pending" - ? "..." - : row.reviewStatus - : "-"; +function toDisplayRow(row: TuiTaskRow): DisplayRow { + const prLabel = row.pullRequest ? `#${row.pullRequest.number}` : "-"; + const reviewLabel = row.pullRequest ? (row.pullRequest.isDraft ? "draft" : row.pullRequest.state.toLowerCase()) : "-"; return { - name: `${conflictPrefix}${row.title || row.branchName}`, - diff: row.diffStat ?? "-", + name: row.title || row.branchName || row.taskId, + diff: "-", agent: agentSymbol(row.status), pr: prLabel, - author: row.prAuthor ?? "-", - ci: ciLabel, + author: row.pullRequest?.authorLogin ?? "-", + ci: "-", review: reviewLabel, age: formatRelativeAge(row.updatedAt), }; @@ -186,7 +188,7 @@ function helpLines(width: number): string[] { } export function formatRows( - rows: TaskRecord[], + rows: TuiTaskRow[], selected: number, organizationId: string, status: string, @@ -336,8 +338,8 @@ export async function runTui(config: AppConfig, organizationId: string): Promise renderer.root.add(text); renderer.start(); - let allRows: TaskRecord[] = []; - let filteredRows: TaskRecord[] = []; + let allRows: TuiTaskRow[] = []; + let filteredRows: TuiTaskRow[] = []; let selected = 0; let searchQuery = ""; let showHelp = false; @@ -393,7 +395,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise render(); }; - const selectedRow = (): TaskRecord | null => { + const selectedRow = (): TuiTaskRow | null => { if (filteredRows.length === 0) { return null; } @@ -522,7 +524,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise render(); void (async () => { try { - const result = await client.switchTask(organizationId, row.taskId); + const result = await client.switchTask(organizationId, row.repoId, row.taskId); close(`cd ${result.switchTarget}`); } catch (err) { busy = false; @@ -543,7 +545,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise render(); void (async () => { try { - const result = await client.attachTask(organizationId, row.taskId); + const result = await client.attachTask(organizationId, row.repoId, row.taskId); close(`target=${result.target} session=${result.sessionId ?? "none"}`); } catch (err) { busy = false; @@ -559,7 +561,11 @@ export async function runTui(config: AppConfig, organizationId: string): Promise if (!row) { return; } - void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "archive"), `archived ${row.taskId}`); + void runActionWithRefresh( + `archiving ${row.taskId}`, + async () => client.runAction(organizationId, row.repoId, row.taskId, "archive"), + `archived ${row.taskId}`, + ); return; } @@ -568,7 +574,11 @@ export async function runTui(config: AppConfig, organizationId: string): Promise if (!row) { return; } - void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "sync"), `synced ${row.taskId}`); + void runActionWithRefresh( + `syncing ${row.taskId}`, + async () => client.runAction(organizationId, row.repoId, row.taskId, "sync"), + `synced ${row.taskId}`, + ); return; } @@ -580,8 +590,8 @@ export async function runTui(config: AppConfig, organizationId: string): Promise void runActionWithRefresh( `merging ${row.taskId}`, async () => { - await client.runAction(organizationId, row.taskId, "merge"); - await client.runAction(organizationId, row.taskId, "archive"); + await client.runAction(organizationId, row.repoId, row.taskId, "merge"); + await client.runAction(organizationId, row.repoId, row.taskId, "archive"); }, `merged+archived ${row.taskId}`, ); @@ -590,14 +600,15 @@ export async function runTui(config: AppConfig, organizationId: string): Promise if (ctrl && name === "o") { const row = selectedRow(); - if (!row?.prUrl) { + const prUrl = row?.pullRequest?.url ?? null; + if (!prUrl) { status = "no PR URL available for this task"; render(); return; } const openCmd = process.platform === "darwin" ? "open" : "xdg-open"; - spawnSync(openCmd, [row.prUrl], { stdio: "ignore" }); - status = `opened ${row.prUrl}`; + spawnSync(openCmd, [prUrl], { stdio: "ignore" }); + status = `opened ${prUrl}`; render(); return; } diff --git a/foundry/packages/cli/test/tui-format.test.ts b/foundry/packages/cli/test/tui-format.test.ts index 9ba0feb..15d3fe8 100644 --- a/foundry/packages/cli/test/tui-format.test.ts +++ b/foundry/packages/cli/test/tui-format.test.ts @@ -3,7 +3,7 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client"; import { formatRows } from "../src/tui.js"; -const sample: TaskRecord = { +const sample = { organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", @@ -13,33 +13,22 @@ const sample: TaskRecord = { task: "Do test", sandboxProviderId: "local", status: "running", - statusMessage: null, activeSandboxId: "sandbox-1", - activeSessionId: "session-1", + pullRequest: null, sandboxes: [ { sandboxId: "sandbox-1", sandboxProviderId: "local", + sandboxActorId: null, switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 1, updatedAt: 1, }, ], - agentType: null, - prSubmitted: false, - diffStat: null, - prUrl: null, - prAuthor: null, - ciStatus: null, - reviewStatus: null, - reviewer: null, - conflictsWithMain: null, - hasUnpushed: null, - parentBranch: null, createdAt: 1, updatedAt: 1, -}; +} satisfies TaskRecord & { pullRequest: null; activeSessionId?: null }; describe("formatRows", () => { it("renders rust-style table header and empty state", () => { diff --git a/foundry/packages/client/package.json b/foundry/packages/client/package.json index 98079d5..9790474 100644 --- a/foundry/packages/client/package.json +++ b/foundry/packages/client/package.json @@ -10,8 +10,8 @@ "typecheck": "tsc --noEmit", "test": "vitest run", "test:e2e:full": "HF_ENABLE_DAEMON_FULL_E2E=1 vitest run test/e2e/full-integration-e2e.test.ts", - "test:e2e:workbench": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workbench-e2e.test.ts", - "test:e2e:workbench-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workbench-load-e2e.test.ts" + "test:e2e:workspace": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workspace-e2e.test.ts", + "test:e2e:workspace-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workspace-load-e2e.test.ts" }, "dependencies": { "@sandbox-agent/foundry-shared": "workspace:*", diff --git a/foundry/packages/client/src/app-client.ts b/foundry/packages/client/src/app-client.ts index 16968cf..0bf5526 100644 --- a/foundry/packages/client/src/app-client.ts +++ b/foundry/packages/client/src/app-client.ts @@ -4,6 +4,7 @@ import type { FoundryOrganization, FoundryUser, UpdateFoundryOrganizationProfileInput, + WorkspaceModelId, } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "./backend-client.js"; import { getMockFoundryAppClient } from "./mock-app.js"; @@ -17,6 +18,7 @@ export interface FoundryAppClient { skipStarterRepo(): Promise; starStarterRepo(organizationId: string): Promise; selectOrganization(organizationId: string): Promise; + setDefaultModel(model: WorkspaceModelId): Promise; updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise; triggerGithubSync(organizationId: string): Promise; completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise; diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts index 14e5661..0903aa8 100644 --- a/foundry/packages/client/src/backend-client.ts +++ b/foundry/packages/client/src/backend-client.ts @@ -7,28 +7,29 @@ import type { CreateTaskInput, AppEvent, SessionEvent, + SandboxProcessSnapshot, SandboxProcessesEvent, TaskRecord, TaskSummary, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, TaskEvent, - WorkbenchTaskDetail, - WorkbenchTaskSummary, - WorkbenchSessionDetail, + WorkspaceTaskDetail, + WorkspaceTaskSummary, + WorkspaceSessionDetail, OrganizationEvent, OrganizationSummarySnapshot, - HistoryEvent, + AuditLogEvent as HistoryEvent, HistoryQueryInput, SandboxProviderId, RepoOverview, @@ -37,8 +38,10 @@ import type { StarSandboxAgentRepoResult, SwitchResult, UpdateFoundryOrganizationProfileInput, + WorkspaceModelGroup, + WorkspaceModelId, } from "@sandbox-agent/foundry-shared"; -import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; +import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import { createMockBackendClient } from "./mock/backend-client.js"; import { taskKey, taskSandboxKey, organizationKey } from "./keys.js"; @@ -64,7 +67,7 @@ export interface SandboxSessionEventRecord { payload: unknown; } -export type SandboxProcessRecord = ProcessInfo; +export type SandboxProcessRecord = SandboxProcessSnapshot; export interface ActorConn { on(event: string, listener: (payload: any) => void): () => void; @@ -72,45 +75,44 @@ export interface ActorConn { dispose(): Promise; } +interface AuthSessionScopedInput { + authSessionId?: string; +} + interface OrganizationHandle { connect(): ActorConn; listRepos(input: { organizationId: string }): Promise; createTask(input: CreateTaskInput): Promise; listTasks(input: { organizationId: string; repoId?: string }): Promise; getRepoOverview(input: { organizationId: string; repoId: string }): Promise; - history(input: HistoryQueryInput): Promise; - switchTask(taskId: string): Promise; - getTask(input: { organizationId: string; taskId: string }): Promise; - attachTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; - pushTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - syncTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - mergeTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - archiveTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - killTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + auditLog(input: HistoryQueryInput): Promise; + switchTask(input: { repoId: string; taskId: string }): Promise; + getTask(input: { organizationId: string; repoId: string; taskId: string }): Promise; + attachTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; + pushTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + syncTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + mergeTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + archiveTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + killTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; useOrganization(input: { organizationId: string }): Promise<{ organizationId: string }>; starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise; getOrganizationSummary(input: { organizationId: string }): Promise; - applyTaskSummaryUpdate(input: { taskSummary: WorkbenchTaskSummary }): Promise; - removeTaskSummary(input: { taskId: string }): Promise; - reconcileWorkbenchState(input: { organizationId: string }): Promise; - createWorkbenchTask(input: TaskWorkbenchCreateTaskInput): Promise; - markWorkbenchUnread(input: TaskWorkbenchSelectInput): Promise; - renameWorkbenchTask(input: TaskWorkbenchRenameInput): Promise; - renameWorkbenchBranch(input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; - renameWorkbenchSession(input: TaskWorkbenchRenameSessionInput): Promise; - setWorkbenchSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; - updateWorkbenchDraft(input: TaskWorkbenchUpdateDraftInput): Promise; - changeWorkbenchModel(input: TaskWorkbenchChangeModelInput): Promise; - sendWorkbenchMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; - closeWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; - publishWorkbenchPr(input: TaskWorkbenchSelectInput): Promise; - revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise; - reloadGithubOrganization(): Promise; - reloadGithubPullRequests(): Promise; - reloadGithubRepository(input: { repoId: string }): Promise; - reloadGithubPullRequest(input: { repoId: string; prNumber: number }): Promise; + createWorkspaceTask(input: TaskWorkspaceCreateTaskInput & AuthSessionScopedInput): Promise; + markWorkspaceUnread(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise; + renameWorkspaceTask(input: TaskWorkspaceRenameInput & AuthSessionScopedInput): Promise; + createWorkspaceSession(input: TaskWorkspaceSelectInput & { model?: string } & AuthSessionScopedInput): Promise<{ sessionId: string }>; + renameWorkspaceSession(input: TaskWorkspaceRenameSessionInput & AuthSessionScopedInput): Promise; + selectWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + setWorkspaceSessionUnread(input: TaskWorkspaceSetSessionUnreadInput & AuthSessionScopedInput): Promise; + updateWorkspaceDraft(input: TaskWorkspaceUpdateDraftInput & AuthSessionScopedInput): Promise; + changeWorkspaceModel(input: TaskWorkspaceChangeModelInput & AuthSessionScopedInput): Promise; + sendWorkspaceMessage(input: TaskWorkspaceSendMessageInput & AuthSessionScopedInput): Promise; + stopWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + closeWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + publishWorkspacePr(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise; + revertWorkspaceFile(input: TaskWorkspaceDiffInput & AuthSessionScopedInput): Promise; + adminReloadGithubOrganization(): Promise; + adminReloadGithubRepository(input: { repoId: string }): Promise; } interface AppOrganizationHandle { @@ -119,6 +121,7 @@ interface AppOrganizationHandle { skipAppStarterRepo(input: { sessionId: string }): Promise; starAppStarterRepo(input: { sessionId: string; organizationId: string }): Promise; selectAppOrganization(input: { sessionId: string; organizationId: string }): Promise; + setAppDefaultModel(input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise; updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput & { sessionId: string }): Promise; triggerAppRepoImport(input: { sessionId: string; organizationId: string }): Promise; beginAppGithubInstall(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>; @@ -130,9 +133,9 @@ interface AppOrganizationHandle { } interface TaskHandle { - getTaskSummary(): Promise; - getTaskDetail(): Promise; - getSessionDetail(input: { sessionId: string }): Promise; + getTaskSummary(): Promise; + getTaskDetail(input?: AuthSessionScopedInput): Promise; + getSessionDetail(input: { sessionId: string } & AuthSessionScopedInput): Promise; connect(): ActorConn; } @@ -157,6 +160,7 @@ interface TaskSandboxHandle { rawSendSessionMethod(sessionId: string, method: string, params: Record): Promise; destroySession(sessionId: string): Promise; sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>; + listWorkspaceModelGroups(): Promise; providerState(): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; } @@ -179,6 +183,7 @@ export interface BackendClientOptions { endpoint: string; defaultOrganizationId?: string; mode?: "remote" | "mock"; + encoding?: "json" | "cbor" | "bare"; } export interface BackendClient { @@ -192,6 +197,7 @@ export interface BackendClient { skipAppStarterRepo(): Promise; starAppStarterRepo(organizationId: string): Promise; selectAppOrganization(organizationId: string): Promise; + setAppDefaultModel(defaultModel: WorkspaceModelId): Promise; updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise; triggerAppRepoImport(organizationId: string): Promise; reconnectAppGithub(organizationId: string): Promise; @@ -204,11 +210,11 @@ export interface BackendClient { createTask(input: CreateTaskInput): Promise; listTasks(organizationId: string, repoId?: string): Promise; getRepoOverview(organizationId: string, repoId: string): Promise; - getTask(organizationId: string, taskId: string): Promise; + getTask(organizationId: string, repoId: string, taskId: string): Promise; listHistory(input: HistoryQueryInput): Promise; - switchTask(organizationId: string, taskId: string): Promise; - attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; - runAction(organizationId: string, taskId: string, action: TaskAction): Promise; + switchTask(organizationId: string, repoId: string, taskId: string): Promise; + attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; + runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise; createSandboxSession(input: { organizationId: string; sandboxProviderId: SandboxProviderId; @@ -279,29 +285,28 @@ export interface BackendClient { sandboxId: string, ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; getSandboxAgentConnection(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; + getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise; getOrganizationSummary(organizationId: string): Promise; - getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; - getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; - getWorkbench(organizationId: string): Promise; - subscribeWorkbench(organizationId: string, listener: () => void): () => void; - createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise; - markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise; - renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise; - renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; - renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise; - setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise; - updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise; - changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise; - sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; - closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; - publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise; - revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise; - reloadGithubOrganization(organizationId: string): Promise; - reloadGithubPullRequests(organizationId: string): Promise; - reloadGithubRepository(organizationId: string, repoId: string): Promise; - reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise; + getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; + getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; + getWorkspace(organizationId: string): Promise; + subscribeWorkspace(organizationId: string, listener: () => void): () => void; + createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise; + markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise; + renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise; + createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }>; + renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise; + selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise; + updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise; + changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise; + sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise; + stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise; + revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise; + adminReloadGithubOrganization(organizationId: string): Promise; + adminReloadGithubRepository(organizationId: string, repoId: string): Promise; health(): Promise<{ ok: true }>; useOrganization(organizationId: string): Promise<{ organizationId: string }>; starSandboxAgentRepo(organizationId: string): Promise; @@ -409,8 +414,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien const endpoints = deriveBackendEndpoints(options.endpoint); const rivetApiEndpoint = endpoints.rivetEndpoint; const appApiEndpoint = endpoints.appEndpoint; - const client = createClient({ endpoint: rivetApiEndpoint }) as unknown as RivetClient; - const workbenchSubscriptions = new Map< + const client = createClient({ endpoint: rivetApiEndpoint, encoding: options.encoding }) as unknown as RivetClient; + const workspaceSubscriptions = new Map< string, { listeners: Set<() => void>; @@ -461,6 +466,16 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return typeof sessionId === "string" && sessionId.length > 0 ? sessionId : null; }; + const getAuthSessionInput = async (): Promise => { + const authSessionId = await getSessionId(); + return authSessionId ? { authSessionId } : undefined; + }; + + const withAuthSessionInput = async (input: TInput): Promise => { + const authSessionInput = await getAuthSessionInput(); + return authSessionInput ? { ...input, ...authSessionInput } : input; + }; + const organization = async (organizationId: string): Promise => client.organization.getOrCreate(organizationKey(organizationId), { createWithInput: organizationId, @@ -471,7 +486,15 @@ export function createBackendClient(options: BackendClientOptions): BackendClien createWithInput: "app", }) as unknown as AppOrganizationHandle; - const task = async (organizationId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(organizationId, repoId, taskId)); + // getOrCreate is intentional here — this is the ONLY lazy creation point for + // virtual tasks (PR-driven entries that exist in the org's local tables but + // have no task actor yet). The task actor self-initializes from org data in + // getCurrentRecord(). Backend code must NEVER use getOrCreateTask except in + // createTaskMutation. See backend/CLAUDE.md "Lazy Task Actor Creation". + const task = async (organizationId: string, repoId: string, taskId: string): Promise => + client.task.getOrCreate(taskKey(organizationId, repoId, taskId), { + createWithInput: { organizationId, repoId, taskId }, + }); const sandboxByKey = async (organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise => { return (client as any).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); @@ -493,17 +516,15 @@ export function createBackendClient(options: BackendClientOptions): BackendClien for (const row of candidates) { try { - const detail = await ws.getTask({ organizationId, taskId: row.taskId }); + const detail = await ws.getTask({ organizationId, repoId: row.repoId, taskId: row.taskId }); if (detail.sandboxProviderId !== sandboxProviderId) { continue; } - const sandbox = detail.sandboxes.find( + const sandboxes = detail.sandboxes as Array<(typeof detail.sandboxes)[number] & { sandboxActorId?: string }>; + const sandbox = sandboxes.find( (sb) => - sb.sandboxId === sandboxId && - sb.sandboxProviderId === sandboxProviderId && - typeof (sb as any).sandboxActorId === "string" && - (sb as any).sandboxActorId.length > 0, - ) as { sandboxActorId?: string } | undefined; + sb.sandboxId === sandboxId && sb.sandboxProviderId === sandboxProviderId && typeof sb.sandboxActorId === "string" && sb.sandboxActorId.length > 0, + ); if (sandbox?.sandboxActorId) { return (client as any).taskSandbox.getForId(sandbox.sandboxActorId); } @@ -563,67 +584,81 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } }; - const getWorkbenchCompat = async (organizationId: string): Promise => { + const getTaskDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(await getAuthSessionInput()); + }; + + const getSessionDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).getSessionDetail(await withAuthSessionInput({ sessionId })); + }; + + const getWorkspaceCompat = async (organizationId: string): Promise => { + const authSessionInput = await getAuthSessionInput(); const summary = await (await organization(organizationId)).getOrganizationSummary({ organizationId }); - const tasks = ( - await Promise.all( - summary.taskSummaries.map(async (taskSummary) => { - let detail; - try { - detail = await (await task(organizationId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); - } catch (error) { - if (isActorNotFoundError(error)) { - return null; - } - throw error; + const resolvedTasks = await Promise.all( + summary.taskSummaries.map(async (taskSummary) => { + let detail; + try { + const taskHandle = await task(organizationId, taskSummary.repoId, taskSummary.id); + detail = await taskHandle.getTaskDetail(authSessionInput); + } catch (error) { + if (isActorNotFoundError(error)) { + return null; } - const sessionDetails = await Promise.all( - detail.sessionsSummary.map(async (session) => { - try { - const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); - return [session.id, full] as const; - } catch (error) { - if (isActorNotFoundError(error)) { - return null; - } - throw error; + throw error; + } + const sessionDetails = await Promise.all( + detail.sessionsSummary.map(async (session) => { + try { + const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ + sessionId: session.id, + ...(authSessionInput ?? {}), + }); + return [session.id, full] as const; + } catch (error) { + if (isActorNotFoundError(error)) { + return null; } - }), - ); - const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkbenchSessionDetail] => entry !== null)); - return { - id: detail.id, - repoId: detail.repoId, - title: detail.title, - status: detail.status, - repoName: detail.repoName, - updatedAtMs: detail.updatedAtMs, - branch: detail.branch, - pullRequest: detail.pullRequest, - sessions: detail.sessionsSummary.map((session) => { - const full = sessionDetailsById.get(session.id); - return { - id: session.id, - sessionId: session.sessionId, - sessionName: session.sessionName, - agent: session.agent, - model: session.model, - status: session.status, - thinkingSinceMs: session.thinkingSinceMs, - unread: session.unread, - created: session.created, - draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, - transcript: full?.transcript ?? [], - }; - }), - fileChanges: detail.fileChanges, - diffs: detail.diffs, - fileTree: detail.fileTree, - minutesUsed: detail.minutesUsed, - }; - }), - ) - ).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null); + throw error; + } + }), + ); + const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkspaceSessionDetail] => entry !== null)); + return { + id: detail.id, + repoId: detail.repoId, + title: detail.title, + status: detail.status, + repoName: detail.repoName, + updatedAtMs: detail.updatedAtMs, + branch: detail.branch, + pullRequest: detail.pullRequest, + activeSessionId: detail.activeSessionId ?? null, + sessions: detail.sessionsSummary.map((session) => { + const full = sessionDetailsById.get(session.id); + return { + id: session.id, + sessionId: session.sessionId, + sessionName: session.sessionName, + agent: session.agent, + model: session.model, + status: session.status, + thinkingSinceMs: session.thinkingSinceMs, + unread: session.unread, + created: session.created, + draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, + transcript: full?.transcript ?? [], + }; + }), + fileChanges: detail.fileChanges, + diffs: detail.diffs, + fileTree: detail.fileTree, + minutesUsed: detail.minutesUsed, + activeSandboxId: detail.activeSandboxId ?? null, + }; + }), + ); + const tasks = resolvedTasks.filter((task): task is Exclude<(typeof resolvedTasks)[number], null> => task !== null); const repositories = summary.repos .map((repo) => ({ @@ -642,14 +677,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }; }; - const subscribeWorkbench = (organizationId: string, listener: () => void): (() => void) => { - let entry = workbenchSubscriptions.get(organizationId); + const subscribeWorkspace = (organizationId: string, listener: () => void): (() => void) => { + let entry = workspaceSubscriptions.get(organizationId); if (!entry) { entry = { listeners: new Set(), disposeConnPromise: null, }; - workbenchSubscriptions.set(organizationId, entry); + workspaceSubscriptions.set(organizationId, entry); } entry.listeners.add(listener); @@ -658,8 +693,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien entry.disposeConnPromise = (async () => { const handle = await organization(organizationId); const conn = (handle as any).connect(); - const unsubscribeEvent = conn.on("workbenchUpdated", () => { - const current = workbenchSubscriptions.get(organizationId); + const unsubscribeEvent = conn.on("organizationUpdated", () => { + const current = workspaceSubscriptions.get(organizationId); if (!current) { return; } @@ -677,7 +712,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } return () => { - const current = workbenchSubscriptions.get(organizationId); + const current = workspaceSubscriptions.get(organizationId); if (!current) { return; } @@ -686,7 +721,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return; } - workbenchSubscriptions.delete(organizationId); + workspaceSubscriptions.delete(organizationId); void current.disposeConnPromise?.then(async (disposeConn) => { await disposeConn?.(); }); @@ -849,6 +884,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return await (await appOrganization()).selectAppOrganization({ sessionId, organizationId }); }, + async setAppDefaultModel(defaultModel: WorkspaceModelId): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).setAppDefaultModel({ sessionId, defaultModel }); + }, + async updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { const sessionId = await getSessionId(); if (!sessionId) { @@ -948,33 +991,36 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return (await organization(organizationId)).getRepoOverview({ organizationId, repoId }); }, - async getTask(organizationId: string, taskId: string): Promise { + async getTask(organizationId: string, repoId: string, taskId: string): Promise { return (await organization(organizationId)).getTask({ organizationId, + repoId, taskId, }); }, async listHistory(input: HistoryQueryInput): Promise { - return (await organization(input.organizationId)).history(input); + return (await organization(input.organizationId)).auditLog(input); }, - async switchTask(organizationId: string, taskId: string): Promise { - return (await organization(organizationId)).switchTask(taskId); + async switchTask(organizationId: string, repoId: string, taskId: string): Promise { + return (await organization(organizationId)).switchTask({ repoId, taskId }); }, - async attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + async attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { return (await organization(organizationId)).attachTask({ organizationId, + repoId, taskId, reason: "cli.attach", }); }, - async runAction(organizationId: string, taskId: string, action: TaskAction): Promise { + async runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise { if (action === "push") { await (await organization(organizationId)).pushTask({ organizationId, + repoId, taskId, reason: "cli.push", }); @@ -983,6 +1029,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (action === "sync") { await (await organization(organizationId)).syncTask({ organizationId, + repoId, taskId, reason: "cli.sync", }); @@ -991,6 +1038,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (action === "merge") { await (await organization(organizationId)).mergeTask({ organizationId, + repoId, taskId, reason: "cli.merge", }); @@ -999,6 +1047,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (action === "archive") { await (await organization(organizationId)).archiveTask({ organizationId, + repoId, taskId, reason: "cli.archive", }); @@ -1006,6 +1055,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } await (await organization(organizationId)).killTask({ organizationId, + repoId, taskId, reason: "cli.kill", }); @@ -1156,96 +1206,92 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.sandboxAgentConnection()); }, + async getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listWorkspaceModelGroups()); + }, + async getOrganizationSummary(organizationId: string): Promise { return (await organization(organizationId)).getOrganizationSummary({ organizationId }); }, - async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { - return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(); + async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { + return await getTaskDetailWithAuth(organizationId, repoId, taskIdValue); }, - async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { - return (await task(organizationId, repoId, taskIdValue)).getSessionDetail({ sessionId }); + async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { + return await getSessionDetailWithAuth(organizationId, repoId, taskIdValue, sessionId); }, - async getWorkbench(organizationId: string): Promise { - return await getWorkbenchCompat(organizationId); + async getWorkspace(organizationId: string): Promise { + return await getWorkspaceCompat(organizationId); }, - subscribeWorkbench(organizationId: string, listener: () => void): () => void { - return subscribeWorkbench(organizationId, listener); + subscribeWorkspace(organizationId: string, listener: () => void): () => void { + return subscribeWorkspace(organizationId, listener); }, - async createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { - return (await organization(organizationId)).createWorkbenchTask(input); + async createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise { + return (await organization(organizationId)).createWorkspaceTask(await withAuthSessionInput(input)); }, - async markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await (await organization(organizationId)).markWorkbenchUnread(input); + async markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await (await organization(organizationId)).markWorkspaceUnread(await withAuthSessionInput(input)); }, - async renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await (await organization(organizationId)).renameWorkbenchTask(input); + async renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise { + await (await organization(organizationId)).renameWorkspaceTask(await withAuthSessionInput(input)); }, - async renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await (await organization(organizationId)).renameWorkbenchBranch(input); + async createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + return await (await organization(organizationId)).createWorkspaceSession(await withAuthSessionInput(input)); }, - async createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { - return await (await organization(organizationId)).createWorkbenchSession(input); + async renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise { + await (await organization(organizationId)).renameWorkspaceSession(await withAuthSessionInput(input)); }, - async renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { - await (await organization(organizationId)).renameWorkbenchSession(input); + async selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).selectWorkspaceSession(await withAuthSessionInput(input)); }, - async setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { - await (await organization(organizationId)).setWorkbenchSessionUnread(input); + async setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise { + await (await organization(organizationId)).setWorkspaceSessionUnread(await withAuthSessionInput(input)); }, - async updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { - await (await organization(organizationId)).updateWorkbenchDraft(input); + async updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise { + await (await organization(organizationId)).updateWorkspaceDraft(await withAuthSessionInput(input)); }, - async changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { - await (await organization(organizationId)).changeWorkbenchModel(input); + async changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise { + await (await organization(organizationId)).changeWorkspaceModel(await withAuthSessionInput(input)); }, - async sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { - await (await organization(organizationId)).sendWorkbenchMessage(input); + async sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise { + await (await organization(organizationId)).sendWorkspaceMessage(await withAuthSessionInput(input)); }, - async stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await (await organization(organizationId)).stopWorkbenchSession(input); + async stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).stopWorkspaceSession(await withAuthSessionInput(input)); }, - async closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await (await organization(organizationId)).closeWorkbenchSession(input); + async closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).closeWorkspaceSession(await withAuthSessionInput(input)); }, - async publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await (await organization(organizationId)).publishWorkbenchPr(input); + async publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await (await organization(organizationId)).publishWorkspacePr(await withAuthSessionInput(input)); }, - async revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise { - await (await organization(organizationId)).revertWorkbenchFile(input); + async revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise { + await (await organization(organizationId)).revertWorkspaceFile(await withAuthSessionInput(input)); }, - async reloadGithubOrganization(organizationId: string): Promise { - await (await organization(organizationId)).reloadGithubOrganization(); + async adminReloadGithubOrganization(organizationId: string): Promise { + await (await organization(organizationId)).adminReloadGithubOrganization(); }, - async reloadGithubPullRequests(organizationId: string): Promise { - await (await organization(organizationId)).reloadGithubPullRequests(); - }, - - async reloadGithubRepository(organizationId: string, repoId: string): Promise { - await (await organization(organizationId)).reloadGithubRepository({ repoId }); - }, - - async reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise { - await (await organization(organizationId)).reloadGithubPullRequest({ repoId, prNumber }); + async adminReloadGithubRepository(organizationId: string, repoId: string): Promise { + await (await organization(organizationId)).adminReloadGithubRepository({ repoId }); }, async health(): Promise<{ ok: true }> { diff --git a/foundry/packages/client/src/index.ts b/foundry/packages/client/src/index.ts index 87909a9..e28745f 100644 --- a/foundry/packages/client/src/index.ts +++ b/foundry/packages/client/src/index.ts @@ -8,4 +8,4 @@ export * from "./subscription/use-subscription.js"; export * from "./keys.js"; export * from "./mock-app.js"; export * from "./view-model.js"; -export * from "./workbench-client.js"; +export * from "./workspace-client.js"; diff --git a/foundry/packages/client/src/keys.ts b/foundry/packages/client/src/keys.ts index 314f16a..7242aae 100644 --- a/foundry/packages/client/src/keys.ts +++ b/foundry/packages/client/src/keys.ts @@ -4,18 +4,14 @@ export function organizationKey(organizationId: string): ActorKey { return ["org", organizationId]; } -export function repositoryKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId]; -} - export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "task", taskId]; + return ["org", organizationId, "task", repoId, taskId]; } export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "history"]; +export function auditLogKey(organizationId: string): ActorKey { + return ["org", organizationId, "audit-log"]; } diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts index 0fa6fc7..00fd9ca 100644 --- a/foundry/packages/client/src/mock-app.ts +++ b/foundry/packages/client/src/mock-app.ts @@ -1,4 +1,8 @@ -import type { WorkbenchModelId } from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS, DEFAULT_WORKSPACE_MODEL_ID, type WorkspaceModelId } from "@sandbox-agent/foundry-shared"; + +const claudeModels = DEFAULT_WORKSPACE_MODEL_GROUPS.find((group) => group.agentKind === "Claude")?.models ?? []; +const CLAUDE_SECONDARY_MODEL_ID = claudeModels[1]?.id ?? claudeModels[0]?.id ?? DEFAULT_WORKSPACE_MODEL_ID; +const CLAUDE_TERTIARY_MODEL_ID = claudeModels[2]?.id ?? CLAUDE_SECONDARY_MODEL_ID; import { injectMockLatency } from "./mock/latency.js"; import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; @@ -16,6 +20,7 @@ export interface MockFoundryUser { githubLogin: string; roleLabel: string; eligibleOrganizationIds: string[]; + defaultModel: WorkspaceModelId; } export interface MockFoundryOrganizationMember { @@ -61,7 +66,6 @@ export interface MockFoundryOrganizationSettings { slug: string; primaryDomain: string; seatAccrualMode: "first_prompt"; - defaultModel: WorkbenchModelId; autoImportRepos: boolean; } @@ -111,6 +115,7 @@ export interface MockFoundryAppClient { skipStarterRepo(): Promise; starStarterRepo(organizationId: string): Promise; selectOrganization(organizationId: string): Promise; + setDefaultModel(model: WorkspaceModelId): Promise; updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise; triggerGithubSync(organizationId: string): Promise; completeHostedCheckout(organizationId: string, planId: MockBillingPlanId): Promise; @@ -180,7 +185,6 @@ function buildRivetOrganization(): MockFoundryOrganization { slug: "rivet", primaryDomain: "rivet.dev", seatAccrualMode: "first_prompt", - defaultModel: "gpt-5.3-codex", autoImportRepos: true, }, github: { @@ -233,6 +237,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { githubLogin: "nathan", roleLabel: "Founder", eligibleOrganizationIds: ["personal-nathan", "acme", "rivet"], + defaultModel: DEFAULT_WORKSPACE_MODEL_ID, }, { id: "user-maya", @@ -241,6 +246,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { githubLogin: "maya", roleLabel: "Staff Engineer", eligibleOrganizationIds: ["acme"], + defaultModel: CLAUDE_SECONDARY_MODEL_ID, }, { id: "user-jamie", @@ -249,6 +255,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { githubLogin: "jamie", roleLabel: "Platform Lead", eligibleOrganizationIds: ["personal-jamie", "rivet"], + defaultModel: CLAUDE_TERTIARY_MODEL_ID, }, ], organizations: [ @@ -261,7 +268,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { slug: "nathan", primaryDomain: "personal", seatAccrualMode: "first_prompt", - defaultModel: "claude-sonnet-4", autoImportRepos: true, }, github: { @@ -297,7 +303,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { slug: "acme", primaryDomain: "acme.dev", seatAccrualMode: "first_prompt", - defaultModel: "claude-sonnet-4", autoImportRepos: true, }, github: { @@ -342,7 +347,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { slug: "jamie", primaryDomain: "personal", seatAccrualMode: "first_prompt", - defaultModel: "claude-opus-4", autoImportRepos: true, }, github: { @@ -538,6 +542,18 @@ class MockFoundryAppStore implements MockFoundryAppClient { } } + async setDefaultModel(model: WorkspaceModelId): Promise { + await this.injectAsyncLatency(); + const currentUserId = this.snapshot.auth.currentUserId; + if (!currentUserId) { + throw new Error("No signed-in mock user"); + } + this.updateSnapshot((current) => ({ + ...current, + users: current.users.map((user) => (user.id === currentUserId ? { ...user, defaultModel: model } : user)), + })); + } + async updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise { await this.injectAsyncLatency(); this.requireOrganization(input.organizationId); diff --git a/foundry/packages/client/src/mock/backend-client.ts b/foundry/packages/client/src/mock/backend-client.ts index 011192d..fc6470c 100644 --- a/foundry/packages/client/src/mock/backend-client.ts +++ b/foundry/packages/client/src/mock/backend-client.ts @@ -6,25 +6,26 @@ import type { SessionEvent, TaskRecord, TaskSummary, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, TaskEvent, - WorkbenchSessionDetail, - WorkbenchTaskDetail, - WorkbenchTaskSummary, + WorkspaceSessionDetail, + WorkspaceModelGroup, + WorkspaceTaskDetail, + WorkspaceTaskSummary, OrganizationEvent, OrganizationSummarySnapshot, - HistoryEvent, + AuditLogEvent as HistoryEvent, HistoryQueryInput, SandboxProviderId, RepoOverview, @@ -32,9 +33,10 @@ import type { StarSandboxAgentRepoResult, SwitchResult, } from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS } from "@sandbox-agent/foundry-shared"; import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import type { ActorConn, BackendClient, SandboxProcessRecord, SandboxSessionEventRecord, SandboxSessionRecord } from "../backend-client.js"; -import { getSharedMockWorkbenchClient } from "./workbench-client.js"; +import { getSharedMockWorkspaceClient } from "./workspace-client.js"; interface MockProcessRecord extends SandboxProcessRecord { logText: string; @@ -89,7 +91,7 @@ function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskReco } export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient { - const workbench = getSharedMockWorkbenchClient(); + const workspace = getSharedMockWorkspaceClient(); const listenersBySandboxId = new Map void>>(); const processesBySandboxId = new Map(); const connectionListeners = new Map void>>(); @@ -97,7 +99,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back let nextProcessId = 1; const requireTask = (taskId: string) => { - const task = workbench.getSnapshot().tasks.find((candidate) => candidate.id === taskId); + const task = workspace.getSnapshot().tasks.find((candidate) => candidate.id === taskId); if (!task) { throw new Error(`Unknown mock task ${taskId}`); } @@ -164,7 +166,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back async dispose(): Promise {}, }); - const buildTaskSummary = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskSummary => ({ + const buildTaskSummary = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskSummary => ({ id: task.id, repoId: task.repoId, title: task.title, @@ -173,6 +175,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back updatedAtMs: task.updatedAtMs, branch: task.branch, pullRequest: task.pullRequest, + activeSessionId: task.activeSessionId ?? task.sessions[0]?.id ?? null, sessionsSummary: task.sessions.map((tab) => ({ id: tab.id, sessionId: tab.sessionId, @@ -187,16 +190,9 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back })), }); - const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({ + const buildTaskDetail = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskDetail => ({ ...buildTaskSummary(task), task: task.title, - agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", - runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"), - statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready", - activeSessionId: task.sessions[0]?.sessionId ?? null, - diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", - prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, - reviewStatus: null, fileChanges: task.fileChanges, diffs: task.diffs, fileTree: task.fileTree, @@ -211,7 +207,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back activeSandboxId: task.id, }); - const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], sessionId: string): WorkbenchSessionDetail => { + const buildSessionDetail = (task: TaskWorkspaceSnapshot["tasks"][number], sessionId: string): WorkspaceSessionDetail => { const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`); @@ -232,10 +228,24 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }; const buildOrganizationSummary = (): OrganizationSummarySnapshot => { - const snapshot = workbench.getSnapshot(); + const snapshot = workspace.getSnapshot(); const taskSummaries = snapshot.tasks.map(buildTaskSummary); return { organizationId: defaultOrganizationId, + github: { + connectedAccount: "mock", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: snapshot.repos.length, + lastSyncLabel: "Synced just now", + lastSyncAt: nowMs(), + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 1, + syncPhase: null, + processedRepositoryCount: snapshot.repos.length, + totalRepositoryCount: snapshot.repos.length, + }, repos: snapshot.repos.map((repo) => { const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id); return { @@ -246,7 +256,6 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }; }), taskSummaries, - openPullRequests: [], }; }; @@ -256,20 +265,16 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back `sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`; const emitOrganizationSnapshot = (): void => { - const summary = buildOrganizationSummary(); - const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null; - if (latestTask) { - emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { - type: "taskSummaryUpdated", - taskSummary: latestTask, - } satisfies OrganizationEvent); - } + emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { + type: "organizationUpdated", + snapshot: buildOrganizationSummary(), + } satisfies OrganizationEvent); }; const emitTaskUpdate = (taskId: string): void => { const task = requireTask(taskId); emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", { - type: "taskDetailUpdated", + type: "taskUpdated", detail: buildTaskDetail(task), } satisfies TaskEvent); }; @@ -303,9 +308,8 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back task: task.title, sandboxProviderId: "local", status: toTaskStatus(archived ? "archived" : "running", archived), - statusMessage: archived ? "archived" : "mock sandbox ready", + pullRequest: null, activeSandboxId: task.id, - activeSessionId: task.sessions[0]?.sessionId ?? null, sandboxes: [ { sandboxId: task.id, @@ -317,17 +321,6 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back updatedAt: task.updatedAtMs, }, ], - agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", - prSubmitted: Boolean(task.pullRequest), - diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", - prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, - prAuthor: task.pullRequest ? "mock" : null, - ciStatus: null, - reviewStatus: null, - reviewer: null, - conflictsWithMain: "0", - hasUnpushed: task.fileChanges.length > 0 ? "1" : "0", - parentBranch: null, createdAt: task.updatedAtMs, updatedAt: task.updatedAtMs, }; @@ -400,6 +393,10 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return unsupportedAppSnapshot(); }, + async setAppDefaultModel(): Promise { + return unsupportedAppSnapshot(); + }, + async updateAppOrganizationProfile(): Promise { return unsupportedAppSnapshot(); }, @@ -433,7 +430,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }, async listRepos(_organizationId: string): Promise { - return workbench.getSnapshot().repos.map((repo) => ({ + return workspace.getSnapshot().repos.map((repo) => ({ organizationId: defaultOrganizationId, repoId: repo.id, remoteUrl: mockRepoRemote(repo.label), @@ -447,7 +444,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }, async listTasks(_organizationId: string, repoId?: string): Promise { - return workbench + return workspace .getSnapshot() .tasks.filter((task) => !repoId || task.repoId === repoId) .map((task) => ({ @@ -457,6 +454,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back branchName: task.branch, title: task.title, status: task.status === "archived" ? "archived" : "running", + pullRequest: null, updatedAt: task.updatedAtMs, })); }, @@ -464,7 +462,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back async getRepoOverview(_organizationId: string, _repoId: string): Promise { notSupported("getRepoOverview"); }, - async getTask(_organizationId: string, taskId: string): Promise { + async getTask(_organizationId: string, _repoId: string, taskId: string): Promise { return buildTaskRecord(taskId); }, @@ -472,7 +470,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return []; }, - async switchTask(_organizationId: string, taskId: string): Promise { + async switchTask(_organizationId: string, _repoId: string, taskId: string): Promise { return { organizationId: defaultOrganizationId, taskId, @@ -481,14 +479,14 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }; }, - async attachTask(_organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + async attachTask(_organizationId: string, _repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { return { target: `mock://${taskId}`, sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null, }; }, - async runAction(_organizationId: string, _taskId: string): Promise { + async runAction(_organizationId: string, _repoId: string, _taskId: string): Promise { notSupported("runAction"); }, @@ -637,28 +635,32 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return { endpoint: "mock://terminal-unavailable" }; }, + async getSandboxWorkspaceModelGroups(_organizationId: string, _sandboxProviderId: SandboxProviderId, _sandboxId: string): Promise { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + }, + async getOrganizationSummary(): Promise { return buildOrganizationSummary(); }, - async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { + async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { return buildTaskDetail(requireTask(taskId)); }, - async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { + async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { return buildSessionDetail(requireTask(taskId), sessionId); }, - async getWorkbench(): Promise { - return workbench.getSnapshot(); + async getWorkspace(): Promise { + return workspace.getSnapshot(); }, - subscribeWorkbench(_organizationId: string, listener: () => void): () => void { - return workbench.subscribe(listener); + subscribeWorkspace(_organizationId: string, listener: () => void): () => void { + return workspace.subscribe(listener); }, - async createWorkbenchTask(_organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { - const created = await workbench.createTask(input); + async createWorkspaceTask(_organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise { + const created = await workspace.createTask(input); emitOrganizationSnapshot(); emitTaskUpdate(created.taskId); if (created.sessionId) { @@ -667,99 +669,95 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return created; }, - async markWorkbenchUnread(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await workbench.markTaskUnread(input); + async markWorkspaceUnread(_organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await workspace.markTaskUnread(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchTask(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await workbench.renameTask(input); + async renameWorkspaceTask(_organizationId: string, input: TaskWorkspaceRenameInput): Promise { + await workspace.renameTask(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchBranch(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await workbench.renameBranch(input); - emitOrganizationSnapshot(); - emitTaskUpdate(input.taskId); - }, - - async createWorkbenchSession(_organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { - const created = await workbench.addSession(input); + async createWorkspaceSession(_organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const created = await workspace.addSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, created.sessionId); return created; }, - async renameWorkbenchSession(_organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { - await workbench.renameSession(input); + async renameWorkspaceSession(_organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise { + await workspace.renameSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async setWorkbenchSessionUnread(_organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { - await workbench.setSessionUnread(input); + async selectWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.selectSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async updateWorkbenchDraft(_organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { - await workbench.updateDraft(input); + async setWorkspaceSessionUnread(_organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise { + await workspace.setSessionUnread(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async changeWorkbenchModel(_organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { - await workbench.changeModel(input); + async updateWorkspaceDraft(_organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise { + await workspace.updateDraft(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async sendWorkbenchMessage(_organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { - await workbench.sendMessage(input); + async changeWorkspaceModel(_organizationId: string, input: TaskWorkspaceChangeModelInput): Promise { + await workspace.changeModel(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async stopWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await workbench.stopAgent(input); + async sendWorkspaceMessage(_organizationId: string, input: TaskWorkspaceSendMessageInput): Promise { + await workspace.sendMessage(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async closeWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await workbench.closeSession(input); + async stopWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.stopAgent(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async closeWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.closeSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async publishWorkbenchPr(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await workbench.publishPr(input); + async publishWorkspacePr(_organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await workspace.publishPr(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async revertWorkbenchFile(_organizationId: string, input: TaskWorkbenchDiffInput): Promise { - await workbench.revertFile(input); + async revertWorkspaceFile(_organizationId: string, input: TaskWorkspaceDiffInput): Promise { + await workspace.revertFile(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async reloadGithubOrganization(): Promise {}, - - async reloadGithubPullRequests(): Promise {}, - - async reloadGithubRepository(): Promise {}, - - async reloadGithubPullRequest(): Promise {}, + async adminReloadGithubOrganization(): Promise {}, + async adminReloadGithubRepository(): Promise {}, async health(): Promise<{ ok: true }> { return { ok: true }; diff --git a/foundry/packages/client/src/mock/workbench-client.ts b/foundry/packages/client/src/mock/workspace-client.ts similarity index 76% rename from foundry/packages/client/src/mock/workbench-client.ts rename to foundry/packages/client/src/mock/workspace-client.ts index fbed2d0..c51b2e8 100644 --- a/foundry/packages/client/src/mock/workbench-client.ts +++ b/foundry/packages/client/src/mock/workspace-client.ts @@ -1,33 +1,34 @@ import { MODEL_GROUPS, buildInitialMockLayoutViewModel, - groupWorkbenchRepositories, + groupWorkspaceRepositories, nowMs, providerAgent, randomReply, removeFileTreePath, slugify, uid, -} from "../workbench-model.js"; +} from "../workspace-model.js"; +import { DEFAULT_WORKSPACE_MODEL_ID, workspaceAgentForModel } from "@sandbox-agent/foundry-shared"; import type { - TaskWorkbenchAddSessionResponse, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, - WorkbenchSession as AgentSession, - WorkbenchTask as Task, - WorkbenchTranscriptEvent as TranscriptEvent, + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, + WorkspaceSession as AgentSession, + WorkspaceTask as Task, + WorkspaceTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; -import type { TaskWorkbenchClient } from "../workbench-client.js"; +import type { TaskWorkspaceClient } from "../workspace-client.js"; function buildTranscriptEvent(params: { sessionId: string; @@ -47,12 +48,12 @@ function buildTranscriptEvent(params: { }; } -class MockWorkbenchStore implements TaskWorkbenchClient { +class MockWorkspaceStore implements TaskWorkspaceClient { private snapshot = buildInitialMockLayoutViewModel(); private listeners = new Set<() => void>(); private pendingTimers = new Map>(); - getSnapshot(): TaskWorkbenchSnapshot { + getSnapshot(): TaskWorkspaceSnapshot { return this.snapshot; } @@ -63,7 +64,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }; } - async createTask(input: TaskWorkbenchCreateTaskInput): Promise { + async createTask(input: TaskWorkspaceCreateTaskInput): Promise { const id = uid(); const sessionId = `session-${id}`; const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId); @@ -74,20 +75,19 @@ class MockWorkbenchStore implements TaskWorkbenchClient { id, repoId: repo.id, title: input.title?.trim() || "New Task", - status: "new", + status: "init_enqueue_provision", repoName: repo.label, updatedAtMs: nowMs(), branch: input.branch?.trim() || null, pullRequest: null, + activeSessionId: sessionId, sessions: [ { id: sessionId, sessionId: sessionId, sessionName: "Session 1", - agent: providerAgent( - MODEL_GROUPS.find((group) => group.models.some((model) => model.id === (input.model ?? "claude-sonnet-4")))?.provider ?? "Claude", - ), - model: input.model ?? "claude-sonnet-4", + agent: workspaceAgentForModel(input.model ?? DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS), + model: input.model ?? DEFAULT_WORKSPACE_MODEL_ID, status: "idle", thinkingSinceMs: null, unread: false, @@ -109,7 +109,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return { taskId: id, sessionId }; } - async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { + async markTaskUnread(input: TaskWorkspaceSelectInput): Promise { this.updateTask(input.taskId, (task) => { const targetSession = task.sessions[task.sessions.length - 1] ?? null; if (!targetSession) { @@ -123,7 +123,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); } - async renameTask(input: TaskWorkbenchRenameInput): Promise { + async renameTask(input: TaskWorkspaceRenameInput): Promise { const value = input.value.trim(); if (!value) { throw new Error(`Cannot rename task ${input.taskId} to an empty title`); @@ -131,28 +131,32 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (task) => ({ ...task, title: value, updatedAtMs: nowMs() })); } - async renameBranch(input: TaskWorkbenchRenameInput): Promise { - const value = input.value.trim(); - if (!value) { - throw new Error(`Cannot rename branch for task ${input.taskId} to an empty value`); - } - this.updateTask(input.taskId, (task) => ({ ...task, branch: value, updatedAtMs: nowMs() })); - } - - async archiveTask(input: TaskWorkbenchSelectInput): Promise { + async archiveTask(input: TaskWorkspaceSelectInput): Promise { this.updateTask(input.taskId, (task) => ({ ...task, status: "archived", updatedAtMs: nowMs() })); } - async publishPr(input: TaskWorkbenchSelectInput): Promise { + async publishPr(input: TaskWorkspaceSelectInput): Promise { const nextPrNumber = Math.max(0, ...this.snapshot.tasks.map((task) => task.pullRequest?.number ?? 0)) + 1; this.updateTask(input.taskId, (task) => ({ ...task, updatedAtMs: nowMs(), - pullRequest: { number: nextPrNumber, status: "ready" }, + pullRequest: { + number: nextPrNumber, + status: "ready", + title: task.title, + state: "open", + url: `https://example.test/pr/${nextPrNumber}`, + headRefName: task.branch ?? `task/${task.id}`, + baseRefName: "main", + repoFullName: task.repoName, + authorLogin: "mock", + isDraft: false, + updatedAtMs: nowMs(), + }, })); } - async revertFile(input: TaskWorkbenchDiffInput): Promise { + async revertFile(input: TaskWorkspaceDiffInput): Promise { this.updateTask(input.taskId, (task) => { const file = task.fileChanges.find((entry) => entry.path === input.path); const nextDiffs = { ...task.diffs }; @@ -167,7 +171,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); } - async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { + async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise { this.assertSession(input.taskId, input.sessionId); this.updateTask(input.taskId, (task) => ({ ...task, @@ -187,7 +191,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { })); } - async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { + async sendMessage(input: TaskWorkspaceSendMessageInput): Promise { const text = input.text.trim(); if (!text) { throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`); @@ -197,7 +201,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { const startedAtMs = nowMs(); this.updateTask(input.taskId, (currentTask) => { - const isFirstOnTask = currentTask.status === "new"; + const isFirstOnTask = String(currentTask.status).startsWith("init_"); const newTitle = isFirstOnTask ? (text.length > 50 ? `${text.slice(0, 47)}...` : text) : currentTask.title; const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch; const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)]; @@ -288,7 +292,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.pendingTimers.set(input.sessionId, timer); } - async stopAgent(input: TaskWorkbenchSessionInput): Promise { + async stopAgent(input: TaskWorkspaceSessionInput): Promise { this.assertSession(input.taskId, input.sessionId); const existing = this.pendingTimers.get(input.sessionId); if (existing) { @@ -311,14 +315,22 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); } - async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { + async selectSession(input: TaskWorkspaceSessionInput): Promise { + this.assertSession(input.taskId, input.sessionId); + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + activeSessionId: input.sessionId, + })); + } + + async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)), })); } - async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { + async renameSession(input: TaskWorkspaceRenameSessionInput): Promise { const title = input.title.trim(); if (!title) { throw new Error(`Cannot rename session ${input.sessionId} to an empty title`); @@ -329,7 +341,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { })); } - async closeSession(input: TaskWorkbenchSessionInput): Promise { + async closeSession(input: TaskWorkspaceSessionInput): Promise { this.updateTask(input.taskId, (currentTask) => { if (currentTask.sessions.length <= 1) { return currentTask; @@ -337,12 +349,13 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return { ...currentTask, + activeSessionId: currentTask.activeSessionId === input.sessionId ? (currentTask.sessions.find((candidate) => candidate.id !== input.sessionId)?.id ?? null) : currentTask.activeSessionId, sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId), }; }); } - async addSession(input: TaskWorkbenchSelectInput): Promise { + async addSession(input: TaskWorkspaceSelectInput): Promise { this.assertTask(input.taskId); const nextSessionId = uid(); const nextSession: AgentSession = { @@ -350,8 +363,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { sessionId: nextSessionId, sandboxSessionId: null, sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`, - agent: "Claude", - model: "claude-sonnet-4", + agent: workspaceAgentForModel(DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS), + model: DEFAULT_WORKSPACE_MODEL_ID, status: "idle", thinkingSinceMs: null, unread: false, @@ -363,12 +376,13 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, updatedAtMs: nowMs(), + activeSessionId: nextSession.id, sessions: [...currentTask.sessions, nextSession], })); return { sessionId: nextSession.id }; } - async changeModel(input: TaskWorkbenchChangeModelInput): Promise { + async changeModel(input: TaskWorkspaceChangeModelInput): Promise { const group = MODEL_GROUPS.find((candidate) => candidate.models.some((entry) => entry.id === input.model)); if (!group) { throw new Error(`Unable to resolve model provider for ${input.model}`); @@ -377,16 +391,16 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, sessions: currentTask.sessions.map((candidate) => - candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate, + candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: workspaceAgentForModel(input.model, MODEL_GROUPS) } : candidate, ), })); } - private updateState(updater: (current: TaskWorkbenchSnapshot) => TaskWorkbenchSnapshot): void { + private updateState(updater: (current: TaskWorkspaceSnapshot) => TaskWorkspaceSnapshot): void { const nextSnapshot = updater(this.snapshot); this.snapshot = { ...nextSnapshot, - repositories: groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), + repositories: groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks), }; this.notify(); } @@ -436,11 +450,11 @@ function candidateEventIndex(task: Task, sessionId: string): number { return (session?.transcript.length ?? 0) + 1; } -let sharedMockWorkbenchClient: TaskWorkbenchClient | null = null; +let sharedMockWorkspaceClient: TaskWorkspaceClient | null = null; -export function getSharedMockWorkbenchClient(): TaskWorkbenchClient { - if (!sharedMockWorkbenchClient) { - sharedMockWorkbenchClient = new MockWorkbenchStore(); +export function getSharedMockWorkspaceClient(): TaskWorkspaceClient { + if (!sharedMockWorkspaceClient) { + sharedMockWorkspaceClient = new MockWorkspaceStore(); } - return sharedMockWorkbenchClient; + return sharedMockWorkspaceClient; } diff --git a/foundry/packages/client/src/remote/app-client.ts b/foundry/packages/client/src/remote/app-client.ts index 6daa2c5..f1cb908 100644 --- a/foundry/packages/client/src/remote/app-client.ts +++ b/foundry/packages/client/src/remote/app-client.ts @@ -1,4 +1,4 @@ -import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput } from "@sandbox-agent/foundry-shared"; +import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "../backend-client.js"; import type { FoundryAppClient } from "../app-client.js"; @@ -72,6 +72,11 @@ class RemoteFoundryAppStore implements FoundryAppClient { this.notify(); } + async setDefaultModel(model: WorkspaceModelId): Promise { + this.snapshot = await this.backend.setAppDefaultModel(model); + this.notify(); + } + async updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { this.snapshot = await this.backend.updateAppOrganizationProfile(input); this.notify(); diff --git a/foundry/packages/client/src/remote/workbench-client.ts b/foundry/packages/client/src/remote/workbench-client.ts deleted file mode 100644 index 0dcbecb..0000000 --- a/foundry/packages/client/src/remote/workbench-client.ts +++ /dev/null @@ -1,198 +0,0 @@ -import type { - TaskWorkbenchAddSessionResponse, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, -} from "@sandbox-agent/foundry-shared"; -import type { BackendClient } from "../backend-client.js"; -import { groupWorkbenchRepositories } from "../workbench-model.js"; -import type { TaskWorkbenchClient } from "../workbench-client.js"; - -export interface RemoteWorkbenchClientOptions { - backend: BackendClient; - organizationId: string; -} - -class RemoteWorkbenchStore implements TaskWorkbenchClient { - private readonly backend: BackendClient; - private readonly organizationId: string; - private snapshot: TaskWorkbenchSnapshot; - private readonly listeners = new Set<() => void>(); - private unsubscribeWorkbench: (() => void) | null = null; - private refreshPromise: Promise | null = null; - private refreshRetryTimeout: ReturnType | null = null; - - constructor(options: RemoteWorkbenchClientOptions) { - this.backend = options.backend; - this.organizationId = options.organizationId; - this.snapshot = { - organizationId: options.organizationId, - repos: [], - repositories: [], - tasks: [], - }; - } - - getSnapshot(): TaskWorkbenchSnapshot { - return this.snapshot; - } - - subscribe(listener: () => void): () => void { - this.listeners.add(listener); - this.ensureStarted(); - return () => { - this.listeners.delete(listener); - if (this.listeners.size === 0 && this.refreshRetryTimeout) { - clearTimeout(this.refreshRetryTimeout); - this.refreshRetryTimeout = null; - } - if (this.listeners.size === 0 && this.unsubscribeWorkbench) { - this.unsubscribeWorkbench(); - this.unsubscribeWorkbench = null; - } - }; - } - - async createTask(input: TaskWorkbenchCreateTaskInput): Promise { - const created = await this.backend.createWorkbenchTask(this.organizationId, input); - await this.refresh(); - return created; - } - - async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { - await this.backend.markWorkbenchUnread(this.organizationId, input); - await this.refresh(); - } - - async renameTask(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchTask(this.organizationId, input); - await this.refresh(); - } - - async renameBranch(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchBranch(this.organizationId, input); - await this.refresh(); - } - - async archiveTask(input: TaskWorkbenchSelectInput): Promise { - await this.backend.runAction(this.organizationId, input.taskId, "archive"); - await this.refresh(); - } - - async publishPr(input: TaskWorkbenchSelectInput): Promise { - await this.backend.publishWorkbenchPr(this.organizationId, input); - await this.refresh(); - } - - async revertFile(input: TaskWorkbenchDiffInput): Promise { - await this.backend.revertWorkbenchFile(this.organizationId, input); - await this.refresh(); - } - - async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { - await this.backend.updateWorkbenchDraft(this.organizationId, input); - // Skip refresh — the server broadcast will trigger it, and the frontend - // holds local draft state to avoid the round-trip overwriting user input. - } - - async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { - await this.backend.sendWorkbenchMessage(this.organizationId, input); - await this.refresh(); - } - - async stopAgent(input: TaskWorkbenchSessionInput): Promise { - await this.backend.stopWorkbenchSession(this.organizationId, input); - await this.refresh(); - } - - async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { - await this.backend.setWorkbenchSessionUnread(this.organizationId, input); - await this.refresh(); - } - - async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { - await this.backend.renameWorkbenchSession(this.organizationId, input); - await this.refresh(); - } - - async closeSession(input: TaskWorkbenchSessionInput): Promise { - await this.backend.closeWorkbenchSession(this.organizationId, input); - await this.refresh(); - } - - async addSession(input: TaskWorkbenchSelectInput): Promise { - const created = await this.backend.createWorkbenchSession(this.organizationId, input); - await this.refresh(); - return created; - } - - async changeModel(input: TaskWorkbenchChangeModelInput): Promise { - await this.backend.changeWorkbenchModel(this.organizationId, input); - await this.refresh(); - } - - private ensureStarted(): void { - if (!this.unsubscribeWorkbench) { - this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.organizationId, () => { - void this.refresh().catch(() => { - this.scheduleRefreshRetry(); - }); - }); - } - void this.refresh().catch(() => { - this.scheduleRefreshRetry(); - }); - } - - private scheduleRefreshRetry(): void { - if (this.refreshRetryTimeout || this.listeners.size === 0) { - return; - } - - this.refreshRetryTimeout = setTimeout(() => { - this.refreshRetryTimeout = null; - void this.refresh().catch(() => { - this.scheduleRefreshRetry(); - }); - }, 1_000); - } - - private async refresh(): Promise { - if (this.refreshPromise) { - await this.refreshPromise; - return; - } - - this.refreshPromise = (async () => { - const nextSnapshot = await this.backend.getWorkbench(this.organizationId); - if (this.refreshRetryTimeout) { - clearTimeout(this.refreshRetryTimeout); - this.refreshRetryTimeout = null; - } - this.snapshot = { - ...nextSnapshot, - repositories: nextSnapshot.repositories ?? groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), - }; - for (const listener of [...this.listeners]) { - listener(); - } - })().finally(() => { - this.refreshPromise = null; - }); - - await this.refreshPromise; - } -} - -export function createRemoteWorkbenchClient(options: RemoteWorkbenchClientOptions): TaskWorkbenchClient { - return new RemoteWorkbenchStore(options); -} diff --git a/foundry/packages/client/src/remote/workspace-client.ts b/foundry/packages/client/src/remote/workspace-client.ts new file mode 100644 index 0000000..1b6bc8e --- /dev/null +++ b/foundry/packages/client/src/remote/workspace-client.ts @@ -0,0 +1,198 @@ +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../backend-client.js"; +import { groupWorkspaceRepositories } from "../workspace-model.js"; +import type { TaskWorkspaceClient } from "../workspace-client.js"; + +export interface RemoteWorkspaceClientOptions { + backend: BackendClient; + organizationId: string; +} + +class RemoteWorkspaceStore implements TaskWorkspaceClient { + private readonly backend: BackendClient; + private readonly organizationId: string; + private snapshot: TaskWorkspaceSnapshot; + private readonly listeners = new Set<() => void>(); + private unsubscribeWorkspace: (() => void) | null = null; + private refreshPromise: Promise | null = null; + private refreshRetryTimeout: ReturnType | null = null; + + constructor(options: RemoteWorkspaceClientOptions) { + this.backend = options.backend; + this.organizationId = options.organizationId; + this.snapshot = { + organizationId: options.organizationId, + repos: [], + repositories: [], + tasks: [], + }; + } + + getSnapshot(): TaskWorkspaceSnapshot { + return this.snapshot; + } + + subscribe(listener: () => void): () => void { + this.listeners.add(listener); + this.ensureStarted(); + return () => { + this.listeners.delete(listener); + if (this.listeners.size === 0 && this.refreshRetryTimeout) { + clearTimeout(this.refreshRetryTimeout); + this.refreshRetryTimeout = null; + } + if (this.listeners.size === 0 && this.unsubscribeWorkspace) { + this.unsubscribeWorkspace(); + this.unsubscribeWorkspace = null; + } + }; + } + + async createTask(input: TaskWorkspaceCreateTaskInput): Promise { + const created = await this.backend.createWorkspaceTask(this.organizationId, input); + await this.refresh(); + return created; + } + + async markTaskUnread(input: TaskWorkspaceSelectInput): Promise { + await this.backend.markWorkspaceUnread(this.organizationId, input); + await this.refresh(); + } + + async renameTask(input: TaskWorkspaceRenameInput): Promise { + await this.backend.renameWorkspaceTask(this.organizationId, input); + await this.refresh(); + } + + async archiveTask(input: TaskWorkspaceSelectInput): Promise { + await this.backend.runAction(this.organizationId, input.repoId, input.taskId, "archive"); + await this.refresh(); + } + + async publishPr(input: TaskWorkspaceSelectInput): Promise { + await this.backend.publishWorkspacePr(this.organizationId, input); + await this.refresh(); + } + + async revertFile(input: TaskWorkspaceDiffInput): Promise { + await this.backend.revertWorkspaceFile(this.organizationId, input); + await this.refresh(); + } + + async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise { + await this.backend.updateWorkspaceDraft(this.organizationId, input); + // Skip refresh — the server broadcast will trigger it, and the frontend + // holds local draft state to avoid the round-trip overwriting user input. + } + + async sendMessage(input: TaskWorkspaceSendMessageInput): Promise { + await this.backend.sendWorkspaceMessage(this.organizationId, input); + await this.refresh(); + } + + async stopAgent(input: TaskWorkspaceSessionInput): Promise { + await this.backend.stopWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async selectSession(input: TaskWorkspaceSessionInput): Promise { + await this.backend.selectWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise { + await this.backend.setWorkspaceSessionUnread(this.organizationId, input); + await this.refresh(); + } + + async renameSession(input: TaskWorkspaceRenameSessionInput): Promise { + await this.backend.renameWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async closeSession(input: TaskWorkspaceSessionInput): Promise { + await this.backend.closeWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async addSession(input: TaskWorkspaceSelectInput): Promise { + const created = await this.backend.createWorkspaceSession(this.organizationId, input); + await this.refresh(); + return created; + } + + async changeModel(input: TaskWorkspaceChangeModelInput): Promise { + await this.backend.changeWorkspaceModel(this.organizationId, input); + await this.refresh(); + } + + private ensureStarted(): void { + if (!this.unsubscribeWorkspace) { + this.unsubscribeWorkspace = this.backend.subscribeWorkspace(this.organizationId, () => { + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + }); + } + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + } + + private scheduleRefreshRetry(): void { + if (this.refreshRetryTimeout || this.listeners.size === 0) { + return; + } + + this.refreshRetryTimeout = setTimeout(() => { + this.refreshRetryTimeout = null; + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + }, 1_000); + } + + private async refresh(): Promise { + if (this.refreshPromise) { + await this.refreshPromise; + return; + } + + this.refreshPromise = (async () => { + const nextSnapshot = await this.backend.getWorkspace(this.organizationId); + if (this.refreshRetryTimeout) { + clearTimeout(this.refreshRetryTimeout); + this.refreshRetryTimeout = null; + } + this.snapshot = { + ...nextSnapshot, + repositories: nextSnapshot.repositories ?? groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks), + }; + for (const listener of [...this.listeners]) { + listener(); + } + })().finally(() => { + this.refreshPromise = null; + }); + + await this.refreshPromise; + } +} + +export function createRemoteWorkspaceClient(options: RemoteWorkspaceClientOptions): TaskWorkspaceClient { + return new RemoteWorkspaceStore(options); +} diff --git a/foundry/packages/client/src/subscription/remote-manager.ts b/foundry/packages/client/src/subscription/remote-manager.ts index 8cb2864..778241f 100644 --- a/foundry/packages/client/src/subscription/remote-manager.ts +++ b/foundry/packages/client/src/subscription/remote-manager.ts @@ -81,6 +81,7 @@ class TopicEntry { private unsubscribeError: (() => void) | null = null; private teardownTimer: ReturnType | null = null; private startPromise: Promise | null = null; + private eventPromise: Promise = Promise.resolve(); private started = false; constructor( @@ -157,12 +158,7 @@ class TopicEntry { try { this.conn = await this.definition.connect(this.backend, this.params); this.unsubscribeEvent = this.conn.on(this.definition.event, (event: TEvent) => { - if (this.data === undefined) { - return; - } - this.data = this.definition.applyEvent(this.data, event); - this.lastRefreshAt = Date.now(); - this.notify(); + void this.applyEvent(event); }); this.unsubscribeError = this.conn.onError((error: unknown) => { this.status = "error"; @@ -182,6 +178,33 @@ class TopicEntry { } } + private applyEvent(event: TEvent): Promise { + this.eventPromise = this.eventPromise + .then(async () => { + if (!this.started || this.data === undefined) { + return; + } + + const nextData = await this.definition.applyEvent(this.backend, this.params, this.data, event); + if (!this.started) { + return; + } + + this.data = nextData; + this.status = "connected"; + this.error = null; + this.lastRefreshAt = Date.now(); + this.notify(); + }) + .catch((error) => { + this.status = "error"; + this.error = error instanceof Error ? error : new Error(String(error)); + this.notify(); + }); + + return this.eventPromise; + } + private notify(): void { for (const listener of [...this.listeners]) { listener(); diff --git a/foundry/packages/client/src/subscription/topics.ts b/foundry/packages/client/src/subscription/topics.ts index f6a0acc..bbda118 100644 --- a/foundry/packages/client/src/subscription/topics.ts +++ b/foundry/packages/client/src/subscription/topics.ts @@ -5,8 +5,8 @@ import type { SandboxProcessesEvent, SessionEvent, TaskEvent, - WorkbenchSessionDetail, - WorkbenchTaskDetail, + WorkspaceSessionDetail, + WorkspaceTaskDetail, OrganizationEvent, OrganizationSummarySnapshot, } from "@sandbox-agent/foundry-shared"; @@ -16,15 +16,15 @@ import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend- * Topic definitions for the subscription manager. * * Each topic describes one actor connection plus one materialized read model. - * Events always carry full replacement payloads for the changed entity so the - * client can replace cached state directly instead of reconstructing patches. + * Some topics can apply broadcast payloads directly, while others refetch + * through BackendClient so auth-scoped state stays user-specific. */ export interface TopicDefinition { key: (params: TParams) => string; event: string; connect: (backend: BackendClient, params: TParams) => Promise; fetchInitial: (backend: BackendClient, params: TParams) => Promise; - applyEvent: (current: TData, event: TEvent) => TData; + applyEvent: (backend: BackendClient, params: TParams, current: TData, event: TEvent) => Promise | TData; } export interface AppTopicParams {} @@ -48,23 +48,13 @@ export interface SandboxProcessesTopicParams { sandboxId: string; } -function upsertById(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] { - const filtered = items.filter((item) => item.id !== nextItem.id); - return [...filtered, nextItem].sort(sort); -} - -function upsertByPrId(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] { - const filtered = items.filter((item) => item.prId !== nextItem.prId); - return [...filtered, nextItem].sort(sort); -} - export const topicDefinitions = { app: { key: () => "app", event: "appUpdated", connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"), fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(), - applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, + applyEvent: (_backend: BackendClient, _params: AppTopicParams, _current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, } satisfies TopicDefinition, organization: { @@ -72,41 +62,8 @@ export const topicDefinitions = { event: "organizationUpdated", connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId), fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId), - applyEvent: (current: OrganizationSummarySnapshot, event: OrganizationEvent) => { - switch (event.type) { - case "taskSummaryUpdated": - return { - ...current, - taskSummaries: upsertById(current.taskSummaries, event.taskSummary, (left, right) => right.updatedAtMs - left.updatedAtMs), - }; - case "taskRemoved": - return { - ...current, - taskSummaries: current.taskSummaries.filter((task) => task.id !== event.taskId), - }; - case "repoAdded": - case "repoUpdated": - return { - ...current, - repos: upsertById(current.repos, event.repo, (left, right) => right.latestActivityMs - left.latestActivityMs), - }; - case "repoRemoved": - return { - ...current, - repos: current.repos.filter((repo) => repo.id !== event.repoId), - }; - case "pullRequestUpdated": - return { - ...current, - openPullRequests: upsertByPrId(current.openPullRequests, event.pullRequest, (left, right) => right.updatedAtMs - left.updatedAtMs), - }; - case "pullRequestRemoved": - return { - ...current, - openPullRequests: current.openPullRequests.filter((pullRequest) => pullRequest.prId !== event.prId), - }; - } - }, + applyEvent: (_backend: BackendClient, _params: OrganizationTopicParams, _current: OrganizationSummarySnapshot, event: OrganizationEvent) => + event.snapshot, } satisfies TopicDefinition, task: { @@ -114,8 +71,9 @@ export const topicDefinitions = { event: "taskUpdated", connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), - applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail, - } satisfies TopicDefinition, + applyEvent: (backend: BackendClient, params: TaskTopicParams, _current: WorkspaceTaskDetail, _event: TaskEvent) => + backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), + } satisfies TopicDefinition, session: { key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`, @@ -123,13 +81,13 @@ export const topicDefinitions = { connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), fetchInitial: (backend: BackendClient, params: SessionTopicParams) => backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId), - applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => { - if (event.session.sessionId !== current.sessionId) { + applyEvent: async (backend: BackendClient, params: SessionTopicParams, current: WorkspaceSessionDetail, event: SessionEvent) => { + if (event.session.sessionId !== params.sessionId) { return current; } - return event.session; + return await backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId); }, - } satisfies TopicDefinition, + } satisfies TopicDefinition, sandboxProcesses: { key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`, @@ -138,7 +96,8 @@ export const topicDefinitions = { backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId), fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) => (await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes, - applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes, + applyEvent: (_backend: BackendClient, _params: SandboxProcessesTopicParams, _current: SandboxProcessRecord[], event: SandboxProcessesEvent) => + event.processes, } satisfies TopicDefinition, } as const; diff --git a/foundry/packages/client/src/view-model.ts b/foundry/packages/client/src/view-model.ts index c30ff2a..bd7a98c 100644 --- a/foundry/packages/client/src/view-model.ts +++ b/foundry/packages/client/src/view-model.ts @@ -65,7 +65,7 @@ export function filterTasks(rows: TaskRecord[], query: string): TaskRecord[] { } return rows.filter((row) => { - const fields = [row.branchName ?? "", row.title ?? "", row.taskId, row.task, row.prAuthor ?? "", row.reviewer ?? ""]; + const fields = [row.branchName ?? "", row.title ?? "", row.taskId, row.task]; return fields.some((field) => fuzzyMatch(field, q)); }); } diff --git a/foundry/packages/client/src/workbench-client.ts b/foundry/packages/client/src/workbench-client.ts deleted file mode 100644 index c317649..0000000 --- a/foundry/packages/client/src/workbench-client.ts +++ /dev/null @@ -1,64 +0,0 @@ -import type { - TaskWorkbenchAddSessionResponse, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, -} from "@sandbox-agent/foundry-shared"; -import type { BackendClient } from "./backend-client.js"; -import { getSharedMockWorkbenchClient } from "./mock/workbench-client.js"; -import { createRemoteWorkbenchClient } from "./remote/workbench-client.js"; - -export type TaskWorkbenchClientMode = "mock" | "remote"; - -export interface CreateTaskWorkbenchClientOptions { - mode: TaskWorkbenchClientMode; - backend?: BackendClient; - organizationId?: string; -} - -export interface TaskWorkbenchClient { - getSnapshot(): TaskWorkbenchSnapshot; - subscribe(listener: () => void): () => void; - createTask(input: TaskWorkbenchCreateTaskInput): Promise; - markTaskUnread(input: TaskWorkbenchSelectInput): Promise; - renameTask(input: TaskWorkbenchRenameInput): Promise; - renameBranch(input: TaskWorkbenchRenameInput): Promise; - archiveTask(input: TaskWorkbenchSelectInput): Promise; - publishPr(input: TaskWorkbenchSelectInput): Promise; - revertFile(input: TaskWorkbenchDiffInput): Promise; - updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise; - sendMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopAgent(input: TaskWorkbenchSessionInput): Promise; - setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; - renameSession(input: TaskWorkbenchRenameSessionInput): Promise; - closeSession(input: TaskWorkbenchSessionInput): Promise; - addSession(input: TaskWorkbenchSelectInput): Promise; - changeModel(input: TaskWorkbenchChangeModelInput): Promise; -} - -export function createTaskWorkbenchClient(options: CreateTaskWorkbenchClientOptions): TaskWorkbenchClient { - if (options.mode === "mock") { - return getSharedMockWorkbenchClient(); - } - - if (!options.backend) { - throw new Error("Remote task workbench client requires a backend client"); - } - if (!options.organizationId) { - throw new Error("Remote task workbench client requires a organization id"); - } - - return createRemoteWorkbenchClient({ - backend: options.backend, - organizationId: options.organizationId, - }); -} diff --git a/foundry/packages/client/src/workspace-client.ts b/foundry/packages/client/src/workspace-client.ts new file mode 100644 index 0000000..c3293a0 --- /dev/null +++ b/foundry/packages/client/src/workspace-client.ts @@ -0,0 +1,64 @@ +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "./backend-client.js"; +import { getSharedMockWorkspaceClient } from "./mock/workspace-client.js"; +import { createRemoteWorkspaceClient } from "./remote/workspace-client.js"; + +export type TaskWorkspaceClientMode = "mock" | "remote"; + +export interface CreateTaskWorkspaceClientOptions { + mode: TaskWorkspaceClientMode; + backend?: BackendClient; + organizationId?: string; +} + +export interface TaskWorkspaceClient { + getSnapshot(): TaskWorkspaceSnapshot; + subscribe(listener: () => void): () => void; + createTask(input: TaskWorkspaceCreateTaskInput): Promise; + markTaskUnread(input: TaskWorkspaceSelectInput): Promise; + renameTask(input: TaskWorkspaceRenameInput): Promise; + archiveTask(input: TaskWorkspaceSelectInput): Promise; + publishPr(input: TaskWorkspaceSelectInput): Promise; + revertFile(input: TaskWorkspaceDiffInput): Promise; + updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise; + sendMessage(input: TaskWorkspaceSendMessageInput): Promise; + stopAgent(input: TaskWorkspaceSessionInput): Promise; + selectSession(input: TaskWorkspaceSessionInput): Promise; + setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise; + renameSession(input: TaskWorkspaceRenameSessionInput): Promise; + closeSession(input: TaskWorkspaceSessionInput): Promise; + addSession(input: TaskWorkspaceSelectInput): Promise; + changeModel(input: TaskWorkspaceChangeModelInput): Promise; +} + +export function createTaskWorkspaceClient(options: CreateTaskWorkspaceClientOptions): TaskWorkspaceClient { + if (options.mode === "mock") { + return getSharedMockWorkspaceClient(); + } + + if (!options.backend) { + throw new Error("Remote task workspace client requires a backend client"); + } + if (!options.organizationId) { + throw new Error("Remote task workspace client requires a organization id"); + } + + return createRemoteWorkspaceClient({ + backend: options.backend, + organizationId: options.organizationId, + }); +} diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workspace-model.ts similarity index 90% rename from foundry/packages/client/src/workbench-model.ts rename to foundry/packages/client/src/workspace-model.ts index afe9e8b..290794b 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workspace-model.ts @@ -1,40 +1,28 @@ +import { + DEFAULT_WORKSPACE_MODEL_ID, + DEFAULT_WORKSPACE_MODEL_GROUPS as SharedModelGroups, + workspaceModelLabel as sharedWorkspaceModelLabel, + workspaceProviderAgent as sharedWorkspaceProviderAgent, +} from "@sandbox-agent/foundry-shared"; import type { - WorkbenchAgentKind as AgentKind, - WorkbenchSession as AgentSession, - WorkbenchDiffLineKind as DiffLineKind, - WorkbenchFileTreeNode as FileTreeNode, - WorkbenchTask as Task, - TaskWorkbenchSnapshot, - WorkbenchHistoryEvent as HistoryEvent, - WorkbenchModelGroup as ModelGroup, - WorkbenchModelId as ModelId, - WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchRepositorySection, - WorkbenchRepo, - WorkbenchTranscriptEvent as TranscriptEvent, + WorkspaceAgentKind as AgentKind, + WorkspaceSession as AgentSession, + WorkspaceDiffLineKind as DiffLineKind, + WorkspaceFileTreeNode as FileTreeNode, + WorkspaceTask as Task, + TaskWorkspaceSnapshot, + WorkspaceHistoryEvent as HistoryEvent, + WorkspaceModelGroup as ModelGroup, + WorkspaceModelId as ModelId, + WorkspaceParsedDiffLine as ParsedDiffLine, + WorkspaceRepositorySection, + WorkspaceRepo, + WorkspaceTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; -export const MODEL_GROUPS: ModelGroup[] = [ - { - provider: "Claude", - models: [ - { id: "claude-sonnet-4", label: "Sonnet 4" }, - { id: "claude-opus-4", label: "Opus 4" }, - ], - }, - { - provider: "OpenAI", - models: [ - { id: "gpt-5.3-codex", label: "GPT-5.3 Codex" }, - { id: "gpt-5.4", label: "GPT-5.4" }, - { id: "gpt-5.2-codex", label: "GPT-5.2 Codex" }, - { id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" }, - { id: "gpt-5.2", label: "GPT-5.2" }, - { id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" }, - ], - }, -]; +export const MODEL_GROUPS: ModelGroup[] = SharedModelGroups; +export const DEFAULT_MODEL_ID: ModelId = DEFAULT_WORKSPACE_MODEL_ID; const MOCK_REPLIES = [ "Got it. I'll work on that now. Let me start by examining the relevant files...", @@ -73,15 +61,11 @@ export function formatMessageDuration(durationMs: number): string { } export function modelLabel(id: ModelId): string { - const group = MODEL_GROUPS.find((candidate) => candidate.models.some((model) => model.id === id)); - const model = group?.models.find((candidate) => candidate.id === id); - return model && group ? `${group.provider} ${model.label}` : id; + return sharedWorkspaceModelLabel(id, MODEL_GROUPS); } export function providerAgent(provider: string): AgentKind { - if (provider === "Claude") return "Claude"; - if (provider === "OpenAI") return "Codex"; - return "Cursor"; + return sharedWorkspaceProviderAgent(provider); } export function slugify(text: string): string { @@ -204,6 +188,29 @@ export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { .sort((left, right) => messageOrder(left.messageId) - messageOrder(right.messageId)); } +function buildPullRequestSummary(params: { + number: number; + title: string; + branch: string; + repoName: string; + updatedAtMs: number; + status: "ready" | "draft"; +}) { + return { + number: params.number, + status: params.status, + title: params.title, + state: "open", + url: `https://github.com/${params.repoName}/pull/${params.number}`, + headRefName: params.branch, + baseRefName: "main", + repoFullName: params.repoName, + authorLogin: "mock", + isDraft: params.status === "draft", + updatedAtMs: params.updatedAtMs, + }; +} + function transcriptFromLegacyMessages(sessionId: string, messages: LegacyMessage[]): TranscriptEvent[] { return messages.map((message, index) => ({ id: message.id, @@ -315,14 +322,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(8), branch: "NathanFlurry/pi-bootstrap-fix", - pullRequest: { number: 227, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 227, + title: "Normalize Pi ACP bootstrap payloads", + branch: "NathanFlurry/pi-bootstrap-fix", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(8), + status: "ready", + }), sessions: [ { id: "t1", sessionId: "t1", sessionName: "Pi payload fix", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -484,14 +498,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(3), branch: "feat/builtin-agent-skills", - pullRequest: { number: 223, status: "draft" }, + pullRequest: buildPullRequestSummary({ + number: 223, + title: "Auto-inject builtin agent skills at startup", + branch: "feat/builtin-agent-skills", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(3), + status: "draft", + }), sessions: [ { id: "t3", sessionId: "t3", sessionName: "Skills injection", agent: "Claude", - model: "claude-opus-4", + model: "opus", status: "running", thinkingSinceMs: NOW_MS - 45_000, unread: false, @@ -584,14 +605,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(45), branch: "hooks-example", - pullRequest: { number: 225, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 225, + title: "Add hooks example for Claude, Codex, and OpenCode", + branch: "hooks-example", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(45), + status: "ready", + }), sessions: [ { id: "t4", sessionId: "t4", sessionName: "Example docs", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -659,14 +687,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/rivet", updatedAtMs: minutesAgo(15), branch: "actor-reschedule-endpoint", - pullRequest: { number: 4400, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 4400, + title: "Add actor reschedule endpoint", + branch: "actor-reschedule-endpoint", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(15), + status: "ready", + }), sessions: [ { id: "t5", sessionId: "t5", sessionName: "Reschedule API", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -793,14 +828,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/rivet", updatedAtMs: minutesAgo(35), branch: "feat/dynamic-actors", - pullRequest: { number: 4395, status: "draft" }, + pullRequest: buildPullRequestSummary({ + number: 4395, + title: "Dynamic actors", + branch: "feat/dynamic-actors", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(35), + status: "draft", + }), sessions: [ { id: "t6", sessionId: "t6", sessionName: "Dynamic actors impl", agent: "Claude", - model: "claude-opus-4", + model: "opus", status: "idle", thinkingSinceMs: null, unread: true, @@ -850,14 +892,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/vbare", updatedAtMs: minutesAgo(25), branch: "fix-use-full-cloud-run-pool-name", - pullRequest: { number: 235, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 235, + title: "Use full cloud run pool name for routing", + branch: "fix-use-full-cloud-run-pool-name", + repoName: "rivet-dev/vbare", + updatedAtMs: minutesAgo(25), + status: "ready", + }), sessions: [ { id: "t7", sessionId: "t7", sessionName: "Pool routing fix", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -959,14 +1008,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/skills", updatedAtMs: minutesAgo(50), branch: "fix-guard-support-https-targets", - pullRequest: { number: 125, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 125, + title: "Route compute gateway path correctly", + branch: "fix-guard-support-https-targets", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(50), + status: "ready", + }), sessions: [ { id: "t8", sessionId: "t8", sessionName: "Guard routing", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -1073,14 +1129,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/skills", updatedAtMs: minutesAgo(2 * 24 * 60), branch: "chore-move-compute-gateway-to", - pullRequest: { number: 123, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 123, + title: "Move compute gateway to guard", + branch: "chore-move-compute-gateway-to", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(2 * 24 * 60), + status: "ready", + }), sessions: [ { id: "t9", sessionId: "t9", sessionName: "Gateway migration", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -1166,8 +1229,6 @@ export function buildInitialTasks(): Task[] { repoId: "sandbox-agent", title: "Fix broken auth middleware (error demo)", status: "error", - runtimeStatus: "error", - statusMessage: "session:error", repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(2), branch: "fix/auth-middleware", @@ -1178,7 +1239,7 @@ export function buildInitialTasks(): Task[] { sessionId: "status-error-session", sessionName: "Auth fix", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "error", thinkingSinceMs: null, unread: false, @@ -1197,9 +1258,7 @@ export function buildInitialTasks(): Task[] { id: "status-provisioning", repoId: "sandbox-agent", title: "Add rate limiting to API gateway (provisioning demo)", - status: "new", - runtimeStatus: "init_enqueue_provision", - statusMessage: "Queueing sandbox provisioning.", + status: "init_enqueue_provision", repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(0), branch: null, @@ -1211,7 +1270,7 @@ export function buildInitialTasks(): Task[] { sandboxSessionId: null, sessionName: "Session 1", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "pending_provision", thinkingSinceMs: null, unread: false, @@ -1259,7 +1318,6 @@ export function buildInitialTasks(): Task[] { repoId: "sandbox-agent", title: "Refactor WebSocket handler (running demo)", status: "running", - runtimeStatus: "running", repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(1), branch: "refactor/ws-handler", @@ -1300,7 +1358,7 @@ export function buildInitialTasks(): Task[] { * Uses real public repos so the mock sidebar matches what an actual rivet-dev * organization would show after a GitHub sync. */ -function buildMockRepos(): WorkbenchRepo[] { +function buildMockRepos(): WorkspaceRepo[] { return rivetDevFixture.repos.map((r) => ({ id: repoIdFromFullName(r.fullName), label: r.fullName, @@ -1313,55 +1371,19 @@ function repoIdFromFullName(fullName: string): string { return parts[parts.length - 1] ?? fullName; } -/** - * Build task entries from open PR fixture data. - * Maps to the backend's PR sync behavior (RepositoryPrSyncActor) where PRs - * appear as first-class sidebar items even without an associated task. - * Each open PR gets a lightweight task entry so it shows in the sidebar. - */ -function buildPrTasks(): Task[] { - // Collect branch names already claimed by hand-written tasks so we don't duplicate - const existingBranches = new Set( - buildInitialTasks() - .map((t) => t.branch) - .filter(Boolean), - ); - - return rivetDevFixture.openPullRequests - .filter((pr) => !existingBranches.has(pr.headRefName)) - .map((pr) => { - const repoId = repoIdFromFullName(pr.repoFullName); - return { - id: `pr-${repoId}-${pr.number}`, - repoId, - title: pr.title, - status: "idle" as const, - repoName: pr.repoFullName, - updatedAtMs: new Date(pr.updatedAt).getTime(), - branch: pr.headRefName, - pullRequest: { number: pr.number, status: pr.draft ? ("draft" as const) : ("ready" as const) }, - sessions: [], - fileChanges: [], - diffs: {}, - fileTree: [], - minutesUsed: 0, - }; - }); -} - -export function buildInitialMockLayoutViewModel(): TaskWorkbenchSnapshot { +export function buildInitialMockLayoutViewModel(): TaskWorkspaceSnapshot { const repos = buildMockRepos(); - const tasks = [...buildInitialTasks(), ...buildPrTasks()]; + const tasks = buildInitialTasks(); return { organizationId: "default", repos, - repositories: groupWorkbenchRepositories(repos, tasks), + repositories: groupWorkspaceRepositories(repos, tasks), tasks, }; } -export function groupWorkbenchRepositories(repos: WorkbenchRepo[], tasks: Task[]): WorkbenchRepositorySection[] { - const grouped = new Map(); +export function groupWorkspaceRepositories(repos: WorkspaceRepo[], tasks: Task[]): WorkspaceRepositorySection[] { + const grouped = new Map(); for (const repo of repos) { grouped.set(repo.id, { diff --git a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts index 8446892..21eaf6b 100644 --- a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts +++ b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts @@ -1,6 +1,6 @@ import { randomUUID } from "node:crypto"; import { describe, expect, it } from "vitest"; -import type { HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; +import type { AuditLogEvent as HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -132,11 +132,11 @@ describe("e2e(client): full integration stack workflow", () => { 90_000, 1_000, async () => client.getRepoOverview(organizationId, repo.repoId), - (value) => value.branches.some((row) => row.branchName === seededBranch), + (value) => value.branches.some((row: RepoOverview["branches"][number]) => row.branchName === seededBranch), ); const postActionOverview = await client.getRepoOverview(organizationId, repo.repoId); - const seededRow = postActionOverview.branches.find((row) => row.branchName === seededBranch); + const seededRow = postActionOverview.branches.find((row: RepoOverview["branches"][number]) => row.branchName === seededBranch); expect(Boolean(seededRow)).toBe(true); expect(postActionOverview.fetchedAt).toBeGreaterThanOrEqual(overview.fetchedAt); } finally { diff --git a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts index 83101fb..89dd638 100644 --- a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts +++ b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import type { TaskRecord, HistoryEvent } from "@sandbox-agent/foundry-shared"; +import type { AuditLogEvent as HistoryEvent, TaskRecord } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -80,9 +80,10 @@ function parseHistoryPayload(event: HistoryEvent): Record { } } -async function debugDump(client: ReturnType, organizationId: string, taskId: string): Promise { +async function debugDump(client: ReturnType, organizationId: string, repoId: string, taskId: string): Promise { try { - const task = await client.getTask(organizationId, taskId); + const task = await client.getTask(organizationId, repoId, taskId); + const detail = await client.getTaskDetail(organizationId, repoId, taskId).catch(() => null); const history = await client.listHistory({ organizationId, taskId, limit: 80 }).catch(() => []); const historySummary = history .slice(0, 20) @@ -90,10 +91,11 @@ async function debugDump(client: ReturnType, organiz .join("\n"); let sessionEventsSummary = ""; - if (task.activeSandboxId && task.activeSessionId) { + const activeSessionId = detail?.activeSessionId ?? null; + if (task.activeSandboxId && activeSessionId) { const events = await client .listSandboxSessionEvents(organizationId, task.sandboxProviderId, task.activeSandboxId, { - sessionId: task.activeSessionId, + sessionId: activeSessionId, limit: 50, }) .then((r) => r.items) @@ -109,13 +111,11 @@ async function debugDump(client: ReturnType, organiz JSON.stringify( { status: task.status, - statusMessage: task.statusMessage, title: task.title, branchName: task.branchName, activeSandboxId: task.activeSandboxId, - activeSessionId: task.activeSessionId, - prUrl: task.prUrl, - prSubmitted: task.prSubmitted, + activeSessionId, + pullRequestUrl: detail?.pullRequest?.url ?? null, }, null, 2, @@ -189,7 +189,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { // Cold local sandbox startup can exceed a few minutes on first run. 8 * 60_000, 1_000, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTask(organizationId, repo.repoId, created.taskId), (h) => Boolean(h.title && h.branchName && h.activeSandboxId), (h) => { if (h.status !== lastStatus) { @@ -200,18 +200,18 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); branchName = namedAndProvisioned.branchName!; sandboxId = namedAndProvisioned.activeSandboxId!; - const withSession = await poll( + const withSession = await poll>>( "task to create active session", 3 * 60_000, 1_500, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTaskDetail(organizationId, repo.repoId, created.taskId), (h) => Boolean(h.activeSessionId), (h) => { if (h.status === "error") { @@ -219,7 +219,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -231,14 +231,14 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { 2_000, async () => ( - await client.listSandboxSessionEvents(organizationId, withSession.sandboxProviderId, sandboxId!, { + await client.listSandboxSessionEvents(organizationId, namedAndProvisioned.sandboxProviderId, sandboxId!, { sessionId: sessionId!, limit: 40, }) ).items, (events) => events.length > 0, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -246,7 +246,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "task to reach idle state", 8 * 60_000, 2_000, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTask(organizationId, repo.repoId, created.taskId), (h) => h.status === "idle", (h) => { if (h.status === "error") { @@ -254,7 +254,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -266,7 +266,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { (events) => events.some((e) => e.kind === "task.pr_created"), ) .catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }) .then((events) => events.find((e) => e.kind === "task.pr_created")!); @@ -287,16 +287,16 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { expect(prFiles.some((f) => f.filename === expectedFile)).toBe(true); // Close the task and assert the sandbox is released (stopped). - await client.runAction(organizationId, created.taskId, "archive"); + await client.runAction(organizationId, repo.repoId, created.taskId, "archive"); - await poll( + await poll>>( "task to become archived (session released)", 60_000, 1_000, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTaskDetail(organizationId, repo.repoId, created.taskId), (h) => h.status === "archived" && h.activeSessionId === null, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -311,7 +311,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused"); }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); const state = await client.sandboxProviderState(organizationId, "local", sandboxId!).catch(() => null); throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`); }); diff --git a/foundry/packages/client/test/e2e/workbench-e2e.test.ts b/foundry/packages/client/test/e2e/workspace-e2e.test.ts similarity index 78% rename from foundry/packages/client/test/e2e/workbench-e2e.test.ts rename to foundry/packages/client/test/e2e/workspace-e2e.test.ts index 5442795..1de2065 100644 --- a/foundry/packages/client/test/e2e/workbench-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workspace-e2e.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import type { TaskWorkbenchSnapshot, WorkbenchSession, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; +import type { TaskWorkspaceSnapshot, WorkspaceSession, WorkspaceTask, WorkspaceModelId, WorkspaceTranscriptEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -13,21 +13,9 @@ function requiredEnv(name: string): string { return value; } -function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchModelId { +function workspaceModelEnv(name: string, fallback: WorkspaceModelId): WorkspaceModelId { const value = process.env[name]?.trim(); - switch (value) { - case "claude-sonnet-4": - case "claude-opus-4": - case "gpt-5.3-codex": - case "gpt-5.4": - case "gpt-5.2-codex": - case "gpt-5.1-codex-max": - case "gpt-5.2": - case "gpt-5.1-codex-mini": - return value; - default: - return fallback; - } + return value && value.length > 0 ? value : fallback; } async function sleep(ms: number): Promise { @@ -50,7 +38,7 @@ async function poll(label: string, timeoutMs: number, intervalMs: number, fn: } } -function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTask { +function findTask(snapshot: TaskWorkspaceSnapshot, taskId: string): WorkspaceTask { const task = snapshot.tasks.find((candidate) => candidate.id === taskId); if (!task) { throw new Error(`task ${taskId} missing from snapshot`); @@ -58,7 +46,7 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { +function findTab(task: WorkspaceTask, sessionId: string): WorkspaceSession { const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { throw new Error(`tab ${sessionId} missing from task ${task.id}`); @@ -66,7 +54,7 @@ function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { return tab; } -function extractEventText(event: WorkbenchTranscriptEvent): string { +function extractEventText(event: WorkspaceTranscriptEvent): string { const payload = event.payload; if (!payload || typeof payload !== "object") { return String(payload ?? ""); @@ -127,7 +115,7 @@ function extractEventText(event: WorkbenchTranscriptEvent): string { return JSON.stringify(payload); } -function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], expectedText: string): boolean { +function transcriptIncludesAgentText(transcript: WorkspaceTranscriptEvent[], expectedText: string): boolean { return transcript .filter((event) => event.sender === "agent") .map((event) => extractEventText(event)) @@ -135,15 +123,15 @@ function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], exp .includes(expectedText); } -describe("e2e(client): workbench flows", () => { +describe("e2e(client): workspace flows", () => { it.skipIf(!RUN_WORKBENCH_E2E)( - "creates a task from an imported repo, adds sessions, exchanges messages, and manages workbench state", + "creates a task from an imported repo, adds sessions, exchanges messages, and manages workspace state", { timeout: 20 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const model = workspaceModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const runId = `wb-${Date.now().toString(36)}`; const expectedFile = `${runId}.txt`; const expectedInitialReply = `WORKBENCH_READY_${runId}`; @@ -155,9 +143,9 @@ describe("e2e(client): workbench flows", () => { }); const repo = await requireImportedRepo(client, organizationId, repoRemote); - const created = await client.createWorkbenchTask(organizationId, { + const created = await client.createWorkspaceTask(organizationId, { repoId: repo.repoId, - title: `Workbench E2E ${runId}`, + title: `Workspace E2E ${runId}`, branch: `e2e/${runId}`, model, task: `Reply with exactly: ${expectedInitialReply}`, @@ -167,7 +155,7 @@ describe("e2e(client): workbench flows", () => { "task provisioning", 12 * 60_000, 2_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => task.branch === `e2e/${runId}` && task.sessions.length > 0, ); @@ -177,7 +165,7 @@ describe("e2e(client): workbench flows", () => { "initial agent response", 12 * 60_000, 2_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = findTab(task, primaryTab.id); return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); @@ -187,28 +175,33 @@ describe("e2e(client): workbench flows", () => { expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - await client.renameWorkbenchTask(organizationId, { + await client.renameWorkspaceTask(organizationId, { + repoId: repo.repoId, taskId: created.taskId, - value: `Workbench E2E ${runId} Renamed`, + value: `Workspace E2E ${runId} Renamed`, }); - await client.renameWorkbenchSession(organizationId, { + await client.renameWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: primaryTab.id, title: "Primary Session", }); - const secondTab = await client.createWorkbenchSession(organizationId, { + const secondTab = await client.createWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, model, }); - await client.renameWorkbenchSession(organizationId, { + await client.renameWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, title: "Follow-up Session", }); - await client.updateWorkbenchDraft(organizationId, { + await client.updateWorkspaceDraft(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, text: [ @@ -226,11 +219,12 @@ describe("e2e(client): workbench flows", () => { ], }); - const drafted = findTask(await client.getWorkbench(organizationId), created.taskId); + const drafted = findTask(await client.getWorkspace(organizationId), created.taskId); expect(findTab(drafted, secondTab.sessionId).draft.text).toContain(expectedReply); expect(findTab(drafted, secondTab.sessionId).draft.attachments).toHaveLength(1); - await client.sendWorkbenchMessage(organizationId, { + await client.sendWorkspaceMessage(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, text: [ @@ -252,7 +246,7 @@ describe("e2e(client): workbench flows", () => { "follow-up session response", 10 * 60_000, 2_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = findTab(task, secondTab.sessionId); return ( @@ -265,17 +259,19 @@ describe("e2e(client): workbench flows", () => { expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); - await client.setWorkbenchSessionUnread(organizationId, { + await client.setWorkspaceSessionUnread(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, unread: false, }); - await client.markWorkbenchUnread(organizationId, { taskId: created.taskId }); + await client.markWorkspaceUnread(organizationId, { repoId: repo.repoId, taskId: created.taskId }); - const unreadSnapshot = findTask(await client.getWorkbench(organizationId), created.taskId); + const unreadSnapshot = findTask(await client.getWorkspace(organizationId), created.taskId); expect(unreadSnapshot.sessions.some((tab) => tab.unread)).toBe(true); - await client.closeWorkbenchSession(organizationId, { + await client.closeWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, }); @@ -284,26 +280,27 @@ describe("e2e(client): workbench flows", () => { "secondary session closed", 30_000, 1_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => !task.sessions.some((tab) => tab.id === secondTab.sessionId), ); expect(closedSnapshot.sessions).toHaveLength(1); - await client.revertWorkbenchFile(organizationId, { + await client.revertWorkspaceFile(organizationId, { + repoId: repo.repoId, taskId: created.taskId, path: expectedFile, }); const revertedSnapshot = await poll( - "file revert reflected in workbench", + "file revert reflected in workspace", 30_000, 1_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => !task.fileChanges.some((file) => file.path === expectedFile), ); expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); - expect(revertedSnapshot.title).toBe(`Workbench E2E ${runId} Renamed`); + expect(revertedSnapshot.title).toBe(`Workspace E2E ${runId} Renamed`); expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); }, ); diff --git a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts b/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts similarity index 85% rename from foundry/packages/client/test/e2e/workbench-load-e2e.test.ts rename to foundry/packages/client/test/e2e/workspace-load-e2e.test.ts index b358b80..f9fc244 100644 --- a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts @@ -1,11 +1,11 @@ import { describe, expect, it } from "vitest"; import { createFoundryLogger, - type TaskWorkbenchSnapshot, - type WorkbenchSession, - type WorkbenchTask, - type WorkbenchModelId, - type WorkbenchTranscriptEvent, + type TaskWorkspaceSnapshot, + type WorkspaceSession, + type WorkspaceTask, + type WorkspaceModelId, + type WorkspaceTranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -14,7 +14,7 @@ const RUN_WORKBENCH_LOAD_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E = const logger = createFoundryLogger({ service: "foundry-client-e2e", bindings: { - suite: "workbench-load", + suite: "workspace-load", }, }); @@ -26,21 +26,9 @@ function requiredEnv(name: string): string { return value; } -function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchModelId { +function workspaceModelEnv(name: string, fallback: WorkspaceModelId): WorkspaceModelId { const value = process.env[name]?.trim(); - switch (value) { - case "claude-sonnet-4": - case "claude-opus-4": - case "gpt-5.3-codex": - case "gpt-5.4": - case "gpt-5.2-codex": - case "gpt-5.1-codex-max": - case "gpt-5.2": - case "gpt-5.1-codex-mini": - return value; - default: - return fallback; - } + return value && value.length > 0 ? value : fallback; } function intEnv(name: string, fallback: number): number { @@ -72,7 +60,7 @@ async function poll(label: string, timeoutMs: number, intervalMs: number, fn: } } -function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTask { +function findTask(snapshot: TaskWorkspaceSnapshot, taskId: string): WorkspaceTask { const task = snapshot.tasks.find((candidate) => candidate.id === taskId); if (!task) { throw new Error(`task ${taskId} missing from snapshot`); @@ -80,7 +68,7 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { +function findTab(task: WorkspaceTask, sessionId: string): WorkspaceSession { const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { throw new Error(`tab ${sessionId} missing from task ${task.id}`); @@ -88,7 +76,7 @@ function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { return tab; } -function extractEventText(event: WorkbenchTranscriptEvent): string { +function extractEventText(event: WorkspaceTranscriptEvent): string { const payload = event.payload; if (!payload || typeof payload !== "object") { return String(payload ?? ""); @@ -138,7 +126,7 @@ function extractEventText(event: WorkbenchTranscriptEvent): string { return typeof envelope.method === "string" ? envelope.method : JSON.stringify(payload); } -function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], expectedText: string): boolean { +function transcriptIncludesAgentText(transcript: WorkspaceTranscriptEvent[], expectedText: string): boolean { return transcript .filter((event) => event.sender === "agent") .map((event) => extractEventText(event)) @@ -150,7 +138,7 @@ function average(values: number[]): number { return values.reduce((sum, value) => sum + value, 0) / Math.max(values.length, 1); } -async function measureWorkbenchSnapshot( +async function measureWorkspaceSnapshot( client: ReturnType, organizationId: string, iterations: number, @@ -163,11 +151,11 @@ async function measureWorkbenchSnapshot( transcriptEventCount: number; }> { const durations: number[] = []; - let snapshot: TaskWorkbenchSnapshot | null = null; + let snapshot: TaskWorkspaceSnapshot | null = null; for (let index = 0; index < iterations; index += 1) { const startedAt = performance.now(); - snapshot = await client.getWorkbench(organizationId); + snapshot = await client.getWorkspace(organizationId); durations.push(performance.now() - startedAt); } @@ -191,12 +179,12 @@ async function measureWorkbenchSnapshot( }; } -describe("e2e(client): workbench load", () => { +describe("e2e(client): workspace load", () => { it.skipIf(!RUN_WORKBENCH_LOAD_E2E)("runs a simple sequential load profile against the real backend", { timeout: 30 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const model = workspaceModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3); const extraSessionCount = intEnv("HF_LOAD_EXTRA_SESSION_COUNT", 2); const pollIntervalMs = intEnv("HF_LOAD_POLL_INTERVAL_MS", 2_000); @@ -220,16 +208,16 @@ describe("e2e(client): workbench load", () => { transcriptEventCount: number; }> = []; - snapshotSeries.push(await measureWorkbenchSnapshot(client, organizationId, 2)); + snapshotSeries.push(await measureWorkspaceSnapshot(client, organizationId, 2)); for (let taskIndex = 0; taskIndex < taskCount; taskIndex += 1) { const runId = `load-${taskIndex}-${Date.now().toString(36)}`; const initialReply = `LOAD_INIT_${runId}`; const createStartedAt = performance.now(); - const created = await client.createWorkbenchTask(organizationId, { + const created = await client.createWorkspaceTask(organizationId, { repoId: repo.repoId, - title: `Workbench Load ${runId}`, + title: `Workspace Load ${runId}`, branch: `load/${runId}`, model, task: `Reply with exactly: ${initialReply}`, @@ -241,7 +229,7 @@ describe("e2e(client): workbench load", () => { `task ${runId} provisioning`, 12 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = task.sessions[0]; return Boolean(tab && task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, initialReply)); @@ -256,13 +244,15 @@ describe("e2e(client): workbench load", () => { for (let sessionIndex = 0; sessionIndex < extraSessionCount; sessionIndex += 1) { const expectedReply = `LOAD_REPLY_${runId}_${sessionIndex}`; const createSessionStartedAt = performance.now(); - const createdSession = await client.createWorkbenchSession(organizationId, { + const createdSession = await client.createWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, model, }); createSessionLatencies.push(performance.now() - createSessionStartedAt); - await client.sendWorkbenchMessage(organizationId, { + await client.sendWorkspaceMessage(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: createdSession.sessionId, text: `Run pwd in the repo, then reply with exactly: ${expectedReply}`, @@ -274,7 +264,7 @@ describe("e2e(client): workbench load", () => { `task ${runId} session ${sessionIndex} reply`, 10 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = findTab(task, createdSession.sessionId); return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply); @@ -285,14 +275,14 @@ describe("e2e(client): workbench load", () => { expect(transcriptIncludesAgentText(findTab(withReply, createdSession.sessionId).transcript, expectedReply)).toBe(true); } - const snapshotMetrics = await measureWorkbenchSnapshot(client, organizationId, 3); + const snapshotMetrics = await measureWorkspaceSnapshot(client, organizationId, 3); snapshotSeries.push(snapshotMetrics); logger.info( { taskIndex: taskIndex + 1, ...snapshotMetrics, }, - "workbench_load_snapshot", + "workspace_load_snapshot", ); } @@ -314,7 +304,7 @@ describe("e2e(client): workbench load", () => { snapshotTranscriptFinalCount: lastSnapshot.transcriptEventCount, }; - logger.info(summary, "workbench_load_summary"); + logger.info(summary, "workspace_load_summary"); expect(createTaskLatencies.length).toBe(taskCount); expect(provisionLatencies.length).toBe(taskCount); diff --git a/foundry/packages/client/test/keys.test.ts b/foundry/packages/client/test/keys.test.ts index 9bd6477..6b93ec1 100644 --- a/foundry/packages/client/test/keys.test.ts +++ b/foundry/packages/client/test/keys.test.ts @@ -1,15 +1,9 @@ import { describe, expect, it } from "vitest"; -import { historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/keys.js"; +import { auditLogKey, organizationKey, taskKey, taskSandboxKey } from "../src/keys.js"; describe("actor keys", () => { it("prefixes every key with organization namespace", () => { - const keys = [ - organizationKey("default"), - repositoryKey("default", "repo"), - taskKey("default", "repo", "task"), - taskSandboxKey("default", "sbx"), - historyKey("default", "repo"), - ]; + const keys = [organizationKey("default"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), auditLogKey("default")]; for (const key of keys) { expect(key[0]).toBe("org"); diff --git a/foundry/packages/client/test/subscription-manager.test.ts b/foundry/packages/client/test/subscription-manager.test.ts index 9908113..c064606 100644 --- a/foundry/packages/client/test/subscription-manager.test.ts +++ b/foundry/packages/client/test/subscription-manager.test.ts @@ -50,6 +50,20 @@ class FakeActorConn implements ActorConn { function organizationSnapshot(): OrganizationSummarySnapshot { return { organizationId: "org-1", + github: { + connectedAccount: "octocat", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: 1, + lastSyncLabel: "Synced just now", + lastSyncAt: 10, + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 1, + syncPhase: null, + processedRepositoryCount: 1, + totalRepositoryCount: 1, + }, repos: [{ id: "repo-1", label: "repo-1", taskCount: 1, latestActivityMs: 10 }], taskSummaries: [ { @@ -61,10 +75,10 @@ function organizationSnapshot(): OrganizationSummarySnapshot { updatedAtMs: 10, branch: "main", pullRequest: null, + activeSessionId: null, sessionsSummary: [], }, ], - openPullRequests: [], }; } @@ -115,20 +129,44 @@ describe("RemoteSubscriptionManager", () => { ]); conn.emit("organizationUpdated", { - type: "taskSummaryUpdated", - taskSummary: { - id: "task-1", - repoId: "repo-1", - title: "Updated task", - status: "running", - repoName: "repo-1", - updatedAtMs: 20, - branch: "feature/live", - pullRequest: null, - sessionsSummary: [], + type: "organizationUpdated", + snapshot: { + organizationId: "org-1", + github: { + connectedAccount: "octocat", + installationStatus: "connected", + syncStatus: "syncing", + importedRepoCount: 1, + lastSyncLabel: "Syncing repositories...", + lastSyncAt: 10, + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 2, + syncPhase: "syncing_branches", + processedRepositoryCount: 1, + totalRepositoryCount: 3, + }, + repos: [], + taskSummaries: [ + { + id: "task-1", + repoId: "repo-1", + title: "Updated task", + status: "running", + repoName: "repo-1", + updatedAtMs: 20, + branch: "feature/live", + pullRequest: null, + activeSessionId: null, + sessionsSummary: [], + }, + ], }, } satisfies OrganizationEvent); + // applyEvent chains onto an internal promise — flush the microtask queue + await flushAsyncWork(); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Updated task"); expect(listenerA).toHaveBeenCalled(); expect(listenerB).toHaveBeenCalled(); diff --git a/foundry/packages/client/test/view-model.test.ts b/foundry/packages/client/test/view-model.test.ts index b494135..d418c2f 100644 --- a/foundry/packages/client/test/view-model.test.ts +++ b/foundry/packages/client/test/view-model.test.ts @@ -12,9 +12,8 @@ const sample: TaskRecord = { task: "Do test", sandboxProviderId: "local", status: "running", - statusMessage: null, activeSandboxId: "sandbox-1", - activeSessionId: "session-1", + pullRequest: null, sandboxes: [ { sandboxId: "sandbox-1", @@ -26,17 +25,6 @@ const sample: TaskRecord = { updatedAt: 1, }, ], - agentType: null, - prSubmitted: false, - diffStat: null, - prUrl: null, - prAuthor: null, - ciStatus: null, - reviewStatus: null, - reviewer: null, - conflictsWithMain: null, - hasUnpushed: null, - parentBranch: null, createdAt: 1, updatedAt: 1, }; diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx index 56907ff..947331e 100644 --- a/foundry/packages/frontend/src/components/dev-panel.tsx +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -6,11 +6,10 @@ import { subscriptionManager } from "../lib/subscription"; import type { FoundryAppSnapshot, FoundryOrganization, - TaskStatus, - TaskWorkbenchSnapshot, - WorkbenchSandboxSummary, - WorkbenchSessionSummary, - WorkbenchTaskStatus, + TaskWorkspaceSnapshot, + WorkspaceSandboxSummary, + WorkspaceSessionSummary, + WorkspaceTaskStatus, } from "@sandbox-agent/foundry-shared"; import { useSubscription } from "@sandbox-agent/foundry-client"; import type { DebugSubscriptionTopic } from "@sandbox-agent/foundry-client"; @@ -18,7 +17,7 @@ import { describeTaskState } from "../features/tasks/status"; interface DevPanelProps { organizationId: string; - snapshot: TaskWorkbenchSnapshot; + snapshot: TaskWorkspaceSnapshot; organization?: FoundryOrganization | null; focusedTask?: DevPanelFocusedTask | null; } @@ -27,14 +26,12 @@ export interface DevPanelFocusedTask { id: string; repoId: string; title: string | null; - status: WorkbenchTaskStatus; - runtimeStatus?: TaskStatus | null; - statusMessage?: string | null; + status: WorkspaceTaskStatus; branch?: string | null; activeSandboxId?: string | null; activeSessionId?: string | null; - sandboxes?: WorkbenchSandboxSummary[]; - sessions?: WorkbenchSessionSummary[]; + sandboxes?: WorkspaceSandboxSummary[]; + sessions?: WorkspaceSessionSummary[]; } interface TopicInfo { @@ -80,7 +77,7 @@ function timeAgo(ts: number | null): string { } function statusColor(status: string, t: ReturnType): string { - if (status === "new" || status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) { + if (status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) { return t.statusWarning; } switch (status) { @@ -159,14 +156,16 @@ export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organ }, [now]); const appState = useSubscription(subscriptionManager, "app", {}); + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); const appSnapshot: FoundryAppSnapshot | null = appState.data ?? null; + const liveGithub = organizationState.data?.github ?? organization?.github ?? null; const repos = snapshot.repos ?? []; const tasks = snapshot.tasks ?? []; const prCount = tasks.filter((task) => task.pullRequest != null).length; - const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null; - const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null); - const lastWebhookAt = organization?.github.lastWebhookAt ?? null; + const focusedTaskStatus = focusedTask?.status ?? null; + const focusedTaskState = describeTaskState(focusedTaskStatus); + const lastWebhookAt = liveGithub?.lastWebhookAt ?? null; const hasRecentWebhook = lastWebhookAt != null && now - lastWebhookAt < 5 * 60_000; const totalOrgs = appSnapshot?.organizations.length ?? 0; const authStatus = appSnapshot?.auth.status ?? "unknown"; @@ -442,7 +441,7 @@ export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organ {/* GitHub */}
- {organization ? ( + {liveGithub ? (
App Install - - {organization.github.installationStatus.replace(/_/g, " ")} + + {liveGithub.installationStatus.replace(/_/g, " ")}
@@ -465,15 +464,13 @@ export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organ width: "5px", height: "5px", borderRadius: "50%", - backgroundColor: syncStatusColor(organization.github.syncStatus, t), + backgroundColor: syncStatusColor(liveGithub.syncStatus, t), flexShrink: 0, })} /> Sync - {organization.github.syncStatus} - {organization.github.lastSyncAt != null && ( - {timeAgo(organization.github.lastSyncAt)} - )} + {liveGithub.syncStatus} + {liveGithub.lastSyncAt != null && {timeAgo(liveGithub.lastSyncAt)}}
Webhook {lastWebhookAt != null ? ( - {organization.github.lastWebhookEvent} · {timeAgo(lastWebhookAt)} + {liveGithub.lastWebhookEvent} · {timeAgo(lastWebhookAt)} ) : ( never received )}
- - + + +
- {organization.github.connectedAccount && ( -
@{organization.github.connectedAccount}
- )} - {organization.github.lastSyncLabel && ( -
last sync: {organization.github.lastSyncLabel}
+ {liveGithub.connectedAccount &&
@{liveGithub.connectedAccount}
} + {liveGithub.lastSyncLabel &&
last sync: {liveGithub.lastSyncLabel}
} + {liveGithub.syncPhase && ( +
+ phase: {liveGithub.syncPhase.replace(/^syncing_/, "").replace(/_/g, " ")} ({liveGithub.processedRepositoryCount}/ + {liveGithub.totalRepositoryCount}) +
)}
) : ( diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index 1ff4d35..042b5a4 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -1,14 +1,17 @@ import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState, type PointerEvent as ReactPointerEvent } from "react"; +import { useQuery } from "@tanstack/react-query"; import { useNavigate } from "@tanstack/react-router"; import { useStyletron } from "baseui"; import { + DEFAULT_WORKSPACE_MODEL_GROUPS, + DEFAULT_WORKSPACE_MODEL_ID, createErrorContext, type FoundryOrganization, - type TaskWorkbenchSnapshot, - type WorkbenchOpenPrSummary, - type WorkbenchSessionSummary, - type WorkbenchTaskDetail, - type WorkbenchTaskSummary, + type TaskWorkspaceSnapshot, + type WorkspaceModelGroup, + type WorkspaceSessionSummary, + type WorkspaceTaskDetail, + type WorkspaceTaskSummary, } from "@sandbox-agent/foundry-shared"; import { useSubscription } from "@sandbox-agent/foundry-client"; @@ -39,7 +42,7 @@ import { type Message, type ModelId, } from "./mock-layout/view-model"; -import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app"; +import { activeMockOrganization, activeMockUser, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; import { backendClient } from "../lib/backend"; import { subscriptionManager } from "../lib/subscription"; import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status"; @@ -77,29 +80,38 @@ function sanitizeActiveSessionId(task: Task, sessionId: string | null | undefine return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentSessionId; } -function githubInstallationWarningTitle(organization: FoundryOrganization): string { - return organization.github.installationStatus === "install_required" ? "GitHub App not installed" : "GitHub App needs reconnection"; +type GithubStatusView = Pick< + FoundryOrganization["github"], + "connectedAccount" | "installationStatus" | "syncStatus" | "importedRepoCount" | "lastSyncLabel" +> & { + syncPhase?: string | null; + processedRepositoryCount?: number; + totalRepositoryCount?: number; +}; + +function githubInstallationWarningTitle(github: GithubStatusView): string { + return github.installationStatus === "install_required" ? "GitHub App not installed" : "GitHub App needs reconnection"; } -function githubInstallationWarningDetail(organization: FoundryOrganization): string { - const statusDetail = organization.github.lastSyncLabel.trim(); +function githubInstallationWarningDetail(github: GithubStatusView): string { + const statusDetail = github.lastSyncLabel.trim(); const requirementDetail = - organization.github.installationStatus === "install_required" + github.installationStatus === "install_required" ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this organization." : "Webhook delivery is unavailable. Repo sync and PR updates will not work until the GitHub App is reconnected."; return statusDetail ? `${requirementDetail} ${statusDetail}.` : requirementDetail; } function GithubInstallationWarning({ - organization, + github, css, t, }: { - organization: FoundryOrganization; + github: GithubStatusView; css: ReturnType[0]; t: ReturnType; }) { - if (organization.github.installationStatus === "connected") { + if (github.installationStatus === "connected") { return null; } @@ -123,15 +135,15 @@ function GithubInstallationWarning({ >
-
{githubInstallationWarningTitle(organization)}
-
{githubInstallationWarningDetail(organization)}
+
{githubInstallationWarningTitle(github)}
+
{githubInstallationWarningDetail(github)}
); } function toSessionModel( - summary: WorkbenchSessionSummary, + summary: WorkspaceSessionSummary, sessionDetail?: { draft: Task["sessions"][number]["draft"]; transcript: Task["sessions"][number]["transcript"] }, ): Task["sessions"][number] { return { @@ -155,8 +167,8 @@ function toSessionModel( } function toTaskModel( - summary: WorkbenchTaskSummary, - detail?: WorkbenchTaskDetail, + summary: WorkspaceTaskSummary, + detail?: WorkspaceTaskDetail, sessionCache?: Map, ): Task { const sessions = detail?.sessionsSummary ?? summary.sessionsSummary; @@ -164,13 +176,12 @@ function toTaskModel( id: summary.id, repoId: summary.repoId, title: detail?.title ?? summary.title, - status: detail?.runtimeStatus ?? detail?.status ?? summary.status, - runtimeStatus: detail?.runtimeStatus, - statusMessage: detail?.statusMessage ?? null, + status: detail?.status ?? summary.status, repoName: detail?.repoName ?? summary.repoName, updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs, branch: detail?.branch ?? summary.branch, pullRequest: detail?.pullRequest ?? summary.pullRequest, + activeSessionId: detail?.activeSessionId ?? summary.activeSessionId ?? null, sessions: sessions.map((session) => toSessionModel(session, sessionCache?.get(session.id))), fileChanges: detail?.fileChanges ?? [], diffs: detail?.diffs ?? {}, @@ -180,40 +191,6 @@ function toTaskModel( }; } -const OPEN_PR_TASK_PREFIX = "pr:"; - -function openPrTaskId(prId: string): string { - return `${OPEN_PR_TASK_PREFIX}${prId}`; -} - -function isOpenPrTaskId(taskId: string): boolean { - return taskId.startsWith(OPEN_PR_TASK_PREFIX); -} - -function toOpenPrTaskModel(pullRequest: WorkbenchOpenPrSummary): Task { - return { - id: openPrTaskId(pullRequest.prId), - repoId: pullRequest.repoId, - title: pullRequest.title, - status: "new", - runtimeStatus: undefined, - statusMessage: pullRequest.authorLogin ? `@${pullRequest.authorLogin}` : null, - repoName: pullRequest.repoFullName, - updatedAtMs: pullRequest.updatedAtMs, - branch: pullRequest.headRefName, - pullRequest: { - number: pullRequest.number, - status: pullRequest.isDraft ? "draft" : "ready", - }, - sessions: [], - fileChanges: [], - diffs: {}, - fileTree: [], - minutesUsed: 0, - activeSandboxId: null, - }; -} - function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): string | null { if (!tab) { return null; @@ -230,18 +207,41 @@ function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): return null; } -function groupRepositories(repos: Array<{ id: string; label: string }>, tasks: Task[]) { +function groupRepositories( + repos: Array<{ id: string; label: string }>, + tasks: Task[], + openPullRequests?: Array<{ + repoId: string; + repoFullName: string; + number: number; + title: string; + state: string; + url: string; + headRefName: string; + authorLogin: string | null; + isDraft: boolean; + }>, +) { return repos .map((repo) => ({ id: repo.id, label: repo.label, updatedAtMs: tasks.filter((task) => task.repoId === repo.id).reduce((latest, task) => Math.max(latest, task.updatedAtMs), 0), tasks: tasks.filter((task) => task.repoId === repo.id).sort((left, right) => right.updatedAtMs - left.updatedAtMs), + pullRequests: (openPullRequests ?? []).filter((pr) => pr.repoId === repo.id), })) - .filter((repo) => repo.tasks.length > 0); + .sort((a, b) => { + // Repos with tasks first, then repos with PRs, then alphabetical + const aHasActivity = a.tasks.length > 0 || a.pullRequests.length > 0; + const bHasActivity = b.tasks.length > 0 || b.pullRequests.length > 0; + if (aHasActivity && !bHasActivity) return -1; + if (!aHasActivity && bHasActivity) return 1; + if (a.updatedAtMs !== b.updatedAtMs) return b.updatedAtMs - a.updatedAtMs; + return a.label.localeCompare(b.label); + }); } -interface WorkbenchActions { +interface WorkspaceActions { createTask(input: { repoId: string; task: string; @@ -250,28 +250,26 @@ interface WorkbenchActions { onBranch?: string; model?: ModelId; }): Promise<{ taskId: string; sessionId?: string }>; - markTaskUnread(input: { taskId: string }): Promise; - renameTask(input: { taskId: string; value: string }): Promise; - renameBranch(input: { taskId: string; value: string }): Promise; - archiveTask(input: { taskId: string }): Promise; - publishPr(input: { taskId: string }): Promise; - revertFile(input: { taskId: string; path: string }): Promise; - updateDraft(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; - sendMessage(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; - stopAgent(input: { taskId: string; sessionId: string }): Promise; - setSessionUnread(input: { taskId: string; sessionId: string; unread: boolean }): Promise; - renameSession(input: { taskId: string; sessionId: string; title: string }): Promise; - closeSession(input: { taskId: string; sessionId: string }): Promise; - addSession(input: { taskId: string; model?: string }): Promise<{ sessionId: string }>; - changeModel(input: { taskId: string; sessionId: string; model: ModelId }): Promise; - reloadGithubOrganization(): Promise; - reloadGithubPullRequests(): Promise; - reloadGithubRepository(repoId: string): Promise; - reloadGithubPullRequest(repoId: string, prNumber: number): Promise; + markTaskUnread(input: { repoId: string; taskId: string }): Promise; + renameTask(input: { repoId: string; taskId: string; value: string }): Promise; + archiveTask(input: { repoId: string; taskId: string }): Promise; + publishPr(input: { repoId: string; taskId: string }): Promise; + revertFile(input: { repoId: string; taskId: string; path: string }): Promise; + updateDraft(input: { repoId: string; taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + sendMessage(input: { repoId: string; taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + stopAgent(input: { repoId: string; taskId: string; sessionId: string }): Promise; + selectSession(input: { repoId: string; taskId: string; sessionId: string }): Promise; + setSessionUnread(input: { repoId: string; taskId: string; sessionId: string; unread: boolean }): Promise; + renameSession(input: { repoId: string; taskId: string; sessionId: string; title: string }): Promise; + closeSession(input: { repoId: string; taskId: string; sessionId: string }): Promise; + addSession(input: { repoId: string; taskId: string; model?: string }): Promise<{ sessionId: string }>; + changeModel(input: { repoId: string; taskId: string; sessionId: string; model: ModelId }): Promise; + adminReloadGithubOrganization(): Promise; + adminReloadGithubRepository(repoId: string): Promise; } const TranscriptPanel = memo(function TranscriptPanel({ - taskWorkbenchClient, + taskWorkspaceClient, task, hasSandbox, activeSessionId, @@ -288,9 +286,10 @@ const TranscriptPanel = memo(function TranscriptPanel({ rightSidebarCollapsed, onToggleRightSidebar, selectedSessionHydrating = false, + modelGroups, onNavigateToUsage, }: { - taskWorkbenchClient: WorkbenchActions; + taskWorkspaceClient: WorkspaceActions; task: Task; hasSandbox: boolean; activeSessionId: string | null; @@ -307,11 +306,15 @@ const TranscriptPanel = memo(function TranscriptPanel({ rightSidebarCollapsed?: boolean; onToggleRightSidebar?: () => void; selectedSessionHydrating?: boolean; + modelGroups: WorkspaceModelGroup[]; onNavigateToUsage?: () => void; }) { const t = useFoundryTokens(); - const [defaultModel, setDefaultModel] = useState("claude-sonnet-4"); - const [editingField, setEditingField] = useState<"title" | "branch" | null>(null); + const appSnapshot = useMockAppSnapshot(); + const appClient = useMockAppClient(); + const currentUser = activeMockUser(appSnapshot); + const defaultModel = currentUser?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID; + const [editingField, setEditingField] = useState<"title" | null>(null); const [editValue, setEditValue] = useState(""); const [editingSessionId, setEditingSessionId] = useState(null); const [editingSessionName, setEditingSessionName] = useState(""); @@ -333,9 +336,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ const isTerminal = task.status === "archived"; const historyEvents = useMemo(() => buildHistoryEvents(task.sessions), [task.sessions]); const activeMessages = useMemo(() => buildDisplayMessages(activeAgentSession), [activeAgentSession]); - const taskRuntimeStatus = task.runtimeStatus ?? task.status; - const taskState = describeTaskState(taskRuntimeStatus, task.statusMessage ?? null); - const taskProvisioning = isProvisioningTaskStatus(taskRuntimeStatus); + const taskState = describeTaskState(task.status); + const taskProvisioning = isProvisioningTaskStatus(task.status); const taskProvisioningMessage = taskState.detail; const activeSessionMessage = sessionStateMessage(activeAgentSession); const showPendingSessionState = @@ -344,16 +346,17 @@ const TranscriptPanel = memo(function TranscriptPanel({ (activeAgentSession.status === "pending_provision" || activeAgentSession.status === "pending_session_create" || activeAgentSession.status === "error") && activeMessages.length === 0; const serverDraft = promptSession?.draft.text ?? ""; - const serverAttachments = promptSession?.draft.attachments ?? []; + const serverAttachments = promptSession?.draft.attachments; + const serverAttachmentsJson = JSON.stringify(serverAttachments ?? []); // Sync server → local only when user hasn't typed recently (3s cooldown) const DRAFT_SYNC_COOLDOWN_MS = 3_000; useEffect(() => { if (Date.now() - lastEditTimeRef.current > DRAFT_SYNC_COOLDOWN_MS) { setLocalDraft(serverDraft); - setLocalAttachments(serverAttachments); + setLocalAttachments(serverAttachments ?? []); } - }, [serverDraft, serverAttachments]); + }, [serverDraft, serverAttachmentsJson]); // Reset local draft immediately on session/task switch useEffect(() => { @@ -436,14 +439,15 @@ const TranscriptPanel = memo(function TranscriptPanel({ return; } - void taskWorkbenchClient.setSessionUnread({ + void taskWorkspaceClient.setSessionUnread({ + repoId: task.repoId, taskId: task.id, sessionId: activeAgentSession.id, unread: false, }); }, [activeAgentSession?.id, activeAgentSession?.unread, task.id]); - const startEditingField = useCallback((field: "title" | "branch", value: string) => { + const startEditingField = useCallback((field: "title", value: string) => { setEditingField(field); setEditValue(value); }, []); @@ -453,18 +457,14 @@ const TranscriptPanel = memo(function TranscriptPanel({ }, []); const commitEditingField = useCallback( - (field: "title" | "branch") => { + (field: "title") => { const value = editValue.trim(); if (!value) { setEditingField(null); return; } - if (field === "title") { - void taskWorkbenchClient.renameTask({ taskId: task.id, value }); - } else { - void taskWorkbenchClient.renameBranch({ taskId: task.id, value }); - } + void taskWorkspaceClient.renameTask({ repoId: task.repoId, taskId: task.id, value }); setEditingField(null); }, [editValue, task.id], @@ -474,7 +474,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ const flushDraft = useCallback( (text: string, nextAttachments: LineAttachment[], sessionId: string) => { - void taskWorkbenchClient.updateDraft({ + void taskWorkspaceClient.updateDraft({ + repoId: task.repoId, taskId: task.id, sessionId, text, @@ -535,7 +536,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ onSetActiveSessionId(promptSession.id); onSetLastAgentSessionId(promptSession.id); - void taskWorkbenchClient.sendMessage({ + void taskWorkspaceClient.sendMessage({ + repoId: task.repoId, taskId: task.id, sessionId: promptSession.id, text, @@ -548,7 +550,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ return; } - void taskWorkbenchClient.stopAgent({ + void taskWorkspaceClient.stopAgent({ + repoId: task.repoId, taskId: task.id, sessionId: promptSession.id, }); @@ -560,9 +563,15 @@ const TranscriptPanel = memo(function TranscriptPanel({ if (!isDiffTab(sessionId)) { onSetLastAgentSessionId(sessionId); + void taskWorkspaceClient.selectSession({ + repoId: task.repoId, + taskId: task.id, + sessionId, + }); const session = task.sessions.find((candidate) => candidate.id === sessionId); if (session?.unread) { - void taskWorkbenchClient.setSessionUnread({ + void taskWorkspaceClient.setSessionUnread({ + repoId: task.repoId, taskId: task.id, sessionId, unread: false, @@ -571,14 +580,14 @@ const TranscriptPanel = memo(function TranscriptPanel({ onSyncRouteSession(task.id, sessionId); } }, - [task.id, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], + [task.id, task.repoId, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); const setSessionUnread = useCallback( (sessionId: string, unread: boolean) => { - void taskWorkbenchClient.setSessionUnread({ taskId: task.id, sessionId, unread }); + void taskWorkspaceClient.setSessionUnread({ repoId: task.repoId, taskId: task.id, sessionId, unread }); }, - [task.id], + [task.id, task.repoId], ); const startRenamingSession = useCallback( @@ -610,7 +619,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ return; } - void taskWorkbenchClient.renameSession({ + void taskWorkspaceClient.renameSession({ + repoId: task.repoId, taskId: task.id, sessionId: editingSessionId, title: trimmedName, @@ -631,9 +641,9 @@ const TranscriptPanel = memo(function TranscriptPanel({ } onSyncRouteSession(task.id, nextSessionId); - void taskWorkbenchClient.closeSession({ taskId: task.id, sessionId }); + void taskWorkspaceClient.closeSession({ repoId: task.repoId, taskId: task.id, sessionId }); }, - [activeSessionId, task.id, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], + [activeSessionId, task.id, task.repoId, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); const closeDiffTab = useCallback( @@ -651,12 +661,12 @@ const TranscriptPanel = memo(function TranscriptPanel({ const addSession = useCallback(() => { void (async () => { - const { sessionId } = await taskWorkbenchClient.addSession({ taskId: task.id }); + const { sessionId } = await taskWorkspaceClient.addSession({ repoId: task.repoId, taskId: task.id }); onSetLastAgentSessionId(sessionId); onSetActiveSessionId(sessionId); onSyncRouteSession(task.id, sessionId); })(); - }, [task.id, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); + }, [task.id, task.repoId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); const changeModel = useCallback( (model: ModelId) => { @@ -664,7 +674,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ throw new Error(`Unable to change model for task ${task.id} without an active prompt session`); } - void taskWorkbenchClient.changeModel({ + void taskWorkspaceClient.changeModel({ + repoId: task.repoId, taskId: task.id, sessionId: promptSession.id, model, @@ -939,7 +950,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ messageRefs={messageRefs} historyEvents={historyEvents} onSelectHistoryEvent={jumpToHistoryEvent} - targetMessageId={pendingHistoryTarget && activeSessionId === pendingHistoryTarget.sessionId ? pendingHistoryTarget.messageId : null} + targetMessageId={pendingHistoryTarget && activeAgentSession?.id === pendingHistoryTarget.sessionId ? pendingHistoryTarget.messageId : null} onTargetMessageResolved={() => setPendingHistoryTarget(null)} copiedMessageId={copiedMessageId} onCopyMessage={(message) => { @@ -958,6 +969,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ textareaRef={textareaRef} placeholder={!promptSession.created ? "Describe your task..." : "Send a message..."} attachments={attachments} + modelGroups={modelGroups} defaultModel={defaultModel} model={promptSession.model} isRunning={promptSession.status === "running"} @@ -966,7 +978,9 @@ const TranscriptPanel = memo(function TranscriptPanel({ onStop={stopAgent} onRemoveAttachment={removeAttachment} onChangeModel={changeModel} - onSetDefaultModel={setDefaultModel} + onSetDefaultModel={(model) => { + void appClient.setDefaultModel(model); + }} /> ) : null} @@ -1280,45 +1294,37 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const [css] = useStyletron(); const t = useFoundryTokens(); const navigate = useNavigate(); - const taskWorkbenchClient = useMemo( + const taskWorkspaceClient = useMemo( () => ({ - createTask: (input) => backendClient.createWorkbenchTask(organizationId, input), - markTaskUnread: (input) => backendClient.markWorkbenchUnread(organizationId, input), - renameTask: (input) => backendClient.renameWorkbenchTask(organizationId, input), - renameBranch: (input) => backendClient.renameWorkbenchBranch(organizationId, input), - archiveTask: async (input) => backendClient.runAction(organizationId, input.taskId, "archive"), - publishPr: (input) => backendClient.publishWorkbenchPr(organizationId, input), - revertFile: (input) => backendClient.revertWorkbenchFile(organizationId, input), - updateDraft: (input) => backendClient.updateWorkbenchDraft(organizationId, input), - sendMessage: (input) => backendClient.sendWorkbenchMessage(organizationId, input), - stopAgent: (input) => backendClient.stopWorkbenchSession(organizationId, input), - setSessionUnread: (input) => backendClient.setWorkbenchSessionUnread(organizationId, input), - renameSession: (input) => backendClient.renameWorkbenchSession(organizationId, input), - closeSession: (input) => backendClient.closeWorkbenchSession(organizationId, input), - addSession: (input) => backendClient.createWorkbenchSession(organizationId, input), - changeModel: (input) => backendClient.changeWorkbenchModel(organizationId, input), - reloadGithubOrganization: () => backendClient.reloadGithubOrganization(organizationId), - reloadGithubPullRequests: () => backendClient.reloadGithubPullRequests(organizationId), - reloadGithubRepository: (repoId) => backendClient.reloadGithubRepository(organizationId, repoId), - reloadGithubPullRequest: (repoId, prNumber) => backendClient.reloadGithubPullRequest(organizationId, repoId, prNumber), + createTask: (input) => backendClient.createWorkspaceTask(organizationId, input), + markTaskUnread: (input) => backendClient.markWorkspaceUnread(organizationId, input), + renameTask: (input) => backendClient.renameWorkspaceTask(organizationId, input), + archiveTask: async (input) => backendClient.runAction(organizationId, input.repoId, input.taskId, "archive"), + publishPr: (input) => backendClient.publishWorkspacePr(organizationId, input), + revertFile: (input) => backendClient.revertWorkspaceFile(organizationId, input), + updateDraft: (input) => backendClient.updateWorkspaceDraft(organizationId, input), + sendMessage: (input) => backendClient.sendWorkspaceMessage(organizationId, input), + stopAgent: (input) => backendClient.stopWorkspaceSession(organizationId, input), + selectSession: (input) => backendClient.selectWorkspaceSession(organizationId, input), + setSessionUnread: (input) => backendClient.setWorkspaceSessionUnread(organizationId, input), + renameSession: (input) => backendClient.renameWorkspaceSession(organizationId, input), + closeSession: (input) => backendClient.closeWorkspaceSession(organizationId, input), + addSession: (input) => backendClient.createWorkspaceSession(organizationId, input), + changeModel: (input) => backendClient.changeWorkspaceModel(organizationId, input), + adminReloadGithubOrganization: () => backendClient.adminReloadGithubOrganization(organizationId), + adminReloadGithubRepository: (repoId) => backendClient.adminReloadGithubRepository(organizationId, repoId), }), [organizationId], ); const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); - const organizationRepos = organizationState.data?.repos ?? []; - const taskSummaries = organizationState.data?.taskSummaries ?? []; - const openPullRequests = organizationState.data?.openPullRequests ?? []; - const openPullRequestsByTaskId = useMemo( - () => new Map(openPullRequests.map((pullRequest) => [openPrTaskId(pullRequest.prId), pullRequest])), - [openPullRequests], - ); - const selectedOpenPullRequest = useMemo( - () => (selectedTaskId ? (openPullRequestsByTaskId.get(selectedTaskId) ?? null) : null), - [openPullRequestsByTaskId, selectedTaskId], - ); + const organizationReposData = organizationState.data?.repos; + const taskSummariesData = organizationState.data?.taskSummaries; + const openPullRequestsData = organizationState.data?.openPullRequests; + const organizationRepos = organizationReposData ?? []; + const taskSummaries = taskSummariesData ?? []; const selectedTaskSummary = useMemo( () => taskSummaries.find((task) => task.id === selectedTaskId) ?? taskSummaries[0] ?? null, - [selectedTaskId, taskSummaries], + [selectedTaskId, taskSummariesData], ); const taskState = useSubscription( subscriptionManager, @@ -1359,6 +1365,20 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } : null, ); const hasSandbox = Boolean(activeSandbox) && sandboxState.status !== "error"; + const modelGroupsQuery = useQuery({ + queryKey: ["mock-layout", "workspace-model-groups", organizationId, activeSandbox?.sandboxProviderId ?? "", activeSandbox?.sandboxId ?? ""], + enabled: Boolean(activeSandbox?.sandboxId), + staleTime: 30_000, + refetchOnWindowFocus: false, + queryFn: async () => { + if (!activeSandbox) { + throw new Error("Cannot load workspace model groups without an active sandbox."); + } + + return await backendClient.getSandboxWorkspaceModelGroups(organizationId, activeSandbox.sandboxProviderId, activeSandbox.sandboxId); + }, + }); + const modelGroups = modelGroupsQuery.data && modelGroupsQuery.data.length > 0 ? modelGroupsQuery.data : DEFAULT_WORKSPACE_MODEL_GROUPS; const tasks = useMemo(() => { const sessionCache = new Map(); if (selectedTaskSummary && taskState.data) { @@ -1383,12 +1403,14 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const hydratedTasks = taskSummaries.map((summary) => summary.id === selectedTaskSummary?.id ? toTaskModel(summary, taskState.data, sessionCache) : toTaskModel(summary), ); - const openPrTasks = openPullRequests.map((pullRequest) => toOpenPrTaskModel(pullRequest)); - return [...hydratedTasks, ...openPrTasks].sort((left, right) => right.updatedAtMs - left.updatedAtMs); - }, [openPullRequests, selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummaries, organizationId]); - const rawRepositories = useMemo(() => groupRepositories(organizationRepos, tasks), [tasks, organizationRepos]); + return hydratedTasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs); + }, [selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummariesData, organizationId]); + const openPullRequests = openPullRequestsData ?? []; + const rawRepositories = useMemo(() => groupRepositories(organizationRepos, tasks, openPullRequests), [tasks, organizationReposData, openPullRequestsData]); const appSnapshot = useMockAppSnapshot(); + const currentUser = activeMockUser(appSnapshot); const activeOrg = activeMockOrganization(appSnapshot); + const liveGithub = organizationState.data?.github ?? activeOrg?.github ?? null; const navigateToUsage = useCallback(() => { if (activeOrg) { void navigate({ to: "/organizations/$organizationId/billing" as never, params: { organizationId: activeOrg.id } as never }); @@ -1413,11 +1435,9 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const leftWidthRef = useRef(leftWidth); const rightWidthRef = useRef(rightWidth); const autoCreatingSessionForTaskRef = useRef>(new Set()); - const resolvingOpenPullRequestsRef = useRef>(new Set()); const [leftSidebarOpen, setLeftSidebarOpen] = useState(true); const [rightSidebarOpen, setRightSidebarOpen] = useState(true); const [leftSidebarPeeking, setLeftSidebarPeeking] = useState(false); - const [materializingOpenPrId, setMaterializingOpenPrId] = useState(null); const showDevPanel = useDevPanel(); const peekTimeoutRef = useRef | null>(null); @@ -1484,80 +1504,17 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }, []); const activeTask = useMemo(() => { - const realTasks = tasks.filter((task) => !isOpenPrTaskId(task.id)); - if (selectedOpenPullRequest) { - return null; - } if (selectedTaskId) { - return realTasks.find((task) => task.id === selectedTaskId) ?? realTasks[0] ?? null; + return tasks.find((task) => task.id === selectedTaskId) ?? tasks[0] ?? null; } - return realTasks[0] ?? null; - }, [selectedOpenPullRequest, selectedTaskId, tasks]); - - const materializeOpenPullRequest = useCallback( - async (pullRequest: WorkbenchOpenPrSummary) => { - if (resolvingOpenPullRequestsRef.current.has(pullRequest.prId)) { - return; - } - - resolvingOpenPullRequestsRef.current.add(pullRequest.prId); - setMaterializingOpenPrId(pullRequest.prId); - - try { - const { taskId, sessionId } = await taskWorkbenchClient.createTask({ - repoId: pullRequest.repoId, - task: `Continue work on GitHub PR #${pullRequest.number}: ${pullRequest.title}`, - model: "gpt-5.3-codex", - title: pullRequest.title, - onBranch: pullRequest.headRefName, - }); - await navigate({ - to: "/organizations/$organizationId/tasks/$taskId", - params: { - organizationId, - taskId, - }, - search: { sessionId: sessionId ?? undefined }, - replace: true, - }); - } catch (error) { - setMaterializingOpenPrId((current) => (current === pullRequest.prId ? null : current)); - resolvingOpenPullRequestsRef.current.delete(pullRequest.prId); - logger.error( - { - prId: pullRequest.prId, - repoId: pullRequest.repoId, - branchName: pullRequest.headRefName, - ...createErrorContext(error), - }, - "failed_to_materialize_open_pull_request_task", - ); - } - }, - [navigate, taskWorkbenchClient, organizationId], - ); - - useEffect(() => { - if (!selectedOpenPullRequest) { - if (materializingOpenPrId) { - resolvingOpenPullRequestsRef.current.delete(materializingOpenPrId); - } - setMaterializingOpenPrId(null); - return; - } - - void materializeOpenPullRequest(selectedOpenPullRequest); - }, [materializeOpenPullRequest, materializingOpenPrId, selectedOpenPullRequest]); + return tasks[0] ?? null; + }, [selectedTaskId, tasks]); useEffect(() => { if (activeTask) { return; } - if (selectedOpenPullRequest || materializingOpenPrId) { - return; - } - const fallbackTaskId = tasks[0]?.id; if (!fallbackTaskId) { return; @@ -1574,11 +1531,13 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } search: { sessionId: fallbackTask?.sessions[0]?.id ?? undefined }, replace: true, }); - }, [activeTask, materializingOpenPrId, navigate, selectedOpenPullRequest, tasks, organizationId]); + }, [activeTask, navigate, tasks, organizationId]); const openDiffs = activeTask ? sanitizeOpenDiffs(activeTask, openDiffsByTask[activeTask.id]) : []; const lastAgentSessionId = activeTask ? sanitizeLastAgentSessionId(activeTask, lastAgentSessionIdByTask[activeTask.id]) : null; - const activeSessionId = activeTask ? sanitizeActiveSessionId(activeTask, activeSessionIdByTask[activeTask.id], openDiffs, lastAgentSessionId) : null; + const activeSessionId = activeTask + ? sanitizeActiveSessionId(activeTask, activeSessionIdByTask[activeTask.id] ?? activeTask.activeSessionId ?? null, openDiffs, lastAgentSessionId) + : null; const selectedSessionHydrating = Boolean( selectedSessionId && activeSessionId === selectedSessionId && sessionState.status === "loading" && !sessionState.data, ); @@ -1635,6 +1594,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }, [activeTask, lastAgentSessionIdByTask, selectedSessionId, syncRouteSession]); useEffect(() => { + const organizationRepos = organizationReposData ?? []; if (selectedNewTaskRepoId && organizationRepos.some((repo) => repo.id === selectedNewTaskRepoId)) { return; } @@ -1644,7 +1604,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } if (fallbackRepoId !== selectedNewTaskRepoId) { setSelectedNewTaskRepoId(fallbackRepoId); } - }, [activeTask?.repoId, selectedNewTaskRepoId, organizationRepos]); + }, [activeTask?.repoId, selectedNewTaskRepoId, organizationReposData]); useEffect(() => { if (!activeTask) { @@ -1664,7 +1624,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } autoCreatingSessionForTaskRef.current.add(activeTask.id); void (async () => { try { - const { sessionId } = await taskWorkbenchClient.addSession({ taskId: activeTask.id }); + const { sessionId } = await taskWorkspaceClient.addSession({ repoId: activeTask.repoId, taskId: activeTask.id }); syncRouteSession(activeTask.id, sessionId, true); } catch (error) { logger.error( @@ -1672,13 +1632,13 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } taskId: activeTask.id, ...createErrorContext(error), }, - "failed_to_auto_create_workbench_session", + "failed_to_auto_create_workspace_session", ); // Keep the guard in the set on error to prevent retry storms. // The guard is cleared when sessions appear (line above) or the task changes. } })(); - }, [activeTask, selectedSessionId, syncRouteSession, taskWorkbenchClient]); + }, [activeTask, selectedSessionId, syncRouteSession, taskWorkspaceClient]); const createTask = useCallback( (overrideRepoId?: string, options?: { title?: string; task?: string; branch?: string; onBranch?: string }) => { @@ -1688,10 +1648,10 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } throw new Error("Cannot create a task without an available repo"); } - const { taskId, sessionId } = await taskWorkbenchClient.createTask({ + const { taskId, sessionId } = await taskWorkspaceClient.createTask({ repoId, task: options?.task ?? "New task", - model: "gpt-5.3-codex", + model: currentUser?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, title: options?.title ?? "New task", ...(options?.branch ? { branch: options.branch } : {}), ...(options?.onBranch ? { onBranch: options.onBranch } : {}), @@ -1706,7 +1666,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }); })(); }, - [navigate, selectedNewTaskRepoId, taskWorkbenchClient, organizationId], + [currentUser?.defaultModel, navigate, selectedNewTaskRepoId, taskWorkspaceClient, organizationId], ); const openDiffTab = useCallback( @@ -1735,14 +1695,6 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const selectTask = useCallback( (id: string) => { - if (isOpenPrTaskId(id)) { - const pullRequest = openPullRequestsByTaskId.get(id); - if (!pullRequest) { - return; - } - void materializeOpenPullRequest(pullRequest); - return; - } const task = tasks.find((candidate) => candidate.id === id) ?? null; void navigate({ to: "/organizations/$organizationId/tasks/$taskId", @@ -1753,12 +1705,19 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } search: { sessionId: task?.sessions[0]?.id ?? undefined }, }); }, - [materializeOpenPullRequest, navigate, openPullRequestsByTaskId, tasks, organizationId], + [navigate, tasks, organizationId], ); - const markTaskUnread = useCallback((id: string) => { - void taskWorkbenchClient.markTaskUnread({ taskId: id }); - }, []); + const markTaskUnread = useCallback( + (id: string) => { + const task = tasks.find((candidate) => candidate.id === id); + if (!task) { + return; + } + void taskWorkspaceClient.markTaskUnread({ repoId: task.repoId, taskId: id }); + }, + [tasks], + ); const renameTask = useCallback( (id: string) => { @@ -1777,29 +1736,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } return; } - void taskWorkbenchClient.renameTask({ taskId: id, value: trimmedTitle }); - }, - [tasks], - ); - - const renameBranch = useCallback( - (id: string) => { - const currentTask = tasks.find((task) => task.id === id); - if (!currentTask) { - throw new Error(`Unable to rename missing task ${id}`); - } - - const nextBranch = window.prompt("Rename branch", currentTask.branch ?? ""); - if (nextBranch === null) { - return; - } - - const trimmedBranch = nextBranch.trim(); - if (!trimmedBranch) { - return; - } - - void taskWorkbenchClient.renameBranch({ taskId: id, value: trimmedBranch }); + void taskWorkspaceClient.renameTask({ repoId: currentTask.repoId, taskId: id, value: trimmedTitle }); }, [tasks], ); @@ -1808,14 +1745,14 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } if (!activeTask) { throw new Error("Cannot archive without an active task"); } - void taskWorkbenchClient.archiveTask({ taskId: activeTask.id }); + void taskWorkspaceClient.archiveTask({ repoId: activeTask.repoId, taskId: activeTask.id }); }, [activeTask]); const publishPr = useCallback(() => { if (!activeTask) { throw new Error("Cannot publish PR without an active task"); } - void taskWorkbenchClient.publishPr({ taskId: activeTask.id }); + void taskWorkspaceClient.publishPr({ repoId: activeTask.repoId, taskId: activeTask.id }); }, [activeTask]); const revertFile = useCallback( @@ -1835,7 +1772,8 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } : (current[activeTask.id] ?? null), })); - void taskWorkbenchClient.revertFile({ + void taskWorkspaceClient.revertFile({ + repoId: activeTask.repoId, taskId: activeTask.id, path, }); @@ -1912,7 +1850,6 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }; if (!activeTask) { - const isMaterializingSelectedOpenPr = Boolean(selectedOpenPullRequest) || materializingOpenPrId != null; return ( <> {dragRegion} @@ -1939,14 +1876,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } onSelectNewTaskRepo={setSelectedNewTaskRepoId} onMarkUnread={markTaskUnread} onRenameTask={renameTask} - onRenameBranch={renameBranch} onReorderRepositories={reorderRepositories} taskOrderByRepository={taskOrderByRepository} onReorderTasks={reorderTasks} - onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} - onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} - onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} - onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} onToggleSidebar={() => setLeftSidebarOpen(false)} /> @@ -1988,7 +1922,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } gap: "12px", }} > - {activeOrg?.github.syncStatus === "syncing" || activeOrg?.github.syncStatus === "pending" ? ( + {liveGithub?.syncStatus === "syncing" || liveGithub?.syncStatus === "pending" ? ( <>

Syncing with GitHub

- Importing repos from @{activeOrg.github.connectedAccount || "GitHub"}... - {activeOrg.github.importedRepoCount > 0 && <> {activeOrg.github.importedRepoCount} repos imported so far.} + {liveGithub.lastSyncLabel || `Importing repos from @${liveGithub.connectedAccount || "GitHub"}...`} + {(liveGithub.totalRepositoryCount ?? 0) > 0 && ( + <> + {" "} + {liveGithub.syncPhase === "syncing_repositories" + ? `${liveGithub.importedRepoCount} of ${liveGithub.totalRepositoryCount} repos imported so far.` + : `${liveGithub.processedRepositoryCount} of ${liveGithub.totalRepositoryCount} repos processed in ${liveGithub.syncPhase?.replace(/^syncing_/, "").replace(/_/g, " ") ?? "sync"}.`} + + )}

- ) : isMaterializingSelectedOpenPr && selectedOpenPullRequest ? ( - <> - -

Creating task from pull request

-

- Preparing a task for {selectedOpenPullRequest.title} on {selectedOpenPullRequest.headRefName}. -

- - ) : activeOrg?.github.syncStatus === "error" ? ( + ) : liveGithub?.syncStatus === "error" ? ( <>

GitHub sync failed

There was a problem syncing repos from GitHub. Check the dev panel for details.

@@ -2075,11 +2008,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId }
- {activeOrg && } + {liveGithub && } {showDevPanel && ( @@ -2114,14 +2047,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } onSelectNewTaskRepo={setSelectedNewTaskRepoId} onMarkUnread={markTaskUnread} onRenameTask={renameTask} - onRenameBranch={renameBranch} onReorderRepositories={reorderRepositories} taskOrderByRepository={taskOrderByRepository} onReorderTasks={reorderTasks} - onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} - onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} - onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} - onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} onToggleSidebar={() => setLeftSidebarOpen(false)} /> @@ -2169,14 +2099,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } onSelectNewTaskRepo={setSelectedNewTaskRepoId} onMarkUnread={markTaskUnread} onRenameTask={renameTask} - onRenameBranch={renameBranch} onReorderRepositories={reorderRepositories} taskOrderByRepository={taskOrderByRepository} onReorderTasks={reorderTasks} - onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} - onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} - onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} - onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} onToggleSidebar={() => { setLeftSidebarPeeking(false); setLeftSidebarOpen(true); @@ -2189,9 +2116,10 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } {leftSidebarOpen ? : null}
- {activeOrg && } + {liveGithub && } {showDevPanel && ( ({ diff --git a/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx b/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx index ba3f0f3..6ec6ea6 100644 --- a/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx @@ -2,18 +2,21 @@ import { memo, useState } from "react"; import { useStyletron } from "baseui"; import { StatefulPopover, PLACEMENT } from "baseui/popover"; import { ChevronUp, Star } from "lucide-react"; +import { workspaceModelLabel, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; import { useFoundryTokens } from "../../app/theme"; import { AgentIcon } from "./ui"; -import { MODEL_GROUPS, modelLabel, providerAgent, type ModelId } from "./view-model"; +import { type ModelId } from "./view-model"; const ModelPickerContent = memo(function ModelPickerContent({ + groups, value, defaultModel, onChange, onSetDefault, close, }: { + groups: WorkspaceModelGroup[]; value: ModelId; defaultModel: ModelId; onChange: (id: ModelId) => void; @@ -26,7 +29,7 @@ const ModelPickerContent = memo(function ModelPickerContent({ return (
- {MODEL_GROUPS.map((group) => ( + {groups.map((group) => (
void; @@ -137,7 +142,9 @@ export const ModelPicker = memo(function ModelPicker({ }, }, }} - content={({ close }) => } + content={({ close }) => ( + + )} >
diff --git a/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx b/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx index 08d72ae..b7e27be 100644 --- a/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx @@ -2,6 +2,7 @@ import { memo, type Ref } from "react"; import { useStyletron } from "baseui"; import { ChatComposer, type ChatComposerClassNames } from "@sandbox-agent/react"; import { FileCode, SendHorizonal, Square, X } from "lucide-react"; +import { type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; import { useFoundryTokens } from "../../app/theme"; import { ModelPicker } from "./model-picker"; @@ -13,6 +14,7 @@ export const PromptComposer = memo(function PromptComposer({ textareaRef, placeholder, attachments, + modelGroups, defaultModel, model, isRunning, @@ -27,6 +29,7 @@ export const PromptComposer = memo(function PromptComposer({ textareaRef: Ref; placeholder: string; attachments: LineAttachment[]; + modelGroups: WorkspaceModelGroup[]; defaultModel: ModelId; model: ModelId; isRunning: boolean; @@ -172,7 +175,7 @@ export const PromptComposer = memo(function PromptComposer({ renderSubmitContent={() => (isRunning ? : )} renderFooter={() => (
- +
)} /> diff --git a/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx index 529da47..cd4c33a 100644 --- a/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx @@ -125,7 +125,7 @@ export const RightSidebar = memo(function RightSidebar({ }); observer.observe(node); }, []); - const pullRequestUrl = task.pullRequest != null ? `https://github.com/${task.repoName}/pull/${task.pullRequest.number}` : null; + const pullRequestUrl = task.pullRequest?.url ?? null; const copyFilePath = useCallback(async (path: string) => { try { diff --git a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx index 7ccb18c..4e8b7ce 100644 --- a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx @@ -54,10 +54,6 @@ function repositoryIconColor(label: string): string { return REPOSITORY_COLORS[Math.abs(hash) % REPOSITORY_COLORS.length]!; } -function isPullRequestSidebarItem(task: Task): boolean { - return task.id.startsWith("pr:"); -} - export const Sidebar = memo(function Sidebar({ repositories, newTaskRepos, @@ -68,14 +64,11 @@ export const Sidebar = memo(function Sidebar({ onSelectNewTaskRepo, onMarkUnread, onRenameTask, - onRenameBranch, onReorderRepositories, taskOrderByRepository, onReorderTasks, onReloadOrganization, - onReloadPullRequests, onReloadRepository, - onReloadPullRequest, onToggleSidebar, }: { repositories: RepositorySection[]; @@ -87,14 +80,11 @@ export const Sidebar = memo(function Sidebar({ onSelectNewTaskRepo: (repoId: string) => void; onMarkUnread: (id: string) => void; onRenameTask: (id: string) => void; - onRenameBranch: (id: string) => void; onReorderRepositories: (fromIndex: number, toIndex: number) => void; taskOrderByRepository: Record; onReorderTasks: (repositoryId: string, fromIndex: number, toIndex: number) => void; onReloadOrganization: () => void; - onReloadPullRequests: () => void; onReloadRepository: (repoId: string) => void; - onReloadPullRequest: (repoId: string, prNumber: number) => void; onToggleSidebar?: () => void; }) { const [css] = useStyletron(); @@ -446,16 +436,6 @@ export const Sidebar = memo(function Sidebar({ > Reload organization -
) : null}
{ if (node) { @@ -667,15 +648,12 @@ export const Sidebar = memo(function Sidebar({ if (item.type === "task") { const { repository, task, taskIndex } = item; const isActive = task.id === activeId; - const isPullRequestItem = isPullRequestSidebarItem(task); const isRunning = task.sessions.some((s) => s.status === "running"); const isProvisioning = - !isPullRequestItem && - ((String(task.status).startsWith("init_") && task.status !== "init_complete") || - task.status === "new" || - task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create")); + (String(task.status).startsWith("init_") && task.status !== "init_complete") || + task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create"); const hasUnread = task.sessions.some((s) => s.unread); - const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; + const isDraft = task.pullRequest?.isDraft ?? true; const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); const hasDiffs = totalAdded > 0 || totalRemoved > 0; @@ -686,6 +664,7 @@ export const Sidebar = memo(function Sidebar({ return (
{ @@ -720,18 +699,11 @@ export const Sidebar = memo(function Sidebar({
onSelect(task.id)} onContextMenu={(event) => { - if (isPullRequestItem && task.pullRequest) { - contextMenu.open(event, [ - { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, - { label: "Create task", onClick: () => onSelect(task.id) }, - ]); - return; - } - contextMenu.open(event, [ + const items = [ { label: "Rename task", onClick: () => onRenameTask(task.id) }, - { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, - ]); + ]; + contextMenu.open(event, items); }} className={css({ padding: "8px 12px", @@ -756,11 +728,7 @@ export const Sidebar = memo(function Sidebar({ flexShrink: 0, })} > - {isPullRequestItem ? ( - - ) : ( - - )} +
{task.title} - {isPullRequestItem && task.statusMessage ? ( - - {task.statusMessage} - - ) : null}
{task.pullRequest != null ? ( #{task.pullRequest.number} - {task.pullRequest.status === "draft" ? : null} + {task.pullRequest.isDraft ? : null} ) : ( @@ -814,6 +777,7 @@ export const Sidebar = memo(function Sidebar({ return (
{ @@ -851,6 +815,7 @@ export const Sidebar = memo(function Sidebar({ return (
{ if (node) { diff --git a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx index 95e6876..10d74d7 100644 --- a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx @@ -305,7 +305,8 @@ export function TerminalPane({ organizationId, taskId, isExpanded, onExpand, onC setProcessTabs([]); }, [taskId]); - const processes = processesState.data ?? []; + const processesData = processesState.data; + const processes = processesData ?? []; const openTerminalTab = useCallback((process: SandboxProcessRecord) => { setProcessTabs((current) => { @@ -361,7 +362,7 @@ export function TerminalPane({ organizationId, taskId, isExpanded, onExpand, onC const activeProcessTab = activeSessionId ? (processTabsById.get(activeSessionId) ?? null) : null; const activeTerminalProcess = useMemo( () => (activeProcessTab ? (processes.find((process) => process.id === activeProcessTab.processId) ?? null) : null), - [activeProcessTab, processes], + [activeProcessTab, processesData], ); const emptyBodyClassName = css({ diff --git a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx index a024871..16f87e6 100644 --- a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx @@ -30,11 +30,11 @@ export const TranscriptHeader = memo(function TranscriptHeader({ task: Task; hasSandbox: boolean; activeSession: AgentSession | null | undefined; - editingField: "title" | "branch" | null; + editingField: "title" | null; editValue: string; onEditValueChange: (value: string) => void; - onStartEditingField: (field: "title" | "branch", value: string) => void; - onCommitEditingField: (field: "title" | "branch") => void; + onStartEditingField: (field: "title", value: string) => void; + onCommitEditingField: (field: "title") => void; onCancelEditingField: () => void; onSetActiveSessionUnread: (unread: boolean) => void; sidebarCollapsed?: boolean; @@ -49,10 +49,9 @@ export const TranscriptHeader = memo(function TranscriptHeader({ const t = useFoundryTokens(); const isDesktop = !!import.meta.env.VITE_DESKTOP; const needsTrafficLightInset = isDesktop && sidebarCollapsed; - const taskStatus = task.runtimeStatus ?? task.status; const headerStatus = useMemo( - () => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), - [taskStatus, task.statusMessage, activeSession?.status, activeSession?.errorMessage, hasSandbox], + () => deriveHeaderStatus(task.status, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), + [task.status, activeSession?.status, activeSession?.errorMessage, hasSandbox], ); return ( @@ -118,55 +117,20 @@ export const TranscriptHeader = memo(function TranscriptHeader({ )} {task.branch ? ( - editingField === "branch" ? ( - onEditValueChange(event.target.value)} - onBlur={() => onCommitEditingField("branch")} - onKeyDown={(event) => { - if (event.key === "Enter") { - onCommitEditingField("branch"); - } else if (event.key === "Escape") { - onCancelEditingField(); - } - }} - className={css({ - appearance: "none", - WebkitAppearance: "none", - margin: "0", - outline: "none", - padding: "2px 8px", - borderRadius: "999px", - border: `1px solid ${t.borderFocus}`, - backgroundColor: t.interactiveSubtle, - color: t.textPrimary, - fontSize: "11px", - whiteSpace: "nowrap", - fontFamily: '"IBM Plex Mono", monospace', - minWidth: "60px", - })} - /> - ) : ( - onStartEditingField("branch", task.branch ?? "")} - className={css({ - padding: "2px 8px", - borderRadius: "999px", - border: `1px solid ${t.borderMedium}`, - backgroundColor: t.interactiveSubtle, - color: t.textPrimary, - fontSize: "11px", - whiteSpace: "nowrap", - fontFamily: '"IBM Plex Mono", monospace', - cursor: "pointer", - ":hover": { borderColor: t.borderFocus }, - })} - > - {task.branch} - - ) + + {task.branch} + ) : null}
diff --git a/foundry/packages/frontend/src/components/mock-layout/ui.tsx b/foundry/packages/frontend/src/components/mock-layout/ui.tsx index d39a408..b86ca18 100644 --- a/foundry/packages/frontend/src/components/mock-layout/ui.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/ui.tsx @@ -181,6 +181,8 @@ export const AgentIcon = memo(function AgentIcon({ agent, size = 14 }: { agent: return ; case "Cursor": return ; + default: + return ; } }); diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts index 21228fc..bc6ab87 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts @@ -1,8 +1,8 @@ import { describe, expect, it } from "vitest"; -import type { WorkbenchSession } from "@sandbox-agent/foundry-shared"; +import type { WorkspaceSession } from "@sandbox-agent/foundry-shared"; import { buildDisplayMessages } from "./view-model"; -function makeSession(transcript: WorkbenchSession["transcript"]): WorkbenchSession { +function makeSession(transcript: WorkspaceSession["transcript"]): WorkspaceSession { return { id: "session-1", sessionId: "session-1", diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.ts index 83f5c7a..9232293 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.ts @@ -1,42 +1,28 @@ +import { + DEFAULT_WORKSPACE_MODEL_GROUPS as SharedModelGroups, + workspaceModelLabel as sharedWorkspaceModelLabel, + workspaceProviderAgent as sharedWorkspaceProviderAgent, +} from "@sandbox-agent/foundry-shared"; import type { - WorkbenchAgentKind as AgentKind, - WorkbenchSession as AgentSession, - WorkbenchDiffLineKind as DiffLineKind, - WorkbenchFileChange as FileChange, - WorkbenchFileTreeNode as FileTreeNode, - WorkbenchTask as Task, - WorkbenchHistoryEvent as HistoryEvent, - WorkbenchLineAttachment as LineAttachment, - WorkbenchModelGroup as ModelGroup, - WorkbenchModelId as ModelId, - WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchRepositorySection as RepositorySection, - WorkbenchTranscriptEvent as TranscriptEvent, + WorkspaceAgentKind as AgentKind, + WorkspaceSession as AgentSession, + WorkspaceDiffLineKind as DiffLineKind, + WorkspaceFileChange as FileChange, + WorkspaceFileTreeNode as FileTreeNode, + WorkspaceTask as Task, + WorkspaceHistoryEvent as HistoryEvent, + WorkspaceLineAttachment as LineAttachment, + WorkspaceModelGroup as ModelGroup, + WorkspaceModelId as ModelId, + WorkspaceParsedDiffLine as ParsedDiffLine, + WorkspaceRepositorySection as RepositorySection, + WorkspaceTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { extractEventText } from "../../features/sessions/model"; export type { RepositorySection }; -export const MODEL_GROUPS: ModelGroup[] = [ - { - provider: "Claude", - models: [ - { id: "claude-sonnet-4", label: "Sonnet 4" }, - { id: "claude-opus-4", label: "Opus 4" }, - ], - }, - { - provider: "OpenAI", - models: [ - { id: "gpt-5.3-codex", label: "GPT-5.3 Codex" }, - { id: "gpt-5.4", label: "GPT-5.4" }, - { id: "gpt-5.2-codex", label: "GPT-5.2 Codex" }, - { id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" }, - { id: "gpt-5.2", label: "GPT-5.2" }, - { id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" }, - ], - }, -]; +export const MODEL_GROUPS: ModelGroup[] = SharedModelGroups; export function formatRelativeAge(updatedAtMs: number, nowMs = Date.now()): string { const deltaSeconds = Math.max(0, Math.floor((nowMs - updatedAtMs) / 1000)); @@ -94,15 +80,11 @@ export function formatMessageDuration(durationMs: number): string { } export function modelLabel(id: ModelId): string { - const group = MODEL_GROUPS.find((candidate) => candidate.models.some((model) => model.id === id)); - const model = group?.models.find((candidate) => candidate.id === id); - return model && group ? `${group.provider} ${model.label}` : id; + return sharedWorkspaceModelLabel(id, MODEL_GROUPS); } export function providerAgent(provider: string): AgentKind { - if (provider === "Claude") return "Claude"; - if (provider === "OpenAI") return "Codex"; - return "Cursor"; + return sharedWorkspaceProviderAgent(provider); } const DIFF_PREFIX = "diff:"; diff --git a/foundry/packages/frontend/src/components/organization-dashboard.tsx b/foundry/packages/frontend/src/components/organization-dashboard.tsx index 461ee90..4f54ac3 100644 --- a/foundry/packages/frontend/src/components/organization-dashboard.tsx +++ b/foundry/packages/frontend/src/components/organization-dashboard.tsx @@ -1,5 +1,5 @@ import { useEffect, useMemo, useState, type ReactNode } from "react"; -import type { AgentType, RepoBranchRecord, RepoOverview, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; +import type { RepoBranchRecord, RepoOverview, TaskWorkspaceSnapshot, WorkspaceTaskStatus } from "@sandbox-agent/foundry-shared"; import { currentFoundryOrganization, useSubscription } from "@sandbox-agent/foundry-client"; import { useMutation, useQuery } from "@tanstack/react-query"; import { Link, useNavigate } from "@tanstack/react-router"; @@ -14,7 +14,6 @@ import { StyledDivider } from "baseui/divider"; import { styled, useStyletron } from "baseui"; import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography"; import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal } from "lucide-react"; -import { formatDiffStat } from "../features/tasks/model"; import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status"; import { HeaderStatusPill } from "./mock-layout/ui"; import { buildTranscript, resolveSessionSelection } from "../features/sessions/model"; @@ -95,25 +94,13 @@ const FILTER_OPTIONS: SelectItem[] = [ { id: "all", label: "All Branches" }, ]; -const AGENT_OPTIONS: SelectItem[] = [ - { id: "codex", label: "codex" }, - { id: "claude", label: "claude" }, -]; - -function statusKind(status: WorkbenchTaskStatus): StatusTagKind { +function statusKind(status: WorkspaceTaskStatus): StatusTagKind { if (status === "running") return "positive"; if (status === "error") return "negative"; - if (status === "new" || String(status).startsWith("init_")) return "warning"; + if (String(status).startsWith("init_")) return "warning"; return "neutral"; } -function normalizeAgent(agent: string | null): AgentType | undefined { - if (agent === "claude" || agent === "codex") { - return agent; - } - return undefined; -} - function formatTime(value: number): string { return new Date(value).toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" }); } @@ -160,7 +147,7 @@ function repoSummary(overview: RepoOverview | undefined): { if (row.taskId) { mapped += 1; } - if (row.prNumber && row.prState !== "MERGED" && row.prState !== "CLOSED") { + if (row.pullRequest && row.pullRequest.state !== "MERGED" && row.pullRequest.state !== "CLOSED") { openPrs += 1; } } @@ -174,15 +161,25 @@ function repoSummary(overview: RepoOverview | undefined): { } function branchKind(row: RepoBranchRecord): StatusTagKind { - if (row.prState === "OPEN" || row.prState === "DRAFT") { + if (row.pullRequest?.isDraft || row.pullRequest?.state === "OPEN") { return "warning"; } - if (row.prState === "MERGED") { + if (row.pullRequest?.state === "MERGED") { return "positive"; } return "neutral"; } +function branchPullRequestLabel(branch: RepoBranchRecord): string { + if (!branch.pullRequest) { + return "no pr"; + } + if (branch.pullRequest.isDraft) { + return "draft"; + } + return branch.pullRequest.state.toLowerCase(); +} + function matchesOverviewFilter(branch: RepoBranchRecord, filter: RepoOverviewFilter): boolean { if (filter === "archived") { return branch.taskStatus === "archived"; @@ -332,23 +329,17 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected const [createTaskOpen, setCreateTaskOpen] = useState(false); const [selectedOverviewBranch, setSelectedOverviewBranch] = useState(null); const [overviewFilter, setOverviewFilter] = useState("active"); - const [newAgentType, setNewAgentType] = useState(() => { - try { - const raw = globalThis.localStorage?.getItem("hf.settings.agentType"); - return raw === "claude" || raw === "codex" ? raw : "codex"; - } catch { - return "codex"; - } - }); const [createError, setCreateError] = useState(null); const appState = useSubscription(subscriptionManager, "app", {}); const activeOrg = appState.data ? currentFoundryOrganization(appState.data) : null; const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); - const repos = organizationState.data?.repos ?? []; - const rows = organizationState.data?.taskSummaries ?? []; - const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rows, selectedTaskId]); + const reposData = organizationState.data?.repos; + const rowsData = organizationState.data?.taskSummaries; + const repos = reposData ?? []; + const rows = rowsData ?? []; + const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rowsData, selectedTaskId]); const taskState = useSubscription( subscriptionManager, "task", @@ -374,6 +365,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected }); useEffect(() => { + const repos = reposData ?? []; if (repoOverviewMode && selectedRepoId) { setCreateRepoId(selectedRepoId); return; @@ -381,17 +373,11 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected if (!createRepoId && repos.length > 0) { setCreateRepoId(repos[0]!.id); } - }, [createRepoId, repoOverviewMode, repos, selectedRepoId]); - - useEffect(() => { - try { - globalThis.localStorage?.setItem("hf.settings.agentType", newAgentType); - } catch { - // ignore storage failures - } - }, [newAgentType]); + }, [createRepoId, repoOverviewMode, reposData, selectedRepoId]); const repoGroups = useMemo(() => { + const repos = reposData ?? []; + const rows = rowsData ?? []; const byRepo = new Map(); for (const row of rows) { const bucket = byRepo.get(row.repoId); @@ -419,7 +405,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected } return a.repoLabel.localeCompare(b.repoLabel); }); - }, [repos, rows]); + }, [reposData, rowsData]); const selectedForSession = repoOverviewMode ? null : (taskState.data ?? null); @@ -432,6 +418,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected }, [selectedForSession]); useEffect(() => { + const rows = rowsData ?? []; if (!repoOverviewMode && !selectedTaskId && rows.length > 0) { void navigate({ to: "/organizations/$organizationId/tasks/$taskId", @@ -443,18 +430,19 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected replace: true, }); } - }, [navigate, repoOverviewMode, rows, selectedTaskId, organizationId]); + }, [navigate, repoOverviewMode, rowsData, selectedTaskId, organizationId]); useEffect(() => { setActiveSessionId(null); setDraft(""); }, [selectedForSession?.id]); - const sessionRows = selectedForSession?.sessionsSummary ?? []; - const taskRuntimeStatus = selectedForSession?.runtimeStatus ?? selectedForSession?.status ?? null; - const taskStatusState = describeTaskState(taskRuntimeStatus, selectedForSession?.statusMessage ?? null); + const sessionRowsData = selectedForSession?.sessionsSummary; + const sessionRows = sessionRowsData ?? []; + const taskStatus = selectedForSession?.status ?? null; + const taskStatusState = describeTaskState(taskStatus); const taskStateSummary = `${taskStatusState.title}. ${taskStatusState.detail}`; - const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskRuntimeStatus && taskRuntimeStatus !== "running" && taskRuntimeStatus !== "idle"); + const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskStatus && taskStatus !== "running" && taskStatus !== "idle"); const sessionSelection = useMemo( () => resolveSessionSelection({ @@ -469,7 +457,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected status: session.status, })), }), - [activeSessionId, selectedForSession?.activeSessionId, sessionRows], + [activeSessionId, selectedForSession?.activeSessionId, sessionRowsData], ); const resolvedSessionId = sessionSelection.sessionId; const staleSessionId = sessionSelection.staleSessionId; @@ -485,7 +473,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected } : null, ); - const selectedSessionSummary = useMemo(() => sessionRows.find((session) => session.id === resolvedSessionId) ?? null, [resolvedSessionId, sessionRows]); + const selectedSessionSummary = useMemo(() => sessionRows.find((session) => session.id === resolvedSessionId) ?? null, [resolvedSessionId, sessionRowsData]); const isPendingProvision = selectedSessionSummary?.status === "pending_provision"; const isPendingSessionCreate = selectedSessionSummary?.status === "pending_session_create"; const isSessionError = selectedSessionSummary?.status === "error"; @@ -505,8 +493,6 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected repoId: task.repoId, title: task.title, status: task.status, - runtimeStatus: selectedForSession?.runtimeStatus ?? null, - statusMessage: selectedForSession?.statusMessage ?? null, branch: task.branch ?? null, activeSandboxId: selectedForSession?.activeSandboxId ?? null, activeSessionId: selectedForSession?.activeSessionId ?? null, @@ -515,7 +501,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected }; }, [repoOverviewMode, selectedForSession, selectedSummary]); const devPanelSnapshot = useMemo( - (): TaskWorkbenchSnapshot => ({ + (): TaskWorkspaceSnapshot => ({ organizationId, repos: repos.map((repo) => ({ id: repo.id, label: repo.label })), repositories: [], @@ -524,8 +510,6 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected repoId: task.repoId, title: task.title, status: task.status, - runtimeStatus: selectedForSession?.id === task.id ? selectedForSession.runtimeStatus : undefined, - statusMessage: selectedForSession?.id === task.id ? selectedForSession.statusMessage : null, repoName: task.repoName, updatedAtMs: task.updatedAtMs, branch: task.branch ?? null, @@ -546,20 +530,21 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null, })), }), - [repos, rows, selectedForSession, organizationId], + [reposData, rowsData, selectedForSession, organizationId], ); const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => { if (!selectedForSession || !activeSandbox?.sandboxId) { throw new Error("No sandbox is available for this task"); } + const preferredAgent = selectedSessionSummary?.agent === "Claude" ? "claude" : selectedSessionSummary?.agent === "Codex" ? "codex" : undefined; return backendClient.createSandboxSession({ organizationId, sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, prompt: selectedForSession.task, cwd: activeSandbox.cwd ?? undefined, - agent: normalizeAgent(selectedForSession.agentType), + agent: preferredAgent, }); }; @@ -616,7 +601,6 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected organizationId, repoId, task, - agentType: newAgentType, explicitTitle: draftTitle || undefined, explicitBranchName: createOnBranch ? undefined : draftBranchName || undefined, onBranch: createOnBranch ?? undefined, @@ -654,16 +638,15 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected setCreateTaskOpen(true); }; - const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.id, label: repo.label })), [repos]); + const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.id, label: repo.label })), [reposData]); const selectedRepoOption = repoOptions.find((option) => option.id === createRepoId) ?? null; - const selectedAgentOption = useMemo(() => createOption(AGENT_OPTIONS.find((option) => option.id === newAgentType) ?? AGENT_OPTIONS[0]!), [newAgentType]); const selectedFilterOption = useMemo( () => createOption(FILTER_OPTIONS.find((option) => option.id === overviewFilter) ?? FILTER_OPTIONS[0]!), [overviewFilter], ); const sessionOptions = useMemo( () => sessionRows.map((session) => createOption({ id: session.id, label: `${session.sessionName} (${session.status})` })), - [sessionRows], + [sessionRowsData], ); const selectedSessionOption = sessionOptions.find((option) => option.id === resolvedSessionId) ?? null; @@ -1057,23 +1040,23 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected
{branch.taskTitle ?? branch.taskId ?? "-"}
- {branch.ciStatus ?? "-"} / {branch.reviewStatus ?? "-"} + {branch.ciStatus ?? "-"} / {branch.pullRequest ? (branch.pullRequest.isDraft ? "draft" : "ready") : "-"}
{formatRelativeAge(branch.updatedAt)}
@@ -1098,7 +1081,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected ) : null} - {branch.prState?.toLowerCase() ?? "no pr"} + {branchPullRequestLabel(branch)}
@@ -1137,8 +1120,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected {selectedForSession ? ( {shouldUseTaskStateEmptyState ? taskStateSummary - : (selectedForSession?.statusMessage ?? - (isPendingProvision ? "The task is still provisioning." : "The session is being created."))} + : isPendingProvision + ? "The task is still provisioning." + : "The session is being created."}
) : null} @@ -1277,15 +1260,13 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected {shouldUseTaskStateEmptyState ? taskStateSummary : isPendingProvision - ? (selectedForSession.statusMessage ?? "Provisioning sandbox...") + ? "Provisioning sandbox..." : isPendingSessionCreate ? "Creating session..." : isSessionError ? (selectedSessionSummary?.errorMessage ?? "Session failed to start.") : !activeSandbox?.sandboxId - ? selectedForSession.statusMessage - ? `Sandbox unavailable: ${selectedForSession.statusMessage}` - : "This task is still provisioning its sandbox." + ? "This task is still provisioning its sandbox." : staleSessionId ? `Session ${staleSessionId} is unavailable. Start a new session to continue.` : resolvedSessionId @@ -1458,7 +1439,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected - +
)} @@ -1483,7 +1464,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected gap: theme.sizing.scale300, })} > - + @@ -1504,9 +1485,8 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected })} > - - - + +
@@ -1529,7 +1509,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected - {taskRuntimeStatus === "error" ? ( + {taskStatus === "error" ? (
-
- - Agent - -