Fix Foundry UI bugs: org names, sessions, and repo selection (#250)

* Fix Foundry auth: migrate to Better Auth adapter, fix access token retrieval

- Remove @ts-nocheck from better-auth.ts, auth-user/index.ts, app-shell.ts
  and fix all type errors
- Fix getAccessTokenForSession: read GitHub token directly from account
  record instead of calling Better Auth's internal /get-access-token
  endpoint which returns 403 on server-side calls
- Re-implement workspaceAuth helper functions (workspaceAuthColumn,
  normalizeAuthValue, workspaceAuthClause, workspaceAuthWhere) that were
  accidentally deleted
- Remove all retry logic (withRetries, isRetryableAppActorError)
- Implement CORS origin allowlist from configured environment
- Document cachedAppWorkspace singleton pattern
- Add inline org sync fallback in buildAppSnapshot for post-OAuth flow
- Add no-retry rule to CLAUDE.md

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* Add Foundry dev panel from fix-git-data branch

Port the dev panel component that was left out when PR #243 was replaced
by PR #247. Adapted to remove runtime/mock-debug references that don't
exist on the current branch.

- Toggle with Shift+D, persists visibility to localStorage
- Shows context, session, GitHub sync status sections
- Dev-only (import.meta.env.DEV)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* Add full Docker image defaults, fix actor deadlocks, and improve dev experience

- Add Dockerfile.full and --all flag to install-agent CLI for pre-built images
- Centralize Docker image constant (FULL_IMAGE) pinned to 0.3.1-full
- Remove examples/shared/Dockerfile{,.dev} and daytona snapshot example
- Expand Docker docs with full runnable Dockerfile
- Fix self-deadlock in createWorkbenchSession (fire-and-forget provisioning)
- Audit and convert 12 task actions from wait:true to wait:false
- Add bun --hot for dev backend hot reload
- Remove --force from pnpm install in dev Dockerfile for faster startup
- Add env_file support to compose.dev.yaml for automatic credential loading
- Add mock frontend compose config and dev panel
- Update CLAUDE.md with wait:true policy and dev environment setup

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* WIP: async action fixes and interest manager

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* Fix Foundry UI bugs: org names, hanging sessions, and wrong repo creation

- Fix org display name using GitHub description instead of name field
- Fix createWorkbenchSession hanging when sandbox is provisioning
- Fix auto-session creation retry storm on errors
- Fix task creation using wrong repo due to React state race conditions
- Remove Bun hot-reload from backend Dockerfile (causes port drift)
- Add GitHub sync/install status to dev panel

Co-Authored-By: Claude Haiku 4.5 <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Nathan Flurry 2026-03-13 20:48:22 -07:00 committed by GitHub
parent 58c54156f1
commit d8b8b49f37
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
88 changed files with 9252 additions and 1933 deletions

View file

@ -1,10 +1,14 @@
# Load this file only when NODE_ENV=development.
# The backend does not load dotenv files in production.
# Foundry local development environment.
# Copy ~/misc/the-foundry.env to .env in the repo root to populate secrets.
# .env is gitignored — never commit it. The source of truth is ~/misc/the-foundry.env.
#
# Docker Compose (just foundry-dev) and the justfile (set dotenv-load := true)
# both read .env automatically.
APP_URL=http://localhost:4173
BETTER_AUTH_URL=http://localhost:4173
BETTER_AUTH_SECRET=sandbox-agent-foundry-development-only-change-me
GITHUB_REDIRECT_URI=http://localhost:4173/api/rivet/app/auth/github/callback
GITHUB_REDIRECT_URI=http://localhost:4173/v1/auth/callback/github
# Fill these in when enabling live GitHub OAuth.
GITHUB_CLIENT_ID=

View file

@ -180,10 +180,20 @@ jobs:
include:
- platform: linux/arm64
runner: depot-ubuntu-24.04-arm-8
arch_suffix: -arm64
tag_suffix: -arm64
dockerfile: docker/runtime/Dockerfile
- platform: linux/amd64
runner: depot-ubuntu-24.04-8
arch_suffix: -amd64
tag_suffix: -amd64
dockerfile: docker/runtime/Dockerfile
- platform: linux/arm64
runner: depot-ubuntu-24.04-arm-8
tag_suffix: -full-arm64
dockerfile: docker/runtime/Dockerfile.full
- platform: linux/amd64
runner: depot-ubuntu-24.04-8
tag_suffix: -full-amd64
dockerfile: docker/runtime/Dockerfile.full
runs-on: ${{ matrix.runner }}
steps:
- uses: actions/checkout@v4
@ -205,8 +215,8 @@ jobs:
with:
context: .
push: true
tags: rivetdev/sandbox-agent:${{ steps.vars.outputs.sha_short }}${{ matrix.arch_suffix }}
file: docker/runtime/Dockerfile
tags: rivetdev/sandbox-agent:${{ steps.vars.outputs.sha_short }}${{ matrix.tag_suffix }}
file: ${{ matrix.dockerfile }}
platforms: ${{ matrix.platform }}
build-args: |
TARGETARCH=${{ contains(matrix.platform, 'arm64') && 'arm64' || 'amd64' }}

View file

@ -125,7 +125,7 @@
## Docker Examples (Dev Testing)
- When manually testing bleeding-edge (unreleased) versions of sandbox-agent in `examples/`, use `SANDBOX_AGENT_DEV=1` with the Docker-based examples.
- This triggers `examples/shared/Dockerfile.dev` which builds the server binary from local source and packages it into the Docker image.
- This triggers a local build of `docker/runtime/Dockerfile.full` which builds the server binary from local source and packages it into the Docker image.
- Example: `SANDBOX_AGENT_DEV=1 pnpm --filter @sandbox-agent/example-mcp start`
## Install Version References
@ -152,7 +152,7 @@
- `.claude/commands/post-release-testing.md`
- `examples/cloudflare/Dockerfile`
- `examples/daytona/src/index.ts`
- `examples/daytona/src/daytona-with-snapshot.ts`
- `examples/shared/src/docker.ts`
- `examples/docker/src/index.ts`
- `examples/e2b/src/index.ts`
- `examples/vercel/src/index.ts`

View file

@ -143,10 +143,7 @@ sandbox-agent server --token "$SANDBOX_TOKEN" --host 127.0.0.1 --port 2468
Optional: preinstall agent binaries (no server required; they will be installed lazily on first use if you skip this):
```bash
sandbox-agent install-agent claude
sandbox-agent install-agent codex
sandbox-agent install-agent opencode
sandbox-agent install-agent amp
sandbox-agent install-agent --all
```
To disable auth locally:

View file

@ -167,4 +167,4 @@ WORKDIR /home/sandbox
EXPOSE 2468
ENTRYPOINT ["sandbox-agent"]
CMD ["--host", "0.0.0.0", "--port", "2468"]
CMD ["server", "--host", "0.0.0.0", "--port", "2468"]

View file

@ -0,0 +1,162 @@
# syntax=docker/dockerfile:1.10.0
# ============================================================================
# Build inspector frontend
# ============================================================================
FROM node:22-alpine AS inspector-build
WORKDIR /app
RUN npm install -g pnpm
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/
COPY sdks/cli-shared/package.json ./sdks/cli-shared/
COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/
COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/
COPY sdks/react/package.json ./sdks/react/
COPY sdks/typescript/package.json ./sdks/typescript/
RUN pnpm install --filter @sandbox-agent/inspector...
COPY docs/openapi.json ./docs/
COPY sdks/cli-shared ./sdks/cli-shared
COPY sdks/acp-http-client ./sdks/acp-http-client
COPY sdks/persist-indexeddb ./sdks/persist-indexeddb
COPY sdks/react ./sdks/react
COPY sdks/typescript ./sdks/typescript
RUN cd sdks/cli-shared && pnpm exec tsup
RUN cd sdks/acp-http-client && pnpm exec tsup
RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup
RUN cd sdks/persist-indexeddb && pnpm exec tsup
RUN cd sdks/react && pnpm exec tsup
COPY frontend/packages/inspector ./frontend/packages/inspector
RUN cd frontend/packages/inspector && pnpm exec vite build
# ============================================================================
# AMD64 Builder - Uses cross-tools musl toolchain
# ============================================================================
FROM --platform=linux/amd64 rust:1.88.0 AS builder-amd64
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y \
musl-tools \
musl-dev \
llvm-14-dev \
libclang-14-dev \
clang-14 \
libssl-dev \
pkg-config \
ca-certificates \
g++ \
g++-multilib \
git \
curl \
wget && \
rm -rf /var/lib/apt/lists/*
RUN wget -q https://github.com/cross-tools/musl-cross/releases/latest/download/x86_64-unknown-linux-musl.tar.xz && \
tar -xf x86_64-unknown-linux-musl.tar.xz -C /opt/ && \
rm x86_64-unknown-linux-musl.tar.xz && \
rustup target add x86_64-unknown-linux-musl
ENV PATH="/opt/x86_64-unknown-linux-musl/bin:$PATH" \
LIBCLANG_PATH=/usr/lib/llvm-14/lib \
CLANG_PATH=/usr/bin/clang-14 \
CC_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-gcc \
CXX_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-g++ \
AR_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-ar \
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=x86_64-unknown-linux-musl-gcc \
CARGO_INCREMENTAL=0 \
CARGO_NET_GIT_FETCH_WITH_CLI=true
ENV SSL_VER=1.1.1w
RUN wget https://www.openssl.org/source/openssl-$SSL_VER.tar.gz && \
tar -xzf openssl-$SSL_VER.tar.gz && \
cd openssl-$SSL_VER && \
./Configure no-shared no-async --prefix=/musl --openssldir=/musl/ssl linux-x86_64 && \
make -j$(nproc) && \
make install_sw && \
cd .. && \
rm -rf openssl-$SSL_VER*
ENV OPENSSL_DIR=/musl \
OPENSSL_INCLUDE_DIR=/musl/include \
OPENSSL_LIB_DIR=/musl/lib \
PKG_CONFIG_ALLOW_CROSS=1 \
RUSTFLAGS="-C target-feature=+crt-static -C link-arg=-static-libgcc"
WORKDIR /build
COPY . .
COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/packages/inspector/dist
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
cargo build -p sandbox-agent --release --target x86_64-unknown-linux-musl && \
cp target/x86_64-unknown-linux-musl/release/sandbox-agent /sandbox-agent
# ============================================================================
# ARM64 Builder - Uses Alpine with native musl
# ============================================================================
FROM --platform=linux/arm64 rust:1.88-alpine AS builder-arm64
RUN apk add --no-cache \
musl-dev \
clang \
llvm-dev \
openssl-dev \
openssl-libs-static \
pkgconfig \
git \
curl \
build-base
RUN rustup target add aarch64-unknown-linux-musl
ENV CARGO_INCREMENTAL=0 \
CARGO_NET_GIT_FETCH_WITH_CLI=true \
RUSTFLAGS="-C target-feature=+crt-static"
WORKDIR /build
COPY . .
COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/packages/inspector/dist
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
cargo build -p sandbox-agent --release --target aarch64-unknown-linux-musl && \
cp target/aarch64-unknown-linux-musl/release/sandbox-agent /sandbox-agent
# ============================================================================
# Select the appropriate builder based on target architecture
# ============================================================================
ARG TARGETARCH
FROM builder-${TARGETARCH} AS builder
# Runtime stage - full image with all supported agents preinstalled
FROM node:22-bookworm-slim
RUN apt-get update && apt-get install -y \
bash \
ca-certificates \
curl \
git && \
rm -rf /var/lib/apt/lists/*
COPY --from=builder /sandbox-agent /usr/local/bin/sandbox-agent
RUN chmod +x /usr/local/bin/sandbox-agent
RUN useradd -m -s /bin/bash sandbox
USER sandbox
WORKDIR /home/sandbox
RUN sandbox-agent install-agent --all
EXPOSE 2468
ENTRYPOINT ["sandbox-agent"]
CMD ["server", "--host", "0.0.0.0", "--port", "2468"]

View file

@ -39,20 +39,24 @@ Notes:
## install-agent
Install or reinstall a single agent.
Install or reinstall a single agent, or every supported agent with `--all`.
```bash
sandbox-agent install-agent <AGENT> [OPTIONS]
sandbox-agent install-agent [<AGENT>] [OPTIONS]
```
| Option | Description |
|--------|-------------|
| `--all` | Install every supported agent |
| `-r, --reinstall` | Force reinstall |
| `--agent-version <VERSION>` | Override agent package version |
| `--agent-process-version <VERSION>` | Override agent process version |
| `--agent-version <VERSION>` | Override agent package version (conflicts with `--all`) |
| `--agent-process-version <VERSION>` | Override agent process version (conflicts with `--all`) |
Examples:
```bash
sandbox-agent install-agent claude --reinstall
sandbox-agent install-agent --all
```
## opencode (experimental)

View file

@ -9,18 +9,18 @@ Docker is not recommended for production isolation of untrusted workloads. Use d
## Quick start
Run Sandbox Agent with agents pre-installed:
Run the published full image with all supported agents pre-installed:
```bash
docker run --rm -p 3000:3000 \
-e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \
-e OPENAI_API_KEY="$OPENAI_API_KEY" \
alpine:latest sh -c "\
apk add --no-cache curl ca-certificates libstdc++ libgcc bash nodejs npm && \
curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh && \
sandbox-agent server --no-token --host 0.0.0.0 --port 3000"
rivetdev/sandbox-agent:0.3.1-full \
server --no-token --host 0.0.0.0 --port 3000
```
The `0.3.1-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image.
## TypeScript with dockerode
```typescript
@ -31,14 +31,8 @@ const docker = new Docker();
const PORT = 3000;
const container = await docker.createContainer({
Image: "node:22-bookworm-slim",
Cmd: ["sh", "-c", [
"apt-get update",
"DEBIAN_FRONTEND=noninteractive apt-get install -y curl ca-certificates bash libstdc++6",
"rm -rf /var/lib/apt/lists/*",
"curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh",
`sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`,
].join(" && ")],
Image: "rivetdev/sandbox-agent:0.3.1-full",
Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", `${PORT}`],
Env: [
`ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}`,
`OPENAI_API_KEY=${process.env.OPENAI_API_KEY}`,
@ -60,6 +54,29 @@ const session = await sdk.createSession({ agent: "codex" });
await session.prompt([{ type: "text", text: "Summarize this repository." }]);
```
## Building a custom image with everything preinstalled
If you need to extend your own base image, install Sandbox Agent and preinstall every supported agent in one step:
```dockerfile
FROM node:22-bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends \
bash ca-certificates curl git && \
rm -rf /var/lib/apt/lists/*
RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh && \
sandbox-agent install-agent --all
RUN useradd -m -s /bin/bash sandbox
USER sandbox
WORKDIR /home/sandbox
EXPOSE 2468
ENTRYPOINT ["sandbox-agent"]
CMD ["server", "--host", "0.0.0.0", "--port", "2468"]
```
## Building from source
```bash

View file

@ -38,7 +38,7 @@ These values can be safely defaulted for local development:
- `APP_URL=http://localhost:4173`
- `BETTER_AUTH_URL=http://localhost:7741`
- `BETTER_AUTH_SECRET=sandbox-agent-foundry-development-only-change-me`
- `GITHUB_REDIRECT_URI=http://localhost:7741/v1/auth/github/callback`
- `GITHUB_REDIRECT_URI=http://localhost:7741/v1/auth/callback/github`
These should be treated as development-only values.

View file

@ -61,9 +61,11 @@ icon: "rocket"
<Tab title="Docker">
```bash
docker run -e ANTHROPIC_API_KEY="sk-ant-..." \
docker run -p 2468:2468 \
-e ANTHROPIC_API_KEY="sk-ant-..." \
-e OPENAI_API_KEY="sk-..." \
your-image
rivetdev/sandbox-agent:0.3.1-full \
server --no-token --host 0.0.0.0 --port 2468
```
</Tab>
</Tabs>
@ -215,10 +217,7 @@ icon: "rocket"
To preinstall agents:
```bash
sandbox-agent install-agent claude
sandbox-agent install-agent codex
sandbox-agent install-agent opencode
sandbox-agent install-agent amp
sandbox-agent install-agent --all
```
If agents are not installed up front, they are lazily installed when creating a session.

View file

@ -4,7 +4,6 @@
"type": "module",
"scripts": {
"start": "tsx src/index.ts",
"start:snapshot": "tsx src/daytona-with-snapshot.ts",
"typecheck": "tsc --noEmit"
},
"dependencies": {

View file

@ -1,39 +0,0 @@
import { Daytona, Image } from "@daytonaio/sdk";
import { SandboxAgent } from "sandbox-agent";
import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared";
const daytona = new Daytona();
const envVars: Record<string, string> = {};
if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY;
if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY;
// Build a custom image with sandbox-agent pre-installed (slower first run, faster subsequent runs)
const image = Image.base("ubuntu:22.04").runCommands(
"apt-get update && apt-get install -y curl ca-certificates",
"curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh",
);
console.log("Creating Daytona sandbox (first run builds the base image and may take a few minutes, subsequent runs are fast)...");
const sandbox = await daytona.create({ envVars, image, autoStopInterval: 0 }, { timeout: 180 });
await sandbox.process.executeCommand("nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &");
const baseUrl = (await sandbox.getSignedPreviewUrl(3000, 4 * 60 * 60)).url;
console.log("Connecting to server...");
const client = await SandboxAgent.connect({ baseUrl });
const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/home/daytona", mcpServers: [] } });
const sessionId = session.id;
console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`);
console.log(" Press Ctrl+C to stop.");
const keepAlive = setInterval(() => {}, 60_000);
const cleanup = async () => {
clearInterval(keepAlive);
await sandbox.delete(60);
process.exit(0);
};
process.once("SIGINT", cleanup);
process.once("SIGTERM", cleanup);

View file

@ -3,12 +3,13 @@ import fs from "node:fs";
import path from "node:path";
import { SandboxAgent } from "sandbox-agent";
import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared";
import { FULL_IMAGE } from "@sandbox-agent/example-shared/docker";
const IMAGE = "node:22-bookworm-slim";
const IMAGE = FULL_IMAGE;
const PORT = 3000;
const agent = detectAgent();
const codexAuthPath = process.env.HOME ? path.join(process.env.HOME, ".codex", "auth.json") : null;
const bindMounts = codexAuthPath && fs.existsSync(codexAuthPath) ? [`${codexAuthPath}:/root/.codex/auth.json:ro`] : [];
const bindMounts = codexAuthPath && fs.existsSync(codexAuthPath) ? [`${codexAuthPath}:/home/sandbox/.codex/auth.json:ro`] : [];
const docker = new Docker({ socketPath: "/var/run/docker.sock" });
@ -28,17 +29,7 @@ try {
console.log("Starting container...");
const container = await docker.createContainer({
Image: IMAGE,
Cmd: [
"sh",
"-c",
[
"apt-get update",
"DEBIAN_FRONTEND=noninteractive apt-get install -y curl ca-certificates bash libstdc++6",
"rm -rf /var/lib/apt/lists/*",
"curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh",
`sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`,
].join(" && "),
],
Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", `${PORT}`],
Env: [
process.env.ANTHROPIC_API_KEY ? `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}` : "",
process.env.OPENAI_API_KEY ? `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}` : "",
@ -56,7 +47,7 @@ await container.start();
const baseUrl = `http://127.0.0.1:${PORT}`;
const client = await SandboxAgent.connect({ baseUrl });
const session = await client.createSession({ agent, sessionInit: { cwd: "/root", mcpServers: [] } });
const session = await client.createSession({ agent, sessionInit: { cwd: "/home/sandbox", mcpServers: [] } });
const sessionId = session.id;
console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`);

View file

@ -7,7 +7,6 @@ const persist = new InMemorySessionPersistDriver();
console.log("Starting sandbox...");
const sandbox = await startDockerSandbox({
port: 3000,
setupCommands: ["sandbox-agent install-agent claude", "sandbox-agent install-agent codex"],
});
const sdk = await SandboxAgent.connect({ baseUrl: sandbox.baseUrl, persist });

View file

@ -66,7 +66,6 @@ try {
console.log("Starting sandbox...");
const sandbox = await startDockerSandbox({
port: 3000,
setupCommands: ["sandbox-agent install-agent claude", "sandbox-agent install-agent codex"],
});
const sdk = await SandboxAgent.connect({ baseUrl: sandbox.baseUrl, persist });

View file

@ -8,7 +8,6 @@ const persist = new SQLiteSessionPersistDriver({ filename: "./sessions.db" });
console.log("Starting sandbox...");
const sandbox = await startDockerSandbox({
port: 3000,
setupCommands: ["sandbox-agent install-agent claude", "sandbox-agent install-agent codex"],
});
const sdk = await SandboxAgent.connect({ baseUrl: sandbox.baseUrl, persist });

View file

@ -1,5 +0,0 @@
FROM node:22-bookworm-slim
RUN apt-get update -qq && apt-get install -y -qq --no-install-recommends ca-certificates > /dev/null 2>&1 && \
rm -rf /var/lib/apt/lists/* && \
npm install -g --silent @sandbox-agent/cli@latest && \
sandbox-agent install-agent claude

View file

@ -1,63 +0,0 @@
FROM node:22-bookworm-slim AS frontend
RUN corepack enable && corepack prepare pnpm@latest --activate
WORKDIR /build
# Copy workspace root config
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
# Copy packages needed for the inspector build chain:
# inspector -> sandbox-agent SDK -> acp-http-client, cli-shared, persist-indexeddb
COPY sdks/typescript/ sdks/typescript/
COPY sdks/acp-http-client/ sdks/acp-http-client/
COPY sdks/cli-shared/ sdks/cli-shared/
COPY sdks/persist-indexeddb/ sdks/persist-indexeddb/
COPY sdks/react/ sdks/react/
COPY frontend/packages/inspector/ frontend/packages/inspector/
COPY docs/openapi.json docs/
# Create stub package.json for workspace packages referenced in pnpm-workspace.yaml
# but not needed for the inspector build (avoids install errors).
RUN set -e; for dir in \
sdks/cli sdks/gigacode \
sdks/persist-postgres sdks/persist-sqlite sdks/persist-rivet \
resources/agent-schemas resources/vercel-ai-sdk-schemas \
scripts/release scripts/sandbox-testing \
examples/shared examples/docker examples/e2b examples/vercel \
examples/daytona examples/cloudflare examples/file-system \
examples/mcp examples/mcp-custom-tool \
examples/skills examples/skills-custom-tool \
frontend/packages/website; do \
mkdir -p "$dir"; \
printf '{"name":"@stub/%s","private":true,"version":"0.0.0"}\n' "$(basename "$dir")" > "$dir/package.json"; \
done; \
for parent in sdks/cli/platforms sdks/gigacode/platforms; do \
for plat in darwin-arm64 darwin-x64 linux-arm64 linux-x64 win32-x64; do \
mkdir -p "$parent/$plat"; \
printf '{"name":"@stub/%s-%s","private":true,"version":"0.0.0"}\n' "$(basename "$parent")" "$plat" > "$parent/$plat/package.json"; \
done; \
done
RUN pnpm install --no-frozen-lockfile
ENV SKIP_OPENAPI_GEN=1
RUN pnpm --filter sandbox-agent build && \
pnpm --filter @sandbox-agent/inspector build
FROM rust:1.88.0-bookworm AS builder
WORKDIR /build
COPY Cargo.toml Cargo.lock ./
COPY server/ ./server/
COPY gigacode/ ./gigacode/
COPY resources/agent-schemas/artifacts/ ./resources/agent-schemas/artifacts/
COPY scripts/agent-configs/ ./scripts/agent-configs/
COPY --from=frontend /build/frontend/packages/inspector/dist/ ./frontend/packages/inspector/dist/
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
cargo build -p sandbox-agent --release && \
cp target/release/sandbox-agent /sandbox-agent
FROM node:22-bookworm-slim
RUN apt-get update -qq && apt-get install -y -qq --no-install-recommends ca-certificates > /dev/null 2>&1 && \
rm -rf /var/lib/apt/lists/*
COPY --from=builder /sandbox-agent /usr/local/bin/sandbox-agent
RUN sandbox-agent install-agent claude

View file

@ -6,10 +6,10 @@ import { PassThrough } from "node:stream";
import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const EXAMPLE_IMAGE = "sandbox-agent-examples:latest";
const EXAMPLE_IMAGE_DEV = "sandbox-agent-examples-dev:latest";
const DOCKERFILE_DIR = path.resolve(__dirname, "..");
const REPO_ROOT = path.resolve(DOCKERFILE_DIR, "../..");
const REPO_ROOT = path.resolve(__dirname, "..", "..", "..");
/** Pre-built Docker image with all agents installed. */
export const FULL_IMAGE = "rivetdev/sandbox-agent:0.3.1-full";
export interface DockerSandboxOptions {
/** Container port used by sandbox-agent inside Docker. */
@ -18,7 +18,7 @@ export interface DockerSandboxOptions {
hostPort?: number;
/** Additional shell commands to run before starting sandbox-agent. */
setupCommands?: string[];
/** Docker image to use. Defaults to the pre-built sandbox-agent-examples image. */
/** Docker image to use. Defaults to the pre-built full image. */
image?: string;
}
@ -131,33 +131,31 @@ function stripAnsi(value: string): string {
return value.replace(/[\u001B\u009B][[\]()#;?]*(?:(?:[a-zA-Z\d]*(?:;[a-zA-Z\d]*)*)?\u0007|(?:\d{1,4}(?:;\d{0,4})*)?[0-9A-ORZcf-nqry=><])/g, "");
}
async function ensureExampleImage(_docker: Docker): Promise<string> {
const dev = !!process.env.SANDBOX_AGENT_DEV;
const imageName = dev ? EXAMPLE_IMAGE_DEV : EXAMPLE_IMAGE;
if (dev) {
console.log(" Building sandbox image from source (may take a while, only runs once)...");
async function ensureImage(docker: Docker, image: string): Promise<void> {
if (process.env.SANDBOX_AGENT_DEV) {
console.log(" Building sandbox image from source (may take a while)...");
try {
execFileSync("docker", ["build", "-t", imageName, "-f", path.join(DOCKERFILE_DIR, "Dockerfile.dev"), REPO_ROOT], {
stdio: ["ignore", "ignore", "pipe"],
});
} catch (err: unknown) {
const stderr = err instanceof Error && "stderr" in err ? String((err as { stderr: unknown }).stderr) : "";
throw new Error(`Failed to build sandbox image: ${stderr}`);
}
} else {
console.log(" Building sandbox image (may take a while, only runs once)...");
try {
execFileSync("docker", ["build", "-t", imageName, DOCKERFILE_DIR], {
execFileSync("docker", ["build", "-t", image, "-f", path.join(REPO_ROOT, "docker/runtime/Dockerfile.full"), REPO_ROOT], {
stdio: ["ignore", "ignore", "pipe"],
});
} catch (err: unknown) {
const stderr = err instanceof Error && "stderr" in err ? String((err as { stderr: unknown }).stderr) : "";
throw new Error(`Failed to build sandbox image: ${stderr}`);
}
return;
}
return imageName;
try {
await docker.getImage(image).inspect();
} catch {
console.log(` Pulling ${image}...`);
await new Promise<void>((resolve, reject) => {
docker.pull(image, (err: Error | null, stream: NodeJS.ReadableStream) => {
if (err) return reject(err);
docker.modem.followProgress(stream, (err: Error | null) => (err ? reject(err) : resolve()));
});
});
}
}
/**
@ -166,8 +164,7 @@ async function ensureExampleImage(_docker: Docker): Promise<string> {
*/
export async function startDockerSandbox(opts: DockerSandboxOptions): Promise<DockerSandbox> {
const { port, hostPort } = opts;
const useCustomImage = !!opts.image;
let image = opts.image ?? EXAMPLE_IMAGE;
const image = opts.image ?? FULL_IMAGE;
// TODO: Replace setupCommands shell bootstrapping with native sandbox-agent exec API once available.
const setupCommands = [...(opts.setupCommands ?? [])];
const credentialEnv = collectCredentialEnv();
@ -197,27 +194,13 @@ export async function startDockerSandbox(opts: DockerSandboxOptions): Promise<Do
const docker = new Docker({ socketPath: "/var/run/docker.sock" });
if (useCustomImage) {
try {
await docker.getImage(image).inspect();
} catch {
console.log(` Pulling ${image}...`);
await new Promise<void>((resolve, reject) => {
docker.pull(image, (err: Error | null, stream: NodeJS.ReadableStream) => {
if (err) return reject(err);
docker.modem.followProgress(stream, (err: Error | null) => (err ? reject(err) : resolve()));
});
});
}
} else {
image = await ensureExampleImage(docker);
}
await ensureImage(docker, image);
const bootCommands = [...setupCommands, `sandbox-agent server --no-token --host 0.0.0.0 --port ${port}`];
const container = await docker.createContainer({
Image: image,
WorkingDir: "/root",
WorkingDir: "/home/sandbox",
Cmd: ["sh", "-c", bootCommands.join(" && ")],
Env: [...Object.entries(credentialEnv).map(([key, value]) => `${key}=${value}`), ...Object.entries(bootstrapEnv).map(([key, value]) => `${key}=${value}`)],
ExposedPorts: { [`${port}/tcp`]: {} },

View file

@ -31,16 +31,27 @@ Use `pnpm` workspaces and Turborepo.
- Foundry is the canonical name for this product tree. Do not introduce or preserve legacy pre-Foundry naming in code, docs, commands, or runtime paths.
- Install deps: `pnpm install`
- Full active-workspace validation: `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test`
- Start the full dev stack: `just foundry-dev`
- Start the full dev stack (real backend + frontend): `just foundry-dev` — frontend on **port 4173**, backend on **port 7741** (Docker via `compose.dev.yaml`)
- Start the mock frontend stack (no backend): `just foundry-mock` — mock frontend on **port 4174** (Docker via `compose.mock.yaml`)
- Start the local production-build preview stack: `just foundry-preview`
- Start only the backend locally: `just foundry-backend-start`
- Start only the frontend locally: `pnpm --filter @sandbox-agent/foundry-frontend dev`
- Start the frontend against the mock workbench client: `FOUNDRY_FRONTEND_CLIENT_MODE=mock pnpm --filter @sandbox-agent/foundry-frontend dev`
- Start the mock frontend locally (no Docker): `just foundry-dev-mock` — mock frontend on **port 4174**
- Dev and mock stacks can run simultaneously on different ports (4173 and 4174).
- Stop the compose dev stack: `just foundry-dev-down`
- Tail compose logs: `just foundry-dev-logs`
- Tail compose dev logs: `just foundry-dev-logs`
- Stop the mock stack: `just foundry-mock-down`
- Tail mock logs: `just foundry-mock-logs`
- Stop the preview stack: `just foundry-preview-down`
- Tail preview logs: `just foundry-preview-logs`
## Dev Environment Setup
- `compose.dev.yaml` loads `foundry/.env` (optional) for credentials needed by the backend (GitHub OAuth, Stripe, Daytona, API keys, etc.).
- The canonical source for these credentials is `~/misc/the-foundry.env`. If `foundry/.env` does not exist, copy it: `cp ~/misc/the-foundry.env foundry/.env`
- `foundry/.env` is gitignored and must never be committed.
- The backend does **not** hot reload. Bun's `--hot` flag causes the server to re-bind on a different port (e.g. 6421 instead of 6420), breaking all client connections while the container still exposes the original port. After backend code changes, restart the backend container: `just foundry-dev-down && just foundry-dev`.
## Railway Logs
- Production Foundry Railway logs can be read from a linked workspace with `railway logs --deployment --lines 200` or `railway logs <deployment-id> --deployment --lines 200`.
@ -65,6 +76,69 @@ Use `pnpm` workspaces and Turborepo.
- When asked for screenshots, capture all relevant affected screens and modal states, not just a single viewport. Include empty, populated, success, and blocked/error states when they are part of the changed flow.
- If a screenshot catches a transition frame, blank modal, or otherwise misleading state, retake it before reporting it.
## Realtime Data Architecture
### Core pattern: fetch initial state + subscribe to deltas
All client data flows follow the same pattern:
1. **Connect** to the actor via WebSocket.
2. **Fetch initial state** via an action call to get the current materialized snapshot.
3. **Subscribe to events** on the connection. Events carry **full replacement payloads** for the changed entity (not empty notifications, not patches — the complete new state of the thing that changed).
4. **Unsubscribe** after a 30-second grace period when interest ends (screen navigation, component unmount). The grace period prevents thrashing during screen transitions and React double-renders.
Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or full-snapshot re-fetches on every mutation. Every mutation broadcasts the new absolute state of the changed entity to connected clients.
### Materialized state in coordinator actors
- **Workspace actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the workspace actor when they mutate. The workspace actor broadcasts the updated entity to connected clients. `getWorkspaceSummary` reads from local tables only — no fan-out to child actors.
- **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it.
- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session tab is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor).
- The expensive fan-out (querying every project/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path.
### Interest manager
The interest manager (`packages/client`) is a global singleton that manages WebSocket connections, cached state, and subscriptions for all topics. It:
- **Deduplicates** — multiple subscribers to the same topic share one connection and one cached state.
- **Grace period (30s)** — when the last subscriber leaves, the connection and state stay alive for 30 seconds before teardown. This keeps data warm for back-navigation and prevents thrashing.
- **Exposes a single hook**`useInterest(topicKey, params)` returns `{ data, status, error }`. Null params = no subscription (conditional interest).
- **Shared harness, separate implementations** — the `InterestManager` interface is shared between mock and remote implementations. The mock implementation uses in-memory state. The remote implementation uses WebSocket connections. The API/client exposure is identical for both.
### Topics
Each topic maps to one actor connection and one event stream:
| Topic | Actor | Event | Data |
|---|---|---|---|
| `app` | Workspace `"app"` | `appUpdated` | Auth, orgs, onboarding |
| `workspace` | Workspace `{workspaceId}` | `workspaceUpdated` | Repo catalog, task summaries, repo summaries |
| `task` | Task `{workspaceId, repoId, taskId}` | `taskUpdated` | Session summaries, sandbox info, diffs, file tree |
| `session` | Task `{workspaceId, repoId, taskId}` (filtered by sessionId) | `sessionUpdated` | Transcript, draft state |
| `sandboxProcesses` | SandboxInstance | `processesUpdated` | Process list |
The client subscribes to `app` always, `workspace` when entering a workspace, `task` when viewing a task, and `session` when viewing a specific session tab. At most 4 actor connections at a time (app + workspace + task + sandbox if terminal is open). The `session` topic reuses the task actor connection and filters by session ID.
### Rules
- Do not add `useQuery` with `refetchInterval` for data that should be push-based.
- Do not broadcast empty notification events. Events must carry the full new state of the changed entity.
- Do not re-fetch full snapshots after mutations. The mutation triggers a server-side broadcast with the new entity state; the client replaces it in local state.
- All event subscriptions go through the interest manager. Do not create ad-hoc `handle.connect()` + `conn.on()` patterns.
- Backend mutations that affect sidebar data (task title, status, branch, PR state) must push the updated summary to the parent workspace actor, which broadcasts to workspace subscribers.
- Comment architecture-related code: add doc comments explaining the materialized state pattern, why deltas flow the way they do, and the relationship between parent/child actor broadcasts. New contributors should understand the data flow from comments alone.
## UI System
- Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation.
- The full `BaseUI` reference for available components and guidance on animations, customization, composition, and forms is at `https://base-ui.com/llms.txt`.
- Prefer existing `BaseUI` components and composition patterns whenever possible instead of building custom controls from scratch.
- Reuse the established Foundry theme/token layer for colors, typography, spacing, and surfaces instead of introducing ad hoc visual values.
- If the same UI pattern is shared with the Inspector or other consumers, prefer extracting or reusing it through `@sandbox-agent/react` rather than duplicating it in Foundry.
- If a requested UI cannot be implemented cleanly with an existing `BaseUI` component, stop and ask the user whether they are sure they want to diverge from the system.
- In that case, recommend the closest existing `BaseUI` components or compositions that could satisfy the need before proposing custom UI work.
- Only introduce custom UI primitives when `BaseUI` and existing Foundry patterns are not sufficient, or when the user explicitly confirms they want the divergence.
## Runtime Policy
- Runtime is Bun-native.
@ -122,11 +196,13 @@ For all Rivet/RivetKit implementation:
- Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes.
- Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs.
- `send` policy: always `await` the `send(...)` call itself so enqueue failures surface immediately, but default to `wait: false`.
- Only use `send(..., { wait: true })` for short, bounded mutations that should finish quickly and do not depend on external readiness, polling actors, provider setup, repo/network I/O, or long-running queue drains.
- Only use `send(..., { wait: true })` for short, bounded local mutations (e.g. a DB write that returns a result the caller needs). Never use `wait: true` for operations that depend on external readiness, polling actors, provider setup, repo/network I/O, sandbox sessions, GitHub API calls, or long-running queue drains.
- Never self-send with `wait: true` from inside a workflow handler — the workflow processes one message at a time, so the handler would deadlock waiting for the new message to be dequeued.
- When an action is void-returning and triggers external work, use `wait: false` and let the UI react to state changes pushed by the workflow.
- Request/action contract: wait only until the minimum resource needed for the client's next step exists. Example: task creation may wait for task actor creation/identity, but not for sandbox provisioning or session bootstrap.
- Read paths must not force refresh/sync work inline. Serve the latest cached projection, mark staleness explicitly, and trigger background refresh separately when needed.
- If a workflow needs to resume after some external work completes, model that as workflow state plus follow-up messages/events instead of holding the original request open.
- Do not rely on retries for correctness or normal control flow. If a queue/workflow/external dependency is not ready yet, model that explicitly and resume from a push/event, instead of polling or retry loops.
- No retries: never add retry loops (`withRetries`, `setTimeout` retry, exponential backoff) anywhere in the codebase. If an operation fails, surface the error immediately. If a dependency is not ready yet, model that explicitly with workflow state and resume from a push/event instead of polling or retry loops.
- Actor handle policy:
- Prefer explicit `get` or explicit `create` based on workflow intent; do not default to `getOrCreate`.
- Use `get`/`getForId` when the actor is expected to already exist; if missing, surface an explicit `Actor not found` error with recovery context.

View file

@ -7,6 +7,9 @@ services:
dockerfile: foundry/docker/backend.dev.Dockerfile
image: foundry-backend-dev
working_dir: /app
env_file:
- path: .env
required: false
environment:
HF_BACKEND_HOST: "0.0.0.0"
HF_BACKEND_PORT: "7741"
@ -41,6 +44,7 @@ services:
HF_DAYTONA_ENDPOINT: "${HF_DAYTONA_ENDPOINT:-}"
HF_DAYTONA_API_KEY: "${HF_DAYTONA_API_KEY:-}"
ports:
- "6420:6420"
- "7741:7741"
volumes:
- "..:/app"

32
foundry/compose.mock.yaml Normal file
View file

@ -0,0 +1,32 @@
name: foundry-mock
services:
frontend:
build:
context: ..
dockerfile: foundry/docker/frontend.dev.Dockerfile
working_dir: /app
environment:
HOME: "/tmp"
FOUNDRY_FRONTEND_CLIENT_MODE: "mock"
ports:
- "4174:4174"
command: ["bash", "-lc", "pnpm install --force --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4174"]
volumes:
- "..:/app"
- "./.foundry:/app/foundry/.foundry"
- "../../../task/rivet-checkout:/task/rivet-checkout:ro"
- "mock_node_modules:/app/node_modules"
- "mock_client_node_modules:/app/foundry/packages/client/node_modules"
- "mock_frontend_errors_node_modules:/app/foundry/packages/frontend-errors/node_modules"
- "mock_frontend_node_modules:/app/foundry/packages/frontend/node_modules"
- "mock_shared_node_modules:/app/foundry/packages/shared/node_modules"
- "mock_pnpm_store:/tmp/.local/share/pnpm/store"
volumes:
mock_node_modules: {}
mock_client_node_modules: {}
mock_frontend_errors_node_modules: {}
mock_frontend_node_modules: {}
mock_shared_node_modules: {}
mock_pnpm_store: {}

View file

@ -39,4 +39,8 @@ ENV SANDBOX_AGENT_BIN="/root/.local/bin/sandbox-agent"
WORKDIR /app
CMD ["bash", "-lc", "git config --global --add safe.directory /app >/dev/null 2>&1 || true; pnpm install --force --frozen-lockfile --filter @sandbox-agent/foundry-backend... && exec bun foundry/packages/backend/src/index.ts start --host 0.0.0.0 --port 7741"]
# NOTE: Do NOT use `bun --hot` here. Bun's hot reloading re-initializes the
# server on a new port (e.g. 6421 instead of 6420) while the container still
# exposes the original port, breaking all client connections. Restart the
# backend container instead: `just foundry-dev-down && just foundry-dev`
CMD ["bash", "-lc", "git config --global --add safe.directory /app >/dev/null 2>&1 || true; pnpm install --frozen-lockfile --filter @sandbox-agent/foundry-backend... && exec bun foundry/packages/backend/src/index.ts start --host 0.0.0.0 --port 7741"]

View file

@ -19,6 +19,7 @@
"@iarna/toml": "^2.2.5",
"@sandbox-agent/foundry-shared": "workspace:*",
"@sandbox-agent/persist-rivet": "workspace:*",
"better-auth": "^1.5.5",
"drizzle-kit": "^0.31.8",
"drizzle-orm": "^0.44.5",
"hono": "^4.11.9",

View file

@ -0,0 +1,5 @@
import { db } from "rivetkit/db/drizzle";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const authUserDb = db({ schema, migrations });

View file

@ -0,0 +1,80 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
entries: [
{
idx: 0,
when: 1773446400000,
tag: "0000_auth_user",
breakpoints: true,
},
],
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`user\` (
\`id\` text PRIMARY KEY NOT NULL,
\`name\` text NOT NULL,
\`email\` text NOT NULL,
\`email_verified\` integer NOT NULL,
\`image\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`session\` (
\`id\` text PRIMARY KEY NOT NULL,
\`token\` text NOT NULL,
\`user_id\` text NOT NULL,
\`expires_at\` integer NOT NULL,
\`ip_address\` text,
\`user_agent\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE UNIQUE INDEX \`session_token_idx\` ON \`session\` (\`token\`);
--> statement-breakpoint
CREATE TABLE \`account\` (
\`id\` text PRIMARY KEY NOT NULL,
\`account_id\` text NOT NULL,
\`provider_id\` text NOT NULL,
\`user_id\` text NOT NULL,
\`access_token\` text,
\`refresh_token\` text,
\`id_token\` text,
\`access_token_expires_at\` integer,
\`refresh_token_expires_at\` integer,
\`scope\` text,
\`password\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE UNIQUE INDEX \`account_provider_account_idx\` ON \`account\` (\`provider_id\`, \`account_id\`);
--> statement-breakpoint
CREATE TABLE \`user_profiles\` (
\`user_id\` text PRIMARY KEY NOT NULL,
\`github_account_id\` text,
\`github_login\` text,
\`role_label\` text NOT NULL,
\`eligible_organization_ids_json\` text NOT NULL,
\`starter_repo_status\` text NOT NULL,
\`starter_repo_starred_at\` integer,
\`starter_repo_skipped_at\` integer,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`session_state\` (
\`session_id\` text PRIMARY KEY NOT NULL,
\`active_organization_id\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);`,
} as const,
};

View file

@ -0,0 +1,70 @@
import { integer, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core";
export const authUsers = sqliteTable("user", {
id: text("id").notNull().primaryKey(),
name: text("name").notNull(),
email: text("email").notNull(),
emailVerified: integer("email_verified").notNull(),
image: text("image"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const authSessions = sqliteTable(
"session",
{
id: text("id").notNull().primaryKey(),
token: text("token").notNull(),
userId: text("user_id").notNull(),
expiresAt: integer("expires_at").notNull(),
ipAddress: text("ip_address"),
userAgent: text("user_agent"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
tokenIdx: uniqueIndex("session_token_idx").on(table.token),
}),
);
export const authAccounts = sqliteTable(
"account",
{
id: text("id").notNull().primaryKey(),
accountId: text("account_id").notNull(),
providerId: text("provider_id").notNull(),
userId: text("user_id").notNull(),
accessToken: text("access_token"),
refreshToken: text("refresh_token"),
idToken: text("id_token"),
accessTokenExpiresAt: integer("access_token_expires_at"),
refreshTokenExpiresAt: integer("refresh_token_expires_at"),
scope: text("scope"),
password: text("password"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId),
}),
);
export const userProfiles = sqliteTable("user_profiles", {
userId: text("user_id").notNull().primaryKey(),
githubAccountId: text("github_account_id"),
githubLogin: text("github_login"),
roleLabel: text("role_label").notNull(),
eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(),
starterRepoStatus: text("starter_repo_status").notNull(),
starterRepoStarredAt: integer("starter_repo_starred_at"),
starterRepoSkippedAt: integer("starter_repo_skipped_at"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const sessionState = sqliteTable("session_state", {
sessionId: text("session_id").notNull().primaryKey(),
activeOrganizationId: text("active_organization_id"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -0,0 +1,353 @@
import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm";
import { actor } from "rivetkit";
import { authUserDb } from "./db/db.js";
import { authAccounts, authSessions, authUsers, sessionState, userProfiles } from "./db/schema.js";
const tables = {
user: authUsers,
session: authSessions,
account: authAccounts,
userProfiles,
sessionState,
} as const;
function tableFor(model: string) {
const table = tables[model as keyof typeof tables];
if (!table) {
throw new Error(`Unsupported auth user model: ${model}`);
}
return table as any;
}
function columnFor(table: any, field: string) {
const column = table[field];
if (!column) {
throw new Error(`Unsupported auth user field: ${field}`);
}
return column;
}
function normalizeValue(value: unknown): unknown {
if (value instanceof Date) {
return value.getTime();
}
if (Array.isArray(value)) {
return value.map((entry) => normalizeValue(entry));
}
return value;
}
function clauseToExpr(table: any, clause: any) {
const column = columnFor(table, clause.field);
const value = normalizeValue(clause.value);
switch (clause.operator) {
case "ne":
return value === null ? isNotNull(column) : ne(column, value as any);
case "lt":
return lt(column, value as any);
case "lte":
return lte(column, value as any);
case "gt":
return gt(column, value as any);
case "gte":
return gte(column, value as any);
case "in":
return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "not_in":
return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "contains":
return like(column, `%${String(value ?? "")}%`);
case "starts_with":
return like(column, `${String(value ?? "")}%`);
case "ends_with":
return like(column, `%${String(value ?? "")}`);
case "eq":
default:
return value === null ? isNull(column) : eq(column, value as any);
}
}
function buildWhere(table: any, where: any[] | undefined) {
if (!where || where.length === 0) {
return undefined;
}
let expr = clauseToExpr(table, where[0]);
for (const clause of where.slice(1)) {
const next = clauseToExpr(table, clause);
expr = clause.connector === "OR" ? or(expr, next) : and(expr, next);
}
return expr;
}
function applyJoinToRow(c: any, model: string, row: any, join: any) {
if (!row || !join) {
return row;
}
if (model === "session" && join.user) {
return c.db
.select()
.from(authUsers)
.where(eq(authUsers.id, row.userId))
.get()
.then((user: any) => ({ ...row, user: user ?? null }));
}
if (model === "account" && join.user) {
return c.db
.select()
.from(authUsers)
.where(eq(authUsers.id, row.userId))
.get()
.then((user: any) => ({ ...row, user: user ?? null }));
}
if (model === "user" && join.account) {
return c.db
.select()
.from(authAccounts)
.where(eq(authAccounts.userId, row.id))
.all()
.then((accounts: any[]) => ({ ...row, account: accounts }));
}
return Promise.resolve(row);
}
async function applyJoinToRows(c: any, model: string, rows: any[], join: any) {
if (!join || rows.length === 0) {
return rows;
}
if (model === "session" && join.user) {
const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))];
const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : [];
const userMap = new Map(users.map((user: any) => [user.id, user]));
return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null }));
}
if (model === "account" && join.user) {
const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))];
const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : [];
const userMap = new Map(users.map((user: any) => [user.id, user]));
return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null }));
}
if (model === "user" && join.account) {
const userIds = rows.map((row) => row.id);
const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : [];
const accountsByUserId = new Map<string, any[]>();
for (const account of accounts) {
const entries = accountsByUserId.get(account.userId) ?? [];
entries.push(account);
accountsByUserId.set(account.userId, entries);
}
return rows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] }));
}
return rows;
}
export const authUser = actor({
db: authUserDb,
options: {
name: "Auth User",
icon: "shield",
actionTimeout: 60_000,
},
createState: (_c, input: { userId: string }) => ({
userId: input.userId,
}),
actions: {
async createAuthRecord(c, input: { model: string; data: Record<string, unknown> }) {
const table = tableFor(input.model);
await c.db
.insert(table)
.values(input.data as any)
.run();
return await c.db
.select()
.from(table)
.where(eq(columnFor(table, "id"), input.data.id as any))
.get();
},
async findOneAuthRecord(c, input: { model: string; where: any[]; join?: any }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get();
return await applyJoinToRow(c, input.model, row ?? null, input.join);
},
async findManyAuthRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
let query: any = c.db.select().from(table);
if (predicate) {
query = query.where(predicate);
}
if (input.sortBy?.field) {
const column = columnFor(table, input.sortBy.field);
query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column));
}
if (typeof input.limit === "number") {
query = query.limit(input.limit);
}
if (typeof input.offset === "number") {
query = query.offset(input.offset);
}
const rows = await query.all();
return await applyJoinToRows(c, input.model, rows, input.join);
},
async updateAuthRecord(c, input: { model: string; where: any[]; update: Record<string, unknown> }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("updateAuthRecord requires a where clause");
}
await c.db
.update(table)
.set(input.update as any)
.where(predicate)
.run();
return await c.db.select().from(table).where(predicate).get();
},
async updateManyAuthRecords(c, input: { model: string; where: any[]; update: Record<string, unknown> }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("updateManyAuthRecords requires a where clause");
}
await c.db
.update(table)
.set(input.update as any)
.where(predicate)
.run();
const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get();
return row?.value ?? 0;
},
async deleteAuthRecord(c, input: { model: string; where: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("deleteAuthRecord requires a where clause");
}
await c.db.delete(table).where(predicate).run();
},
async deleteManyAuthRecords(c, input: { model: string; where: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("deleteManyAuthRecords requires a where clause");
}
const rows = await c.db.select().from(table).where(predicate).all();
await c.db.delete(table).where(predicate).run();
return rows.length;
},
async countAuthRecords(c, input: { model: string; where?: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
const row = predicate
? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get()
: await c.db.select({ value: sqlCount() }).from(table).get();
return row?.value ?? 0;
},
async getAppAuthState(c, input: { sessionId: string }) {
const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get();
if (!session) {
return null;
}
const [user, profile, currentSessionState, accounts] = await Promise.all([
c.db.select().from(authUsers).where(eq(authUsers.id, session.userId)).get(),
c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(),
c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(),
c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(),
]);
return {
session,
user,
profile: profile ?? null,
sessionState: currentSessionState ?? null,
accounts,
};
},
async upsertUserProfile(
c,
input: {
userId: string;
patch: {
githubAccountId?: string | null;
githubLogin?: string | null;
roleLabel?: string;
eligibleOrganizationIdsJson?: string;
starterRepoStatus?: string;
starterRepoStarredAt?: number | null;
starterRepoSkippedAt?: number | null;
};
},
) {
const now = Date.now();
await c.db
.insert(userProfiles)
.values({
userId: input.userId,
githubAccountId: input.patch.githubAccountId ?? null,
githubLogin: input.patch.githubLogin ?? null,
roleLabel: input.patch.roleLabel ?? "GitHub user",
eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]",
starterRepoStatus: input.patch.starterRepoStatus ?? "pending",
starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null,
starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: userProfiles.userId,
set: {
...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}),
...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}),
...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}),
...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}),
...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}),
...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}),
...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}),
updatedAt: now,
},
})
.run();
return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get();
},
async upsertSessionState(c, input: { sessionId: string; activeOrganizationId: string | null }) {
const now = Date.now();
await c.db
.insert(sessionState)
.values({
sessionId: input.sessionId,
activeOrganizationId: input.activeOrganizationId,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: sessionState.sessionId,
set: {
activeOrganizationId: input.activeOrganizationId,
updatedAt: now,
},
})
.run();
return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get();
},
},
});

View file

@ -1,4 +1,14 @@
import { taskKey, taskStatusSyncKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, sandboxInstanceKey, workspaceKey } from "./keys.js";
import {
authUserKey,
taskKey,
taskStatusSyncKey,
historyKey,
projectBranchSyncKey,
projectKey,
projectPrSyncKey,
sandboxInstanceKey,
workspaceKey,
} from "./keys.js";
import type { ProviderId } from "@sandbox-agent/foundry-shared";
export function actorClient(c: any) {
@ -11,6 +21,16 @@ export async function getOrCreateWorkspace(c: any, workspaceId: string) {
});
}
export async function getOrCreateAuthUser(c: any, userId: string) {
return await actorClient(c).authUser.getOrCreate(authUserKey(userId), {
createWithInput: { userId },
});
}
export function getAuthUser(c: any, userId: string) {
return actorClient(c).authUser.get(authUserKey(userId));
}
export async function getOrCreateProject(c: any, workspaceId: string, repoId: string, remoteUrl: string) {
return await actorClient(c).project.getOrCreate(projectKey(workspaceId, repoId), {
createWithInput: {
@ -125,3 +145,7 @@ export function selfProject(c: any) {
export function selfSandboxInstance(c: any) {
return actorClient(c).sandboxInstance.getForId(c.actorId);
}
export function selfAuthUser(c: any) {
return actorClient(c).authUser.getForId(c.actorId);
}

View file

@ -1,3 +1,4 @@
import { authUser } from "./auth-user/index.js";
import { setup } from "rivetkit";
import { taskStatusSync } from "./task-status-sync/index.js";
import { task } from "./task/index.js";
@ -22,6 +23,7 @@ export const registry = setup({
baseLogger: logger,
},
use: {
authUser,
workspace,
project,
task,
@ -35,6 +37,7 @@ export const registry = setup({
export * from "./context.js";
export * from "./events.js";
export * from "./auth-user/index.js";
export * from "./task-status-sync/index.js";
export * from "./task/index.js";
export * from "./history/index.js";

View file

@ -4,6 +4,10 @@ export function workspaceKey(workspaceId: string): ActorKey {
return ["ws", workspaceId];
}
export function authUserKey(userId: string): ActorKey {
return ["ws", "app", "user", userId];
}
export function projectKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId];
}

View file

@ -10,7 +10,7 @@ import { foundryRepoClonePath } from "../../services/foundry-paths.js";
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
import { expectQueueResponse } from "../../services/queue.js";
import { withRepoGitLock } from "../../services/repo-git-lock.js";
import { branches, taskIndex, prCache, repoMeta } from "./db/schema.js";
import { branches, taskIndex, prCache, repoActionJobs, repoMeta } from "./db/schema.js";
import { deriveFallbackTitle } from "../../services/create-flow.js";
import { normalizeBaseBranchName } from "../../integrations/git-spice/index.js";
import { sortBranchesForOverview } from "./stack-model.js";
@ -87,6 +87,7 @@ interface BranchSyncResult {
interface RepoOverviewCommand {}
interface RunRepoStackActionCommand {
jobId?: string;
action: RepoStackAction;
branchName?: string;
parentBranch?: string;
@ -133,6 +134,90 @@ async function ensureProjectSyncActors(c: any, localPath: string): Promise<void>
c.state.syncActorsStarted = true;
}
async function ensureRepoActionJobsTable(c: any): Promise<void> {
await c.db.execute(`
CREATE TABLE IF NOT EXISTS repo_action_jobs (
job_id text PRIMARY KEY NOT NULL,
action text NOT NULL,
branch_name text,
parent_branch text,
status text NOT NULL,
message text NOT NULL,
created_at integer NOT NULL,
updated_at integer NOT NULL,
completed_at integer
)
`);
}
async function writeRepoActionJob(
c: any,
input: {
jobId: string;
action: RepoStackAction;
branchName: string | null;
parentBranch: string | null;
status: "queued" | "running" | "completed" | "error";
message: string;
createdAt?: number;
completedAt?: number | null;
},
): Promise<void> {
await ensureRepoActionJobsTable(c);
const now = Date.now();
await c.db
.insert(repoActionJobs)
.values({
jobId: input.jobId,
action: input.action,
branchName: input.branchName,
parentBranch: input.parentBranch,
status: input.status,
message: input.message,
createdAt: input.createdAt ?? now,
updatedAt: now,
completedAt: input.completedAt ?? null,
})
.onConflictDoUpdate({
target: repoActionJobs.jobId,
set: {
status: input.status,
message: input.message,
updatedAt: now,
completedAt: input.completedAt ?? null,
},
})
.run();
}
async function listRepoActionJobRows(c: any): Promise<
Array<{
jobId: string;
action: RepoStackAction;
branchName: string | null;
parentBranch: string | null;
status: "queued" | "running" | "completed" | "error";
message: string;
createdAt: number;
updatedAt: number;
completedAt: number | null;
}>
> {
await ensureRepoActionJobsTable(c);
const rows = await c.db.select().from(repoActionJobs).orderBy(desc(repoActionJobs.updatedAt)).limit(20).all();
return rows.map((row: any) => ({
jobId: row.jobId,
action: row.action,
branchName: row.branchName ?? null,
parentBranch: row.parentBranch ?? null,
status: row.status,
message: row.message,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
completedAt: row.completedAt ?? null,
}));
}
async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise<void> {
try {
await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run();
@ -359,8 +444,6 @@ async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise<TaskR
const taskId = randomUUID();
if (onBranch) {
await forceProjectSync(c, localPath);
const branchRow = await c.db.select({ branchName: branches.branchName }).from(branches).where(eq(branches.branchName, onBranch)).get();
if (!branchRow) {
throw new Error(`Branch not found in repo snapshot: ${onBranch}`);
@ -573,14 +656,37 @@ async function runRepoStackActionMutation(c: any, cmd: RunRepoStackActionCommand
const { driver } = getActorRuntimeContext();
const at = Date.now();
const jobId = cmd.jobId ?? randomUUID();
const action = cmd.action;
const branchName = cmd.branchName?.trim() || null;
const parentBranch = cmd.parentBranch?.trim() || null;
await writeRepoActionJob(c, {
jobId,
action,
branchName,
parentBranch,
status: "running",
message: `Running ${action}`,
createdAt: at,
});
if (!(await driver.stack.available(localPath).catch(() => false))) {
await writeRepoActionJob(c, {
jobId,
action,
branchName,
parentBranch,
status: "error",
message: "git-spice is not available for this repo",
createdAt: at,
completedAt: Date.now(),
});
return {
jobId,
action,
executed: false,
status: "error",
message: "git-spice is not available for this repo",
at,
};
@ -615,48 +721,77 @@ async function runRepoStackActionMutation(c: any, cmd: RunRepoStackActionCommand
}
}
await withRepoGitLock(localPath, async () => {
if (action === "sync_repo") {
await driver.stack.syncRepo(localPath);
} else if (action === "restack_repo") {
await driver.stack.restackRepo(localPath);
} else if (action === "restack_subtree") {
await driver.stack.restackSubtree(localPath, branchName!);
} else if (action === "rebase_branch") {
await driver.stack.rebaseBranch(localPath, branchName!);
} else if (action === "reparent_branch") {
await driver.stack.reparentBranch(localPath, branchName!, parentBranch!);
} else {
throw new Error(`Unsupported repo stack action: ${action}`);
}
});
await forceProjectSync(c, localPath);
try {
const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId);
await history.append({
kind: "repo.stack_action",
branchName: branchName ?? null,
payload: {
action,
await withRepoGitLock(localPath, async () => {
if (action === "sync_repo") {
await driver.stack.syncRepo(localPath);
} else if (action === "restack_repo") {
await driver.stack.restackRepo(localPath);
} else if (action === "restack_subtree") {
await driver.stack.restackSubtree(localPath, branchName!);
} else if (action === "rebase_branch") {
await driver.stack.rebaseBranch(localPath, branchName!);
} else if (action === "reparent_branch") {
await driver.stack.reparentBranch(localPath, branchName!, parentBranch!);
} else {
throw new Error(`Unsupported repo stack action: ${action}`);
}
});
try {
const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId);
await history.append({
kind: "repo.stack_action",
branchName: branchName ?? null,
parentBranch: parentBranch ?? null,
},
payload: {
action,
branchName: branchName ?? null,
parentBranch: parentBranch ?? null,
jobId,
},
});
} catch (error) {
logActorWarning("project", "failed appending repo stack history event", {
workspaceId: c.state.workspaceId,
repoId: c.state.repoId,
action,
error: resolveErrorMessage(error),
});
}
await forceProjectSync(c, localPath);
await writeRepoActionJob(c, {
jobId,
action,
branchName,
parentBranch,
status: "completed",
message: `Completed ${action}`,
createdAt: at,
completedAt: Date.now(),
});
} catch (error) {
logActorWarning("project", "failed appending repo stack history event", {
workspaceId: c.state.workspaceId,
repoId: c.state.repoId,
const message = resolveErrorMessage(error);
await writeRepoActionJob(c, {
jobId,
action,
error: resolveErrorMessage(error),
branchName,
parentBranch,
status: "error",
message,
createdAt: at,
completedAt: Date.now(),
});
throw error;
}
return {
jobId,
action,
executed: true,
message: `stack action executed: ${action}`,
status: "completed",
message: `Completed ${action}`,
at,
};
}
@ -999,7 +1134,6 @@ export const projectActions = {
async getRepoOverview(c: any, _cmd?: RepoOverviewCommand): Promise<RepoOverview> {
const localPath = await ensureProjectReadyForRead(c);
await ensureTaskIndexHydratedForRead(c);
await forceProjectSync(c, localPath);
const { driver } = getActorRuntimeContext();
const now = Date.now();
@ -1118,6 +1252,9 @@ export const projectActions = {
};
});
const latestBranchSync = await c.db.select({ updatedAt: branches.updatedAt }).from(branches).orderBy(desc(branches.updatedAt)).limit(1).get();
const latestPrSync = await c.db.select({ updatedAt: prCache.updatedAt }).from(prCache).orderBy(desc(prCache.updatedAt)).limit(1).get();
return {
workspaceId: c.state.workspaceId,
repoId: c.state.repoId,
@ -1125,6 +1262,11 @@ export const projectActions = {
baseRef,
stackAvailable,
fetchedAt: now,
branchSyncAt: latestBranchSync?.updatedAt ?? null,
prSyncAt: latestPrSync?.updatedAt ?? null,
branchSyncStatus: latestBranchSync ? "synced" : "pending",
prSyncStatus: latestPrSync ? "synced" : "pending",
repoActionJobs: await listRepoActionJobRows(c),
branches: branchRows,
};
},
@ -1156,12 +1298,41 @@ export const projectActions = {
async runRepoStackAction(c: any, cmd: RunRepoStackActionCommand): Promise<RepoStackActionResult> {
const self = selfProject(c);
return expectQueueResponse<RepoStackActionResult>(
await self.send(projectWorkflowQueueName("project.command.runRepoStackAction"), cmd, {
wait: true,
timeout: 12 * 60_000,
}),
const jobId = randomUUID();
const at = Date.now();
const action = cmd.action;
const branchName = cmd.branchName?.trim() || null;
const parentBranch = cmd.parentBranch?.trim() || null;
await writeRepoActionJob(c, {
jobId,
action,
branchName,
parentBranch,
status: "queued",
message: `Queued ${action}`,
createdAt: at,
});
await self.send(
projectWorkflowQueueName("project.command.runRepoStackAction"),
{
...cmd,
jobId,
},
{
wait: false,
},
);
return {
jobId,
action,
executed: true,
status: "queued",
message: `Queued ${action}`,
at,
};
},
async applyPrSyncResult(c: any, body: PrSyncResult): Promise<void> {

View file

@ -42,3 +42,15 @@ export const taskIndex = sqliteTable("task_index", {
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const repoActionJobs = sqliteTable("repo_action_jobs", {
jobId: text("job_id").notNull().primaryKey(),
action: text("action").notNull(),
branchName: text("branch_name"),
parentBranch: text("parent_branch"),
status: text("status").notNull(),
message: text("message").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
completedAt: integer("completed_at"),
});

View file

@ -278,10 +278,12 @@ async function getSandboxAgentClient(c: any) {
});
}
function broadcastProcessesUpdated(c: any): void {
async function broadcastProcessesUpdated(c: any): Promise<void> {
const client = await getSandboxAgentClient(c);
const { processes } = await client.listProcesses();
c.broadcast("processesUpdated", {
sandboxId: c.state.sandboxId,
at: Date.now(),
type: "processesUpdated",
processes,
});
}
@ -475,7 +477,7 @@ export const sandboxInstance = actor({
async createProcess(c: any, request: ProcessCreateRequest): Promise<ProcessInfo> {
const client = await getSandboxAgentClient(c);
const created = await client.createProcess(request);
broadcastProcessesUpdated(c);
await broadcastProcessesUpdated(c);
return created;
},
@ -492,21 +494,21 @@ export const sandboxInstance = actor({
async stopProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise<ProcessInfo> {
const client = await getSandboxAgentClient(c);
const stopped = await client.stopProcess(request.processId, request.query);
broadcastProcessesUpdated(c);
await broadcastProcessesUpdated(c);
return stopped;
},
async killProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise<ProcessInfo> {
const client = await getSandboxAgentClient(c);
const killed = await client.killProcess(request.processId, request.query);
broadcastProcessesUpdated(c);
await broadcastProcessesUpdated(c);
return killed;
},
async deleteProcess(c: any, request: { processId: string }): Promise<void> {
const client = await getSandboxAgentClient(c);
await client.deleteProcess(request.processId);
broadcastProcessesUpdated(c);
await broadcastProcessesUpdated(c);
},
async providerState(c: any): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> {

View file

@ -28,6 +28,10 @@ export const taskRuntime = sqliteTable(
activeSwitchTarget: text("active_switch_target"),
activeCwd: text("active_cwd"),
statusMessage: text("status_message"),
gitStateJson: text("git_state_json"),
gitStateUpdatedAt: integer("git_state_updated_at"),
provisionStage: text("provision_stage"),
provisionStageUpdatedAt: integer("provision_stage_updated_at"),
updatedAt: integer("updated_at").notNull(),
},
(table) => [check("task_runtime_singleton_id_check", sql`${table.id} = 1`)],
@ -46,8 +50,13 @@ export const taskSandboxes = sqliteTable("task_sandboxes", {
export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", {
sessionId: text("session_id").notNull().primaryKey(),
sandboxSessionId: text("sandbox_session_id"),
sessionName: text("session_name").notNull(),
model: text("model").notNull(),
status: text("status").notNull().default("ready"),
errorMessage: text("error_message"),
transcriptJson: text("transcript_json").notNull().default("[]"),
transcriptUpdatedAt: integer("transcript_updated_at"),
unread: integer("unread").notNull().default(0),
draftText: text("draft_text").notNull().default(""),
// Structured by the workbench composer attachment payload format.

View file

@ -19,7 +19,9 @@ import {
changeWorkbenchModel,
closeWorkbenchSession,
createWorkbenchSession,
getWorkbenchTask,
getSessionDetail,
getTaskDetail,
getTaskSummary,
markWorkbenchUnread,
publishWorkbenchPr,
renameWorkbenchBranch,
@ -144,14 +146,9 @@ export const task = actor({
async provision(c, cmd: InitializeCommand): Promise<{ ok: true }> {
const self = selfTask(c);
const result = await self.send(taskWorkflowQueueName("task.command.provision"), cmd ?? {}, {
wait: true,
timeout: 30 * 60_000,
await self.send(taskWorkflowQueueName("task.command.provision"), cmd ?? {}, {
wait: false,
});
const response = expectQueueResponse<{ ok: boolean; error?: string }>(result);
if (!response.ok) {
throw new Error(response.error ?? "task provisioning failed");
}
return { ok: true };
},
@ -180,47 +177,35 @@ export const task = actor({
async push(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.push"), cmd ?? {}, {
wait: true,
timeout: 180_000,
wait: false,
});
},
async sync(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.sync"), cmd ?? {}, {
wait: true,
timeout: 30_000,
wait: false,
});
},
async merge(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.merge"), cmd ?? {}, {
wait: true,
timeout: 30_000,
wait: false,
});
},
async archive(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
void self
.send(taskWorkflowQueueName("task.command.archive"), cmd ?? {}, {
wait: true,
timeout: 60_000,
})
.catch((error: unknown) => {
c.log.warn({
msg: "archive command failed",
error: error instanceof Error ? error.message : String(error),
});
});
await self.send(taskWorkflowQueueName("task.command.archive"), cmd ?? {}, {
wait: false,
});
},
async kill(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.kill"), cmd ?? {}, {
wait: true,
timeout: 60_000,
wait: false,
});
},
@ -228,8 +213,16 @@ export const task = actor({
return await getCurrentRecord({ db: c.db, state: c.state });
},
async getWorkbench(c) {
return await getWorkbenchTask(c);
async getTaskSummary(c) {
return await getTaskSummary(c);
},
async getTaskDetail(c) {
return await getTaskDetail(c);
},
async getSessionDetail(c, input: { sessionId: string }) {
return await getSessionDetail(c, input.sessionId);
},
async markWorkbenchUnread(c): Promise<void> {
@ -255,8 +248,7 @@ export const task = actor({
async renameWorkbenchBranch(c, input: TaskWorkbenchRenameInput): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.rename_branch"), { value: input.value } satisfies TaskWorkbenchValueCommand, {
wait: true,
timeout: 5 * 60_000,
wait: false,
});
},
@ -335,8 +327,7 @@ export const task = actor({
attachments: input.attachments,
} satisfies TaskWorkbenchSendMessageCommand,
{
wait: true,
timeout: 10 * 60_000,
wait: false,
},
);
},
@ -344,8 +335,7 @@ export const task = actor({
async stopWorkbenchSession(c, input: TaskTabCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, {
wait: true,
timeout: 5 * 60_000,
wait: false,
});
},
@ -360,8 +350,7 @@ export const task = actor({
async closeWorkbenchSession(c, input: TaskTabCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, {
wait: true,
timeout: 5 * 60_000,
wait: false,
});
},
@ -371,8 +360,7 @@ export const task = actor({
taskWorkflowQueueName("task.command.workbench.publish_pr"),
{},
{
wait: true,
timeout: 10 * 60_000,
wait: false,
},
);
},
@ -380,8 +368,7 @@ export const task = actor({
async revertWorkbenchFile(c, input: { path: string }): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.revert_file"), input, {
wait: true,
timeout: 5 * 60_000,
wait: false,
});
},
},

View file

@ -1,4 +1,5 @@
// @ts-nocheck
import { randomUUID } from "node:crypto";
import { basename } from "node:path";
import { asc, eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../context.js";
@ -6,15 +7,30 @@ import { getOrCreateTaskStatusSync, getOrCreateProject, getOrCreateWorkspace, ge
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
import { task as taskTable, taskRuntime, taskWorkbenchSessions } from "./db/schema.js";
import { getCurrentRecord } from "./workflow/common.js";
import { taskWorkflowQueueName } from "./workflow/queue.js";
const STATUS_SYNC_INTERVAL_MS = 1_000;
function emptyGitState() {
return {
fileChanges: [],
diffs: {},
fileTree: [],
updatedAt: null as number | null,
};
}
async function ensureWorkbenchSessionTable(c: any): Promise<void> {
await c.db.execute(`
CREATE TABLE IF NOT EXISTS task_workbench_sessions (
session_id text PRIMARY KEY NOT NULL,
sandbox_session_id text,
session_name text NOT NULL,
model text NOT NULL,
status text DEFAULT 'ready' NOT NULL,
error_message text,
transcript_json text DEFAULT '[]' NOT NULL,
transcript_updated_at integer,
unread integer DEFAULT 0 NOT NULL,
draft_text text DEFAULT '' NOT NULL,
draft_attachments_json text DEFAULT '[]' NOT NULL,
@ -26,6 +42,18 @@ async function ensureWorkbenchSessionTable(c: any): Promise<void> {
updated_at integer NOT NULL
)
`);
await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN sandbox_session_id text`).catch(() => {});
await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN status text DEFAULT 'ready' NOT NULL`).catch(() => {});
await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN error_message text`).catch(() => {});
await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN transcript_json text DEFAULT '[]' NOT NULL`).catch(() => {});
await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN transcript_updated_at integer`).catch(() => {});
}
async function ensureTaskRuntimeCacheColumns(c: any): Promise<void> {
await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {});
await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {});
await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {});
await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {});
}
function defaultModelForAgent(agentType: string | null | undefined) {
@ -74,6 +102,40 @@ function parseDraftAttachments(value: string | null | undefined): Array<any> {
}
}
function parseTranscript(value: string | null | undefined): Array<any> {
if (!value) {
return [];
}
try {
const parsed = JSON.parse(value) as unknown;
return Array.isArray(parsed) ? parsed : [];
} catch {
return [];
}
}
function parseGitState(value: string | null | undefined): { fileChanges: Array<any>; diffs: Record<string, string>; fileTree: Array<any> } {
if (!value) {
return emptyGitState();
}
try {
const parsed = JSON.parse(value) as {
fileChanges?: unknown;
diffs?: unknown;
fileTree?: unknown;
};
return {
fileChanges: Array.isArray(parsed.fileChanges) ? parsed.fileChanges : [],
diffs: parsed.diffs && typeof parsed.diffs === "object" ? (parsed.diffs as Record<string, string>) : {},
fileTree: Array.isArray(parsed.fileTree) ? parsed.fileTree : [],
};
} catch {
return emptyGitState();
}
}
export function shouldMarkSessionUnreadForStatus(meta: { thinkingSinceMs?: number | null }, status: "running" | "idle" | "error"): boolean {
if (status === "running") {
return false;
@ -90,7 +152,13 @@ async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }
const mapped = rows.map((row: any) => ({
...row,
id: row.sessionId,
sessionId: row.sessionId,
sessionId: row.sandboxSessionId ?? null,
tabId: row.sessionId,
sandboxSessionId: row.sandboxSessionId ?? null,
status: row.status ?? "ready",
errorMessage: row.errorMessage ?? null,
transcript: parseTranscript(row.transcriptJson),
transcriptUpdatedAt: row.transcriptUpdatedAt ?? null,
draftAttachments: parseDraftAttachments(row.draftAttachmentsJson),
draftUpdatedAtMs: row.draftUpdatedAt ?? null,
unread: row.unread === 1,
@ -121,7 +189,13 @@ async function readSessionMeta(c: any, sessionId: string): Promise<any | null> {
return {
...row,
id: row.sessionId,
sessionId: row.sessionId,
sessionId: row.sandboxSessionId ?? null,
tabId: row.sessionId,
sandboxSessionId: row.sandboxSessionId ?? null,
status: row.status ?? "ready",
errorMessage: row.errorMessage ?? null,
transcript: parseTranscript(row.transcriptJson),
transcriptUpdatedAt: row.transcriptUpdatedAt ?? null,
draftAttachments: parseDraftAttachments(row.draftAttachmentsJson),
draftUpdatedAtMs: row.draftUpdatedAt ?? null,
unread: row.unread === 1,
@ -133,14 +207,18 @@ async function readSessionMeta(c: any, sessionId: string): Promise<any | null> {
async function ensureSessionMeta(
c: any,
params: {
sessionId: string;
tabId: string;
sandboxSessionId?: string | null;
model?: string;
sessionName?: string;
unread?: boolean;
created?: boolean;
status?: "pending_provision" | "pending_session_create" | "ready" | "error";
errorMessage?: string | null;
},
): Promise<any> {
await ensureWorkbenchSessionTable(c);
const existing = await readSessionMeta(c, params.sessionId);
const existing = await readSessionMeta(c, params.tabId);
if (existing) {
return existing;
}
@ -153,14 +231,19 @@ async function ensureSessionMeta(
await c.db
.insert(taskWorkbenchSessions)
.values({
sessionId: params.sessionId,
sessionId: params.tabId,
sandboxSessionId: params.sandboxSessionId ?? null,
sessionName,
model,
status: params.status ?? "ready",
errorMessage: params.errorMessage ?? null,
transcriptJson: "[]",
transcriptUpdatedAt: null,
unread: unread ? 1 : 0,
draftText: "",
draftAttachmentsJson: "[]",
draftUpdatedAt: null,
created: 1,
created: params.created === false ? 0 : 1,
closed: 0,
thinkingSinceMs: null,
createdAt: now,
@ -168,25 +251,40 @@ async function ensureSessionMeta(
})
.run();
return await readSessionMeta(c, params.sessionId);
return await readSessionMeta(c, params.tabId);
}
async function updateSessionMeta(c: any, sessionId: string, values: Record<string, unknown>): Promise<any> {
await ensureSessionMeta(c, { sessionId });
async function updateSessionMeta(c: any, tabId: string, values: Record<string, unknown>): Promise<any> {
await ensureSessionMeta(c, { tabId });
await c.db
.update(taskWorkbenchSessions)
.set({
...values,
updatedAt: Date.now(),
})
.where(eq(taskWorkbenchSessions.sessionId, sessionId))
.where(eq(taskWorkbenchSessions.sessionId, tabId))
.run();
return await readSessionMeta(c, sessionId);
return await readSessionMeta(c, tabId);
}
async function notifyWorkbenchUpdated(c: any): Promise<void> {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.notifyWorkbenchUpdated({});
async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise<any | null> {
await ensureWorkbenchSessionTable(c);
const row = await c.db.select().from(taskWorkbenchSessions).where(eq(taskWorkbenchSessions.sandboxSessionId, sandboxSessionId)).get();
if (!row) {
return null;
}
return await readSessionMeta(c, row.sessionId);
}
async function requireReadySessionMeta(c: any, tabId: string): Promise<any> {
const meta = await readSessionMeta(c, tabId);
if (!meta) {
throw new Error(`Unknown workbench tab: ${tabId}`);
}
if (meta.status !== "ready" || !meta.sandboxSessionId) {
throw new Error(meta.errorMessage ?? "This workbench tab is still preparing");
}
return meta;
}
function shellFragment(parts: string[]): string {
@ -333,17 +431,6 @@ async function collectWorkbenchGitState(c: any, record: any) {
label: "git diff numstat",
});
const numstat = parseNumstat(numstatResult.result);
const diffs: Record<string, string> = {};
for (const row of statusRows) {
const diffResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
cwd,
command: `if git ls-files --error-unmatch -- ${JSON.stringify(row.path)} >/dev/null 2>&1; then git diff -- ${JSON.stringify(row.path)}; else git diff --no-index -- /dev/null ${JSON.stringify(row.path)} || true; fi`,
label: `git diff ${row.path}`,
});
diffs[row.path] = diffResult.result;
}
const filesResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
@ -356,6 +443,17 @@ async function collectWorkbenchGitState(c: any, record: any) {
.map((line) => line.trim())
.filter(Boolean);
const diffs: Record<string, string> = {};
for (const row of statusRows) {
const diffResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
cwd,
command: `git diff -- ${JSON.stringify(row.path)}`,
label: `git diff ${row.path}`,
});
diffs[row.path] = diffResult.exitCode === 0 ? diffResult.result : "";
}
return {
fileChanges: statusRows.map((row) => {
const counts = numstat.get(row.path) ?? { added: 0, removed: 0 };
@ -371,6 +469,37 @@ async function collectWorkbenchGitState(c: any, record: any) {
};
}
async function readCachedGitState(c: any): Promise<{ fileChanges: Array<any>; diffs: Record<string, string>; fileTree: Array<any>; updatedAt: number | null }> {
await ensureTaskRuntimeCacheColumns(c);
const row = await c.db
.select({
gitStateJson: taskRuntime.gitStateJson,
gitStateUpdatedAt: taskRuntime.gitStateUpdatedAt,
})
.from(taskRuntime)
.where(eq(taskRuntime.id, 1))
.get();
const parsed = parseGitState(row?.gitStateJson);
return {
...parsed,
updatedAt: row?.gitStateUpdatedAt ?? null,
};
}
async function writeCachedGitState(c: any, gitState: { fileChanges: Array<any>; diffs: Record<string, string>; fileTree: Array<any> }): Promise<void> {
await ensureTaskRuntimeCacheColumns(c);
const now = Date.now();
await c.db
.update(taskRuntime)
.set({
gitStateJson: JSON.stringify(gitState),
gitStateUpdatedAt: now,
updatedAt: now,
})
.where(eq(taskRuntime.id, 1))
.run();
}
async function readSessionTranscript(c: any, record: any, sessionId: string) {
const sandboxId = record.activeSandboxId ?? record.sandboxes?.[0]?.sandboxId ?? null;
if (!sandboxId) {
@ -380,7 +509,7 @@ async function readSessionTranscript(c: any, record: any, sessionId: string) {
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, sandboxId);
const page = await sandbox.listSessionEvents({
sessionId,
limit: 500,
limit: 100,
});
return page.items.map((event: any) => ({
id: event.id,
@ -393,14 +522,50 @@ async function readSessionTranscript(c: any, record: any, sessionId: string) {
}));
}
async function activeSessionStatus(c: any, record: any, sessionId: string) {
if (record.activeSessionId !== sessionId || !record.activeSandboxId) {
async function writeSessionTranscript(c: any, tabId: string, transcript: Array<any>): Promise<void> {
await updateSessionMeta(c, tabId, {
transcriptJson: JSON.stringify(transcript),
transcriptUpdatedAt: Date.now(),
});
}
async function enqueueWorkbenchRefresh(
c: any,
command: "task.command.workbench.refresh_derived" | "task.command.workbench.refresh_session_transcript",
body: Record<string, unknown>,
): Promise<void> {
const self = selfTask(c);
await self.send(command, body, { wait: false });
}
async function maybeScheduleWorkbenchRefreshes(c: any, record: any, sessions: Array<any>): Promise<void> {
const gitState = await readCachedGitState(c);
if (record.activeSandboxId && !gitState.updatedAt) {
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {});
}
for (const session of sessions) {
if (session.closed || session.status !== "ready" || !session.sandboxSessionId || session.transcriptUpdatedAt) {
continue;
}
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: session.sandboxSessionId,
});
}
}
function activeSessionStatus(record: any, sessionId: string) {
if (record.activeSessionId !== sessionId) {
return "idle";
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const status = await sandbox.sessionStatus({ sessionId });
return status.status;
if (record.status === "running") {
return "running";
}
if (record.status === "error") {
return "error";
}
return "idle";
}
async function readPullRequestSummary(c: any, branchName: string | null) {
@ -417,51 +582,75 @@ async function readPullRequestSummary(c: any, branchName: string | null) {
}
export async function ensureWorkbenchSeeded(c: any): Promise<any> {
await ensureTaskRuntimeCacheColumns(c);
const record = await getCurrentRecord({ db: c.db, state: c.state });
if (record.activeSessionId) {
await ensureSessionMeta(c, {
sessionId: record.activeSessionId,
tabId: record.activeSessionId,
sandboxSessionId: record.activeSessionId,
model: defaultModelForAgent(record.agentType),
sessionName: "Session 1",
status: "ready",
});
}
return record;
}
export async function getWorkbenchTask(c: any): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
const gitState = await collectWorkbenchGitState(c, record);
const sessions = await listSessionMetaRows(c);
const tabs = [];
for (const meta of sessions) {
const status = await activeSessionStatus(c, record, meta.sessionId);
let thinkingSinceMs = meta.thinkingSinceMs ?? null;
let unread = Boolean(meta.unread);
if (thinkingSinceMs && status !== "running") {
thinkingSinceMs = null;
unread = true;
}
tabs.push({
id: meta.id,
sessionId: meta.sessionId,
sessionName: meta.sessionName,
agent: agentKindForModel(meta.model),
model: meta.model,
status,
thinkingSinceMs: status === "running" ? thinkingSinceMs : null,
unread,
created: Boolean(meta.created),
draft: {
text: meta.draftText ?? "",
attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [],
updatedAtMs: meta.draftUpdatedAtMs ?? null,
},
transcript: await readSessionTranscript(c, record, meta.sessionId),
});
function buildSessionSummary(record: any, meta: any): any {
const derivedSandboxSessionId = meta.sandboxSessionId ?? (meta.status === "pending_provision" && record.activeSessionId ? record.activeSessionId : null);
const sessionStatus =
meta.status === "ready" && derivedSandboxSessionId ? activeSessionStatus(record, derivedSandboxSessionId) : meta.status === "error" ? "error" : "idle";
let thinkingSinceMs = meta.thinkingSinceMs ?? null;
let unread = Boolean(meta.unread);
if (thinkingSinceMs && sessionStatus !== "running") {
thinkingSinceMs = null;
unread = true;
}
return {
id: meta.id,
sessionId: derivedSandboxSessionId,
sessionName: meta.sessionName,
agent: agentKindForModel(meta.model),
model: meta.model,
status: sessionStatus,
thinkingSinceMs: sessionStatus === "running" ? thinkingSinceMs : null,
unread,
created: Boolean(meta.created || derivedSandboxSessionId),
};
}
function buildSessionDetailFromMeta(record: any, meta: any): any {
const summary = buildSessionSummary(record, meta);
return {
sessionId: meta.tabId,
tabId: meta.tabId,
sandboxSessionId: summary.sessionId,
sessionName: summary.sessionName,
agent: summary.agent,
model: summary.model,
status: summary.status,
thinkingSinceMs: summary.thinkingSinceMs,
unread: summary.unread,
created: summary.created,
draft: {
text: meta.draftText ?? "",
attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [],
updatedAtMs: meta.draftUpdatedAtMs ?? null,
},
transcript: meta.transcript ?? [],
};
}
/**
* Builds a WorkbenchTaskSummary from local task actor state. Task actors push
* this to the parent workspace actor so workspace sidebar reads stay local.
*/
export async function buildTaskSummary(c: any): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
const sessions = await listSessionMetaRows(c);
await maybeScheduleWorkbenchRefreshes(c, record, sessions);
return {
id: c.state.taskId,
repoId: c.state.repoId,
@ -471,14 +660,112 @@ export async function getWorkbenchTask(c: any): Promise<any> {
updatedAtMs: record.updatedAt,
branch: record.branchName,
pullRequest: await readPullRequestSummary(c, record.branchName),
tabs,
sessionsSummary: sessions.map((meta) => buildSessionSummary(record, meta)),
};
}
/**
* Builds a WorkbenchTaskDetail from local task actor state for direct task
* subscribers. This is a full replacement payload, not a patch.
*/
export async function buildTaskDetail(c: any): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
const gitState = await readCachedGitState(c);
const sessions = await listSessionMetaRows(c);
await maybeScheduleWorkbenchRefreshes(c, record, sessions);
const summary = await buildTaskSummary(c);
return {
...summary,
task: record.task,
agentType: record.agentType === "claude" || record.agentType === "codex" ? record.agentType : null,
runtimeStatus: record.status,
statusMessage: record.statusMessage ?? null,
activeSessionId: record.activeSessionId ?? null,
diffStat: record.diffStat ?? null,
prUrl: record.prUrl ?? null,
reviewStatus: record.reviewStatus ?? null,
fileChanges: gitState.fileChanges,
diffs: gitState.diffs,
fileTree: gitState.fileTree,
minutesUsed: 0,
sandboxes: (record.sandboxes ?? []).map((sandbox: any) => ({
providerId: sandbox.providerId,
sandboxId: sandbox.sandboxId,
cwd: sandbox.cwd ?? null,
})),
activeSandboxId: record.activeSandboxId ?? null,
};
}
/**
* Builds a WorkbenchSessionDetail for a specific session tab.
*/
export async function buildSessionDetail(c: any, tabId: string): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
const meta = await readSessionMeta(c, tabId);
if (!meta || meta.closed) {
throw new Error(`Unknown workbench session tab: ${tabId}`);
}
return buildSessionDetailFromMeta(record, meta);
}
export async function getTaskSummary(c: any): Promise<any> {
return await buildTaskSummary(c);
}
export async function getTaskDetail(c: any): Promise<any> {
return await buildTaskDetail(c);
}
export async function getSessionDetail(c: any, tabId: string): Promise<any> {
return await buildSessionDetail(c, tabId);
}
/**
* Replaces the old notifyWorkbenchUpdated pattern.
*
* The task actor emits two kinds of updates:
* - Push summary state up to the parent workspace actor so the sidebar
* materialized projection stays current.
* - Broadcast full detail/session payloads down to direct task subscribers.
*/
export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise<void> {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) });
c.broadcast("taskUpdated", {
type: "taskDetailUpdated",
detail: await buildTaskDetail(c),
});
if (options?.sessionId) {
c.broadcast("sessionUpdated", {
type: "sessionUpdated",
session: await buildSessionDetail(c, options.sessionId),
});
}
}
export async function refreshWorkbenchDerivedState(c: any): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
const gitState = await collectWorkbenchGitState(c, record);
await writeCachedGitState(c, gitState);
await broadcastTaskUpdate(c);
}
export async function refreshWorkbenchSessionTranscript(c: any, sessionId: string): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await readSessionMeta(c, sessionId));
if (!meta?.sandboxSessionId) {
return;
}
const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId);
await writeSessionTranscript(c, meta.tabId, transcript);
await broadcastTaskUpdate(c, { sessionId: meta.tabId });
}
export async function renameWorkbenchTask(c: any, value: string): Promise<void> {
const nextTitle = value.trim();
if (!nextTitle) {
@ -494,7 +781,7 @@ export async function renameWorkbenchTask(c: any, value: string): Promise<void>
.where(eq(taskTable.id, 1))
.run();
c.state.title = nextTitle;
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c);
}
export async function renameWorkbenchBranch(c: any, value: string): Promise<void> {
@ -545,55 +832,168 @@ export async function renameWorkbenchBranch(c: any, value: string): Promise<void
taskId: c.state.taskId,
branchName: nextBranch,
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c);
}
export async function createWorkbenchSession(c: any, model?: string): Promise<{ tabId: string }> {
let record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
// Fire-and-forget: enqueue provisioning without waiting to avoid self-deadlock
// (this handler already runs inside the task workflow loop, so wait:true would deadlock).
const providerId = record.providerId ?? c.state.providerId ?? getActorRuntimeContext().providers.defaultProviderId();
await selfTask(c).provision({ providerId });
record = await ensureWorkbenchSeeded(c);
await selfTask(c).send(taskWorkflowQueueName("task.command.provision"), { providerId }, { wait: false });
throw new Error("sandbox is provisioning — retry shortly");
}
if (record.activeSessionId) {
const existingSessions = await listSessionMetaRows(c);
if (existingSessions.length === 0) {
await ensureSessionMeta(c, {
sessionId: record.activeSessionId,
tabId: record.activeSessionId,
sandboxSessionId: record.activeSessionId,
model: model ?? defaultModelForAgent(record.agentType),
sessionName: "Session 1",
status: "ready",
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId: record.activeSessionId });
return { tabId: record.activeSessionId };
}
}
if (!record.activeSandboxId) {
throw new Error("cannot create session without an active sandbox");
const tabId = `tab-${randomUUID()}`;
await ensureSessionMeta(c, {
tabId,
model: model ?? defaultModelForAgent(record.agentType),
status: record.activeSandboxId ? "pending_session_create" : "pending_provision",
created: false,
});
const providerId = record.providerId ?? c.state.providerId ?? getActorRuntimeContext().providers.defaultProviderId();
const self = selfTask(c);
if (!record.activeSandboxId && !String(record.status ?? "").startsWith("init_")) {
await self.send("task.command.provision", { providerId }, { wait: false });
}
await self.send(
"task.command.workbench.ensure_session",
{ tabId, ...(model ? { model } : {}) },
{
wait: false,
},
);
await broadcastTaskUpdate(c, { sessionId: tabId });
return { tabId };
}
export async function ensureWorkbenchSession(c: any, tabId: string, model?: string): Promise<void> {
const meta = await readSessionMeta(c, tabId);
if (!meta || meta.closed) {
return;
}
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
await updateSessionMeta(c, tabId, {
status: "pending_provision",
errorMessage: null,
});
return;
}
if (!meta.sandboxSessionId && record.activeSessionId && meta.status === "pending_provision") {
const existingTabForActiveSession = await readSessionMetaBySandboxSessionId(c, record.activeSessionId);
if (existingTabForActiveSession && existingTabForActiveSession.tabId !== tabId) {
await updateSessionMeta(c, existingTabForActiveSession.tabId, {
closed: 1,
});
}
await updateSessionMeta(c, tabId, {
sandboxSessionId: record.activeSessionId,
status: "ready",
errorMessage: null,
created: 1,
});
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: record.activeSessionId,
});
await broadcastTaskUpdate(c, { sessionId: tabId });
return;
}
if (meta.sandboxSessionId) {
await updateSessionMeta(c, tabId, {
status: "ready",
errorMessage: null,
});
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: meta.sandboxSessionId,
});
await broadcastTaskUpdate(c, { sessionId: tabId });
return;
}
const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null;
if (!cwd) {
throw new Error("cannot create session without a sandbox cwd");
await updateSessionMeta(c, tabId, {
status: "error",
errorMessage: "cannot create session without a sandbox cwd",
});
await broadcastTaskUpdate(c, { sessionId: tabId });
return;
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const created = await sandbox.createSession({
prompt: "",
cwd,
agent: agentTypeForModel(model ?? defaultModelForAgent(record.agentType)),
await updateSessionMeta(c, tabId, {
status: "pending_session_create",
errorMessage: null,
});
if (!created.id) {
throw new Error(created.error ?? "sandbox-agent session creation failed");
try {
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const created = await sandbox.createSession({
prompt: "",
cwd,
agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)),
});
if (!created.id) {
throw new Error(created.error ?? "sandbox-agent session creation failed");
}
await updateSessionMeta(c, tabId, {
sandboxSessionId: created.id,
status: "ready",
errorMessage: null,
});
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: created.id,
});
} catch (error) {
await updateSessionMeta(c, tabId, {
status: "error",
errorMessage: error instanceof Error ? error.message : String(error),
});
}
await ensureSessionMeta(c, {
sessionId: created.id,
model: model ?? defaultModelForAgent(record.agentType),
});
await notifyWorkbenchUpdated(c);
return { tabId: created.id };
await broadcastTaskUpdate(c, { sessionId: tabId });
}
export async function enqueuePendingWorkbenchSessions(c: any): Promise<void> {
const self = selfTask(c);
const pending = (await listSessionMetaRows(c, { includeClosed: true })).filter(
(row) => row.closed !== true && row.status !== "ready" && row.status !== "error",
);
for (const row of pending) {
await self.send(
"task.command.workbench.ensure_session",
{
tabId: row.tabId,
model: row.model,
},
{
wait: false,
},
);
}
}
export async function renameWorkbenchSession(c: any, sessionId: string, title: string): Promise<void> {
@ -604,14 +1004,14 @@ export async function renameWorkbenchSession(c: any, sessionId: string, title: s
await updateSessionMeta(c, sessionId, {
sessionName: trimmed,
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId });
}
export async function setWorkbenchSessionUnread(c: any, sessionId: string, unread: boolean): Promise<void> {
await updateSessionMeta(c, sessionId, {
unread: unread ? 1 : 0,
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId });
}
export async function updateWorkbenchDraft(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
@ -620,14 +1020,14 @@ export async function updateWorkbenchDraft(c: any, sessionId: string, text: stri
draftAttachmentsJson: JSON.stringify(attachments),
draftUpdatedAt: Date.now(),
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId });
}
export async function changeWorkbenchModel(c: any, sessionId: string, model: string): Promise<void> {
await updateSessionMeta(c, sessionId, {
model,
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId });
}
export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
@ -636,7 +1036,7 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri
throw new Error("cannot send message without an active sandbox");
}
await ensureSessionMeta(c, { sessionId });
const meta = await requireReadySessionMeta(c, sessionId);
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)]
.filter(Boolean)
@ -646,7 +1046,7 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri
}
await sandbox.sendPrompt({
sessionId,
sessionId: meta.sandboxSessionId,
prompt,
notification: true,
});
@ -663,25 +1063,28 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri
await c.db
.update(taskRuntime)
.set({
activeSessionId: sessionId,
activeSessionId: meta.sandboxSessionId,
updatedAt: Date.now(),
})
.where(eq(taskRuntime.id, 1))
.run();
const sync = await getOrCreateTaskStatusSync(c, c.state.workspaceId, c.state.repoId, c.state.taskId, record.activeSandboxId, sessionId, {
const sync = await getOrCreateTaskStatusSync(c, c.state.workspaceId, c.state.repoId, c.state.taskId, record.activeSandboxId, meta.sandboxSessionId, {
workspaceId: c.state.workspaceId,
repoId: c.state.repoId,
taskId: c.state.taskId,
providerId: c.state.providerId,
sandboxId: record.activeSandboxId,
sessionId,
sessionId: meta.sandboxSessionId,
intervalMs: STATUS_SYNC_INTERVAL_MS,
});
await sync.setIntervalMs({ intervalMs: STATUS_SYNC_INTERVAL_MS });
await sync.start();
await sync.force();
await notifyWorkbenchUpdated(c);
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: meta.sandboxSessionId,
});
await broadcastTaskUpdate(c, { sessionId });
}
export async function stopWorkbenchSession(c: any, sessionId: string): Promise<void> {
@ -689,20 +1092,21 @@ export async function stopWorkbenchSession(c: any, sessionId: string): Promise<v
if (!record.activeSandboxId) {
return;
}
const meta = await requireReadySessionMeta(c, sessionId);
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
await sandbox.cancelSession({ sessionId });
await sandbox.cancelSession({ sessionId: meta.sandboxSessionId });
await updateSessionMeta(c, sessionId, {
thinkingSinceMs: null,
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId });
}
export async function syncWorkbenchSessionStatus(c: any, sessionId: string, status: "running" | "idle" | "error", at: number): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
const meta = await ensureSessionMeta(c, { sessionId });
const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { tabId: sessionId, sandboxSessionId: sessionId }));
let changed = false;
if (record.activeSessionId === sessionId) {
if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) {
const mappedStatus = status === "running" ? "running" : status === "error" ? "error" : "idle";
if (record.status !== mappedStatus) {
await c.db
@ -753,27 +1157,36 @@ export async function syncWorkbenchSessionStatus(c: any, sessionId: string, stat
}
if (changed) {
await notifyWorkbenchUpdated(c);
if (status !== "running") {
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId,
});
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {});
}
await broadcastTaskUpdate(c, { sessionId: meta.tabId });
}
}
export async function closeWorkbenchSession(c: any, sessionId: string): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
return;
}
const sessions = await listSessionMetaRows(c);
if (sessions.filter((candidate) => candidate.closed !== true).length <= 1) {
return;
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
await sandbox.destroySession({ sessionId });
const meta = await readSessionMeta(c, sessionId);
if (!meta) {
return;
}
if (record.activeSandboxId && meta.sandboxSessionId) {
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
await sandbox.destroySession({ sessionId: meta.sandboxSessionId });
}
await updateSessionMeta(c, sessionId, {
closed: 1,
thinkingSinceMs: null,
});
if (record.activeSessionId === sessionId) {
if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) {
await c.db
.update(taskRuntime)
.set({
@ -783,7 +1196,7 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise<
.where(eq(taskRuntime.id, 1))
.run();
}
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c);
}
export async function markWorkbenchUnread(c: any): Promise<void> {
@ -792,10 +1205,10 @@ export async function markWorkbenchUnread(c: any): Promise<void> {
if (!latest) {
return;
}
await updateSessionMeta(c, latest.sessionId, {
await updateSessionMeta(c, latest.tabId, {
unread: 1,
});
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c, { sessionId: latest.tabId });
}
export async function publishWorkbenchPr(c: any): Promise<void> {
@ -816,7 +1229,7 @@ export async function publishWorkbenchPr(c: any): Promise<void> {
})
.where(eq(taskTable.id, 1))
.run();
await notifyWorkbenchUpdated(c);
await broadcastTaskUpdate(c);
}
export async function revertWorkbenchFile(c: any, path: string): Promise<void> {
@ -838,5 +1251,6 @@ export async function revertWorkbenchFile(c: any, path: string): Promise<void> {
if (result.exitCode !== 0) {
throw new Error(`file revert failed (${result.exitCode}): ${result.result}`);
}
await notifyWorkbenchUpdated(c);
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {});
await broadcastTaskUpdate(c);
}

View file

@ -1,9 +1,9 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared";
import { getOrCreateWorkspace } from "../../handles.js";
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
import { historyKey } from "../../keys.js";
import { broadcastTaskUpdate } from "../workbench.js";
export const TASK_ROW_ID = 1;
@ -83,8 +83,7 @@ export async function setTaskState(ctx: any, status: TaskStatus, statusMessage?:
.run();
}
const workspace = await getOrCreateWorkspace(ctx, ctx.state.workspaceId);
await workspace.notifyWorkbenchUpdated({});
await broadcastTaskUpdate(ctx);
}
export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
@ -176,6 +175,5 @@ export async function appendHistory(ctx: any, kind: string, payload: Record<stri
payload,
});
const workspace = await getOrCreateWorkspace(ctx, ctx.state.workspaceId);
await workspace.notifyWorkbenchUpdated({});
await broadcastTaskUpdate(ctx);
}

View file

@ -8,6 +8,7 @@ import {
initCompleteActivity,
initCreateSandboxActivity,
initCreateSessionActivity,
initEnqueueProvisionActivity,
initEnsureAgentActivity,
initEnsureNameActivity,
initExposeSandboxActivity,
@ -32,6 +33,9 @@ import {
changeWorkbenchModel,
closeWorkbenchSession,
createWorkbenchSession,
ensureWorkbenchSession,
refreshWorkbenchDerivedState,
refreshWorkbenchSessionTranscript,
markWorkbenchUnread,
publishWorkbenchPr,
renameWorkbenchBranch,
@ -56,7 +60,7 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
const body = msg.body;
await loopCtx.step("init-bootstrap-db", async () => initBootstrapDbActivity(loopCtx, body));
await loopCtx.removed("init-enqueue-provision", "step");
await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body));
await loopCtx.removed("init-dispatch-provision-v2", "step");
const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx));
@ -164,12 +168,25 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
},
"task.command.workbench.create_session": async (loopCtx, msg) => {
const created = await loopCtx.step({
name: "workbench-create-session",
try {
const created = await loopCtx.step({
name: "workbench-create-session",
timeout: 30_000,
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
});
await msg.complete(created);
} catch (error) {
await msg.complete({ error: resolveErrorMessage(error) });
}
},
"task.command.workbench.ensure_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-ensure-session",
timeout: 5 * 60_000,
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
run: async () => ensureWorkbenchSession(loopCtx, msg.body.tabId, msg.body?.model),
});
await msg.complete(created);
await msg.complete({ ok: true });
},
"task.command.workbench.rename_session": async (loopCtx, msg) => {
@ -215,6 +232,24 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
await msg.complete({ ok: true });
},
"task.command.workbench.refresh_derived": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-refresh-derived",
timeout: 5 * 60_000,
run: async () => refreshWorkbenchDerivedState(loopCtx),
});
await msg.complete({ ok: true });
},
"task.command.workbench.refresh_session_transcript": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-refresh-session-transcript",
timeout: 60_000,
run: async () => refreshWorkbenchSessionTranscript(loopCtx, msg.body.sessionId),
});
await msg.complete({ ok: true });
},
"task.command.workbench.close_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-close-session",

View file

@ -8,6 +8,7 @@ import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
import { TASK_ROW_ID, appendHistory, buildAgentPrompt, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
import { taskWorkflowQueueName } from "./queue.js";
import { enqueuePendingWorkbenchSessions } from "../workbench.js";
const DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS = 180_000;
@ -34,6 +35,13 @@ function debugInit(loopCtx: any, message: string, context?: Record<string, unkno
});
}
async function ensureTaskRuntimeCacheColumns(db: any): Promise<void> {
await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {});
await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {});
await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {});
await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {});
}
async function withActivityTimeout<T>(timeoutMs: number, label: string, run: () => Promise<T>): Promise<T> {
let timer: ReturnType<typeof setTimeout> | null = null;
try {
@ -60,6 +68,8 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming";
try {
await ensureTaskRuntimeCacheColumns(db);
await db
.insert(taskTable)
.values({
@ -96,6 +106,10 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
activeSwitchTarget: null,
activeCwd: null,
statusMessage: initialStatusMessage,
gitStateJson: null,
gitStateUpdatedAt: null,
provisionStage: "queued",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
@ -106,6 +120,8 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
activeSwitchTarget: null,
activeCwd: null,
statusMessage: initialStatusMessage,
provisionStage: "queued",
provisionStageUpdatedAt: now,
updatedAt: now,
},
})
@ -118,19 +134,29 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise<void> {
await setTaskState(loopCtx, "init_enqueue_provision", "provision queued");
const self = selfTask(loopCtx);
void self
.send(taskWorkflowQueueName("task.command.provision"), body, {
wait: false,
await loopCtx.db
.update(taskRuntime)
.set({
provisionStage: "queued",
provisionStageUpdatedAt: Date.now(),
updatedAt: Date.now(),
})
.catch((error: unknown) => {
logActorWarning("task.init", "background provision command failed", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
taskId: loopCtx.state.taskId,
error: resolveErrorMessage(error),
});
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
const self = selfTask(loopCtx);
try {
await self.send(taskWorkflowQueueName("task.command.provision"), body, {
wait: false,
});
} catch (error: unknown) {
logActorWarning("task.init", "background provision command failed", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
taskId: loopCtx.state.taskId,
error: resolveErrorMessage(error),
});
throw error;
}
}
export async function initEnsureNameActivity(loopCtx: any): Promise<void> {
@ -197,6 +223,8 @@ export async function initEnsureNameActivity(loopCtx: any): Promise<void> {
.update(taskRuntime)
.set({
statusMessage: "provisioning",
provisionStage: "repo_prepared",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
@ -222,6 +250,15 @@ export async function initAssertNameActivity(loopCtx: any): Promise<void> {
export async function initCreateSandboxActivity(loopCtx: any, body: any): Promise<any> {
await setTaskState(loopCtx, "init_create_sandbox", "creating sandbox");
await loopCtx.db
.update(taskRuntime)
.set({
provisionStage: "sandbox_allocated",
provisionStageUpdatedAt: Date.now(),
updatedAt: Date.now(),
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
const { providers } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId;
const provider = providers.get(providerId);
@ -307,6 +344,15 @@ export async function initCreateSandboxActivity(loopCtx: any, body: any): Promis
export async function initEnsureAgentActivity(loopCtx: any, body: any, sandbox: any): Promise<any> {
await setTaskState(loopCtx, "init_ensure_agent", "ensuring sandbox agent");
await loopCtx.db
.update(taskRuntime)
.set({
provisionStage: "agent_installing",
provisionStageUpdatedAt: Date.now(),
updatedAt: Date.now(),
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
const { providers } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId;
const provider = providers.get(providerId);
@ -318,6 +364,15 @@ export async function initEnsureAgentActivity(loopCtx: any, body: any, sandbox:
export async function initStartSandboxInstanceActivity(loopCtx: any, body: any, sandbox: any, agent: any): Promise<any> {
await setTaskState(loopCtx, "init_start_sandbox_instance", "starting sandbox runtime");
await loopCtx.db
.update(taskRuntime)
.set({
provisionStage: "agent_starting",
provisionStageUpdatedAt: Date.now(),
updatedAt: Date.now(),
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
try {
const providerId = body?.providerId ?? loopCtx.state.providerId;
const sandboxInstance = await getOrCreateSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId, {
@ -350,6 +405,15 @@ export async function initStartSandboxInstanceActivity(loopCtx: any, body: any,
export async function initCreateSessionActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady: any): Promise<any> {
await setTaskState(loopCtx, "init_create_session", "creating agent session");
await loopCtx.db
.update(taskRuntime)
.set({
provisionStage: "session_creating",
provisionStageUpdatedAt: Date.now(),
updatedAt: Date.now(),
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
if (!sandboxInstanceReady.ok) {
return {
id: null,
@ -481,6 +545,8 @@ export async function initWriteDbActivity(
activeSwitchTarget: sandbox.switchTarget,
activeCwd,
statusMessage,
provisionStage: sessionHealthy ? "ready" : "error",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
@ -491,6 +557,8 @@ export async function initWriteDbActivity(
activeSwitchTarget: sandbox.switchTarget,
activeCwd,
statusMessage,
provisionStage: sessionHealthy ? "ready" : "error",
provisionStageUpdatedAt: now,
updatedAt: now,
},
})
@ -535,6 +603,12 @@ export async function initCompleteActivity(loopCtx: any, body: any, sandbox: any
});
loopCtx.state.initialized = true;
await enqueuePendingWorkbenchSessions(loopCtx);
const self = selfTask(loopCtx);
await self.send(taskWorkflowQueueName("task.command.workbench.refresh_derived"), {}, { wait: false });
if (sessionId) {
await self.send(taskWorkflowQueueName("task.command.workbench.refresh_session_transcript"), { sessionId }, { wait: false });
}
return;
}
@ -591,6 +665,8 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
activeSwitchTarget: null,
activeCwd: null,
statusMessage: detail,
provisionStage: "error",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
@ -601,6 +677,8 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
activeSwitchTarget: null,
activeCwd: null,
statusMessage: detail,
provisionStage: "error",
provisionStageUpdatedAt: now,
updatedAt: now,
},
})

View file

@ -13,6 +13,7 @@ export const TASK_QUEUE_NAMES = [
"task.command.workbench.rename_task",
"task.command.workbench.rename_branch",
"task.command.workbench.create_session",
"task.command.workbench.ensure_session",
"task.command.workbench.rename_session",
"task.command.workbench.set_session_unread",
"task.command.workbench.update_draft",
@ -20,6 +21,8 @@ export const TASK_QUEUE_NAMES = [
"task.command.workbench.send_message",
"task.command.workbench.stop_session",
"task.command.workbench.sync_session_status",
"task.command.workbench.refresh_derived",
"task.command.workbench.refresh_session_transcript",
"task.command.workbench.close_session",
"task.command.workbench.publish_pr",
"task.command.workbench.revert_file",

View file

@ -4,6 +4,17 @@ import { Loop } from "rivetkit/workflow";
import type {
AddRepoInput,
CreateTaskInput,
HistoryEvent,
HistoryQueryInput,
ListTasksInput,
ProviderId,
RepoOverview,
RepoRecord,
RepoStackActionInput,
RepoStackActionResult,
StarSandboxAgentRepoInput,
StarSandboxAgentRepoResult,
SwitchResult,
TaskRecord,
TaskSummary,
TaskWorkbenchChangeModelInput,
@ -14,20 +25,13 @@ import type {
TaskWorkbenchSelectInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchTabInput,
TaskWorkbenchUpdateDraftInput,
HistoryEvent,
HistoryQueryInput,
ListTasksInput,
ProviderId,
RepoOverview,
RepoStackActionInput,
RepoStackActionResult,
RepoRecord,
StarSandboxAgentRepoInput,
StarSandboxAgentRepoResult,
SwitchResult,
WorkbenchRepoSummary,
WorkbenchSessionSummary,
WorkbenchTaskSummary,
WorkspaceEvent,
WorkspaceSummarySnapshot,
WorkspaceUseInput,
} from "@sandbox-agent/foundry-shared";
import { getActorRuntimeContext } from "../context.js";
@ -35,7 +39,7 @@ import { getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "
import { logActorWarning, resolveErrorMessage } from "../logging.js";
import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js";
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
import { taskLookup, repos, providerProfiles } from "./db/schema.js";
import { taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js";
import { agentTypeForModel } from "../task/workbench.js";
import { expectQueueResponse } from "../../services/queue.js";
import { workspaceAppActions } from "./app-shell.js";
@ -109,6 +113,18 @@ async function upsertTaskLookupRow(c: any, taskId: string, repoId: string): Prom
.run();
}
function parseJsonValue<T>(value: string | null | undefined, fallback: T): T {
if (!value) {
return fallback;
}
try {
return JSON.parse(value) as T;
} catch {
return fallback;
}
}
async function collectAllTaskSummaries(c: any): Promise<TaskSummary[]> {
const repoRows = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all();
@ -145,17 +161,55 @@ function repoLabelFromRemote(remoteUrl: string): string {
return remoteUrl;
}
async function buildWorkbenchSnapshot(c: any): Promise<TaskWorkbenchSnapshot> {
function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepoSummary {
const repoTasks = taskRows.filter((task) => task.repoId === repoRow.repoId);
const latestActivityMs = repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), repoRow.updatedAt);
return {
id: repoRow.repoId,
label: repoLabelFromRemote(repoRow.remoteUrl),
taskCount: repoTasks.length,
latestActivityMs,
};
}
function taskSummaryRowFromSummary(taskSummary: WorkbenchTaskSummary) {
return {
taskId: taskSummary.id,
repoId: taskSummary.repoId,
title: taskSummary.title,
status: taskSummary.status,
repoName: taskSummary.repoName,
updatedAtMs: taskSummary.updatedAtMs,
branch: taskSummary.branch,
pullRequestJson: JSON.stringify(taskSummary.pullRequest),
sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary),
};
}
function taskSummaryFromRow(row: any): WorkbenchTaskSummary {
return {
id: row.taskId,
repoId: row.repoId,
title: row.title,
status: row.status,
repoName: row.repoName,
updatedAtMs: row.updatedAtMs,
branch: row.branch ?? null,
pullRequest: parseJsonValue(row.pullRequestJson, null),
sessionsSummary: parseJsonValue<WorkbenchSessionSummary[]>(row.sessionsSummaryJson, []),
};
}
async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySnapshot> {
const repoRows = await c.db
.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt })
.from(repos)
.orderBy(desc(repos.updatedAt))
.all();
const tasks: Array<any> = [];
const projects: Array<any> = [];
const taskRows: WorkbenchTaskSummary[] = [];
for (const row of repoRows) {
const projectTasks: Array<any> = [];
try {
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
const summaries = await project.listTaskSummaries({ includeArchived: true });
@ -163,11 +217,18 @@ async function buildWorkbenchSnapshot(c: any): Promise<TaskWorkbenchSnapshot> {
try {
await upsertTaskLookupRow(c, summary.taskId, row.repoId);
const task = getTask(c, c.state.workspaceId, row.repoId, summary.taskId);
const snapshot = await task.getWorkbench({});
tasks.push(snapshot);
projectTasks.push(snapshot);
const taskSummary = await task.getTaskSummary({});
taskRows.push(taskSummary);
await c.db
.insert(taskSummaries)
.values(taskSummaryRowFromSummary(taskSummary))
.onConflictDoUpdate({
target: taskSummaries.taskId,
set: taskSummaryRowFromSummary(taskSummary),
})
.run();
} catch (error) {
logActorWarning("workspace", "failed collecting workbench task", {
logActorWarning("workspace", "failed collecting task summary during reconciliation", {
workspaceId: c.state.workspaceId,
repoId: row.repoId,
taskId: summary.taskId,
@ -175,17 +236,8 @@ async function buildWorkbenchSnapshot(c: any): Promise<TaskWorkbenchSnapshot> {
});
}
}
if (projectTasks.length > 0) {
projects.push({
id: row.repoId,
label: repoLabelFromRemote(row.remoteUrl),
updatedAtMs: projectTasks[0]?.updatedAtMs ?? row.updatedAt,
tasks: projectTasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs),
});
}
} catch (error) {
logActorWarning("workspace", "failed collecting workbench repo snapshot", {
logActorWarning("workspace", "failed collecting repo during workbench reconciliation", {
workspaceId: c.state.workspaceId,
repoId: row.repoId,
error: resolveErrorMessage(error),
@ -193,16 +245,11 @@ async function buildWorkbenchSnapshot(c: any): Promise<TaskWorkbenchSnapshot> {
}
}
tasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
projects.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
taskRows.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
return {
workspaceId: c.state.workspaceId,
repos: repoRows.map((row) => ({
id: row.repoId,
label: repoLabelFromRemote(row.remoteUrl),
})),
projects,
tasks,
repos: repoRows.map((row) => buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs),
taskSummaries: taskRows,
};
}
@ -211,6 +258,41 @@ async function requireWorkbenchTask(c: any, taskId: string) {
return getTask(c, c.state.workspaceId, repoId, taskId);
}
/**
* Reads the workspace sidebar snapshot from the workspace actor's local SQLite
* only. Task actors push summary updates into `task_summaries`, so clients do
* not need this action to fan out to every child actor on the hot read path.
*/
async function getWorkspaceSummarySnapshot(c: any): Promise<WorkspaceSummarySnapshot> {
const repoRows = await c.db
.select({
repoId: repos.repoId,
remoteUrl: repos.remoteUrl,
updatedAt: repos.updatedAt,
})
.from(repos)
.orderBy(desc(repos.updatedAt))
.all();
const taskRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all();
const summaries = taskRows.map(taskSummaryFromRow);
return {
workspaceId: c.state.workspaceId,
repos: repoRows.map((row) => buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs),
taskSummaries: summaries,
};
}
async function broadcastRepoSummary(
c: any,
type: "repoAdded" | "repoUpdated",
repoRow: { repoId: string; remoteUrl: string; updatedAt: number },
): Promise<void> {
const matchingTaskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoRow.repoId)).all();
const repo = buildRepoSummary(repoRow, matchingTaskRows.map(taskSummaryFromRow));
c.broadcast("workspaceUpdated", { type, repo } satisfies WorkspaceEvent);
}
async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord> {
assertWorkspace(c, input.workspaceId);
@ -225,6 +307,7 @@ async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord>
const repoId = repoIdFromRemote(remoteUrl);
const now = Date.now();
const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, repoId)).get();
await c.db
.insert(repos)
@ -243,7 +326,11 @@ async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord>
})
.run();
await workspaceActions.notifyWorkbenchUpdated(c);
await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", {
repoId,
remoteUrl,
updatedAt: now,
});
return {
workspaceId: c.state.workspaceId,
repoId,
@ -306,10 +393,20 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskR
})
.run();
const task = getTask(c, c.state.workspaceId, repoId, created.taskId);
await task.provision({ providerId });
try {
const task = getTask(c, c.state.workspaceId, repoId, created.taskId);
await workspaceActions.applyTaskSummaryUpdate(c, {
taskSummary: await task.getTaskSummary({}),
});
} catch (error) {
logActorWarning("workspace", "failed seeding task summary after task creation", {
workspaceId: c.state.workspaceId,
repoId,
taskId: created.taskId,
error: resolveErrorMessage(error),
});
}
await workspaceActions.notifyWorkbenchUpdated(c);
return created;
}
@ -465,13 +562,37 @@ export const workspaceActions = {
};
},
async getWorkbench(c: any, input: WorkspaceUseInput): Promise<TaskWorkbenchSnapshot> {
assertWorkspace(c, input.workspaceId);
return await buildWorkbenchSnapshot(c);
/**
* Called by task actors when their summary-level state changes.
* This is the write path for the local materialized projection; clients read
* the projection via `getWorkspaceSummary`, but only task actors should push
* rows into it.
*/
async applyTaskSummaryUpdate(c: any, input: { taskSummary: WorkbenchTaskSummary }): Promise<void> {
await c.db
.insert(taskSummaries)
.values(taskSummaryRowFromSummary(input.taskSummary))
.onConflictDoUpdate({
target: taskSummaries.taskId,
set: taskSummaryRowFromSummary(input.taskSummary),
})
.run();
c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies WorkspaceEvent);
},
async notifyWorkbenchUpdated(c: any): Promise<void> {
c.broadcast("workbenchUpdated", { at: Date.now() });
async removeTaskSummary(c: any, input: { taskId: string }): Promise<void> {
await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run();
c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies WorkspaceEvent);
},
async getWorkspaceSummary(c: any, input: WorkspaceUseInput): Promise<WorkspaceSummarySnapshot> {
assertWorkspace(c, input.workspaceId);
return await getWorkspaceSummarySnapshot(c);
},
async reconcileWorkbenchState(c: any, input: WorkspaceUseInput): Promise<WorkspaceSummarySnapshot> {
assertWorkspace(c, input.workspaceId);
return await reconcileWorkbenchProjection(c);
},
async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; tabId?: string }> {
@ -483,11 +604,8 @@ export const workspaceActions = {
...(input.branch ? { explicitBranchName: input.branch } : {}),
...(input.model ? { agentType: agentTypeForModel(input.model) } : {}),
});
const task = await requireWorkbenchTask(c, created.taskId);
const snapshot = await task.getWorkbench({});
return {
taskId: created.taskId,
tabId: snapshot.tabs[0]?.id,
};
},

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,18 @@ const journal = {
tag: "0000_melted_viper",
breakpoints: true,
},
{
idx: 1,
when: 1773638400000,
tag: "0001_auth_index_tables",
breakpoints: true,
},
{
idx: 2,
when: 1773720000000,
tag: "0002_task_summaries",
breakpoints: true,
},
],
} as const;
@ -113,6 +125,49 @@ CREATE TABLE \`task_lookup\` (
\`task_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL
);
`,
m0001: `CREATE TABLE IF NOT EXISTS \`auth_session_index\` (
\`session_id\` text PRIMARY KEY NOT NULL,
\`session_token\` text NOT NULL,
\`user_id\` text NOT NULL,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS \`auth_email_index\` (
\`email\` text PRIMARY KEY NOT NULL,
\`user_id\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS \`auth_account_index\` (
\`id\` text PRIMARY KEY NOT NULL,
\`provider_id\` text NOT NULL,
\`account_id\` text NOT NULL,
\`user_id\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS \`auth_verification\` (
\`id\` text PRIMARY KEY NOT NULL,
\`identifier\` text NOT NULL,
\`value\` text NOT NULL,
\`expires_at\` integer NOT NULL,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0002: `CREATE TABLE IF NOT EXISTS \`task_summaries\` (
\`task_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL,
\`title\` text NOT NULL,
\`status\` text NOT NULL,
\`repo_name\` text NOT NULL,
\`updated_at_ms\` integer NOT NULL,
\`branch\` text,
\`pull_request_json\` text,
\`sessions_summary_json\` text DEFAULT '[]' NOT NULL
);
`,
} as const,
};

View file

@ -20,6 +20,23 @@ export const taskLookup = sqliteTable("task_lookup", {
repoId: text("repo_id").notNull(),
});
/**
* Materialized sidebar projection maintained by task actors.
* The source of truth still lives on each task actor; this table exists so
* workspace reads can stay local and avoid fan-out across child actors.
*/
export const taskSummaries = sqliteTable("task_summaries", {
taskId: text("task_id").notNull().primaryKey(),
repoId: text("repo_id").notNull(),
title: text("title").notNull(),
status: text("status").notNull(),
repoName: text("repo_name").notNull(),
updatedAtMs: integer("updated_at_ms").notNull(),
branch: text("branch"),
pullRequestJson: text("pull_request_json"),
sessionsSummaryJson: text("sessions_summary_json").notNull().default("[]"),
});
export const organizationProfile = sqliteTable("organization_profile", {
id: text("id").notNull().primaryKey(),
kind: text("kind").notNull(),
@ -74,23 +91,33 @@ export const invoices = sqliteTable("invoices", {
createdAt: integer("created_at").notNull(),
});
export const appSessions = sqliteTable("app_sessions", {
export const authSessionIndex = sqliteTable("auth_session_index", {
sessionId: text("session_id").notNull().primaryKey(),
sessionToken: text("session_token").notNull(),
userId: text("user_id").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const authEmailIndex = sqliteTable("auth_email_index", {
email: text("email").notNull().primaryKey(),
userId: text("user_id").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const authAccountIndex = sqliteTable("auth_account_index", {
id: text("id").notNull().primaryKey(),
currentUserId: text("current_user_id"),
currentUserName: text("current_user_name"),
currentUserEmail: text("current_user_email"),
currentUserGithubLogin: text("current_user_github_login"),
currentUserRoleLabel: text("current_user_role_label"),
// Structured as a JSON array of eligible organization ids for the session.
eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(),
activeOrganizationId: text("active_organization_id"),
githubAccessToken: text("github_access_token"),
githubScope: text("github_scope").notNull(),
starterRepoStatus: text("starter_repo_status").notNull(),
starterRepoStarredAt: integer("starter_repo_starred_at"),
starterRepoSkippedAt: integer("starter_repo_skipped_at"),
oauthState: text("oauth_state"),
oauthStateExpiresAt: integer("oauth_state_expires_at"),
providerId: text("provider_id").notNull(),
accountId: text("account_id").notNull(),
userId: text("user_id").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const authVerification = sqliteTable("auth_verification", {
id: text("id").notNull().primaryKey(),
identifier: text("identifier").notNull(),
value: text("value").notNull(),
expiresAt: integer("expires_at").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -10,6 +10,7 @@ import { createDefaultDriver } from "./driver.js";
import { createProviderRegistry } from "./providers/index.js";
import { createClient } from "rivetkit/client";
import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared";
import { initBetterAuthService } from "./services/better-auth.js";
import { createDefaultAppShellServices } from "./services/app-shell-runtime.js";
import { APP_SHELL_WORKSPACE_ID } from "./actors/workspace/app-shell.js";
import { logger } from "./logging.js";
@ -39,33 +40,15 @@ interface AppWorkspaceLogContext {
xRealIp?: string;
}
function stripTrailingSlash(value: string): string {
return value.replace(/\/$/, "");
}
function isRivetRequest(request: Request): boolean {
const { pathname } = new URL(request.url);
return pathname === "/v1/rivet" || pathname.startsWith("/v1/rivet/");
}
function isRetryableAppActorError(error: unknown): boolean {
const message = error instanceof Error ? error.message : String(error);
return message.includes("Actor not ready") || message.includes("socket connection was closed unexpectedly");
}
async function withRetries<T>(run: () => Promise<T>, attempts = 20, delayMs = 250): Promise<T> {
let lastError: unknown;
for (let attempt = 1; attempt <= attempts; attempt += 1) {
try {
return await run();
} catch (error) {
lastError = error;
if (!isRetryableAppActorError(error) || attempt === attempts) {
throw error;
}
await new Promise((resolve) => setTimeout(resolve, delayMs));
}
}
throw lastError instanceof Error ? lastError : new Error(String(lastError));
}
export async function startBackend(options: BackendStartOptions = {}): Promise<void> {
// sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth.
// Normalize to keep local dev + docker-compose simple.
@ -94,11 +77,16 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
const providers = createProviderRegistry(config, driver);
const backends = await createBackends(config.notify);
const notifications = createNotificationService(backends);
initActorRuntimeContext(config, providers, notifications, driver, createDefaultAppShellServices());
const appShellServices = createDefaultAppShellServices();
initActorRuntimeContext(config, providers, notifications, driver, appShellServices);
const actorClient = createClient({
endpoint: `http://127.0.0.1:${config.backend.port}/v1/rivet`,
}) as any;
const betterAuth = initBetterAuthService(actorClient, {
apiUrl: appShellServices.apiUrl,
appUrl: appShellServices.appUrl,
});
const requestHeaderContext = (c: any): AppWorkspaceLogContext => ({
cfConnectingIp: c.req.header("cf-connecting-ip") ?? undefined,
@ -131,29 +119,18 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
"x-rivet-total-slots",
"x-rivet-runner-name",
"x-rivet-namespace-name",
"x-foundry-session",
];
const exposeHeaders = ["Content-Type", "x-foundry-session", "x-rivet-ray-id"];
app.use(
"/v1/*",
cors({
origin: (origin) => origin ?? "*",
credentials: true,
allowHeaders,
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
exposeHeaders,
}),
);
app.use(
"/v1",
cors({
origin: (origin) => origin ?? "*",
credentials: true,
allowHeaders,
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
exposeHeaders,
}),
);
const exposeHeaders = ["Content-Type", "x-rivet-ray-id"];
const allowedOrigins = new Set([stripTrailingSlash(appShellServices.appUrl), stripTrailingSlash(appShellServices.apiUrl)]);
const corsConfig = {
origin: (origin: string) => (allowedOrigins.has(origin) ? origin : null) as string | undefined | null,
credentials: true,
allowHeaders,
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
exposeHeaders,
};
app.use("/v1/*", cors(corsConfig));
app.use("/v1", cors(corsConfig));
app.use("*", async (c, next) => {
const requestId = c.req.header("x-request-id")?.trim() || randomUUID();
const start = performance.now();
@ -190,6 +167,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
);
});
// Cache the app workspace actor handle for the lifetime of this backend process.
// The "app" workspace is a singleton coordinator for auth indexes, org state, and
// billing. Caching avoids repeated getOrCreate round-trips on every HTTP request.
let cachedAppWorkspace: any | null = null;
const appWorkspace = async (context: AppWorkspaceLogContext = {}) => {
@ -197,12 +177,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
const start = performance.now();
try {
const handle = await withRetries(
async () =>
await actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), {
createWithInput: APP_SHELL_WORKSPACE_ID,
}),
);
const handle = await actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), {
createWithInput: APP_SHELL_WORKSPACE_ID,
});
cachedAppWorkspace = handle;
logger.info(
{
@ -253,68 +230,70 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
sessionId,
});
const resolveSessionId = async (c: any): Promise<string> => {
const requested = c.req.header("x-foundry-session");
const { sessionId } = await appWorkspaceAction(
"ensureAppSession",
async (workspace) => await workspace.ensureAppSession(requested && requested.trim().length > 0 ? { requestedSessionId: requested } : {}),
requestLogContext(c),
);
c.header("x-foundry-session", sessionId);
return sessionId;
const resolveSessionId = async (c: any): Promise<string | null> => {
const session = await betterAuth.resolveSession(c.req.raw.headers);
return session?.session?.id ?? null;
};
app.get("/v1/app/snapshot", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.json({
auth: { status: "signed_out", currentUserId: null },
activeOrganizationId: null,
onboarding: {
starterRepo: {
repoFullName: "rivet-dev/sandbox-agent",
repoUrl: "https://github.com/rivet-dev/sandbox-agent",
status: "pending",
starredAt: null,
skippedAt: null,
},
},
users: [],
organizations: [],
});
}
return c.json(
await appWorkspaceAction("getAppSnapshot", async (workspace) => await workspace.getAppSnapshot({ sessionId }), requestLogContext(c, sessionId)),
);
});
app.get("/v1/auth/github/start", async (c) => {
const sessionId = await resolveSessionId(c);
const result = await appWorkspaceAction(
"startAppGithubAuth",
async (workspace) => await workspace.startAppGithubAuth({ sessionId }),
requestLogContext(c, sessionId),
);
return Response.redirect(result.url, 302);
app.all("/v1/auth/*", async (c) => {
return await betterAuth.auth.handler(c.req.raw);
});
const handleGithubAuthCallback = async (c: any) => {
// TEMPORARY: dump all request headers to diagnose duplicate callback requests
// (Railway nginx proxy_next_upstream? Cloudflare retry? browser?)
// Remove once root cause is identified.
const allHeaders: Record<string, string> = {};
c.req.raw.headers.forEach((value: string, key: string) => {
allHeaders[key] = value;
});
logger.info({ headers: allHeaders, url: c.req.url }, "github_callback_headers");
const code = c.req.query("code");
const state = c.req.query("state");
if (!code || !state) {
return c.text("Missing GitHub OAuth callback parameters", 400);
}
const result = await appWorkspaceAction(
"completeAppGithubAuth",
async (workspace) => await workspace.completeAppGithubAuth({ code, state }),
requestLogContext(c),
);
c.header("x-foundry-session", result.sessionId);
return Response.redirect(result.redirectTo, 302);
};
app.get("/v1/auth/github/callback", handleGithubAuthCallback);
app.get("/api/auth/callback/github", handleGithubAuthCallback);
app.post("/v1/app/sign-out", async (c) => {
const sessionId = await resolveSessionId(c);
return c.json(await appWorkspaceAction("signOutApp", async (workspace) => await workspace.signOutApp({ sessionId }), requestLogContext(c, sessionId)));
if (sessionId) {
const signOutResponse = await betterAuth.signOut(c.req.raw.headers);
const setCookie = signOutResponse.headers.get("set-cookie");
if (setCookie) {
c.header("set-cookie", setCookie);
}
}
return c.json({
auth: { status: "signed_out", currentUserId: null },
activeOrganizationId: null,
onboarding: {
starterRepo: {
repoFullName: "rivet-dev/sandbox-agent",
repoUrl: "https://github.com/rivet-dev/sandbox-agent",
status: "pending",
starredAt: null,
skippedAt: null,
},
},
users: [],
organizations: [],
});
});
app.post("/v1/app/onboarding/starter-repo/skip", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await appWorkspaceAction("skipAppStarterRepo", async (workspace) => await workspace.skipAppStarterRepo({ sessionId }), requestLogContext(c, sessionId)),
);
@ -322,6 +301,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/starter-repo/star", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await appWorkspaceAction(
"starAppStarterRepo",
@ -337,6 +319,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/select", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await appWorkspaceAction(
"selectAppOrganization",
@ -352,6 +337,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.patch("/v1/app/organizations/:organizationId/profile", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
const body = await c.req.json();
return c.json(
await appWorkspaceAction(
@ -371,6 +359,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/import", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await appWorkspaceAction(
"triggerAppRepoImport",
@ -386,6 +377,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/reconnect", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await appWorkspaceAction(
"beginAppGithubInstall",
@ -401,6 +395,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/billing/checkout", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
const body = await c.req.json().catch(() => ({}));
const planId = body?.planId === "free" || body?.planId === "team" ? (body.planId as FoundryBillingPlanId) : "team";
return c.json(
@ -414,11 +411,14 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.get("/v1/billing/checkout/complete", async (c) => {
const organizationId = c.req.query("organizationId");
const sessionId = c.req.query("foundrySession");
const checkoutSessionId = c.req.query("session_id");
if (!organizationId || !sessionId || !checkoutSessionId) {
if (!organizationId || !checkoutSessionId) {
return c.text("Missing Stripe checkout completion parameters", 400);
}
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
const result = await (await appWorkspace(requestLogContext(c, sessionId))).finalizeAppCheckoutSession({
organizationId,
sessionId,
@ -429,6 +429,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/billing/portal", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await (await appWorkspace(requestLogContext(c, sessionId))).createAppBillingPortalSession({
sessionId,
@ -439,6 +442,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/billing/cancel", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await (await appWorkspace(requestLogContext(c, sessionId))).cancelAppScheduledRenewal({
sessionId,
@ -449,6 +455,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/organizations/:organizationId/billing/resume", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await (await appWorkspace(requestLogContext(c, sessionId))).resumeAppSubscription({
sessionId,
@ -459,6 +468,9 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/app/workspaces/:workspaceId/seat-usage", async (c) => {
const sessionId = await resolveSessionId(c);
if (!sessionId) {
return c.text("Unauthorized", 401);
}
return c.json(
await (await appWorkspace(requestLogContext(c, sessionId))).recordAppSeatUsage({
sessionId,

View file

@ -2,4 +2,5 @@ import { createFoundryLogger } from "@sandbox-agent/foundry-shared";
export const logger = createFoundryLogger({
service: "foundry-backend",
format: "logfmt",
});

View file

@ -262,11 +262,11 @@ export class GitHubAppClient {
}
async listOrganizations(accessToken: string): Promise<GitHubOrgIdentity[]> {
const organizations = await this.paginate<{ id: number; login: string; description?: string | null }>("/user/orgs?per_page=100", accessToken);
const organizations = await this.paginate<{ id: number; login: string; name?: string | null }>("/user/orgs?per_page=100", accessToken);
return organizations.map((organization) => ({
id: String(organization.id),
login: organization.login,
name: organization.description?.trim() || organization.login,
name: organization.name?.trim() || organization.login,
}));
}

View file

@ -0,0 +1,533 @@
import { betterAuth } from "better-auth";
import { createAdapterFactory } from "better-auth/adapters";
import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js";
import { authUserKey, workspaceKey } from "../actors/keys.js";
import { logger } from "../logging.js";
const AUTH_BASE_PATH = "/v1/auth";
const SESSION_COOKIE = "better-auth.session_token";
let betterAuthService: BetterAuthService | null = null;
function requireEnv(name: string): string {
const value = process.env[name]?.trim();
if (!value) {
throw new Error(`${name} is required`);
}
return value;
}
function stripTrailingSlash(value: string): string {
return value.replace(/\/$/, "");
}
function buildCookieHeaders(sessionToken: string): Headers {
return new Headers({
cookie: `${SESSION_COOKIE}=${encodeURIComponent(sessionToken)}`,
});
}
async function readJsonSafe(response: Response): Promise<any> {
const text = await response.text();
if (!text) {
return null;
}
try {
return JSON.parse(text);
} catch {
return text;
}
}
async function callAuthEndpoint(auth: any, url: string, init?: RequestInit): Promise<Response> {
return await auth.handler(new Request(url, init));
}
function resolveRouteUserId(workspace: any, resolved: any): string | null {
if (!resolved) {
return null;
}
if (typeof resolved === "string") {
return resolved;
}
if (typeof resolved.userId === "string" && resolved.userId.length > 0) {
return resolved.userId;
}
if (typeof resolved.id === "string" && resolved.id.length > 0) {
return resolved.id;
}
return null;
}
export interface BetterAuthService {
auth: any;
resolveSession(headers: Headers): Promise<{ session: any; user: any } | null>;
signOut(headers: Headers): Promise<Response>;
getAuthState(sessionId: string): Promise<any | null>;
upsertUserProfile(userId: string, patch: Record<string, unknown>): Promise<any>;
setActiveOrganization(sessionId: string, activeOrganizationId: string | null): Promise<any>;
getAccessTokenForSession(sessionId: string): Promise<{ accessToken: string; scopes: string[] } | null>;
}
export function initBetterAuthService(actorClient: any, options: { apiUrl: string; appUrl: string }): BetterAuthService {
if (betterAuthService) {
return betterAuthService;
}
// getOrCreate is intentional here: the adapter runs during Better Auth callbacks
// which can fire before any explicit create path. The app workspace and auth user
// actors must exist by the time the adapter needs them.
const appWorkspace = () =>
actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), {
createWithInput: APP_SHELL_WORKSPACE_ID,
});
// getOrCreate is intentional: Better Auth creates user records during OAuth
// callbacks, so the auth-user actor must be lazily provisioned on first access.
const getAuthUser = async (userId: string) =>
await actorClient.authUser.getOrCreate(authUserKey(userId), {
createWithInput: { userId },
});
const adapter = createAdapterFactory({
config: {
adapterId: "rivetkit-actor",
adapterName: "RivetKit Actor Adapter",
supportsBooleans: false,
supportsDates: false,
supportsJSON: false,
},
adapter: ({ transformInput, transformOutput, transformWhereClause }) => {
const resolveUserIdForQuery = async (model: string, where?: any[], data?: Record<string, unknown>): Promise<string | null> => {
const clauses = where ?? [];
const direct = (field: string) => clauses.find((entry) => entry.field === field)?.value;
if (model === "user") {
const fromId = direct("id") ?? data?.id;
if (typeof fromId === "string" && fromId.length > 0) {
return fromId;
}
const email = direct("email");
if (typeof email === "string" && email.length > 0) {
const workspace = await appWorkspace();
const resolved = await workspace.authFindEmailIndex({ email: email.toLowerCase() });
return resolveRouteUserId(workspace, resolved);
}
return null;
}
if (model === "session") {
const fromUserId = direct("userId") ?? data?.userId;
if (typeof fromUserId === "string" && fromUserId.length > 0) {
return fromUserId;
}
const sessionId = direct("id") ?? data?.id;
const sessionToken = direct("token") ?? data?.token;
if (typeof sessionId === "string" || typeof sessionToken === "string") {
const workspace = await appWorkspace();
const resolved = await workspace.authFindSessionIndex({
...(typeof sessionId === "string" ? { sessionId } : {}),
...(typeof sessionToken === "string" ? { sessionToken } : {}),
});
return resolveRouteUserId(workspace, resolved);
}
return null;
}
if (model === "account") {
const fromUserId = direct("userId") ?? data?.userId;
if (typeof fromUserId === "string" && fromUserId.length > 0) {
return fromUserId;
}
const accountRecordId = direct("id") ?? data?.id;
const providerId = direct("providerId") ?? data?.providerId;
const accountId = direct("accountId") ?? data?.accountId;
const workspace = await appWorkspace();
if (typeof accountRecordId === "string" && accountRecordId.length > 0) {
const resolved = await workspace.authFindAccountIndex({ id: accountRecordId });
return resolveRouteUserId(workspace, resolved);
}
if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) {
const resolved = await workspace.authFindAccountIndex({ providerId, accountId });
return resolveRouteUserId(workspace, resolved);
}
return null;
}
return null;
};
const ensureWorkspaceVerification = async (method: string, payload: Record<string, unknown>) => {
const workspace = await appWorkspace();
return await workspace[method](payload);
};
return {
options: {
useDatabaseGeneratedIds: false,
},
create: async ({ model, data }) => {
const transformed = await transformInput(data, model, "create", true);
if (model === "verification") {
return await ensureWorkspaceVerification("authCreateVerification", { data: transformed });
}
const userId = await resolveUserIdForQuery(model, undefined, transformed);
if (!userId) {
throw new Error(`Unable to resolve auth actor for create(${model})`);
}
const userActor = await getAuthUser(userId);
const created = await userActor.createAuthRecord({ model, data: transformed });
const workspace = await appWorkspace();
if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) {
await workspace.authUpsertEmailIndex({
email: transformed.email.toLowerCase(),
userId,
});
}
if (model === "session") {
await workspace.authUpsertSessionIndex({
sessionId: String(created.id),
sessionToken: String(created.token),
userId,
});
}
if (model === "account") {
await workspace.authUpsertAccountIndex({
id: String(created.id),
providerId: String(created.providerId),
accountId: String(created.accountId),
userId,
});
}
return (await transformOutput(created, model)) as any;
},
findOne: async ({ model, where, join }) => {
const transformedWhere = transformWhereClause({ model, where, action: "findOne" });
if (model === "verification") {
return await ensureWorkspaceVerification("authFindOneVerification", { where: transformedWhere, join });
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
if (!userId) {
return null;
}
const userActor = await getAuthUser(userId);
const found = await userActor.findOneAuthRecord({ model, where: transformedWhere, join });
return found ? ((await transformOutput(found, model, undefined, join)) as any) : null;
},
findMany: async ({ model, where, limit, sortBy, offset, join }) => {
const transformedWhere = transformWhereClause({ model, where, action: "findMany" });
if (model === "verification") {
return await ensureWorkspaceVerification("authFindManyVerification", {
where: transformedWhere,
limit,
sortBy,
offset,
join,
});
}
if (model === "session") {
const tokenClause = transformedWhere?.find((entry: any) => entry.field === "token" && entry.operator === "in");
if (tokenClause && Array.isArray(tokenClause.value)) {
const workspace = await appWorkspace();
const resolved = await Promise.all(
(tokenClause.value as string[]).map(async (sessionToken: string) => ({
sessionToken,
route: await workspace.authFindSessionIndex({ sessionToken }),
})),
);
const byUser = new Map<string, string[]>();
for (const item of resolved) {
if (!item.route?.userId) {
continue;
}
const tokens = byUser.get(item.route.userId) ?? [];
tokens.push(item.sessionToken);
byUser.set(item.route.userId, tokens);
}
const rows = [];
for (const [userId, tokens] of byUser) {
const userActor = await getAuthUser(userId);
const scopedWhere = transformedWhere.map((entry: any) =>
entry.field === "token" && entry.operator === "in" ? { ...entry, value: tokens } : entry,
);
const found = await userActor.findManyAuthRecords({ model, where: scopedWhere, limit, sortBy, offset, join });
rows.push(...found);
}
return await Promise.all(rows.map(async (row: any) => await transformOutput(row, model, undefined, join)));
}
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
if (!userId) {
return [];
}
const userActor = await getAuthUser(userId);
const found = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit, sortBy, offset, join });
return await Promise.all(found.map(async (row: any) => await transformOutput(row, model, undefined, join)));
},
update: async ({ model, where, update }) => {
const transformedWhere = transformWhereClause({ model, where, action: "update" });
const transformedUpdate = (await transformInput(update as Record<string, unknown>, model, "update", true)) as Record<string, unknown>;
if (model === "verification") {
return await ensureWorkspaceVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate });
}
const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate);
if (!userId) {
return null;
}
const userActor = await getAuthUser(userId);
const before =
model === "user"
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
: model === "account"
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
: model === "session"
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
: null;
const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate });
const workspace = await appWorkspace();
if (model === "user" && updated) {
if (before?.email && before.email !== updated.email) {
await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() });
}
if (updated.email) {
await workspace.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId });
}
}
if (model === "session" && updated) {
await workspace.authUpsertSessionIndex({
sessionId: String(updated.id),
sessionToken: String(updated.token),
userId,
});
}
if (model === "account" && updated) {
await workspace.authUpsertAccountIndex({
id: String(updated.id),
providerId: String(updated.providerId),
accountId: String(updated.accountId),
userId,
});
}
return updated ? ((await transformOutput(updated, model)) as any) : null;
},
updateMany: async ({ model, where, update }) => {
const transformedWhere = transformWhereClause({ model, where, action: "updateMany" });
const transformedUpdate = (await transformInput(update as Record<string, unknown>, model, "update", true)) as Record<string, unknown>;
if (model === "verification") {
return await ensureWorkspaceVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate });
}
const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate);
if (!userId) {
return 0;
}
const userActor = await getAuthUser(userId);
return await userActor.updateManyAuthRecords({ model, where: transformedWhere, update: transformedUpdate });
},
delete: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "delete" });
if (model === "verification") {
await ensureWorkspaceVerification("authDeleteVerification", { where: transformedWhere });
return;
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
if (!userId) {
return;
}
const userActor = await getAuthUser(userId);
const workspace = await appWorkspace();
const before = await userActor.findOneAuthRecord({ model, where: transformedWhere });
await userActor.deleteAuthRecord({ model, where: transformedWhere });
if (model === "session" && before) {
await workspace.authDeleteSessionIndex({
sessionId: before.id,
sessionToken: before.token,
});
}
if (model === "account" && before) {
await workspace.authDeleteAccountIndex({
id: before.id,
providerId: before.providerId,
accountId: before.accountId,
});
}
if (model === "user" && before?.email) {
await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() });
}
},
deleteMany: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" });
if (model === "verification") {
return await ensureWorkspaceVerification("authDeleteManyVerification", { where: transformedWhere });
}
if (model === "session") {
const userId = await resolveUserIdForQuery(model, transformedWhere);
if (!userId) {
return 0;
}
const userActor = await getAuthUser(userId);
const workspace = await appWorkspace();
const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 });
const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere });
for (const session of sessions) {
await workspace.authDeleteSessionIndex({
sessionId: session.id,
sessionToken: session.token,
});
}
return deleted;
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
if (!userId) {
return 0;
}
const userActor = await getAuthUser(userId);
const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere });
return deleted;
},
count: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "count" });
if (model === "verification") {
return await ensureWorkspaceVerification("authCountVerification", { where: transformedWhere });
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
if (!userId) {
return 0;
}
const userActor = await getAuthUser(userId);
return await userActor.countAuthRecords({ model, where: transformedWhere });
},
};
},
});
const auth = betterAuth({
baseURL: stripTrailingSlash(process.env.BETTER_AUTH_URL ?? options.apiUrl),
basePath: AUTH_BASE_PATH,
secret: requireEnv("BETTER_AUTH_SECRET"),
database: adapter,
trustedOrigins: [stripTrailingSlash(options.appUrl), stripTrailingSlash(options.apiUrl)],
session: {
cookieCache: {
enabled: true,
maxAge: 5 * 60,
strategy: "compact",
},
},
socialProviders: {
github: {
clientId: requireEnv("GITHUB_CLIENT_ID"),
clientSecret: requireEnv("GITHUB_CLIENT_SECRET"),
scope: ["read:org", "repo"],
redirectURI: process.env.GITHUB_REDIRECT_URI || undefined,
},
},
});
betterAuthService = {
auth,
async resolveSession(headers: Headers) {
return (await auth.api.getSession({ headers })) ?? null;
},
async signOut(headers: Headers) {
return await callAuthEndpoint(auth, `${stripTrailingSlash(process.env.BETTER_AUTH_URL ?? options.apiUrl)}${AUTH_BASE_PATH}/sign-out`, {
method: "POST",
headers,
});
},
async getAuthState(sessionId: string) {
const workspace = await appWorkspace();
const route = await workspace.authFindSessionIndex({ sessionId });
if (!route?.userId) {
return null;
}
const userActor = await getAuthUser(route.userId);
return await userActor.getAppAuthState({ sessionId });
},
async upsertUserProfile(userId: string, patch: Record<string, unknown>) {
const userActor = await getAuthUser(userId);
return await userActor.upsertUserProfile({ userId, patch });
},
async setActiveOrganization(sessionId: string, activeOrganizationId: string | null) {
const authState = await this.getAuthState(sessionId);
if (!authState?.user?.id) {
throw new Error(`Unknown auth session ${sessionId}`);
}
const userActor = await getAuthUser(authState.user.id);
return await userActor.upsertSessionState({ sessionId, activeOrganizationId });
},
async getAccessTokenForSession(sessionId: string) {
// Read the GitHub access token directly from the account record stored in the
// auth user actor. Better Auth's internal /get-access-token endpoint requires
// session middleware resolution which fails for server-side internal calls (403),
// so we bypass it and read the stored token from our adapter layer directly.
const authState = await this.getAuthState(sessionId);
if (!authState?.user?.id || !authState?.accounts) {
return null;
}
const githubAccount = authState.accounts.find((account: any) => account.providerId === "github");
if (!githubAccount?.accessToken) {
logger.warn({ sessionId, userId: authState.user.id }, "get_access_token_no_github_account");
return null;
}
return {
accessToken: githubAccount.accessToken,
scopes: githubAccount.scope ? githubAccount.scope.split(/[, ]+/) : [],
};
},
};
return betterAuthService;
}
export function getBetterAuthService(): BetterAuthService {
if (!betterAuthService) {
throw new Error("BetterAuth service is not initialized");
}
return betterAuthService;
}

View file

@ -15,10 +15,12 @@
},
"dependencies": {
"@sandbox-agent/foundry-shared": "workspace:*",
"react": "^19.1.1",
"rivetkit": "2.1.6",
"sandbox-agent": "workspace:*"
},
"devDependencies": {
"@types/react": "^19.1.12",
"tsup": "^8.5.0"
}
}

View file

@ -6,6 +6,9 @@ import type {
FoundryAppSnapshot,
FoundryBillingPlanId,
CreateTaskInput,
AppEvent,
SessionEvent,
SandboxProcessesEvent,
TaskRecord,
TaskSummary,
TaskWorkbenchChangeModelInput,
@ -20,6 +23,12 @@ import type {
TaskWorkbenchSnapshot,
TaskWorkbenchTabInput,
TaskWorkbenchUpdateDraftInput,
TaskEvent,
WorkbenchTaskDetail,
WorkbenchTaskSummary,
WorkbenchSessionDetail,
WorkspaceEvent,
WorkspaceSummarySnapshot,
HistoryEvent,
HistoryQueryInput,
ProviderId,
@ -34,18 +43,10 @@ import type {
} from "@sandbox-agent/foundry-shared";
import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
import { createMockBackendClient } from "./mock/backend-client.js";
import { sandboxInstanceKey, workspaceKey } from "./keys.js";
import { sandboxInstanceKey, taskKey, workspaceKey } from "./keys.js";
export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill";
type RivetMetadataResponse = {
runtime?: string;
actorNames?: Record<string, unknown>;
clientEndpoint?: string;
clientNamespace?: string;
clientToken?: string;
};
export interface SandboxSessionRecord {
id: string;
agent: string;
@ -68,7 +69,14 @@ export interface SandboxSessionEventRecord {
export type SandboxProcessRecord = ProcessInfo;
export interface ActorConn {
on(event: string, listener: (payload: any) => void): () => void;
onError(listener: (error: unknown) => void): () => void;
dispose(): Promise<void>;
}
interface WorkspaceHandle {
connect(): ActorConn;
addRepo(input: AddRepoInput): Promise<RepoRecord>;
listRepos(input: { workspaceId: string }): Promise<RepoRecord[]>;
createTask(input: CreateTaskInput): Promise<TaskRecord>;
@ -86,7 +94,10 @@ interface WorkspaceHandle {
killTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise<void>;
useWorkspace(input: { workspaceId: string }): Promise<{ workspaceId: string }>;
starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise<StarSandboxAgentRepoResult>;
getWorkbench(input: { workspaceId: string }): Promise<TaskWorkbenchSnapshot>;
getWorkspaceSummary(input: { workspaceId: string }): Promise<WorkspaceSummarySnapshot>;
applyTaskSummaryUpdate(input: { taskSummary: WorkbenchTaskSummary }): Promise<void>;
removeTaskSummary(input: { taskId: string }): Promise<void>;
reconcileWorkbenchState(input: { workspaceId: string }): Promise<WorkspaceSummarySnapshot>;
createWorkbenchTask(input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse>;
markWorkbenchUnread(input: TaskWorkbenchSelectInput): Promise<void>;
renameWorkbenchTask(input: TaskWorkbenchRenameInput): Promise<void>;
@ -103,7 +114,15 @@ interface WorkspaceHandle {
revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise<void>;
}
interface TaskHandle {
getTaskSummary(): Promise<WorkbenchTaskSummary>;
getTaskDetail(): Promise<WorkbenchTaskDetail>;
getSessionDetail(input: { sessionId: string }): Promise<WorkbenchSessionDetail>;
connect(): ActorConn;
}
interface SandboxInstanceHandle {
connect(): ActorConn;
createSession(input: {
prompt: string;
cwd?: string;
@ -127,6 +146,10 @@ interface RivetClient {
workspace: {
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): WorkspaceHandle;
};
task: {
get(key?: string | string[]): TaskHandle;
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskHandle;
};
sandboxInstance: {
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): SandboxInstanceHandle;
};
@ -138,16 +161,12 @@ export interface BackendClientOptions {
mode?: "remote" | "mock";
}
export interface BackendMetadata {
runtime?: string;
actorNames?: Record<string, unknown>;
clientEndpoint?: string;
clientNamespace?: string;
clientToken?: string;
}
export interface BackendClient {
getAppSnapshot(): Promise<FoundryAppSnapshot>;
connectWorkspace(workspaceId: string): Promise<ActorConn>;
connectTask(workspaceId: string, repoId: string, taskId: string): Promise<ActorConn>;
connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<ActorConn>;
subscribeApp(listener: () => void): () => void;
signInWithGithub(): Promise<void>;
signOutApp(): Promise<FoundryAppSnapshot>;
skipAppStarterRepo(): Promise<FoundryAppSnapshot>;
@ -237,6 +256,9 @@ export interface BackendClient {
sandboxId: string,
): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>;
getSandboxAgentConnection(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>;
getWorkspaceSummary(workspaceId: string): Promise<WorkspaceSummarySnapshot>;
getTaskDetail(workspaceId: string, repoId: string, taskId: string): Promise<WorkbenchTaskDetail>;
getSessionDetail(workspaceId: string, repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail>;
getWorkbench(workspaceId: string): Promise<TaskWorkbenchSnapshot>;
subscribeWorkbench(workspaceId: string, listener: () => void): () => void;
createWorkbenchTask(workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse>;
@ -295,118 +317,6 @@ function deriveBackendEndpoints(endpoint: string): { appEndpoint: string; rivetE
};
}
function isLoopbackHost(hostname: string): boolean {
const h = hostname.toLowerCase();
return h === "127.0.0.1" || h === "localhost" || h === "0.0.0.0" || h === "::1";
}
function rewriteLoopbackClientEndpoint(clientEndpoint: string, fallbackOrigin: string): string {
const clientUrl = new URL(clientEndpoint);
if (!isLoopbackHost(clientUrl.hostname)) {
return clientUrl.toString().replace(/\/$/, "");
}
const originUrl = new URL(fallbackOrigin);
// Keep the manager port from clientEndpoint; only rewrite host/protocol to match the origin.
clientUrl.hostname = originUrl.hostname;
clientUrl.protocol = originUrl.protocol;
return clientUrl.toString().replace(/\/$/, "");
}
async function fetchJsonWithTimeout(url: string, timeoutMs: number): Promise<unknown> {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
try {
const res = await fetch(url, { signal: controller.signal });
if (!res.ok) {
throw new Error(`request failed: ${res.status} ${res.statusText}`);
}
return (await res.json()) as unknown;
} finally {
clearTimeout(timeout);
}
}
async function fetchMetadataWithRetry(
endpoint: string,
namespace: string | undefined,
opts: { timeoutMs: number; requestTimeoutMs: number },
): Promise<RivetMetadataResponse> {
const base = new URL(endpoint);
base.pathname = base.pathname.replace(/\/$/, "") + "/metadata";
if (namespace) {
base.searchParams.set("namespace", namespace);
}
const start = Date.now();
let delayMs = 250;
// Keep this bounded: callers (UI/CLI) should not hang forever if the backend is down.
for (;;) {
try {
const json = await fetchJsonWithTimeout(base.toString(), opts.requestTimeoutMs);
if (!json || typeof json !== "object") return {};
const data = json as Record<string, unknown>;
return {
runtime: typeof data.runtime === "string" ? data.runtime : undefined,
actorNames: data.actorNames && typeof data.actorNames === "object" ? (data.actorNames as Record<string, unknown>) : undefined,
clientEndpoint: typeof data.clientEndpoint === "string" ? data.clientEndpoint : undefined,
clientNamespace: typeof data.clientNamespace === "string" ? data.clientNamespace : undefined,
clientToken: typeof data.clientToken === "string" ? data.clientToken : undefined,
};
} catch (err) {
if (Date.now() - start > opts.timeoutMs) {
throw err;
}
await new Promise((r) => setTimeout(r, delayMs));
delayMs = Math.min(delayMs * 2, 2_000);
}
}
}
export async function readBackendMetadata(input: { endpoint: string; namespace?: string; timeoutMs?: number }): Promise<BackendMetadata> {
const base = new URL(input.endpoint);
base.pathname = base.pathname.replace(/\/$/, "") + "/metadata";
if (input.namespace) {
base.searchParams.set("namespace", input.namespace);
}
const json = await fetchJsonWithTimeout(base.toString(), input.timeoutMs ?? 4_000);
if (!json || typeof json !== "object") {
return {};
}
const data = json as Record<string, unknown>;
return {
runtime: typeof data.runtime === "string" ? data.runtime : undefined,
actorNames: data.actorNames && typeof data.actorNames === "object" ? (data.actorNames as Record<string, unknown>) : undefined,
clientEndpoint: typeof data.clientEndpoint === "string" ? data.clientEndpoint : undefined,
clientNamespace: typeof data.clientNamespace === "string" ? data.clientNamespace : undefined,
clientToken: typeof data.clientToken === "string" ? data.clientToken : undefined,
};
}
export async function checkBackendHealth(input: { endpoint: string; namespace?: string; timeoutMs?: number }): Promise<boolean> {
try {
const metadata = await readBackendMetadata(input);
return metadata.runtime === "rivetkit" && Boolean(metadata.actorNames);
} catch {
return false;
}
}
async function probeMetadataEndpoint(endpoint: string, namespace: string | undefined, timeoutMs: number): Promise<boolean> {
try {
const base = new URL(endpoint);
base.pathname = base.pathname.replace(/\/$/, "") + "/metadata";
if (namespace) {
base.searchParams.set("namespace", namespace);
}
await fetchJsonWithTimeout(base.toString(), timeoutMs);
return true;
} catch {
return false;
}
}
export function createBackendClient(options: BackendClientOptions): BackendClient {
if (options.mode === "mock") {
return createMockBackendClient(options.defaultWorkspaceId);
@ -415,8 +325,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
const endpoints = deriveBackendEndpoints(options.endpoint);
const rivetApiEndpoint = endpoints.rivetEndpoint;
const appApiEndpoint = endpoints.appEndpoint;
let clientPromise: Promise<RivetClient> | null = null;
let appSessionId = typeof window !== "undefined" ? window.localStorage.getItem("sandbox-agent-foundry:remote-app-session") : null;
const client = createClient({ endpoint: rivetApiEndpoint }) as unknown as RivetClient;
const workbenchSubscriptions = new Map<
string,
{
@ -431,34 +340,13 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
disposeConnPromise: Promise<(() => Promise<void>) | null> | null;
}
>();
const persistAppSessionId = (nextSessionId: string | null): void => {
appSessionId = nextSessionId;
if (typeof window === "undefined") {
return;
}
if (nextSessionId) {
window.localStorage.setItem("sandbox-agent-foundry:remote-app-session", nextSessionId);
} else {
window.localStorage.removeItem("sandbox-agent-foundry:remote-app-session");
}
const appSubscriptions = {
listeners: new Set<() => void>(),
disposeConnPromise: null as Promise<(() => Promise<void>) | null> | null,
};
if (typeof window !== "undefined") {
const url = new URL(window.location.href);
const sessionFromUrl = url.searchParams.get("foundrySession");
if (sessionFromUrl) {
persistAppSessionId(sessionFromUrl);
url.searchParams.delete("foundrySession");
window.history.replaceState({}, document.title, `${url.pathname}${url.search}${url.hash}`);
}
}
const appRequest = async <T>(path: string, init?: RequestInit): Promise<T> => {
const headers = new Headers(init?.headers);
if (appSessionId) {
headers.set("x-foundry-session", appSessionId);
}
if (init?.body && !headers.has("Content-Type")) {
headers.set("Content-Type", "application/json");
}
@ -468,10 +356,6 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
headers,
credentials: "include",
});
const nextSessionId = res.headers.get("x-foundry-session");
if (nextSessionId) {
persistAppSessionId(nextSessionId);
}
if (!res.ok) {
throw new Error(`app request failed: ${res.status} ${res.statusText}`);
}
@ -485,51 +369,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
}
};
const getClient = async (): Promise<RivetClient> => {
if (clientPromise) {
return clientPromise;
}
clientPromise = (async () => {
// Use the serverless /metadata endpoint to discover the manager endpoint.
// If the server reports a loopback clientEndpoint (127.0.0.1), rewrite to the same host
// as the configured endpoint so remote browsers/clients can connect.
const configured = new URL(rivetApiEndpoint);
const configuredOrigin = `${configured.protocol}//${configured.host}`;
const initialNamespace = undefined;
const metadata = await fetchMetadataWithRetry(rivetApiEndpoint, initialNamespace, {
timeoutMs: 30_000,
requestTimeoutMs: 8_000,
});
// Candidate endpoint: manager endpoint if provided, otherwise stick to the configured endpoint.
const candidateEndpoint = metadata.clientEndpoint ? rewriteLoopbackClientEndpoint(metadata.clientEndpoint, configuredOrigin) : rivetApiEndpoint;
// If the manager port isn't reachable from this client (common behind reverse proxies),
// fall back to the configured serverless endpoint to avoid hanging requests.
const shouldUseCandidate = metadata.clientEndpoint ? await probeMetadataEndpoint(candidateEndpoint, metadata.clientNamespace, 1_500) : true;
const resolvedEndpoint = shouldUseCandidate ? candidateEndpoint : rivetApiEndpoint;
return createClient({
endpoint: resolvedEndpoint,
namespace: metadata.clientNamespace,
token: metadata.clientToken,
// Prevent rivetkit from overriding back to a loopback endpoint (or to an unreachable manager).
disableMetadataLookup: true,
}) as unknown as RivetClient;
})();
return clientPromise;
};
const workspace = async (workspaceId: string): Promise<WorkspaceHandle> =>
(await getClient()).workspace.getOrCreate(workspaceKey(workspaceId), {
client.workspace.getOrCreate(workspaceKey(workspaceId), {
createWithInput: workspaceId,
});
const task = async (workspaceId: string, repoId: string, taskId: string): Promise<TaskHandle> => client.task.get(taskKey(workspaceId, repoId, taskId));
const sandboxByKey = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<SandboxInstanceHandle> => {
const client = await getClient();
return (client as any).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId));
};
@ -557,7 +404,6 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
(sb as any).sandboxActorId.length > 0,
) as { sandboxActorId?: string } | undefined;
if (sandbox?.sandboxActorId) {
const client = await getClient();
return (client as any).sandboxInstance.getForId(sandbox.sandboxActorId);
}
} catch (error) {
@ -593,6 +439,91 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
}
};
const connectWorkspace = async (workspaceId: string): Promise<ActorConn> => {
return (await workspace(workspaceId)).connect() as ActorConn;
};
const connectTask = async (workspaceId: string, repoId: string, taskIdValue: string): Promise<ActorConn> => {
return (await task(workspaceId, repoId, taskIdValue)).connect() as ActorConn;
};
const connectSandbox = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<ActorConn> => {
try {
return (await sandboxByKey(workspaceId, providerId, sandboxId)).connect() as ActorConn;
} catch (error) {
if (!isActorNotFoundError(error)) {
throw error;
}
const fallback = await sandboxByActorIdFromTask(workspaceId, providerId, sandboxId);
if (!fallback) {
throw error;
}
return fallback.connect() as ActorConn;
}
};
const getWorkbenchCompat = async (workspaceId: string): Promise<TaskWorkbenchSnapshot> => {
const summary = await (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId });
const tasks = await Promise.all(
summary.taskSummaries.map(async (taskSummary) => {
const detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail();
const sessionDetails = await Promise.all(
detail.sessionsSummary.map(async (session) => {
const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id });
return [session.id, full] as const;
}),
);
const sessionDetailsById = new Map(sessionDetails);
return {
id: detail.id,
repoId: detail.repoId,
title: detail.title,
status: detail.status,
repoName: detail.repoName,
updatedAtMs: detail.updatedAtMs,
branch: detail.branch,
pullRequest: detail.pullRequest,
tabs: detail.sessionsSummary.map((session) => {
const full = sessionDetailsById.get(session.id);
return {
id: session.id,
sessionId: session.sessionId,
sessionName: session.sessionName,
agent: session.agent,
model: session.model,
status: session.status,
thinkingSinceMs: session.thinkingSinceMs,
unread: session.unread,
created: session.created,
draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null },
transcript: full?.transcript ?? [],
};
}),
fileChanges: detail.fileChanges,
diffs: detail.diffs,
fileTree: detail.fileTree,
minutesUsed: detail.minutesUsed,
};
}),
);
const projects = summary.repos
.map((repo) => ({
id: repo.id,
label: repo.label,
updatedAtMs: tasks.filter((task) => task.repoId === repo.id).reduce((latest, task) => Math.max(latest, task.updatedAtMs), repo.latestActivityMs),
tasks: tasks.filter((task) => task.repoId === repo.id).sort((left, right) => right.updatedAtMs - left.updatedAtMs),
}))
.filter((repo) => repo.tasks.length > 0);
return {
workspaceId,
repos: summary.repos.map((repo) => ({ id: repo.id, label: repo.label })),
projects,
tasks: tasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs),
};
};
const subscribeWorkbench = (workspaceId: string, listener: () => void): (() => void) => {
let entry = workbenchSubscriptions.get(workspaceId);
if (!entry) {
@ -698,17 +629,74 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
};
};
const subscribeApp = (listener: () => void): (() => void) => {
appSubscriptions.listeners.add(listener);
if (!appSubscriptions.disposeConnPromise) {
appSubscriptions.disposeConnPromise = (async () => {
const handle = await workspace("app");
const conn = (handle as any).connect();
const unsubscribeEvent = conn.on("appUpdated", () => {
for (const currentListener of [...appSubscriptions.listeners]) {
currentListener();
}
});
const unsubscribeError = conn.onError(() => {});
return async () => {
unsubscribeEvent();
unsubscribeError();
await conn.dispose();
};
})().catch(() => null);
}
return () => {
appSubscriptions.listeners.delete(listener);
if (appSubscriptions.listeners.size > 0) {
return;
}
void appSubscriptions.disposeConnPromise?.then(async (disposeConn) => {
await disposeConn?.();
});
appSubscriptions.disposeConnPromise = null;
};
};
return {
async getAppSnapshot(): Promise<FoundryAppSnapshot> {
return await appRequest<FoundryAppSnapshot>("/app/snapshot");
},
async connectWorkspace(workspaceId: string): Promise<ActorConn> {
return await connectWorkspace(workspaceId);
},
async connectTask(workspaceId: string, repoId: string, taskIdValue: string): Promise<ActorConn> {
return await connectTask(workspaceId, repoId, taskIdValue);
},
async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<ActorConn> {
return await connectSandbox(workspaceId, providerId, sandboxId);
},
subscribeApp(listener: () => void): () => void {
return subscribeApp(listener);
},
async signInWithGithub(): Promise<void> {
const callbackURL = typeof window !== "undefined" ? `${window.location.origin}/organizations` : `${appApiEndpoint.replace(/\/$/, "")}/organizations`;
const response = await appRequest<{ url: string; redirect?: boolean }>("/auth/sign-in/social", {
method: "POST",
body: JSON.stringify({
provider: "github",
callbackURL,
disableRedirect: true,
}),
});
if (typeof window !== "undefined") {
window.location.assign(`${appApiEndpoint}/auth/github/start`);
return;
window.location.assign(response.url);
}
await redirectTo("/auth/github/start");
},
async signOutApp(): Promise<FoundryAppSnapshot> {
@ -1009,8 +997,20 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.sandboxAgentConnection());
},
async getWorkspaceSummary(workspaceId: string): Promise<WorkspaceSummarySnapshot> {
return (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId });
},
async getTaskDetail(workspaceId: string, repoId: string, taskIdValue: string): Promise<WorkbenchTaskDetail> {
return (await task(workspaceId, repoId, taskIdValue)).getTaskDetail();
},
async getSessionDetail(workspaceId: string, repoId: string, taskIdValue: string, sessionId: string): Promise<WorkbenchSessionDetail> {
return (await task(workspaceId, repoId, taskIdValue)).getSessionDetail({ sessionId });
},
async getWorkbench(workspaceId: string): Promise<TaskWorkbenchSnapshot> {
return (await workspace(workspaceId)).getWorkbench({ workspaceId });
return await getWorkbenchCompat(workspaceId);
},
subscribeWorkbench(workspaceId: string, listener: () => void): () => void {

View file

@ -1,5 +1,10 @@
export * from "./app-client.js";
export * from "./backend-client.js";
export * from "./interest/manager.js";
export * from "./interest/mock-manager.js";
export * from "./interest/remote-manager.js";
export * from "./interest/topics.js";
export * from "./interest/use-interest.js";
export * from "./keys.js";
export * from "./mock-app.js";
export * from "./view-model.js";

View file

@ -0,0 +1,24 @@
import type { TopicData, TopicKey, TopicParams } from "./topics.js";
export type TopicStatus = "loading" | "connected" | "error";
export interface TopicState<K extends TopicKey> {
data: TopicData<K> | undefined;
status: TopicStatus;
error: Error | null;
}
/**
* The InterestManager owns all realtime actor connections and cached state.
*
* Multiple subscribers to the same topic share one connection and one cache
* entry. After the last subscriber leaves, a short grace period keeps the
* connection warm so navigation does not thrash actor connections.
*/
export interface InterestManager {
subscribe<K extends TopicKey>(topicKey: K, params: TopicParams<K>, listener: () => void): () => void;
getSnapshot<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicData<K> | undefined;
getStatus<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicStatus;
getError<K extends TopicKey>(topicKey: K, params: TopicParams<K>): Error | null;
dispose(): void;
}

View file

@ -0,0 +1,12 @@
import { createMockBackendClient } from "../mock/backend-client.js";
import { RemoteInterestManager } from "./remote-manager.js";
/**
* Mock implementation shares the same interest-manager harness as the remote
* path, but uses the in-memory mock backend that synthesizes actor events.
*/
export class MockInterestManager extends RemoteInterestManager {
constructor() {
super(createMockBackendClient());
}
}

View file

@ -0,0 +1,167 @@
import type { BackendClient } from "../backend-client.js";
import type { InterestManager, TopicStatus } from "./manager.js";
import { topicDefinitions, type TopicData, type TopicDefinition, type TopicKey, type TopicParams } from "./topics.js";
const GRACE_PERIOD_MS = 30_000;
/**
* Remote implementation of InterestManager.
* Each cache entry owns one actor connection plus one materialized snapshot.
*/
export class RemoteInterestManager implements InterestManager {
private entries = new Map<string, TopicEntry<any, any, any>>();
constructor(private readonly backend: BackendClient) {}
subscribe<K extends TopicKey>(topicKey: K, params: TopicParams<K>, listener: () => void): () => void {
const definition = topicDefinitions[topicKey] as unknown as TopicDefinition<any, any, any>;
const cacheKey = definition.key(params as any);
let entry = this.entries.get(cacheKey);
if (!entry) {
entry = new TopicEntry(definition, this.backend, params as any);
this.entries.set(cacheKey, entry);
}
entry.cancelTeardown();
entry.addListener(listener);
entry.ensureStarted();
return () => {
const current = this.entries.get(cacheKey);
if (!current) {
return;
}
current.removeListener(listener);
if (current.listenerCount === 0) {
current.scheduleTeardown(GRACE_PERIOD_MS, () => {
this.entries.delete(cacheKey);
});
}
};
}
getSnapshot<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicData<K> | undefined {
return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.data as TopicData<K> | undefined;
}
getStatus<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicStatus {
return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.status ?? "loading";
}
getError<K extends TopicKey>(topicKey: K, params: TopicParams<K>): Error | null {
return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.error ?? null;
}
dispose(): void {
for (const entry of this.entries.values()) {
entry.dispose();
}
this.entries.clear();
}
}
class TopicEntry<TData, TParams, TEvent> {
data: TData | undefined;
status: TopicStatus = "loading";
error: Error | null = null;
listenerCount = 0;
private readonly listeners = new Set<() => void>();
private conn: Awaited<ReturnType<TopicDefinition<TData, TParams, TEvent>["connect"]>> | null = null;
private unsubscribeEvent: (() => void) | null = null;
private unsubscribeError: (() => void) | null = null;
private teardownTimer: ReturnType<typeof setTimeout> | null = null;
private startPromise: Promise<void> | null = null;
private started = false;
constructor(
private readonly definition: TopicDefinition<TData, TParams, TEvent>,
private readonly backend: BackendClient,
private readonly params: TParams,
) {}
addListener(listener: () => void): void {
this.listeners.add(listener);
this.listenerCount = this.listeners.size;
}
removeListener(listener: () => void): void {
this.listeners.delete(listener);
this.listenerCount = this.listeners.size;
}
ensureStarted(): void {
if (this.started || this.startPromise) {
return;
}
this.startPromise = this.start().finally(() => {
this.startPromise = null;
});
}
scheduleTeardown(ms: number, onTeardown: () => void): void {
this.teardownTimer = setTimeout(() => {
this.dispose();
onTeardown();
}, ms);
}
cancelTeardown(): void {
if (this.teardownTimer) {
clearTimeout(this.teardownTimer);
this.teardownTimer = null;
}
}
dispose(): void {
this.cancelTeardown();
this.unsubscribeEvent?.();
this.unsubscribeError?.();
if (this.conn) {
void this.conn.dispose();
}
this.conn = null;
this.data = undefined;
this.status = "loading";
this.error = null;
this.started = false;
}
private async start(): Promise<void> {
this.status = "loading";
this.error = null;
this.notify();
try {
this.conn = await this.definition.connect(this.backend, this.params);
this.unsubscribeEvent = this.conn.on(this.definition.event, (event: TEvent) => {
if (this.data === undefined) {
return;
}
this.data = this.definition.applyEvent(this.data, event);
this.notify();
});
this.unsubscribeError = this.conn.onError((error: unknown) => {
this.status = "error";
this.error = error instanceof Error ? error : new Error(String(error));
this.notify();
});
this.data = await this.definition.fetchInitial(this.backend, this.params);
this.status = "connected";
this.started = true;
this.notify();
} catch (error) {
this.status = "error";
this.error = error instanceof Error ? error : new Error(String(error));
this.started = false;
this.notify();
}
}
private notify(): void {
for (const listener of [...this.listeners]) {
listener();
}
}
}

View file

@ -0,0 +1,131 @@
import type {
AppEvent,
FoundryAppSnapshot,
ProviderId,
SandboxProcessesEvent,
SessionEvent,
TaskEvent,
WorkbenchSessionDetail,
WorkbenchTaskDetail,
WorkspaceEvent,
WorkspaceSummarySnapshot,
} from "@sandbox-agent/foundry-shared";
import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend-client.js";
/**
* Topic definitions for the interest manager.
*
* Each topic describes one actor connection plus one materialized read model.
* Events always carry full replacement payloads for the changed entity so the
* client can replace cached state directly instead of reconstructing patches.
*/
export interface TopicDefinition<TData, TParams, TEvent> {
key: (params: TParams) => string;
event: string;
connect: (backend: BackendClient, params: TParams) => Promise<ActorConn>;
fetchInitial: (backend: BackendClient, params: TParams) => Promise<TData>;
applyEvent: (current: TData, event: TEvent) => TData;
}
export interface AppTopicParams {}
export interface WorkspaceTopicParams {
workspaceId: string;
}
export interface TaskTopicParams {
workspaceId: string;
repoId: string;
taskId: string;
}
export interface SessionTopicParams {
workspaceId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface SandboxProcessesTopicParams {
workspaceId: string;
providerId: ProviderId;
sandboxId: string;
}
function upsertById<T extends { id: string }>(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] {
const filtered = items.filter((item) => item.id !== nextItem.id);
return [...filtered, nextItem].sort(sort);
}
export const topicDefinitions = {
app: {
key: () => "app",
event: "appUpdated",
connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectWorkspace("app"),
fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(),
applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot,
} satisfies TopicDefinition<FoundryAppSnapshot, AppTopicParams, AppEvent>,
workspace: {
key: (params: WorkspaceTopicParams) => `workspace:${params.workspaceId}`,
event: "workspaceUpdated",
connect: (backend: BackendClient, params: WorkspaceTopicParams) => backend.connectWorkspace(params.workspaceId),
fetchInitial: (backend: BackendClient, params: WorkspaceTopicParams) => backend.getWorkspaceSummary(params.workspaceId),
applyEvent: (current: WorkspaceSummarySnapshot, event: WorkspaceEvent) => {
switch (event.type) {
case "taskSummaryUpdated":
return {
...current,
taskSummaries: upsertById(current.taskSummaries, event.taskSummary, (left, right) => right.updatedAtMs - left.updatedAtMs),
};
case "taskRemoved":
return {
...current,
taskSummaries: current.taskSummaries.filter((task) => task.id !== event.taskId),
};
case "repoAdded":
case "repoUpdated":
return {
...current,
repos: upsertById(current.repos, event.repo, (left, right) => right.latestActivityMs - left.latestActivityMs),
};
case "repoRemoved":
return {
...current,
repos: current.repos.filter((repo) => repo.id !== event.repoId),
};
}
},
} satisfies TopicDefinition<WorkspaceSummarySnapshot, WorkspaceTopicParams, WorkspaceEvent>,
task: {
key: (params: TaskTopicParams) => `task:${params.workspaceId}:${params.taskId}`,
event: "taskUpdated",
connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.workspaceId, params.repoId, params.taskId),
applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail,
} satisfies TopicDefinition<WorkbenchTaskDetail, TaskTopicParams, TaskEvent>,
session: {
key: (params: SessionTopicParams) => `session:${params.workspaceId}:${params.taskId}:${params.sessionId}`,
event: "sessionUpdated",
connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: SessionTopicParams) =>
backend.getSessionDetail(params.workspaceId, params.repoId, params.taskId, params.sessionId),
applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => {
if (event.session.sessionId !== current.sessionId) {
return current;
}
return event.session;
},
} satisfies TopicDefinition<WorkbenchSessionDetail, SessionTopicParams, SessionEvent>,
sandboxProcesses: {
key: (params: SandboxProcessesTopicParams) => `sandbox:${params.workspaceId}:${params.providerId}:${params.sandboxId}`,
event: "processesUpdated",
connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => backend.connectSandbox(params.workspaceId, params.providerId, params.sandboxId),
fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) =>
(await backend.listSandboxProcesses(params.workspaceId, params.providerId, params.sandboxId)).processes,
applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes,
} satisfies TopicDefinition<SandboxProcessRecord[], SandboxProcessesTopicParams, SandboxProcessesEvent>,
} as const;
export type TopicKey = keyof typeof topicDefinitions;
export type TopicParams<K extends TopicKey> = Parameters<(typeof topicDefinitions)[K]["fetchInitial"]>[1];
export type TopicData<K extends TopicKey> = Awaited<ReturnType<(typeof topicDefinitions)[K]["fetchInitial"]>>;

View file

@ -0,0 +1,56 @@
import { useMemo, useRef, useSyncExternalStore } from "react";
import type { InterestManager, TopicState } from "./manager.js";
import { topicDefinitions, type TopicKey, type TopicParams } from "./topics.js";
/**
* React bridge for the interest manager.
*
* `null` params disable the subscription entirely, which is how screens express
* conditional interest in task/session/sandbox topics.
*/
export function useInterest<K extends TopicKey>(manager: InterestManager, topicKey: K, params: TopicParams<K> | null): TopicState<K> {
const paramsKey = params ? (topicDefinitions[topicKey] as any).key(params) : null;
const paramsRef = useRef<TopicParams<K> | null>(params);
paramsRef.current = params;
const subscribe = useMemo(() => {
return (listener: () => void) => {
const currentParams = paramsRef.current;
if (!currentParams) {
return () => {};
}
return manager.subscribe(topicKey, currentParams, listener);
};
}, [manager, topicKey, paramsKey]);
const getSnapshot = useMemo(() => {
let lastSnapshot: TopicState<K> | null = null;
return (): TopicState<K> => {
const currentParams = paramsRef.current;
const nextSnapshot: TopicState<K> = currentParams
? {
data: manager.getSnapshot(topicKey, currentParams),
status: manager.getStatus(topicKey, currentParams),
error: manager.getError(topicKey, currentParams),
}
: {
data: undefined,
status: "loading",
error: null,
};
// `useSyncExternalStore` requires referentially-stable snapshots when the
// underlying store has not changed. Reuse the previous object whenever
// the topic data/status/error triplet is unchanged.
if (lastSnapshot && lastSnapshot.data === nextSnapshot.data && lastSnapshot.status === nextSnapshot.status && lastSnapshot.error === nextSnapshot.error) {
return lastSnapshot;
}
lastSnapshot = nextSnapshot;
return nextSnapshot;
};
}, [manager, topicKey, paramsKey]);
return useSyncExternalStore(subscribe, getSnapshot, getSnapshot);
}

View file

@ -1,4 +1,5 @@
import { injectMockLatency } from "./mock/latency.js";
import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" };
export type MockBillingPlanId = "free" | "team";
export type MockBillingStatus = "active" | "trialing" | "past_due" | "scheduled_cancel";
@ -140,6 +141,69 @@ function syncStatusFromLegacy(value: unknown): MockGithubSyncStatus {
}
}
/**
* Build the "rivet" mock organization from real public GitHub data.
* Fixture sourced from: scripts/pull-org-data.ts (run against rivet-dev).
* Members that don't exist in the public fixture get synthetic entries
* so the mock still has realistic owner/admin/member role distribution.
*/
function buildRivetOrganization(): MockFoundryOrganization {
const repos = rivetDevFixture.repos.map((r) => r.fullName);
const fixtureMembers: MockFoundryOrganizationMember[] = rivetDevFixture.members.map((m) => ({
id: `member-rivet-${m.login.toLowerCase()}`,
name: m.login,
email: `${m.login.toLowerCase()}@rivet.dev`,
role: "member" as const,
state: "active" as const,
}));
// Ensure we have named owner/admin roles for the mock user personas
// that may not appear in the public members list
const knownMembers: MockFoundryOrganizationMember[] = [
{ id: "member-rivet-jamie", name: "Jamie", email: "jamie@rivet.dev", role: "owner", state: "active" },
{ id: "member-rivet-nathan", name: "Nathan", email: "nathan@acme.dev", role: "member", state: "active" },
];
// Merge: known members take priority, then fixture members not already covered
const knownIds = new Set(knownMembers.map((m) => m.id));
const members = [...knownMembers, ...fixtureMembers.filter((m) => !knownIds.has(m.id))];
return {
id: "rivet",
workspaceId: "rivet",
kind: "organization",
settings: {
displayName: rivetDevFixture.name ?? rivetDevFixture.login,
slug: "rivet",
primaryDomain: "rivet.dev",
seatAccrualMode: "first_prompt",
defaultModel: "o3",
autoImportRepos: true,
},
github: {
connectedAccount: rivetDevFixture.login,
installationStatus: "connected",
syncStatus: "synced",
importedRepoCount: repos.length,
lastSyncLabel: "Synced just now",
lastSyncAt: Date.now() - 60_000,
},
billing: {
planId: "team",
status: "trialing",
seatsIncluded: 5,
trialEndsAt: isoDate(12),
renewalAt: isoDate(12),
stripeCustomerId: "cus_mock_rivet_team",
paymentMethodLabel: "Visa ending in 4242",
invoices: [{ id: "inv-rivet-001", label: "Team pilot", issuedAt: "2026-03-04", amountUsd: 0, status: "paid" }],
},
members,
seatAssignments: ["jamie@rivet.dev"],
repoCatalog: repos,
};
}
function buildDefaultSnapshot(): MockFoundryAppSnapshot {
return {
auth: {
@ -259,44 +323,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
seatAssignments: ["nathan@acme.dev", "maya@acme.dev"],
repoCatalog: ["acme/backend", "acme/frontend", "acme/infra"],
},
{
id: "rivet",
workspaceId: "rivet",
kind: "organization",
settings: {
displayName: "Rivet",
slug: "rivet",
primaryDomain: "rivet.dev",
seatAccrualMode: "first_prompt",
defaultModel: "o3",
autoImportRepos: true,
},
github: {
connectedAccount: "rivet-dev",
installationStatus: "reconnect_required",
syncStatus: "error",
importedRepoCount: 4,
lastSyncLabel: "Sync stalled 2 hours ago",
lastSyncAt: Date.now() - 2 * 60 * 60_000,
},
billing: {
planId: "team",
status: "trialing",
seatsIncluded: 5,
trialEndsAt: isoDate(12),
renewalAt: isoDate(12),
stripeCustomerId: "cus_mock_rivet_team",
paymentMethodLabel: "Visa ending in 4242",
invoices: [{ id: "inv-rivet-001", label: "Team pilot", issuedAt: "2026-03-04", amountUsd: 0, status: "paid" }],
},
members: [
{ id: "member-rivet-jamie", name: "Jamie", email: "jamie@rivet.dev", role: "owner", state: "active" },
{ id: "member-rivet-nathan", name: "Nathan", email: "nathan@acme.dev", role: "member", state: "active" },
{ id: "member-rivet-lena", name: "Lena", email: "lena@rivet.dev", role: "admin", state: "active" },
],
seatAssignments: ["jamie@rivet.dev"],
repoCatalog: ["rivet/dashboard", "rivet/agents", "rivet/billing", "rivet/infrastructure"],
},
buildRivetOrganization(),
{
id: "personal-jamie",
workspaceId: "personal-jamie",

View file

@ -1,7 +1,10 @@
import type {
AddRepoInput,
AppEvent,
CreateTaskInput,
FoundryAppSnapshot,
SandboxProcessesEvent,
SessionEvent,
TaskRecord,
TaskSummary,
TaskWorkbenchChangeModelInput,
@ -16,6 +19,12 @@ import type {
TaskWorkbenchSnapshot,
TaskWorkbenchTabInput,
TaskWorkbenchUpdateDraftInput,
TaskEvent,
WorkbenchSessionDetail,
WorkbenchTaskDetail,
WorkbenchTaskSummary,
WorkspaceEvent,
WorkspaceSummarySnapshot,
HistoryEvent,
HistoryQueryInput,
ProviderId,
@ -27,7 +36,7 @@ import type {
SwitchResult,
} from "@sandbox-agent/foundry-shared";
import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
import type { BackendClient, SandboxProcessRecord, SandboxSessionEventRecord, SandboxSessionRecord } from "../backend-client.js";
import type { ActorConn, BackendClient, SandboxProcessRecord, SandboxSessionEventRecord, SandboxSessionRecord } from "../backend-client.js";
import { getSharedMockWorkbenchClient } from "./workbench-client.js";
interface MockProcessRecord extends SandboxProcessRecord {
@ -86,6 +95,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
const workbench = getSharedMockWorkbenchClient();
const listenersBySandboxId = new Map<string, Set<() => void>>();
const processesBySandboxId = new Map<string, MockProcessRecord[]>();
const connectionListeners = new Map<string, Set<(payload: any) => void>>();
let nextPid = 4000;
let nextProcessId = 1;
@ -110,11 +120,174 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
const notifySandbox = (sandboxId: string): void => {
const listeners = listenersBySandboxId.get(sandboxId);
if (!listeners) {
emitSandboxProcessesUpdate(sandboxId);
return;
}
for (const listener of [...listeners]) {
listener();
}
emitSandboxProcessesUpdate(sandboxId);
};
const connectionChannel = (scope: string, event: string): string => `${scope}:${event}`;
const emitConnectionEvent = (scope: string, event: string, payload: any): void => {
const listeners = connectionListeners.get(connectionChannel(scope, event));
if (!listeners) {
return;
}
for (const listener of [...listeners]) {
listener(payload);
}
};
const createConn = (scope: string): ActorConn => ({
on(event: string, listener: (payload: any) => void): () => void {
const channel = connectionChannel(scope, event);
let listeners = connectionListeners.get(channel);
if (!listeners) {
listeners = new Set();
connectionListeners.set(channel, listeners);
}
listeners.add(listener);
return () => {
const current = connectionListeners.get(channel);
if (!current) {
return;
}
current.delete(listener);
if (current.size === 0) {
connectionListeners.delete(channel);
}
};
},
onError(): () => void {
return () => {};
},
async dispose(): Promise<void> {},
});
const buildTaskSummary = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskSummary => ({
id: task.id,
repoId: task.repoId,
title: task.title,
status: task.status,
repoName: task.repoName,
updatedAtMs: task.updatedAtMs,
branch: task.branch,
pullRequest: task.pullRequest,
sessionsSummary: task.tabs.map((tab) => ({
id: tab.id,
sessionId: tab.sessionId,
sessionName: tab.sessionName,
agent: tab.agent,
model: tab.model,
status: tab.status,
thinkingSinceMs: tab.thinkingSinceMs,
unread: tab.unread,
created: tab.created,
})),
});
const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({
...buildTaskSummary(task),
task: task.title,
agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude",
runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"),
statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready",
activeSessionId: task.tabs[0]?.sessionId ?? null,
diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0",
prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null,
reviewStatus: null,
fileChanges: task.fileChanges,
diffs: task.diffs,
fileTree: task.fileTree,
minutesUsed: task.minutesUsed,
sandboxes: [
{
providerId: "local",
sandboxId: task.id,
cwd: mockCwd(task.repoName, task.id),
},
],
activeSandboxId: task.id,
});
const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], tabId: string): WorkbenchSessionDetail => {
const tab = task.tabs.find((candidate) => candidate.id === tabId);
if (!tab) {
throw new Error(`Unknown mock tab ${tabId} for task ${task.id}`);
}
return {
sessionId: tab.id,
tabId: tab.id,
sandboxSessionId: tab.sessionId,
sessionName: tab.sessionName,
agent: tab.agent,
model: tab.model,
status: tab.status,
thinkingSinceMs: tab.thinkingSinceMs,
unread: tab.unread,
created: tab.created,
draft: tab.draft,
transcript: tab.transcript,
};
};
const buildWorkspaceSummary = (): WorkspaceSummarySnapshot => {
const snapshot = workbench.getSnapshot();
const taskSummaries = snapshot.tasks.map(buildTaskSummary);
return {
workspaceId: defaultWorkspaceId,
repos: snapshot.repos.map((repo) => {
const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id);
return {
id: repo.id,
label: repo.label,
taskCount: repoTasks.length,
latestActivityMs: repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), 0),
};
}),
taskSummaries,
};
};
const workspaceScope = (workspaceId: string): string => `workspace:${workspaceId}`;
const taskScope = (workspaceId: string, repoId: string, taskId: string): string => `task:${workspaceId}:${repoId}:${taskId}`;
const sandboxScope = (workspaceId: string, providerId: string, sandboxId: string): string => `sandbox:${workspaceId}:${providerId}:${sandboxId}`;
const emitWorkspaceSnapshot = (): void => {
const summary = buildWorkspaceSummary();
const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null;
if (latestTask) {
emitConnectionEvent(workspaceScope(defaultWorkspaceId), "workspaceUpdated", {
type: "taskSummaryUpdated",
taskSummary: latestTask,
} satisfies WorkspaceEvent);
}
};
const emitTaskUpdate = (taskId: string): void => {
const task = requireTask(taskId);
emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "taskUpdated", {
type: "taskDetailUpdated",
detail: buildTaskDetail(task),
} satisfies TaskEvent);
};
const emitSessionUpdate = (taskId: string, tabId: string): void => {
const task = requireTask(taskId);
emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "sessionUpdated", {
type: "sessionUpdated",
session: buildSessionDetail(task, tabId),
} satisfies SessionEvent);
};
const emitSandboxProcessesUpdate = (sandboxId: string): void => {
emitConnectionEvent(sandboxScope(defaultWorkspaceId, "local", sandboxId), "processesUpdated", {
type: "processesUpdated",
processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)),
} satisfies SandboxProcessesEvent);
};
const buildTaskRecord = (taskId: string): TaskRecord => {
@ -192,6 +365,22 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return unsupportedAppSnapshot();
},
async connectWorkspace(workspaceId: string): Promise<ActorConn> {
return createConn(workspaceScope(workspaceId));
},
async connectTask(workspaceId: string, repoId: string, taskId: string): Promise<ActorConn> {
return createConn(taskScope(workspaceId, repoId, taskId));
},
async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<ActorConn> {
return createConn(sandboxScope(workspaceId, providerId, sandboxId));
},
subscribeApp(): () => void {
return () => {};
},
async signInWithGithub(): Promise<void> {
notSupported("signInWithGithub");
},
@ -458,6 +647,18 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return { endpoint: "mock://terminal-unavailable" };
},
async getWorkspaceSummary(): Promise<WorkspaceSummarySnapshot> {
return buildWorkspaceSummary();
},
async getTaskDetail(_workspaceId: string, _repoId: string, taskId: string): Promise<WorkbenchTaskDetail> {
return buildTaskDetail(requireTask(taskId));
},
async getSessionDetail(_workspaceId: string, _repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail> {
return buildSessionDetail(requireTask(taskId), sessionId);
},
async getWorkbench(): Promise<TaskWorkbenchSnapshot> {
return workbench.getSnapshot();
},
@ -467,59 +668,99 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
},
async createWorkbenchTask(_workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
return await workbench.createTask(input);
const created = await workbench.createTask(input);
emitWorkspaceSnapshot();
emitTaskUpdate(created.taskId);
if (created.tabId) {
emitSessionUpdate(created.taskId, created.tabId);
}
return created;
},
async markWorkbenchUnread(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await workbench.markTaskUnread(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
},
async renameWorkbenchTask(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await workbench.renameTask(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
},
async renameWorkbenchBranch(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await workbench.renameBranch(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
},
async createWorkbenchSession(_workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> {
return await workbench.addTab(input);
const created = await workbench.addTab(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, created.tabId);
return created;
},
async renameWorkbenchSession(_workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise<void> {
await workbench.renameSession(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
},
async setWorkbenchSessionUnread(_workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
await workbench.setSessionUnread(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
},
async updateWorkbenchDraft(_workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
await workbench.updateDraft(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
},
async changeWorkbenchModel(_workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise<void> {
await workbench.changeModel(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
},
async sendWorkbenchMessage(_workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise<void> {
await workbench.sendMessage(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
},
async stopWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise<void> {
await workbench.stopAgent(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
},
async closeWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise<void> {
await workbench.closeTab(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
},
async publishWorkbenchPr(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await workbench.publishPr(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
},
async revertWorkbenchFile(_workspaceId: string, input: TaskWorkbenchDiffInput): Promise<void> {
await workbench.revertFile(input);
emitWorkspaceSnapshot();
emitTaskUpdate(input.taskId);
},
async health(): Promise<{ ok: true }> {

View file

@ -25,7 +25,7 @@ class RemoteFoundryAppStore implements FoundryAppClient {
};
private readonly listeners = new Set<() => void>();
private refreshPromise: Promise<void> | null = null;
private syncPollTimeout: ReturnType<typeof setTimeout> | null = null;
private unsubscribeApp: (() => void) | null = null;
constructor(options: RemoteFoundryAppClientOptions) {
this.backend = options.backend;
@ -37,9 +37,13 @@ class RemoteFoundryAppStore implements FoundryAppClient {
subscribe(listener: () => void): () => void {
this.listeners.add(listener);
void this.refresh();
this.ensureStarted();
return () => {
this.listeners.delete(listener);
if (this.listeners.size === 0 && this.unsubscribeApp) {
this.unsubscribeApp();
this.unsubscribeApp = null;
}
};
}
@ -66,7 +70,6 @@ class RemoteFoundryAppStore implements FoundryAppClient {
async selectOrganization(organizationId: string): Promise<void> {
this.snapshot = await this.backend.selectAppOrganization(organizationId);
this.notify();
this.scheduleSyncPollingIfNeeded();
}
async updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise<void> {
@ -77,7 +80,6 @@ class RemoteFoundryAppStore implements FoundryAppClient {
async triggerGithubSync(organizationId: string): Promise<void> {
this.snapshot = await this.backend.triggerAppRepoImport(organizationId);
this.notify();
this.scheduleSyncPollingIfNeeded();
}
async completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise<void> {
@ -107,20 +109,13 @@ class RemoteFoundryAppStore implements FoundryAppClient {
this.notify();
}
private scheduleSyncPollingIfNeeded(): void {
if (this.syncPollTimeout) {
clearTimeout(this.syncPollTimeout);
this.syncPollTimeout = null;
private ensureStarted(): void {
if (!this.unsubscribeApp) {
this.unsubscribeApp = this.backend.subscribeApp(() => {
void this.refresh();
});
}
if (!this.snapshot.organizations.some((organization) => organization.github.syncStatus === "syncing")) {
return;
}
this.syncPollTimeout = setTimeout(() => {
this.syncPollTimeout = null;
void this.refresh();
}, 500);
void this.refresh();
}
private async refresh(): Promise<void> {
@ -132,7 +127,6 @@ class RemoteFoundryAppStore implements FoundryAppClient {
this.refreshPromise = (async () => {
this.snapshot = await this.backend.getAppSnapshot();
this.notify();
this.scheduleSyncPollingIfNeeded();
})().finally(() => {
this.refreshPromise = null;
});

View file

@ -13,6 +13,7 @@ import type {
WorkbenchRepo,
WorkbenchTranscriptEvent as TranscriptEvent,
} from "@sandbox-agent/foundry-shared";
import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" };
export const MODEL_GROUPS: ModelGroup[] = [
{
@ -801,13 +802,13 @@ export function buildInitialTasks(): Task[] {
fileTree: [],
minutesUsed: 312,
},
// ── rivet-dev/cloud ──
// ── rivet-dev/vbare ──
{
id: "h6",
repoId: "cloud",
repoId: "vbare",
title: "Use full cloud run pool name for routing",
status: "idle",
repoName: "rivet-dev/cloud",
repoName: "rivet-dev/vbare",
updatedAtMs: minutesAgo(25),
branch: "fix-use-full-cloud-run-pool-name",
pullRequest: { number: 235, status: "ready" },
@ -910,13 +911,13 @@ export function buildInitialTasks(): Task[] {
],
minutesUsed: 0,
},
// ── rivet-dev/engine-ee ──
// ── rivet-dev/skills ──
{
id: "h7",
repoId: "engine-ee",
repoId: "skills",
title: "Route compute gateway path correctly",
status: "idle",
repoName: "rivet-dev/engine-ee",
repoName: "rivet-dev/skills",
updatedAtMs: minutesAgo(50),
branch: "fix-guard-support-https-targets",
pullRequest: { number: 125, status: "ready" },
@ -1024,13 +1025,13 @@ export function buildInitialTasks(): Task[] {
],
minutesUsed: 78,
},
// ── rivet-dev/engine-ee (archived) ──
// ── rivet-dev/skills (archived) ──
{
id: "h8",
repoId: "engine-ee",
repoId: "skills",
title: "Move compute gateway to guard",
status: "archived",
repoName: "rivet-dev/engine-ee",
repoName: "rivet-dev/skills",
updatedAtMs: minutesAgo(2 * 24 * 60),
branch: "chore-move-compute-gateway-to",
pullRequest: { number: 123, status: "ready" },
@ -1066,13 +1067,13 @@ export function buildInitialTasks(): Task[] {
fileTree: [],
minutesUsed: 15,
},
// ── rivet-dev/secure-exec ──
// ── rivet-dev/deploy-action ──
{
id: "h9",
repoId: "secure-exec",
repoId: "deploy-action",
title: "Harden namespace isolation for nested containers",
status: "idle",
repoName: "rivet-dev/secure-exec",
repoName: "rivet-dev/deploy-action",
updatedAtMs: minutesAgo(90),
branch: "fix/namespace-isolation",
pullRequest: null,
@ -1122,15 +1123,63 @@ export function buildInitialTasks(): Task[] {
];
}
/**
* Build repos list from the rivet-dev fixture data (scripts/data/rivet-dev.json).
* Uses real public repos so the mock sidebar matches what an actual rivet-dev
* workspace would show after a GitHub sync.
*/
function buildMockRepos(): WorkbenchRepo[] {
return rivetDevFixture.repos.map((r) => ({
id: repoIdFromFullName(r.fullName),
label: r.fullName,
}));
}
/** Derive a stable short id from a "org/repo" full name (e.g. "rivet-dev/rivet" → "rivet"). */
function repoIdFromFullName(fullName: string): string {
const parts = fullName.split("/");
return parts[parts.length - 1] ?? fullName;
}
/**
* Build task entries from open PR fixture data.
* Maps to the backend's PR sync behavior (ProjectPrSyncActor) where PRs
* appear as first-class sidebar items even without an associated task.
* Each open PR gets a lightweight task entry so it shows in the sidebar.
*/
function buildPrTasks(): Task[] {
// Collect branch names already claimed by hand-written tasks so we don't duplicate
const existingBranches = new Set(
buildInitialTasks()
.map((t) => t.branch)
.filter(Boolean),
);
return rivetDevFixture.openPullRequests
.filter((pr) => !existingBranches.has(pr.headRefName))
.map((pr) => {
const repoId = repoIdFromFullName(pr.repoFullName);
return {
id: `pr-${repoId}-${pr.number}`,
repoId,
title: pr.title,
status: "idle" as const,
repoName: pr.repoFullName,
updatedAtMs: new Date(pr.updatedAt).getTime(),
branch: pr.headRefName,
pullRequest: { number: pr.number, status: pr.draft ? ("draft" as const) : ("ready" as const) },
tabs: [],
fileChanges: [],
diffs: {},
fileTree: [],
minutesUsed: 0,
};
});
}
export function buildInitialMockLayoutViewModel(): TaskWorkbenchSnapshot {
const repos: WorkbenchRepo[] = [
{ id: "sandbox-agent", label: "rivet-dev/sandbox-agent" },
{ id: "rivet", label: "rivet-dev/rivet" },
{ id: "cloud", label: "rivet-dev/cloud" },
{ id: "engine-ee", label: "rivet-dev/engine-ee" },
{ id: "secure-exec", label: "rivet-dev/secure-exec" },
];
const tasks = buildInitialTasks();
const repos = buildMockRepos();
const tasks = [...buildInitialTasks(), ...buildPrTasks()];
return {
workspaceId: "default",
repos,

View file

@ -0,0 +1,171 @@
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { WorkspaceEvent, WorkspaceSummarySnapshot } from "@sandbox-agent/foundry-shared";
import type { ActorConn, BackendClient } from "../src/backend-client.js";
import { RemoteInterestManager } from "../src/interest/remote-manager.js";
class FakeActorConn implements ActorConn {
private readonly listeners = new Map<string, Set<(payload: any) => void>>();
private readonly errorListeners = new Set<(error: unknown) => void>();
disposeCount = 0;
on(event: string, listener: (payload: any) => void): () => void {
let current = this.listeners.get(event);
if (!current) {
current = new Set();
this.listeners.set(event, current);
}
current.add(listener);
return () => {
current?.delete(listener);
if (current?.size === 0) {
this.listeners.delete(event);
}
};
}
onError(listener: (error: unknown) => void): () => void {
this.errorListeners.add(listener);
return () => {
this.errorListeners.delete(listener);
};
}
emit(event: string, payload: unknown): void {
for (const listener of this.listeners.get(event) ?? []) {
listener(payload);
}
}
emitError(error: unknown): void {
for (const listener of this.errorListeners) {
listener(error);
}
}
async dispose(): Promise<void> {
this.disposeCount += 1;
}
}
function workspaceSnapshot(): WorkspaceSummarySnapshot {
return {
workspaceId: "ws-1",
repos: [{ id: "repo-1", label: "repo-1", taskCount: 1, latestActivityMs: 10 }],
taskSummaries: [
{
id: "task-1",
repoId: "repo-1",
title: "Initial task",
status: "idle",
repoName: "repo-1",
updatedAtMs: 10,
branch: "main",
pullRequest: null,
sessionsSummary: [],
},
],
};
}
function createBackend(conn: FakeActorConn, snapshot: WorkspaceSummarySnapshot): BackendClient {
return {
connectWorkspace: vi.fn(async () => conn),
getWorkspaceSummary: vi.fn(async () => snapshot),
} as unknown as BackendClient;
}
async function flushAsyncWork(): Promise<void> {
await Promise.resolve();
await Promise.resolve();
}
describe("RemoteInterestManager", () => {
beforeEach(() => {
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
it("shares one connection per topic key and applies incoming events", async () => {
const conn = new FakeActorConn();
const backend = createBackend(conn, workspaceSnapshot());
const manager = new RemoteInterestManager(backend);
const params = { workspaceId: "ws-1" } as const;
const listenerA = vi.fn();
const listenerB = vi.fn();
const unsubscribeA = manager.subscribe("workspace", params, listenerA);
const unsubscribeB = manager.subscribe("workspace", params, listenerB);
await flushAsyncWork();
expect(backend.connectWorkspace).toHaveBeenCalledTimes(1);
expect(backend.getWorkspaceSummary).toHaveBeenCalledTimes(1);
expect(manager.getStatus("workspace", params)).toBe("connected");
expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Initial task");
conn.emit("workspaceUpdated", {
type: "taskSummaryUpdated",
taskSummary: {
id: "task-1",
repoId: "repo-1",
title: "Updated task",
status: "running",
repoName: "repo-1",
updatedAtMs: 20,
branch: "feature/live",
pullRequest: null,
sessionsSummary: [],
},
} satisfies WorkspaceEvent);
expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Updated task");
expect(listenerA).toHaveBeenCalled();
expect(listenerB).toHaveBeenCalled();
unsubscribeA();
unsubscribeB();
manager.dispose();
});
it("keeps a topic warm during the grace period and tears it down afterwards", async () => {
const conn = new FakeActorConn();
const backend = createBackend(conn, workspaceSnapshot());
const manager = new RemoteInterestManager(backend);
const params = { workspaceId: "ws-1" } as const;
const unsubscribeA = manager.subscribe("workspace", params, () => {});
await flushAsyncWork();
unsubscribeA();
vi.advanceTimersByTime(29_000);
const unsubscribeB = manager.subscribe("workspace", params, () => {});
await flushAsyncWork();
expect(backend.connectWorkspace).toHaveBeenCalledTimes(1);
expect(conn.disposeCount).toBe(0);
unsubscribeB();
vi.advanceTimersByTime(30_000);
expect(conn.disposeCount).toBe(1);
expect(manager.getSnapshot("workspace", params)).toBeUndefined();
});
it("surfaces connection errors to subscribers", async () => {
const conn = new FakeActorConn();
const backend = createBackend(conn, workspaceSnapshot());
const manager = new RemoteInterestManager(backend);
const params = { workspaceId: "ws-1" } as const;
manager.subscribe("workspace", params, () => {});
await flushAsyncWork();
conn.emitError(new Error("socket dropped"));
expect(manager.getStatus("workspace", params)).toBe("error");
expect(manager.getError("workspace", params)?.message).toBe("socket dropped");
});
});

View file

@ -1,15 +1,12 @@
<!doctype html>
<html lang="en">
<head>
<!--
<script src="https://unpkg.com/react-scan/dist/auto.global.js" crossorigin="anonymous"></script>
<script type="module">
if (import.meta.env.DEV) {
import("react-grab");
import("@react-grab/mcp/client");
}
</script>
-->
<script>if(window.__TAURI_INTERNALS__)document.documentElement.dataset.tauri="1"</script>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />

View file

@ -1,6 +1,7 @@
import { type ReactNode, useEffect } from "react";
import { setFrontendErrorContext } from "@sandbox-agent/foundry-frontend-errors/client";
import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared";
import { useInterest } from "@sandbox-agent/foundry-client";
import { Navigate, Outlet, createRootRoute, createRoute, createRouter, useRouterState } from "@tanstack/react-router";
import { MockLayout } from "../components/mock-layout";
import {
@ -12,8 +13,8 @@ import {
MockSignInPage,
} from "../components/mock-onboarding";
import { defaultWorkspaceId, isMockFrontendClient } from "../lib/env";
import { interestManager } from "../lib/interest";
import { activeMockOrganization, getMockOrganizationById, isAppSnapshotBootstrapping, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app";
import { getTaskWorkbenchClient } from "../lib/workbench";
const rootRoute = createRootRoute({
component: RootLayout,
@ -324,7 +325,7 @@ function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; chil
}
function RepoRouteInner({ workspaceId, repoId }: { workspaceId: string; repoId: string }) {
const taskWorkbenchClient = getTaskWorkbenchClient(workspaceId);
const workspaceState = useInterest(interestManager, "workspace", { workspaceId });
useEffect(() => {
setFrontendErrorContext({
workspaceId,
@ -332,7 +333,7 @@ function RepoRouteInner({ workspaceId, repoId }: { workspaceId: string; repoId:
repoId,
});
}, [repoId, workspaceId]);
const activeTaskId = taskWorkbenchClient.getSnapshot().tasks.find((task) => task.repoId === repoId)?.id;
const activeTaskId = workspaceState.data?.taskSummaries.find((task) => task.repoId === repoId)?.id;
if (!activeTaskId) {
return <Navigate to="/workspaces/$workspaceId" params={{ workspaceId }} replace />;
}

View file

@ -0,0 +1,379 @@
import { memo, useCallback, useEffect, useMemo, useState } from "react";
import { useStyletron } from "baseui";
import { useFoundryTokens } from "../app/theme";
import { isMockFrontendClient } from "../lib/env";
import type { FoundryOrganization, TaskWorkbenchSnapshot, WorkbenchTask } from "@sandbox-agent/foundry-shared";
interface DevPanelProps {
workspaceId: string;
snapshot: TaskWorkbenchSnapshot;
organization?: FoundryOrganization | null;
}
interface TopicInfo {
label: string;
key: string;
listenerCount: number;
hasConnection: boolean;
lastRefresh: number | null;
}
function timeAgo(ts: number | null): string {
if (!ts) return "never";
const seconds = Math.floor((Date.now() - ts) / 1000);
if (seconds < 5) return "now";
if (seconds < 60) return `${seconds}s`;
const minutes = Math.floor(seconds / 60);
if (minutes < 60) return `${minutes}m`;
return `${Math.floor(minutes / 60)}h`;
}
function taskStatusLabel(task: WorkbenchTask): string {
if (task.status === "archived") return "archived";
const hasRunning = task.tabs?.some((tab) => tab.status === "running");
if (hasRunning) return "running";
return task.status ?? "idle";
}
function statusColor(status: string, t: ReturnType<typeof useFoundryTokens>): string {
switch (status) {
case "running":
return t.statusSuccess;
case "archived":
return t.textMuted;
case "error":
case "failed":
return t.statusError;
default:
return t.textTertiary;
}
}
function syncStatusColor(status: string, t: ReturnType<typeof useFoundryTokens>): string {
switch (status) {
case "synced":
return t.statusSuccess;
case "syncing":
case "pending":
return t.statusWarning;
case "error":
return t.statusError;
default:
return t.textMuted;
}
}
function installStatusColor(status: string, t: ReturnType<typeof useFoundryTokens>): string {
switch (status) {
case "connected":
return t.statusSuccess;
case "install_required":
return t.statusWarning;
case "reconnect_required":
return t.statusError;
default:
return t.textMuted;
}
}
export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organization }: DevPanelProps) {
const [css] = useStyletron();
const t = useFoundryTokens();
const [now, setNow] = useState(Date.now());
// Tick every 2s to keep relative timestamps fresh
useEffect(() => {
const id = setInterval(() => setNow(Date.now()), 2000);
return () => clearInterval(id);
}, []);
const topics = useMemo((): TopicInfo[] => {
const items: TopicInfo[] = [];
// Workbench subscription topic
items.push({
label: "Workbench",
key: `ws:${workspaceId}`,
listenerCount: 1,
hasConnection: true,
lastRefresh: now,
});
// Per-task tab subscriptions
for (const task of snapshot.tasks ?? []) {
if (task.status === "archived") continue;
for (const tab of task.tabs ?? []) {
items.push({
label: `Tab/${task.title?.slice(0, 16) || task.id.slice(0, 8)}/${tab.sessionName.slice(0, 10)}`,
key: `${workspaceId}:${task.id}:${tab.id}`,
listenerCount: 1,
hasConnection: tab.status === "running",
lastRefresh: tab.status === "running" ? now : null,
});
}
}
return items;
}, [workspaceId, snapshot, now]);
const tasks = snapshot.tasks ?? [];
const repos = snapshot.repos ?? [];
const projects = snapshot.projects ?? [];
const mono = css({
fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace",
fontSize: "10px",
});
return (
<div
className={css({
position: "fixed",
bottom: "8px",
right: "8px",
width: "320px",
maxHeight: "50vh",
zIndex: 99999,
backgroundColor: t.surfaceElevated,
border: `1px solid ${t.borderMedium}`,
borderRadius: "6px",
boxShadow: t.shadow,
overflow: "hidden",
display: "flex",
flexDirection: "column",
})}
>
{/* Header */}
<div
className={css({
display: "flex",
alignItems: "center",
justifyContent: "space-between",
padding: "4px 8px",
borderBottom: `1px solid ${t.borderSubtle}`,
backgroundColor: t.surfaceTertiary,
flexShrink: 0,
})}
>
<span
className={css({
fontSize: "10px",
fontWeight: 600,
color: t.textSecondary,
letterSpacing: "0.5px",
textTransform: "uppercase",
display: "flex",
alignItems: "center",
gap: "4px",
})}
>
Dev
{isMockFrontendClient && <span className={css({ fontSize: "8px", fontWeight: 600, color: t.statusWarning, letterSpacing: "0.3px" })}>MOCK</span>}
</span>
<span className={css({ fontSize: "9px", color: t.textMuted })}>Shift+D</span>
</div>
{/* Body */}
<div className={css({ overflowY: "auto", padding: "6px" })}>
{/* Interest Topics */}
<Section label="Interest Topics" t={t} css={css}>
{topics.map((topic) => (
<div
key={topic.key}
className={css({
display: "flex",
alignItems: "center",
gap: "6px",
padding: "2px 0",
})}
>
<span
className={css({
width: "5px",
height: "5px",
borderRadius: "50%",
backgroundColor: topic.hasConnection ? t.statusSuccess : t.textMuted,
flexShrink: 0,
})}
/>
<span className={css({ fontSize: "10px", color: t.textPrimary, flex: 1, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" })}>
{topic.label}
</span>
<span className={`${mono} ${css({ color: t.textMuted })}`}>{topic.key.length > 24 ? `...${topic.key.slice(-20)}` : topic.key}</span>
<span className={`${mono} ${css({ color: t.textTertiary })}`}>{timeAgo(topic.lastRefresh)}</span>
</div>
))}
{topics.length === 0 && <span className={css({ fontSize: "10px", color: t.textMuted })}>No active subscriptions</span>}
</Section>
{/* Snapshot Summary */}
<Section label="Snapshot" t={t} css={css}>
<div className={css({ display: "flex", gap: "10px", fontSize: "10px" })}>
<Stat label="repos" value={repos.length} t={t} css={css} />
<Stat label="projects" value={projects.length} t={t} css={css} />
<Stat label="tasks" value={tasks.length} t={t} css={css} />
</div>
</Section>
{/* Tasks */}
{tasks.length > 0 && (
<Section label="Tasks" t={t} css={css}>
{tasks.slice(0, 10).map((task) => {
const status = taskStatusLabel(task);
return (
<div
key={task.id}
className={css({
display: "flex",
alignItems: "center",
gap: "6px",
padding: "1px 0",
fontSize: "10px",
})}
>
<span
className={css({
width: "5px",
height: "5px",
borderRadius: "50%",
backgroundColor: statusColor(status, t),
flexShrink: 0,
})}
/>
<span className={css({ color: t.textPrimary, flex: 1, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" })}>
{task.title || task.id.slice(0, 12)}
</span>
<span className={`${mono} ${css({ color: statusColor(status, t) })}`}>{status}</span>
<span className={`${mono} ${css({ color: t.textMuted })}`}>{task.tabs?.length ?? 0} tabs</span>
</div>
);
})}
</Section>
)}
{/* GitHub */}
{organization && (
<Section label="GitHub" t={t} css={css}>
<div className={css({ display: "flex", flexDirection: "column", gap: "3px", fontSize: "10px" })}>
<div className={css({ display: "flex", alignItems: "center", gap: "6px" })}>
<span
className={css({
width: "5px",
height: "5px",
borderRadius: "50%",
backgroundColor: installStatusColor(organization.github.installationStatus, t),
flexShrink: 0,
})}
/>
<span className={css({ color: t.textPrimary, flex: 1 })}>App</span>
<span className={`${mono} ${css({ color: installStatusColor(organization.github.installationStatus, t) })}`}>
{organization.github.installationStatus.replace(/_/g, " ")}
</span>
</div>
<div className={css({ display: "flex", alignItems: "center", gap: "6px" })}>
<span
className={css({
width: "5px",
height: "5px",
borderRadius: "50%",
backgroundColor: syncStatusColor(organization.github.syncStatus, t),
flexShrink: 0,
})}
/>
<span className={css({ color: t.textPrimary, flex: 1 })}>Sync</span>
<span className={`${mono} ${css({ color: syncStatusColor(organization.github.syncStatus, t) })}`}>{organization.github.syncStatus}</span>
</div>
<div className={css({ display: "flex", gap: "10px", marginTop: "2px" })}>
<Stat label="repos imported" value={organization.github.importedRepoCount} t={t} css={css} />
</div>
{organization.github.connectedAccount && (
<div className={`${mono} ${css({ color: t.textMuted, marginTop: "1px" })}`}>@{organization.github.connectedAccount}</div>
)}
{organization.github.lastSyncLabel && (
<div className={`${mono} ${css({ color: t.textMuted })}`}>last sync: {organization.github.lastSyncLabel}</div>
)}
</div>
</Section>
)}
{/* Workspace */}
<Section label="Workspace" t={t} css={css}>
<div className={`${mono} ${css({ color: t.textTertiary })}`}>{workspaceId}</div>
{organization && (
<div className={`${mono} ${css({ color: t.textMuted, marginTop: "2px" })}`}>
org: {organization.settings.displayName} ({organization.kind})
</div>
)}
</Section>
</div>
</div>
);
});
function Section({
label,
t,
css: cssFn,
children,
}: {
label: string;
t: ReturnType<typeof useFoundryTokens>;
css: ReturnType<typeof useStyletron>[0];
children: React.ReactNode;
}) {
return (
<div className={cssFn({ marginBottom: "6px" })}>
<div
className={cssFn({
fontSize: "9px",
fontWeight: 600,
color: t.textMuted,
textTransform: "uppercase",
letterSpacing: "0.5px",
marginBottom: "2px",
})}
>
{label}
</div>
{children}
</div>
);
}
function Stat({
label,
value,
t,
css: cssFn,
}: {
label: string;
value: number;
t: ReturnType<typeof useFoundryTokens>;
css: ReturnType<typeof useStyletron>[0];
}) {
return (
<span>
<span className={cssFn({ fontWeight: 600, color: t.textPrimary })}>{value}</span>
<span className={cssFn({ color: t.textTertiary, marginLeft: "2px" })}>{label}</span>
</span>
);
}
export function useDevPanel() {
const [visible, setVisible] = useState(true);
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.shiftKey && e.key === "D" && !e.metaKey && !e.ctrlKey && !e.altKey) {
const tag = (e.target as HTMLElement)?.tagName;
if (tag === "INPUT" || tag === "TEXTAREA" || tag === "SELECT") return;
e.preventDefault();
setVisible((prev) => !prev);
}
};
window.addEventListener("keydown", handleKeyDown);
return () => window.removeEventListener("keydown", handleKeyDown);
}, []);
return visible;
}

View file

@ -1,7 +1,14 @@
import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState, useSyncExternalStore, type PointerEvent as ReactPointerEvent } from "react";
import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState, type PointerEvent as ReactPointerEvent } from "react";
import { useNavigate } from "@tanstack/react-router";
import { useStyletron } from "baseui";
import { createErrorContext } from "@sandbox-agent/foundry-shared";
import {
createErrorContext,
type TaskWorkbenchSnapshot,
type WorkbenchSessionSummary,
type WorkbenchTaskDetail,
type WorkbenchTaskSummary,
} from "@sandbox-agent/foundry-shared";
import { useInterest } from "@sandbox-agent/foundry-client";
import { PanelLeft, PanelRight } from "lucide-react";
import { useFoundryTokens } from "../app/theme";
@ -16,6 +23,7 @@ import { TabStrip } from "./mock-layout/tab-strip";
import { TerminalPane } from "./mock-layout/terminal-pane";
import { TranscriptHeader } from "./mock-layout/transcript-header";
import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT, SPanel, ScrollBody, Shell } from "./mock-layout/ui";
import { DevPanel, useDevPanel } from "./dev-panel";
import {
buildDisplayMessages,
diffPath,
@ -30,7 +38,8 @@ import {
type ModelId,
} from "./mock-layout/view-model";
import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app";
import { getTaskWorkbenchClient } from "../lib/workbench";
import { backendClient } from "../lib/backend";
import { interestManager } from "../lib/interest";
function firstAgentTabId(task: Task): string | null {
return task.tabs[0]?.id ?? null;
@ -65,6 +74,81 @@ function sanitizeActiveTabId(task: Task, tabId: string | null | undefined, openD
return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentTabId;
}
function toLegacyTab(
summary: WorkbenchSessionSummary,
sessionDetail?: { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] },
): Task["tabs"][number] {
return {
id: summary.id,
sessionId: summary.sessionId,
sessionName: summary.sessionName,
agent: summary.agent,
model: summary.model,
status: summary.status,
thinkingSinceMs: summary.thinkingSinceMs,
unread: summary.unread,
created: summary.created,
draft: sessionDetail?.draft ?? {
text: "",
attachments: [],
updatedAtMs: null,
},
transcript: sessionDetail?.transcript ?? [],
};
}
function toLegacyTask(
summary: WorkbenchTaskSummary,
detail?: WorkbenchTaskDetail,
sessionCache?: Map<string, { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }>,
): Task {
const sessions = detail?.sessionsSummary ?? summary.sessionsSummary;
return {
id: summary.id,
repoId: summary.repoId,
title: detail?.title ?? summary.title,
status: detail?.status ?? summary.status,
repoName: detail?.repoName ?? summary.repoName,
updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs,
branch: detail?.branch ?? summary.branch,
pullRequest: detail?.pullRequest ?? summary.pullRequest,
tabs: sessions.map((session) => toLegacyTab(session, sessionCache?.get(session.id))),
fileChanges: detail?.fileChanges ?? [],
diffs: detail?.diffs ?? {},
fileTree: detail?.fileTree ?? [],
minutesUsed: detail?.minutesUsed ?? 0,
};
}
function groupProjects(repos: Array<{ id: string; label: string }>, tasks: Task[]) {
return repos
.map((repo) => ({
id: repo.id,
label: repo.label,
updatedAtMs: tasks.filter((task) => task.repoId === repo.id).reduce((latest, task) => Math.max(latest, task.updatedAtMs), 0),
tasks: tasks.filter((task) => task.repoId === repo.id).sort((left, right) => right.updatedAtMs - left.updatedAtMs),
}))
.filter((repo) => repo.tasks.length > 0);
}
interface WorkbenchActions {
createTask(input: { repoId: string; task: string; title?: string; branch?: string; model?: ModelId }): Promise<{ taskId: string; tabId?: string }>;
markTaskUnread(input: { taskId: string }): Promise<void>;
renameTask(input: { taskId: string; value: string }): Promise<void>;
renameBranch(input: { taskId: string; value: string }): Promise<void>;
archiveTask(input: { taskId: string }): Promise<void>;
publishPr(input: { taskId: string }): Promise<void>;
revertFile(input: { taskId: string; path: string }): Promise<void>;
updateDraft(input: { taskId: string; tabId: string; text: string; attachments: LineAttachment[] }): Promise<void>;
sendMessage(input: { taskId: string; tabId: string; text: string; attachments: LineAttachment[] }): Promise<void>;
stopAgent(input: { taskId: string; tabId: string }): Promise<void>;
setSessionUnread(input: { taskId: string; tabId: string; unread: boolean }): Promise<void>;
renameSession(input: { taskId: string; tabId: string; title: string }): Promise<void>;
closeTab(input: { taskId: string; tabId: string }): Promise<void>;
addTab(input: { taskId: string; model?: string }): Promise<{ tabId: string }>;
changeModel(input: { taskId: string; tabId: string; model: ModelId }): Promise<void>;
}
const TranscriptPanel = memo(function TranscriptPanel({
taskWorkbenchClient,
task,
@ -83,7 +167,7 @@ const TranscriptPanel = memo(function TranscriptPanel({
onToggleRightSidebar,
onNavigateToUsage,
}: {
taskWorkbenchClient: ReturnType<typeof getTaskWorkbenchClient>;
taskWorkbenchClient: WorkbenchActions;
task: Task;
activeTabId: string | null;
lastAgentTabId: string | null;
@ -727,7 +811,7 @@ const RightRail = memo(function RightRail({
}, [clampTerminalHeight]);
const startResize = useCallback(
(event: ReactPointerEvent<HTMLDivElement>) => {
(event: ReactPointerEvent) => {
event.preventDefault();
const startY = event.clientY;
@ -902,19 +986,87 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
const [css] = useStyletron();
const t = useFoundryTokens();
const navigate = useNavigate();
const taskWorkbenchClient = useMemo(() => getTaskWorkbenchClient(workspaceId), [workspaceId]);
const viewModel = useSyncExternalStore(
taskWorkbenchClient.subscribe.bind(taskWorkbenchClient),
taskWorkbenchClient.getSnapshot.bind(taskWorkbenchClient),
taskWorkbenchClient.getSnapshot.bind(taskWorkbenchClient),
const taskWorkbenchClient = useMemo<WorkbenchActions>(
() => ({
createTask: (input) => backendClient.createWorkbenchTask(workspaceId, input),
markTaskUnread: (input) => backendClient.markWorkbenchUnread(workspaceId, input),
renameTask: (input) => backendClient.renameWorkbenchTask(workspaceId, input),
renameBranch: (input) => backendClient.renameWorkbenchBranch(workspaceId, input),
archiveTask: async (input) => backendClient.runAction(workspaceId, input.taskId, "archive"),
publishPr: (input) => backendClient.publishWorkbenchPr(workspaceId, input),
revertFile: (input) => backendClient.revertWorkbenchFile(workspaceId, input),
updateDraft: (input) => backendClient.updateWorkbenchDraft(workspaceId, input),
sendMessage: (input) => backendClient.sendWorkbenchMessage(workspaceId, input),
stopAgent: (input) => backendClient.stopWorkbenchSession(workspaceId, input),
setSessionUnread: (input) => backendClient.setWorkbenchSessionUnread(workspaceId, input),
renameSession: (input) => backendClient.renameWorkbenchSession(workspaceId, input),
closeTab: (input) => backendClient.closeWorkbenchSession(workspaceId, input),
addTab: (input) => backendClient.createWorkbenchSession(workspaceId, input),
changeModel: (input) => backendClient.changeWorkbenchModel(workspaceId, input),
}),
[workspaceId],
);
const tasks = viewModel.tasks ?? [];
const rawProjects = viewModel.projects ?? [];
const workspaceState = useInterest(interestManager, "workspace", { workspaceId });
const workspaceRepos = workspaceState.data?.repos ?? [];
const taskSummaries = workspaceState.data?.taskSummaries ?? [];
const selectedTaskSummary = useMemo(
() => taskSummaries.find((task) => task.id === selectedTaskId) ?? taskSummaries[0] ?? null,
[selectedTaskId, taskSummaries],
);
const taskState = useInterest(
interestManager,
"task",
selectedTaskSummary
? {
workspaceId,
repoId: selectedTaskSummary.repoId,
taskId: selectedTaskSummary.id,
}
: null,
);
const sessionState = useInterest(
interestManager,
"session",
selectedTaskSummary && selectedSessionId
? {
workspaceId,
repoId: selectedTaskSummary.repoId,
taskId: selectedTaskSummary.id,
sessionId: selectedSessionId,
}
: null,
);
const tasks = useMemo(() => {
const sessionCache = new Map<string, { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }>();
if (selectedTaskSummary && taskState.data) {
for (const session of taskState.data.sessionsSummary) {
const cached =
(selectedSessionId && session.id === selectedSessionId ? sessionState.data : undefined) ??
interestManager.getSnapshot("session", {
workspaceId,
repoId: selectedTaskSummary.repoId,
taskId: selectedTaskSummary.id,
sessionId: session.id,
});
if (cached) {
sessionCache.set(session.id, {
draft: cached.draft,
transcript: cached.transcript,
});
}
}
}
return taskSummaries.map((summary) =>
summary.id === selectedTaskSummary?.id ? toLegacyTask(summary, taskState.data, sessionCache) : toLegacyTask(summary),
);
}, [selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummaries, workspaceId]);
const rawProjects = useMemo(() => groupProjects(workspaceRepos, tasks), [tasks, workspaceRepos]);
const appSnapshot = useMockAppSnapshot();
const activeOrg = activeMockOrganization(appSnapshot);
const navigateToUsage = useCallback(() => {
if (activeOrg) {
void navigate({ to: "/organizations/$organizationId/billing" as never, params: { organizationId: activeOrg.id } });
void navigate({ to: "/organizations/$organizationId/billing" as never, params: { organizationId: activeOrg.id } as never });
}
}, [activeOrg, navigate]);
const [projectOrder, setProjectOrder] = useState<string[] | null>(null);
@ -939,6 +1091,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
const [leftSidebarOpen, setLeftSidebarOpen] = useState(true);
const [rightSidebarOpen, setRightSidebarOpen] = useState(true);
const [leftSidebarPeeking, setLeftSidebarPeeking] = useState(false);
const showDevPanel = useDevPanel();
const peekTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const startPeek = useCallback(() => {
@ -1084,16 +1237,16 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
}, [activeTask, lastAgentTabIdByTask, selectedSessionId, syncRouteSession]);
useEffect(() => {
if (selectedNewTaskRepoId && viewModel.repos.some((repo) => repo.id === selectedNewTaskRepoId)) {
if (selectedNewTaskRepoId && workspaceRepos.some((repo) => repo.id === selectedNewTaskRepoId)) {
return;
}
const fallbackRepoId =
activeTask?.repoId && viewModel.repos.some((repo) => repo.id === activeTask.repoId) ? activeTask.repoId : (viewModel.repos[0]?.id ?? "");
activeTask?.repoId && workspaceRepos.some((repo) => repo.id === activeTask.repoId) ? activeTask.repoId : (workspaceRepos[0]?.id ?? "");
if (fallbackRepoId !== selectedNewTaskRepoId) {
setSelectedNewTaskRepoId(fallbackRepoId);
}
}, [activeTask?.repoId, selectedNewTaskRepoId, viewModel.repos]);
}, [activeTask?.repoId, selectedNewTaskRepoId, workspaceRepos]);
useEffect(() => {
if (!activeTask) {
@ -1123,35 +1276,38 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
},
"failed_to_auto_create_workbench_session",
);
} finally {
autoCreatingSessionForTaskRef.current.delete(activeTask.id);
// Keep the guard in the set on error to prevent retry storms.
// The guard is cleared when tabs appear (line above) or the task changes.
}
})();
}, [activeTask, selectedSessionId, syncRouteSession, taskWorkbenchClient]);
const createTask = useCallback(() => {
void (async () => {
const repoId = selectedNewTaskRepoId;
if (!repoId) {
throw new Error("Cannot create a task without an available repo");
}
const createTask = useCallback(
(overrideRepoId?: string) => {
void (async () => {
const repoId = overrideRepoId || selectedNewTaskRepoId;
if (!repoId) {
throw new Error("Cannot create a task without an available repo");
}
const { taskId, tabId } = await taskWorkbenchClient.createTask({
repoId,
task: "New task",
model: "gpt-4o",
title: "New task",
});
await navigate({
to: "/workspaces/$workspaceId/tasks/$taskId",
params: {
workspaceId,
taskId,
},
search: { sessionId: tabId ?? undefined },
});
})();
}, [navigate, selectedNewTaskRepoId, workspaceId]);
const { taskId, tabId } = await taskWorkbenchClient.createTask({
repoId,
task: "New task",
model: "gpt-4o",
title: "New task",
});
await navigate({
to: "/workspaces/$workspaceId/tasks/$taskId",
params: {
workspaceId,
taskId,
},
search: { sessionId: tabId ?? undefined },
});
})();
},
[navigate, selectedNewTaskRepoId, workspaceId],
);
const openDiffTab = useCallback(
(path: string) => {
@ -1283,7 +1439,11 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
const onDragMouseDown = useCallback((event: ReactPointerEvent) => {
if (event.button !== 0) return;
// Tauri v2 IPC: invoke start_dragging on the webview window
const ipc = (window as Record<string, unknown>).__TAURI_INTERNALS__ as { invoke: (cmd: string, args?: unknown) => Promise<unknown> } | undefined;
const ipc = (window as unknown as Record<string, unknown>).__TAURI_INTERNALS__ as
| {
invoke: (cmd: string, args?: unknown) => Promise<unknown>;
}
| undefined;
if (ipc?.invoke) {
ipc.invoke("plugin:window|start_dragging").catch(() => {});
}
@ -1359,10 +1519,10 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
transition: sidebarTransition,
}}
>
<div style={{ minWidth: `${leftWidth}px`, flex: 1, display: "flex", flexDirection: "column" }}>
<div style={{ minWidth: `${leftWidth}px`, flex: 1, minHeight: 0, display: "flex", flexDirection: "column" }}>
<Sidebar
projects={projects}
newTaskRepos={viewModel.repos}
newTaskRepos={workspaceRepos}
selectedNewTaskRepoId={selectedNewTaskRepoId}
activeId=""
onSelect={selectTask}
@ -1415,29 +1575,63 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
gap: "12px",
}}
>
<h2 style={{ margin: 0, fontSize: "20px", fontWeight: 600 }}>Create your first task</h2>
<p style={{ margin: 0, opacity: 0.75 }}>
{viewModel.repos.length > 0
? "Start from the sidebar to create a task on the first available repo."
: "No repos are available in this workspace yet."}
</p>
<button
type="button"
onClick={createTask}
disabled={viewModel.repos.length === 0}
style={{
alignSelf: "center",
border: 0,
borderRadius: "999px",
padding: "10px 18px",
background: viewModel.repos.length > 0 ? t.borderMedium : t.textTertiary,
color: t.textPrimary,
cursor: viewModel.repos.length > 0 ? "pointer" : "not-allowed",
fontWeight: 600,
}}
>
New task
</button>
{activeOrg?.github.syncStatus === "syncing" || activeOrg?.github.syncStatus === "pending" ? (
<>
<div
className={css({
width: "24px",
height: "24px",
border: `2px solid ${t.borderSubtle}`,
borderTopColor: t.textSecondary,
borderRadius: "50%",
animationName: {
from: { transform: "rotate(0deg)" },
to: { transform: "rotate(360deg)" },
} as unknown as string,
animationDuration: "0.8s",
animationIterationCount: "infinite",
animationTimingFunction: "linear",
alignSelf: "center",
})}
/>
<h2 style={{ margin: 0, fontSize: "20px", fontWeight: 600 }}>Syncing with GitHub</h2>
<p style={{ margin: 0, opacity: 0.75 }}>
Importing repos from @{activeOrg.github.connectedAccount || "GitHub"}...
{activeOrg.github.importedRepoCount > 0 && <> {activeOrg.github.importedRepoCount} repos imported so far.</>}
</p>
</>
) : activeOrg?.github.syncStatus === "error" ? (
<>
<h2 style={{ margin: 0, fontSize: "20px", fontWeight: 600, color: t.statusError }}>GitHub sync failed</h2>
<p style={{ margin: 0, opacity: 0.75 }}>There was a problem syncing repos from GitHub. Check the dev panel for details.</p>
</>
) : (
<>
<h2 style={{ margin: 0, fontSize: "20px", fontWeight: 600 }}>Create your first task</h2>
<p style={{ margin: 0, opacity: 0.75 }}>
{workspaceRepos.length > 0
? "Start from the sidebar to create a task on the first available repo."
: "No repos are available in this workspace yet."}
</p>
<button
type="button"
onClick={() => createTask()}
disabled={workspaceRepos.length === 0}
style={{
alignSelf: "center",
border: 0,
borderRadius: "999px",
padding: "10px 18px",
background: workspaceRepos.length > 0 ? t.borderMedium : t.textTertiary,
color: t.textPrimary,
cursor: workspaceRepos.length > 0 ? "pointer" : "not-allowed",
fontWeight: 600,
}}
>
New task
</button>
</>
)}
</div>
</div>
</ScrollBody>
@ -1460,6 +1654,47 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
</div>
</div>
</Shell>
{activeOrg && (activeOrg.github.installationStatus === "install_required" || activeOrg.github.installationStatus === "reconnect_required") && (
<div
className={css({
position: "fixed",
bottom: "8px",
left: "8px",
zIndex: 99998,
display: "flex",
alignItems: "center",
gap: "6px",
padding: "6px 12px",
backgroundColor: t.surfaceElevated,
border: `1px solid ${t.statusError}`,
borderRadius: "6px",
boxShadow: t.shadow,
fontSize: "11px",
color: t.textPrimary,
maxWidth: "360px",
})}
>
<span
className={css({
width: "6px",
height: "6px",
borderRadius: "50%",
backgroundColor: t.statusError,
flexShrink: 0,
})}
/>
<span>
GitHub App {activeOrg.github.installationStatus === "install_required" ? "not installed" : "needs reconnection"} repo sync is unavailable
</span>
</div>
)}
{showDevPanel && (
<DevPanel
workspaceId={workspaceId}
snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot}
organization={activeOrg}
/>
)}
</>
);
}
@ -1479,10 +1714,10 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
transition: sidebarTransition,
}}
>
<div style={{ minWidth: `${leftWidth}px`, flex: 1, display: "flex", flexDirection: "column" }}>
<div style={{ minWidth: `${leftWidth}px`, flex: 1, minHeight: 0, display: "flex", flexDirection: "column" }}>
<Sidebar
projects={projects}
newTaskRepos={viewModel.repos}
newTaskRepos={workspaceRepos}
selectedNewTaskRepoId={selectedNewTaskRepoId}
activeId={activeTask.id}
onSelect={selectTask}
@ -1530,7 +1765,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
>
<Sidebar
projects={projects}
newTaskRepos={viewModel.repos}
newTaskRepos={workspaceRepos}
selectedNewTaskRepoId={selectedNewTaskRepoId}
activeId={activeTask.id}
onSelect={(id) => {
@ -1610,6 +1845,47 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
</div>
</div>
</div>
{activeOrg && (activeOrg.github.installationStatus === "install_required" || activeOrg.github.installationStatus === "reconnect_required") && (
<div
className={css({
position: "fixed",
bottom: "8px",
left: "8px",
zIndex: 99998,
display: "flex",
alignItems: "center",
gap: "6px",
padding: "6px 12px",
backgroundColor: t.surfaceElevated,
border: `1px solid ${t.statusError}`,
borderRadius: "6px",
boxShadow: t.shadow,
fontSize: "11px",
color: t.textPrimary,
maxWidth: "360px",
})}
>
<span
className={css({
width: "6px",
height: "6px",
borderRadius: "50%",
backgroundColor: t.statusError,
flexShrink: 0,
})}
/>
<span>
GitHub App {activeOrg.github.installationStatus === "install_required" ? "not installed" : "needs reconnection"} repo sync is unavailable
</span>
</div>
)}
{showDevPanel && (
<DevPanel
workspaceId={workspaceId}
snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot}
organization={activeOrg}
/>
)}
</Shell>
</>
);

View file

@ -1,8 +1,9 @@
import { memo, useCallback, useEffect, useLayoutEffect, useRef, useState } from "react";
import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState } from "react";
import { createPortal } from "react-dom";
import { useNavigate } from "@tanstack/react-router";
import { useStyletron } from "baseui";
import { LabelSmall, LabelXSmall } from "baseui/typography";
import { Select, type Value } from "baseui/select";
import {
ChevronDown,
ChevronRight,
@ -26,6 +27,17 @@ import type { FoundryTokens } from "../../styles/tokens";
const PROJECT_COLORS = ["#6366f1", "#f59e0b", "#10b981", "#ef4444", "#8b5cf6", "#ec4899", "#06b6d4", "#f97316"];
/** Strip the org prefix (e.g. "rivet-dev/") when all repos share the same org. */
function stripCommonOrgPrefix(label: string, repos: Array<{ label: string }>): string {
const slashIdx = label.indexOf("/");
if (slashIdx < 0) return label;
const prefix = label.slice(0, slashIdx + 1);
if (repos.every((r) => r.label.startsWith(prefix))) {
return label.slice(slashIdx + 1);
}
return label;
}
function projectInitial(label: string): string {
const parts = label.split("/");
const name = parts[parts.length - 1] ?? label;
@ -61,7 +73,7 @@ export const Sidebar = memo(function Sidebar({
selectedNewTaskRepoId: string;
activeId: string;
onSelect: (id: string) => void;
onCreate: () => void;
onCreate: (repoId?: string) => void;
onSelectNewTaskRepo: (repoId: string) => void;
onMarkUnread: (id: string) => void;
onRenameTask: (id: string) => void;
@ -137,19 +149,8 @@ export const Sidebar = memo(function Sidebar({
};
}, [drag, onReorderProjects, onReorderTasks]);
const [createMenuOpen, setCreateMenuOpen] = useState(false);
const createMenuRef = useRef<HTMLDivElement>(null);
useEffect(() => {
if (!createMenuOpen) return;
function handleClick(event: MouseEvent) {
if (createMenuRef.current && !createMenuRef.current.contains(event.target as Node)) {
setCreateMenuOpen(false);
}
}
document.addEventListener("mousedown", handleClick);
return () => document.removeEventListener("mousedown", handleClick);
}, [createMenuOpen]);
const [createSelectOpen, setCreateSelectOpen] = useState(false);
const selectOptions = useMemo(() => newTaskRepos.map((repo) => ({ id: repo.id, label: stripCommonOrgPrefix(repo.label, newTaskRepos) })), [newTaskRepos]);
return (
<SPanel>
@ -232,7 +233,99 @@ export const Sidebar = memo(function Sidebar({
<PanelLeft size={14} />
</div>
) : null}
<div ref={createMenuRef} className={css({ position: "relative", flexShrink: 0 })}>
{createSelectOpen ? (
<div className={css({ flex: 1, minWidth: 0 })}>
<Select
options={selectOptions}
value={[]}
placeholder="Search repos..."
type="search"
openOnClick
autoFocus
onChange={({ value }: { value: Value }) => {
const selected = value[0];
if (selected) {
onSelectNewTaskRepo(selected.id as string);
setCreateSelectOpen(false);
onCreate(selected.id as string);
}
}}
onClose={() => setCreateSelectOpen(false)}
overrides={{
Root: {
style: {
width: "100%",
},
},
ControlContainer: {
style: {
backgroundColor: t.surfaceTertiary,
borderTopColor: t.borderSubtle,
borderBottomColor: t.borderSubtle,
borderLeftColor: t.borderSubtle,
borderRightColor: t.borderSubtle,
borderTopWidth: "1px",
borderBottomWidth: "1px",
borderLeftWidth: "1px",
borderRightWidth: "1px",
borderTopLeftRadius: "6px",
borderTopRightRadius: "6px",
borderBottomLeftRadius: "6px",
borderBottomRightRadius: "6px",
minHeight: "28px",
paddingLeft: "8px",
},
},
ValueContainer: {
style: {
paddingTop: "0px",
paddingBottom: "0px",
},
},
Input: {
style: {
fontSize: "12px",
color: t.textPrimary,
},
},
Placeholder: {
style: {
fontSize: "12px",
color: t.textMuted,
},
},
Dropdown: {
style: {
backgroundColor: t.surfaceElevated,
borderTopColor: t.borderDefault,
borderBottomColor: t.borderDefault,
borderLeftColor: t.borderDefault,
borderRightColor: t.borderDefault,
maxHeight: "min(320px, 50vh)",
},
},
DropdownListItem: {
style: {
fontSize: "12px",
paddingTop: "6px",
paddingBottom: "6px",
},
},
IconsContainer: {
style: {
paddingRight: "4px",
},
},
SearchIconContainer: {
style: {
paddingLeft: "0px",
paddingRight: "4px",
},
},
}}
/>
</div>
) : (
<div
role="button"
tabIndex={0}
@ -241,9 +334,9 @@ export const Sidebar = memo(function Sidebar({
if (newTaskRepos.length === 0) return;
if (newTaskRepos.length === 1) {
onSelectNewTaskRepo(newTaskRepos[0]!.id);
onCreate();
onCreate(newTaskRepos[0]!.id);
} else {
setCreateMenuOpen((prev) => !prev);
setCreateSelectOpen(true);
}
}}
onKeyDown={(event) => {
@ -251,9 +344,9 @@ export const Sidebar = memo(function Sidebar({
if (event.key === "Enter" || event.key === " ") {
if (newTaskRepos.length === 1) {
onSelectNewTaskRepo(newTaskRepos[0]!.id);
onCreate();
onCreate(newTaskRepos[0]!.id);
} else {
setCreateMenuOpen((prev) => !prev);
setCreateSelectOpen(true);
}
}
}}
@ -275,80 +368,7 @@ export const Sidebar = memo(function Sidebar({
>
<Plus size={14} style={{ display: "block" }} />
</div>
{createMenuOpen && newTaskRepos.length > 1 ? (
<div
className={css({
position: "absolute",
top: "100%",
right: 0,
marginTop: "4px",
zIndex: 9999,
minWidth: "200px",
borderRadius: "10px",
border: `1px solid ${t.borderDefault}`,
backgroundColor: t.surfaceElevated,
boxShadow: `${t.shadow}, 0 0 0 1px ${t.interactiveSubtle}`,
padding: "4px",
display: "flex",
flexDirection: "column",
gap: "2px",
maxHeight: "240px",
overflowY: "auto",
})}
>
{newTaskRepos.map((repo) => (
<button
key={repo.id}
type="button"
onClick={() => {
onSelectNewTaskRepo(repo.id);
setCreateMenuOpen(false);
onCreate();
}}
className={css({
display: "flex",
alignItems: "center",
gap: "10px",
width: "100%",
padding: "8px 12px",
borderRadius: "6px",
border: "none",
background: "transparent",
color: t.textSecondary,
cursor: "pointer",
fontSize: "13px",
fontWeight: 400,
textAlign: "left",
transition: "background 200ms ease, color 200ms ease",
":hover": {
backgroundColor: t.interactiveHover,
color: t.textPrimary,
},
})}
>
<span
className={css({
width: "18px",
height: "18px",
borderRadius: "4px",
background: `linear-gradient(135deg, ${projectIconColor(repo.label)}, ${projectIconColor(repo.label + "x")})`,
display: "flex",
alignItems: "center",
justifyContent: "center",
fontSize: "9px",
fontWeight: 700,
color: t.textOnAccent,
flexShrink: 0,
})}
>
{projectInitial(repo.label)}
</span>
<span className={css({ flex: 1, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" })}>{repo.label}</span>
</button>
))}
</div>
) : null}
</div>
)}
</PanelHeaderBar>
<ScrollBody>
<div className={css({ padding: "8px", display: "flex", flexDirection: "column", gap: "4px" })}>
@ -458,7 +478,7 @@ export const Sidebar = memo(function Sidebar({
whiteSpace: "nowrap",
}}
>
{project.label}
{stripCommonOrgPrefix(project.label, projects)}
</LabelSmall>
</div>
<div className={css({ display: "flex", alignItems: "center", gap: "4px", flexShrink: 0 })}>
@ -468,7 +488,7 @@ export const Sidebar = memo(function Sidebar({
e.stopPropagation();
setHoveredProjectId(null);
onSelectNewTaskRepo(project.id);
onCreate();
onCreate(project.id);
}}
onMouseDown={(e) => e.stopPropagation()}
className={css({

View file

@ -1,4 +1,4 @@
import type { SandboxProcessRecord } from "@sandbox-agent/foundry-client";
import { type SandboxProcessRecord, useInterest } from "@sandbox-agent/foundry-client";
import { ProcessTerminal } from "@sandbox-agent/react";
import { useQuery } from "@tanstack/react-query";
import { useStyletron } from "baseui";
@ -7,6 +7,7 @@ import { ChevronDown, ChevronUp, Plus, SquareTerminal, Trash2 } from "lucide-rea
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { SandboxAgent } from "sandbox-agent";
import { backendClient } from "../../lib/backend";
import { interestManager } from "../../lib/interest";
interface TerminalPaneProps {
workspaceId: string;
@ -135,6 +136,9 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
setProcessTabs((prev) => {
const next = [...prev];
const [moved] = next.splice(d.fromIdx, 1);
if (!moved) {
return prev;
}
next.splice(d.overIdx!, 0, moved);
return next;
});
@ -180,28 +184,31 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
[listWidth],
);
const taskQuery = useQuery({
queryKey: ["mock-layout", "task", workspaceId, taskId],
enabled: Boolean(taskId),
staleTime: 1_000,
refetchOnWindowFocus: true,
refetchInterval: (query) => (query.state.data?.activeSandboxId ? false : 2_000),
queryFn: async () => {
if (!taskId) {
throw new Error("Cannot load terminal state without a task.");
}
return await backendClient.getTask(workspaceId, taskId);
},
});
const workspaceState = useInterest(interestManager, "workspace", { workspaceId });
const taskSummary = useMemo(
() => (taskId ? (workspaceState.data?.taskSummaries.find((task) => task.id === taskId) ?? null) : null),
[taskId, workspaceState.data?.taskSummaries],
);
const taskState = useInterest(
interestManager,
"task",
taskSummary
? {
workspaceId,
repoId: taskSummary.repoId,
taskId: taskSummary.id,
}
: null,
);
const activeSandbox = useMemo(() => {
const task = taskQuery.data;
const task = taskState.data;
if (!task?.activeSandboxId) {
return null;
}
return task.sandboxes.find((sandbox) => sandbox.sandboxId === task.activeSandboxId) ?? null;
}, [taskQuery.data]);
}, [taskState.data]);
const connectionQuery = useQuery({
queryKey: ["mock-layout", "sandbox-agent-connection", workspaceId, activeSandbox?.providerId ?? "", activeSandbox?.sandboxId ?? ""],
@ -217,30 +224,17 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
},
});
const processesQuery = useQuery({
queryKey: ["mock-layout", "sandbox-processes", workspaceId, activeSandbox?.providerId ?? "", activeSandbox?.sandboxId ?? ""],
enabled: Boolean(activeSandbox?.sandboxId),
staleTime: 0,
refetchOnWindowFocus: true,
refetchInterval: activeSandbox?.sandboxId ? 3_000 : false,
queryFn: async () => {
if (!activeSandbox) {
throw new Error("Cannot load processes without an active sandbox.");
}
return await backendClient.listSandboxProcesses(workspaceId, activeSandbox.providerId, activeSandbox.sandboxId);
},
});
useEffect(() => {
if (!activeSandbox?.sandboxId) {
return;
}
return backendClient.subscribeSandboxProcesses(workspaceId, activeSandbox.providerId, activeSandbox.sandboxId, () => {
void processesQuery.refetch();
});
}, [activeSandbox?.providerId, activeSandbox?.sandboxId, processesQuery, workspaceId]);
const processesState = useInterest(
interestManager,
"sandboxProcesses",
activeSandbox
? {
workspaceId,
providerId: activeSandbox.providerId,
sandboxId: activeSandbox.sandboxId,
}
: null,
);
useEffect(() => {
if (!connectionQuery.data) {
@ -311,7 +305,7 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
setProcessTabs([]);
}, [taskId]);
const processes = processesQuery.data?.processes ?? [];
const processes = processesState.data ?? [];
const openTerminalTab = useCallback((process: SandboxProcessRecord) => {
setProcessTabs((current) => {
@ -357,12 +351,11 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
sandboxId: activeSandbox.sandboxId,
request: defaultShellRequest(activeSandbox.cwd),
});
await processesQuery.refetch();
openTerminalTab(created);
} finally {
setCreatingProcess(false);
}
}, [activeSandbox, openTerminalTab, processesQuery, workspaceId]);
}, [activeSandbox, openTerminalTab, workspaceId]);
const processTabsById = useMemo(() => new Map(processTabs.map((tab) => [tab.id, tab])), [processTabs]);
const activeProcessTab = activeTabId ? (processTabsById.get(activeTabId) ?? null) : null;
@ -462,9 +455,6 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
height: "100%",
padding: "18px 16px 14px",
}}
onExit={() => {
void processesQuery.refetch();
}}
/>
</div>
);
@ -481,7 +471,7 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
);
}
if (taskQuery.isLoading) {
if (taskState.status === "loading") {
return (
<div className={emptyBodyClassName}>
<div className={emptyCopyClassName}>
@ -491,12 +481,12 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl
);
}
if (taskQuery.error) {
if (taskState.error) {
return (
<div className={emptyBodyClassName}>
<div className={emptyCopyClassName}>
<strong>Could not load task state.</strong>
<span>{taskQuery.error.message}</span>
<span>{taskState.error.message}</span>
</div>
</div>
);

View file

@ -56,7 +56,11 @@ function DesktopDragRegion() {
const isDesktop = !!import.meta.env.VITE_DESKTOP;
const onDragMouseDown = useCallback((event: React.PointerEvent) => {
if (event.button !== 0) return;
const ipc = (window as Record<string, unknown>).__TAURI_INTERNALS__ as { invoke: (cmd: string, args?: unknown) => Promise<unknown> } | undefined;
const ipc = (window as unknown as Record<string, unknown>).__TAURI_INTERNALS__ as
| {
invoke: (cmd: string, args?: unknown) => Promise<unknown>;
}
| undefined;
if (ipc?.invoke) {
ipc.invoke("plugin:window|start_dragging").catch(() => {});
}

View file

@ -1,6 +1,6 @@
import { useEffect, useMemo, useState, type ReactNode } from "react";
import type { AgentType, TaskRecord, TaskSummary, RepoBranchRecord, RepoOverview, RepoStackAction } from "@sandbox-agent/foundry-shared";
import { groupTaskStatus, type SandboxSessionEventRecord } from "@sandbox-agent/foundry-client";
import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared";
import { useInterest } from "@sandbox-agent/foundry-client";
import { useMutation, useQuery } from "@tanstack/react-query";
import { Link, useNavigate } from "@tanstack/react-router";
import { Button } from "baseui/button";
@ -17,6 +17,7 @@ import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizon
import { formatDiffStat } from "../features/tasks/model";
import { buildTranscript, resolveSessionSelection } from "../features/sessions/model";
import { backendClient } from "../lib/backend";
import { interestManager } from "../lib/interest";
interface WorkspaceDashboardProps {
workspaceId: string;
@ -96,11 +97,9 @@ const AGENT_OPTIONS: SelectItem[] = [
{ id: "claude", label: "claude" },
];
function statusKind(status: TaskSummary["status"]): StatusTagKind {
const group = groupTaskStatus(status);
if (group === "running") return "positive";
if (group === "queued") return "warning";
if (group === "error") return "negative";
function statusKind(status: WorkbenchTaskStatus): StatusTagKind {
if (status === "running") return "positive";
if (status === "new") return "warning";
return "neutral";
}
@ -135,26 +134,6 @@ function branchTestIdToken(value: string): string {
return token || "branch";
}
function useSessionEvents(
task: TaskRecord | null,
sessionId: string | null,
): ReturnType<typeof useQuery<{ items: SandboxSessionEventRecord[]; nextCursor?: string }, Error>> {
return useQuery({
queryKey: ["workspace", task?.workspaceId ?? "", "session", task?.taskId ?? "", sessionId ?? ""],
enabled: Boolean(task?.activeSandboxId && sessionId),
refetchInterval: 2_500,
queryFn: async () => {
if (!task?.activeSandboxId || !sessionId) {
return { items: [] };
}
return backendClient.listSandboxSessionEvents(task.workspaceId, task.providerId, task.activeSandboxId, {
sessionId,
limit: 120,
});
},
});
}
function repoSummary(overview: RepoOverview | undefined): {
total: number;
mapped: number;
@ -382,37 +361,26 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
});
const [createError, setCreateError] = useState<string | null>(null);
const tasksQuery = useQuery({
queryKey: ["workspace", workspaceId, "tasks"],
queryFn: async () => backendClient.listTasks(workspaceId),
refetchInterval: 2_500,
});
const taskDetailQuery = useQuery({
queryKey: ["workspace", workspaceId, "task-detail", selectedTaskId],
enabled: Boolean(selectedTaskId && !repoOverviewMode),
refetchInterval: 2_500,
queryFn: async () => {
if (!selectedTaskId) {
throw new Error("No task selected");
}
return backendClient.getTask(workspaceId, selectedTaskId);
},
});
const reposQuery = useQuery({
queryKey: ["workspace", workspaceId, "repos"],
queryFn: async () => backendClient.listRepos(workspaceId),
refetchInterval: 10_000,
});
const repos = reposQuery.data ?? [];
const workspaceState = useInterest(interestManager, "workspace", { workspaceId });
const repos = workspaceState.data?.repos ?? [];
const rows = workspaceState.data?.taskSummaries ?? [];
const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rows, selectedTaskId]);
const taskState = useInterest(
interestManager,
"task",
!repoOverviewMode && selectedSummary
? {
workspaceId,
repoId: selectedSummary.repoId,
taskId: selectedSummary.id,
}
: null,
);
const activeRepoId = selectedRepoId ?? createRepoId;
const repoOverviewQuery = useQuery({
queryKey: ["workspace", workspaceId, "repo-overview", activeRepoId],
enabled: Boolean(repoOverviewMode && activeRepoId),
refetchInterval: 5_000,
queryFn: async () => {
if (!activeRepoId) {
throw new Error("No repo selected");
@ -427,7 +395,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
return;
}
if (!createRepoId && repos.length > 0) {
setCreateRepoId(repos[0]!.repoId);
setCreateRepoId(repos[0]!.id);
}
}, [createRepoId, repoOverviewMode, repos, selectedRepoId]);
@ -439,9 +407,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
}
}, [newAgentType]);
const rows = tasksQuery.data ?? [];
const repoGroups = useMemo(() => {
const byRepo = new Map<string, TaskSummary[]>();
const byRepo = new Map<string, typeof rows>();
for (const row of rows) {
const bucket = byRepo.get(row.repoId);
if (bucket) {
@ -453,12 +420,12 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
return repos
.map((repo) => {
const tasks = [...(byRepo.get(repo.repoId) ?? [])].sort((a, b) => b.updatedAt - a.updatedAt);
const latestTaskAt = tasks[0]?.updatedAt ?? 0;
const tasks = [...(byRepo.get(repo.id) ?? [])].sort((a, b) => b.updatedAtMs - a.updatedAtMs);
const latestTaskAt = tasks[0]?.updatedAtMs ?? 0;
return {
repoId: repo.repoId,
repoRemote: repo.remoteUrl,
latestActivityAt: Math.max(repo.updatedAt, latestTaskAt),
repoId: repo.id,
repoLabel: repo.label,
latestActivityAt: Math.max(repo.latestActivityMs, latestTaskAt),
tasks,
};
})
@ -466,13 +433,11 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
if (a.latestActivityAt !== b.latestActivityAt) {
return b.latestActivityAt - a.latestActivityAt;
}
return a.repoRemote.localeCompare(b.repoRemote);
return a.repoLabel.localeCompare(b.repoLabel);
});
}, [repos, rows]);
const selectedSummary = useMemo(() => rows.find((row) => row.taskId === selectedTaskId) ?? rows[0] ?? null, [rows, selectedTaskId]);
const selectedForSession = repoOverviewMode ? null : (taskDetailQuery.data ?? null);
const selectedForSession = repoOverviewMode ? null : (taskState.data ?? null);
const activeSandbox = useMemo(() => {
if (!selectedForSession) return null;
@ -488,7 +453,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
to: "/workspaces/$workspaceId/tasks/$taskId",
params: {
workspaceId,
taskId: rows[0]!.taskId,
taskId: rows[0]!.id,
},
search: { sessionId: undefined },
replace: true,
@ -499,35 +464,39 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
useEffect(() => {
setActiveSessionId(null);
setDraft("");
}, [selectedForSession?.taskId]);
}, [selectedForSession?.id]);
const sessionsQuery = useQuery({
queryKey: ["workspace", workspaceId, "sandbox", activeSandbox?.sandboxId ?? "", "sessions"],
enabled: Boolean(activeSandbox?.sandboxId && selectedForSession),
refetchInterval: 3_000,
queryFn: async () => {
if (!activeSandbox?.sandboxId || !selectedForSession) {
return { items: [] };
}
return backendClient.listSandboxSessions(workspaceId, activeSandbox.providerId, activeSandbox.sandboxId, {
limit: 30,
});
},
});
const sessionRows = sessionsQuery.data?.items ?? [];
const sessionRows = selectedForSession?.sessionsSummary ?? [];
const sessionSelection = useMemo(
() =>
resolveSessionSelection({
explicitSessionId: activeSessionId,
taskSessionId: selectedForSession?.activeSessionId ?? null,
sessions: sessionRows,
sessions: sessionRows.map((session) => ({
id: session.id,
agent: session.agent,
agentSessionId: session.sessionId ?? "",
lastConnectionId: "",
createdAt: 0,
status: session.status,
})),
}),
[activeSessionId, selectedForSession?.activeSessionId, sessionRows],
);
const resolvedSessionId = sessionSelection.sessionId;
const staleSessionId = sessionSelection.staleSessionId;
const eventsQuery = useSessionEvents(selectedForSession, resolvedSessionId);
const sessionState = useInterest(
interestManager,
"session",
selectedForSession && resolvedSessionId
? {
workspaceId,
repoId: selectedForSession.repoId,
taskId: selectedForSession.id,
sessionId: resolvedSessionId,
}
: null,
);
const canStartSession = Boolean(selectedForSession && activeSandbox?.sandboxId);
const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => {
@ -546,9 +515,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
const createSession = useMutation({
mutationFn: async () => startSessionFromTask(),
onSuccess: async (session) => {
onSuccess: (session) => {
setActiveSessionId(session.id);
await Promise.all([sessionsQuery.refetch(), eventsQuery.refetch()]);
},
});
@ -558,7 +526,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
}
const created = await startSessionFromTask();
setActiveSessionId(created.id);
await sessionsQuery.refetch();
return created.id;
};
@ -576,13 +543,12 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
prompt,
});
},
onSuccess: async () => {
onSuccess: () => {
setDraft("");
await Promise.all([sessionsQuery.refetch(), eventsQuery.refetch()]);
},
});
const transcript = buildTranscript(eventsQuery.data?.items ?? []);
const transcript = buildTranscript(sessionState.data?.transcript ?? []);
const canCreateTask = createRepoId.trim().length > 0 && newTask.trim().length > 0;
const createTask = useMutation({
@ -613,8 +579,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
setNewBranchName("");
setCreateOnBranch(null);
setCreateTaskOpen(false);
await tasksQuery.refetch();
await repoOverviewQuery.refetch();
await navigate({
to: "/workspaces/$workspaceId/tasks/$taskId",
params: {
@ -641,7 +605,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
setAddRepoError(null);
setAddRepoRemote("");
setAddRepoOpen(false);
await reposQuery.refetch();
setCreateRepoId(created.repoId);
if (repoOverviewMode) {
await navigate({
@ -679,7 +642,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
setStackActionMessage(null);
setStackActionError(result.message);
}
await Promise.all([repoOverviewQuery.refetch(), tasksQuery.refetch()]);
await repoOverviewQuery.refetch();
},
onError: (error) => {
setStackActionMessage(null);
@ -698,7 +661,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
setCreateTaskOpen(true);
};
const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.repoId, label: repo.remoteUrl })), [repos]);
const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.id, label: repo.label })), [repos]);
const selectedRepoOption = repoOptions.find((option) => option.id === createRepoId) ?? null;
const selectedAgentOption = useMemo(() => createOption(AGENT_OPTIONS.find((option) => option.id === newAgentType) ?? AGENT_OPTIONS[0]!), [newAgentType]);
const selectedFilterOption = useMemo(
@ -706,7 +669,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
[overviewFilter],
);
const sessionOptions = useMemo(
() => sessionRows.map((session) => createOption({ id: session.id, label: `${session.id} (${session.status ?? "running"})` })),
() => sessionRows.map((session) => createOption({ id: session.id, label: `${session.sessionName} (${session.status})` })),
[sessionRows],
);
const selectedSessionOption = sessionOptions.find((option) => option.id === resolvedSessionId) ?? null;
@ -839,13 +802,15 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
</PanelHeader>
<ScrollBody>
{tasksQuery.isLoading ? (
{workspaceState.status === "loading" ? (
<>
<Skeleton rows={3} height="72px" />
</>
) : null}
{!tasksQuery.isLoading && repoGroups.length === 0 ? <EmptyState>No repos or tasks yet. Add a repo to start a workspace.</EmptyState> : null}
{workspaceState.status !== "loading" && repoGroups.length === 0 ? (
<EmptyState>No repos or tasks yet. Add a repo to start a workspace.</EmptyState>
) : null}
{repoGroups.map((group) => (
<section
@ -876,7 +841,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
})}
data-testid={group.repoId === activeRepoId ? "repo-overview-open" : `repo-overview-open-${group.repoId}`}
>
{group.repoRemote}
{group.repoLabel}
</Link>
<div
@ -887,14 +852,14 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
})}
>
{group.tasks
.filter((task) => task.status !== "archived" || task.taskId === selectedSummary?.taskId)
.filter((task) => task.status !== "archived" || task.id === selectedSummary?.id)
.map((task) => {
const isActive = !repoOverviewMode && task.taskId === selectedSummary?.taskId;
const isActive = !repoOverviewMode && task.id === selectedSummary?.id;
return (
<Link
key={task.taskId}
key={task.id}
to="/workspaces/$workspaceId/tasks/$taskId"
params={{ workspaceId, taskId: task.taskId }}
params={{ workspaceId, taskId: task.id }}
search={{ sessionId: undefined }}
className={css({
display: "block",
@ -927,7 +892,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
color="contentSecondary"
overrides={{ Block: { style: { overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" } } }}
>
{task.branchName ?? "Determining branch..."}
{task.branch ?? "Determining branch..."}
</ParagraphSmall>
<StatusPill kind={statusKind(task.status)}>{task.status}</StatusPill>
</div>
@ -1396,11 +1361,11 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
backgroundColor: theme.colors.backgroundPrimary,
})}
>
{eventsQuery.isLoading ? <Skeleton rows={2} height="90px" /> : null}
{resolvedSessionId && sessionState.status === "loading" ? <Skeleton rows={2} height="90px" /> : null}
{transcript.length === 0 && !eventsQuery.isLoading ? (
{transcript.length === 0 && !(resolvedSessionId && sessionState.status === "loading") ? (
<EmptyState testId="session-transcript-empty">
{groupTaskStatus(selectedForSession.status) === "error" && selectedForSession.statusMessage
{selectedForSession.runtimeStatus === "error" && selectedForSession.statusMessage
? `Session failed: ${selectedForSession.statusMessage}`
: !activeSandbox?.sandboxId
? selectedForSession.statusMessage
@ -1597,7 +1562,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
gap: theme.sizing.scale300,
})}
>
<MetaRow label="Task" value={selectedForSession.taskId} mono />
<MetaRow label="Task" value={selectedForSession.id} mono />
<MetaRow label="Sandbox" value={selectedForSession.activeSandboxId ?? "-"} mono />
<MetaRow label="Session" value={resolvedSessionId ?? "-"} mono />
</div>
@ -1615,7 +1580,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
gap: theme.sizing.scale300,
})}
>
<MetaRow label="Branch" value={selectedForSession.branchName ?? "-"} mono />
<MetaRow label="Branch" value={selectedForSession.branch ?? "-"} mono />
<MetaRow label="Diff" value={formatDiffStat(selectedForSession.diffStat)} />
<MetaRow label="PR" value={selectedForSession.prUrl ?? "-"} />
<MetaRow label="Review" value={selectedForSession.reviewStatus ?? "-"} />
@ -1641,7 +1606,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
</div>
</section>
{groupTaskStatus(selectedForSession.status) === "error" ? (
{selectedForSession.runtimeStatus === "error" ? (
<div
className={css({
padding: "12px",

View file

@ -0,0 +1,5 @@
import { MockInterestManager, RemoteInterestManager } from "@sandbox-agent/foundry-client";
import { backendClient } from "./backend";
import { frontendClientMode } from "./env";
export const interestManager = frontendClientMode === "mock" ? new MockInterestManager() : new RemoteInterestManager(backendClient);

View file

@ -1,23 +1,100 @@
import { useSyncExternalStore } from "react";
import {
createFoundryAppClient,
useInterest,
currentFoundryOrganization,
currentFoundryUser,
eligibleFoundryOrganizations,
type FoundryAppClient,
} from "@sandbox-agent/foundry-client";
import type { FoundryAppSnapshot, FoundryOrganization } from "@sandbox-agent/foundry-shared";
import type { FoundryAppSnapshot, FoundryBillingPlanId, FoundryOrganization, UpdateFoundryOrganizationProfileInput } from "@sandbox-agent/foundry-shared";
import { backendClient } from "./backend";
import { interestManager } from "./interest";
import { frontendClientMode } from "./env";
const REMOTE_APP_SESSION_STORAGE_KEY = "sandbox-agent-foundry:remote-app-session";
const appClient: FoundryAppClient = createFoundryAppClient({
const EMPTY_APP_SNAPSHOT: FoundryAppSnapshot = {
auth: { status: "signed_out", currentUserId: null },
activeOrganizationId: null,
onboarding: {
starterRepo: {
repoFullName: "rivet-dev/sandbox-agent",
repoUrl: "https://github.com/rivet-dev/sandbox-agent",
status: "pending",
starredAt: null,
skippedAt: null,
},
},
users: [],
organizations: [],
};
const legacyAppClient: FoundryAppClient = createFoundryAppClient({
mode: frontendClientMode,
backend: frontendClientMode === "remote" ? backendClient : undefined,
});
const remoteAppClient: FoundryAppClient = {
getSnapshot(): FoundryAppSnapshot {
return interestManager.getSnapshot("app", {}) ?? EMPTY_APP_SNAPSHOT;
},
subscribe(listener: () => void): () => void {
return interestManager.subscribe("app", {}, listener);
},
async signInWithGithub(userId?: string): Promise<void> {
void userId;
await backendClient.signInWithGithub();
},
async signOut(): Promise<void> {
await backendClient.signOutApp();
},
async skipStarterRepo(): Promise<void> {
await backendClient.skipAppStarterRepo();
},
async starStarterRepo(organizationId: string): Promise<void> {
await backendClient.starAppStarterRepo(organizationId);
},
async selectOrganization(organizationId: string): Promise<void> {
await backendClient.selectAppOrganization(organizationId);
},
async updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise<void> {
await backendClient.updateAppOrganizationProfile(input);
},
async triggerGithubSync(organizationId: string): Promise<void> {
await backendClient.triggerAppRepoImport(organizationId);
},
async completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise<void> {
await backendClient.completeAppHostedCheckout(organizationId, planId);
},
async openBillingPortal(organizationId: string): Promise<void> {
await backendClient.openAppBillingPortal(organizationId);
},
async cancelScheduledRenewal(organizationId: string): Promise<void> {
await backendClient.cancelAppScheduledRenewal(organizationId);
},
async resumeSubscription(organizationId: string): Promise<void> {
await backendClient.resumeAppSubscription(organizationId);
},
async reconnectGithub(organizationId: string): Promise<void> {
await backendClient.reconnectAppGithub(organizationId);
},
async recordSeatUsage(workspaceId: string): Promise<void> {
await backendClient.recordAppSeatUsage(workspaceId);
},
};
const appClient: FoundryAppClient = frontendClientMode === "remote" ? remoteAppClient : legacyAppClient;
export function useMockAppSnapshot(): FoundryAppSnapshot {
if (frontendClientMode === "remote") {
const app = useInterest(interestManager, "app", {});
if (app.status !== "loading") {
firstSnapshotDelivered = true;
}
return app.data ?? EMPTY_APP_SNAPSHOT;
}
return useSyncExternalStore(appClient.subscribe.bind(appClient), appClient.getSnapshot.bind(appClient), appClient.getSnapshot.bind(appClient));
}

View file

@ -1,20 +0,0 @@
import { createTaskWorkbenchClient, type TaskWorkbenchClient } from "@sandbox-agent/foundry-client";
import { backendClient } from "./backend";
import { frontendClientMode } from "./env";
const workbenchClients = new Map<string, TaskWorkbenchClient>();
export function getTaskWorkbenchClient(workspaceId: string): TaskWorkbenchClient {
const existing = workbenchClients.get(workspaceId);
if (existing) {
return existing;
}
const created = createTaskWorkbenchClient({
mode: frontendClientMode,
backend: backendClient,
workspaceId,
});
workbenchClients.set(workspaceId, created);
return created;
}

View file

@ -12,6 +12,7 @@
},
"dependencies": {
"pino": "^10.3.1",
"sandbox-agent": "workspace:*",
"zod": "^4.1.5"
},
"devDependencies": {

View file

@ -172,6 +172,23 @@ export const RepoOverviewSchema = z.object({
baseRef: z.string().nullable(),
stackAvailable: z.boolean(),
fetchedAt: z.number().int(),
branchSyncAt: z.number().int().nullable(),
prSyncAt: z.number().int().nullable(),
branchSyncStatus: z.enum(["pending", "syncing", "synced", "error"]),
prSyncStatus: z.enum(["pending", "syncing", "synced", "error"]),
repoActionJobs: z.array(
z.object({
jobId: z.string().min(1),
action: z.enum(["sync_repo", "restack_repo", "restack_subtree", "rebase_branch", "reparent_branch"]),
branchName: z.string().nullable(),
parentBranch: z.string().nullable(),
status: z.enum(["queued", "running", "completed", "error"]),
message: z.string().min(1),
createdAt: z.number().int(),
updatedAt: z.number().int(),
completedAt: z.number().int().nullable(),
}),
),
branches: z.array(RepoBranchRecordSchema),
});
export type RepoOverview = z.infer<typeof RepoOverviewSchema>;
@ -189,8 +206,10 @@ export const RepoStackActionInputSchema = z.object({
export type RepoStackActionInput = z.infer<typeof RepoStackActionInputSchema>;
export const RepoStackActionResultSchema = z.object({
jobId: z.string().min(1).nullable().optional(),
action: RepoStackActionSchema,
executed: z.boolean(),
status: z.enum(["queued", "running", "completed", "error"]).optional(),
message: z.string().min(1),
at: z.number().int(),
});

View file

@ -2,5 +2,6 @@ export * from "./app-shell.js";
export * from "./contracts.js";
export * from "./config.js";
export * from "./logging.js";
export * from "./realtime-events.js";
export * from "./workbench.js";
export * from "./workspace.js";

View file

@ -4,10 +4,14 @@ export interface FoundryLoggerOptions {
service: string;
bindings?: Record<string, unknown>;
level?: string;
format?: "json" | "logfmt";
}
type ProcessLike = {
env?: Record<string, string | undefined>;
stdout?: {
write?: (chunk: string) => unknown;
};
};
function resolveEnvVar(name: string): string | undefined {
@ -28,6 +32,116 @@ function isBrowserRuntime(): boolean {
return typeof window !== "undefined" && typeof document !== "undefined";
}
function serializeLogValue(value: unknown): string | number | boolean | null {
if (value === undefined || value === null) {
return null;
}
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
return value;
}
if (typeof value === "bigint") {
return value.toString();
}
if (value instanceof Error) {
return JSON.stringify({
name: value.name,
message: value.message,
stack: value.stack,
});
}
try {
return JSON.stringify(value);
} catch {
return "[unserializable]";
}
}
function formatLogfmtValue(value: string | number | boolean | null): string {
if (typeof value === "number" || typeof value === "boolean") {
return String(value);
}
const raw = value ?? "null";
if (raw.length > 0 && !/[\s="\\]/.test(raw)) {
return raw;
}
return `"${raw.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n")}"`;
}
function formatLogfmtLine(record: Record<string, unknown>): string {
return Object.entries(record)
.filter(([, value]) => value !== undefined)
.map(([key, value]) => `${key}=${formatLogfmtValue(serializeLogValue(value))}`)
.join(" ");
}
function stringifyMessagePart(value: unknown): string {
if (typeof value === "string") {
return value;
}
const serialized = serializeLogValue(value);
return typeof serialized === "string" ? serialized : String(serialized);
}
function buildLogRecord(level: string, bindings: Record<string, unknown>, args: Parameters<Logger["info"]>): Record<string, unknown> {
const record: Record<string, unknown> = {
time: new Date().toISOString(),
level,
};
for (const [key, value] of Object.entries(bindings)) {
if (key !== "time" && key !== "level" && key !== "msg" && value !== undefined) {
record[key] = value;
}
}
if (args.length === 0) {
return record;
}
const [first, ...rest] = args;
if (first && typeof first === "object") {
if (first instanceof Error) {
record.err = {
name: first.name,
message: first.message,
stack: first.stack,
};
} else {
for (const [key, value] of Object.entries(first)) {
if (key !== "time" && key !== "level" && key !== "msg" && value !== undefined) {
record[key] = value;
}
}
}
if (rest.length > 0) {
record.msg = rest.map(stringifyMessagePart).join(" ");
}
return record;
}
record.msg = [first, ...rest].map(stringifyMessagePart).join(" ");
return record;
}
function writeLogfmtLine(line: string): void {
const processLike = (globalThis as { process?: ProcessLike }).process;
if (processLike?.stdout?.write) {
processLike.stdout.write(`${line}\n`);
return;
}
console.log(line);
}
export function createFoundryLogger(options: FoundryLoggerOptions): Logger {
const browser = isBrowserRuntime();
const loggerOptions: LoggerOptions = {
@ -44,6 +158,15 @@ export function createFoundryLogger(options: FoundryLoggerOptions): Logger {
};
} else {
loggerOptions.timestamp = pino.stdTimeFunctions.isoTime;
if (options.format === "logfmt") {
loggerOptions.hooks = {
logMethod(this: Logger, args, _method, level) {
const levelLabel = this.levels.labels[level] ?? "info";
const record = buildLogRecord(levelLabel, this.bindings(), args);
writeLogfmtLine(formatLogfmtLine(record));
},
};
}
}
return pino(loggerOptions);

View file

@ -0,0 +1,36 @@
import type { FoundryAppSnapshot } from "./app-shell.js";
import type { WorkbenchRepoSummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js";
export interface SandboxProcessSnapshot {
id: string;
command: string;
args: string[];
createdAtMs: number;
cwd?: string | null;
exitCode?: number | null;
exitedAtMs?: number | null;
interactive: boolean;
pid?: number | null;
status: "running" | "exited";
tty: boolean;
}
/** Workspace-level events broadcast by the workspace actor. */
export type WorkspaceEvent =
| { type: "taskSummaryUpdated"; taskSummary: WorkbenchTaskSummary }
| { type: "taskRemoved"; taskId: string }
| { type: "repoAdded"; repo: WorkbenchRepoSummary }
| { type: "repoUpdated"; repo: WorkbenchRepoSummary }
| { type: "repoRemoved"; repoId: string };
/** Task-level events broadcast by the task actor. */
export type TaskEvent = { type: "taskDetailUpdated"; detail: WorkbenchTaskDetail };
/** Session-level events broadcast by the task actor and filtered by sessionId on the client. */
export type SessionEvent = { type: "sessionUpdated"; session: WorkbenchSessionDetail };
/** App-level events broadcast by the app workspace actor. */
export type AppEvent = { type: "appUpdated"; snapshot: FoundryAppSnapshot };
/** Sandbox process events broadcast by the sandbox instance actor. */
export type SandboxProcessesEvent = { type: "processesUpdated"; processes: SandboxProcessSnapshot[] };

View file

@ -1,3 +1,5 @@
import type { AgentType, ProviderId, TaskStatus } from "./contracts.js";
export type WorkbenchTaskStatus = "running" | "idle" | "new" | "archived";
export type WorkbenchAgentKind = "Claude" | "Codex" | "Cursor";
export type WorkbenchModelId = "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3";
@ -18,7 +20,8 @@ export interface WorkbenchComposerDraft {
updatedAtMs: number | null;
}
export interface WorkbenchAgentTab {
/** Session metadata without transcript content. */
export interface WorkbenchSessionSummary {
id: string;
sessionId: string | null;
sessionName: string;
@ -28,6 +31,21 @@ export interface WorkbenchAgentTab {
thinkingSinceMs: number | null;
unread: boolean;
created: boolean;
}
/** Full session content — only fetched when viewing a specific session tab. */
export interface WorkbenchSessionDetail {
/** Stable UI tab id used for the session topic key and routing. */
sessionId: string;
tabId: string;
sandboxSessionId: string | null;
sessionName: string;
agent: WorkbenchAgentKind;
model: WorkbenchModelId;
status: "running" | "idle" | "error";
thinkingSinceMs: number | null;
unread: boolean;
created: boolean;
draft: WorkbenchComposerDraft;
transcript: WorkbenchTranscriptEvent[];
}
@ -76,6 +94,73 @@ export interface WorkbenchPullRequestSummary {
status: "draft" | "ready";
}
export interface WorkbenchSandboxSummary {
providerId: ProviderId;
sandboxId: string;
cwd: string | null;
}
/** Sidebar-level task data. Materialized in the workspace actor's SQLite. */
export interface WorkbenchTaskSummary {
id: string;
repoId: string;
title: string;
status: WorkbenchTaskStatus;
repoName: string;
updatedAtMs: number;
branch: string | null;
pullRequest: WorkbenchPullRequestSummary | null;
/** Summary of sessions — no transcript content. */
sessionsSummary: WorkbenchSessionSummary[];
}
/** Full task detail — only fetched when viewing a specific task. */
export interface WorkbenchTaskDetail extends WorkbenchTaskSummary {
/** Original task prompt/instructions shown in the detail view. */
task: string;
/** Agent choice used when creating new sandbox sessions for this task. */
agentType: AgentType | null;
/** Underlying task runtime status preserved for detail views and error handling. */
runtimeStatus: TaskStatus;
statusMessage: string | null;
activeSessionId: string | null;
diffStat: string | null;
prUrl: string | null;
reviewStatus: string | null;
fileChanges: WorkbenchFileChange[];
diffs: Record<string, string>;
fileTree: WorkbenchFileTreeNode[];
minutesUsed: number;
/** Sandbox info for this task. */
sandboxes: WorkbenchSandboxSummary[];
activeSandboxId: string | null;
}
/** Repo-level summary for workspace sidebar. */
export interface WorkbenchRepoSummary {
id: string;
label: string;
/** Aggregated branch/task overview state (replaces getRepoOverview polling). */
taskCount: number;
latestActivityMs: number;
}
/** Workspace-level snapshot — initial fetch for the workspace topic. */
export interface WorkspaceSummarySnapshot {
workspaceId: string;
repos: WorkbenchRepoSummary[];
taskSummaries: WorkbenchTaskSummary[];
}
/**
* Deprecated compatibility aliases for older mock/view-model code.
* New code should use the summary/detail/topic-specific types above.
*/
export interface WorkbenchAgentTab extends WorkbenchSessionSummary {
draft: WorkbenchComposerDraft;
transcript: WorkbenchTranscriptEvent[];
}
export interface WorkbenchTask {
id: string;
repoId: string;

View file

@ -0,0 +1,29 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { createFoundryLogger } from "../src/logging.js";
describe("createFoundryLogger", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("emits logfmt output when requested", () => {
const writes: string[] = [];
const write = vi.fn((chunk: string | Uint8Array) => {
writes.push(typeof chunk === "string" ? chunk : Buffer.from(chunk).toString("utf8"));
return true;
});
vi.spyOn(process.stdout, "write").mockImplementation(write as typeof process.stdout.write);
const logger = createFoundryLogger({
service: "foundry-backend",
format: "logfmt",
}).child({
requestId: "req-123",
});
logger.info({ count: 2, nested: { ok: true } }, "backend started");
expect(write).toHaveBeenCalledTimes(1);
expect(writes[0]).toMatch(/^time=\S+ level=info service=foundry-backend requestId=req-123 count=2 nested="\{\\"ok\\":true\}" msg="backend started"\n$/);
});
});

View file

@ -0,0 +1,919 @@
# Realtime Interest Manager — Implementation Spec
## Overview
Replace the current polling + empty-notification + full-refetch architecture with a push-based realtime system. The client subscribes to topics, receives the initial state, and then receives full replacement payloads for changed entities over WebSocket. No polling. No re-fetching.
This spec covers three layers: backend (materialized state + broadcast), client library (interest manager), and frontend (hook consumption). Comment architecture-related code throughout so new contributors can understand the data flow from comments alone.
---
## 1. Data Model: What Changes
### 1.1 Split `WorkbenchTask` into summary and detail types
**File:** `packages/shared/src/workbench.ts`
Currently `WorkbenchTask` is a single flat type carrying everything (sidebar fields + transcripts + diffs + file tree). Split it:
```typescript
/** Sidebar-level task data. Materialized in the workspace actor's SQLite. */
export interface WorkbenchTaskSummary {
id: string;
repoId: string;
title: string;
status: WorkbenchTaskStatus;
repoName: string;
updatedAtMs: number;
branch: string | null;
pullRequest: WorkbenchPullRequestSummary | null;
/** Summary of sessions — no transcript content. */
sessionsSummary: WorkbenchSessionSummary[];
}
/** Session metadata without transcript content. */
export interface WorkbenchSessionSummary {
id: string;
sessionId: string | null;
sessionName: string;
agent: WorkbenchAgentKind;
model: WorkbenchModelId;
status: "running" | "idle" | "error";
thinkingSinceMs: number | null;
unread: boolean;
created: boolean;
}
/** Repo-level summary for workspace sidebar. */
export interface WorkbenchRepoSummary {
id: string;
label: string;
/** Aggregated branch/task overview state (replaces getRepoOverview polling). */
taskCount: number;
latestActivityMs: number;
}
/** Full task detail — only fetched when viewing a specific task. */
export interface WorkbenchTaskDetail {
id: string;
repoId: string;
title: string;
status: WorkbenchTaskStatus;
repoName: string;
updatedAtMs: number;
branch: string | null;
pullRequest: WorkbenchPullRequestSummary | null;
sessionsSummary: WorkbenchSessionSummary[];
fileChanges: WorkbenchFileChange[];
diffs: Record<string, string>;
fileTree: WorkbenchFileTreeNode[];
minutesUsed: number;
/** Sandbox info for this task. */
sandboxes: WorkbenchSandboxSummary[];
activeSandboxId: string | null;
}
export interface WorkbenchSandboxSummary {
providerId: string;
sandboxId: string;
cwd: string | null;
}
/** Full session content — only fetched when viewing a specific session tab. */
export interface WorkbenchSessionDetail {
sessionId: string;
tabId: string;
sessionName: string;
agent: WorkbenchAgentKind;
model: WorkbenchModelId;
status: "running" | "idle" | "error";
thinkingSinceMs: number | null;
unread: boolean;
draft: WorkbenchComposerDraft;
transcript: WorkbenchTranscriptEvent[];
}
/** Workspace-level snapshot — initial fetch for the workspace topic. */
export interface WorkspaceSummarySnapshot {
workspaceId: string;
repos: WorkbenchRepoSummary[];
taskSummaries: WorkbenchTaskSummary[];
}
```
Remove the old `TaskWorkbenchSnapshot` type and `WorkbenchTask` type once migration is complete.
### 1.2 Event payload types
**File:** `packages/shared/src/realtime-events.ts` (new file)
Each event carries the full new state of the changed entity — not a patch, not an empty notification.
```typescript
/** Workspace-level events broadcast by the workspace actor. */
export type WorkspaceEvent =
| { type: "taskSummaryUpdated"; taskSummary: WorkbenchTaskSummary }
| { type: "taskRemoved"; taskId: string }
| { type: "repoAdded"; repo: WorkbenchRepoSummary }
| { type: "repoUpdated"; repo: WorkbenchRepoSummary }
| { type: "repoRemoved"; repoId: string };
/** Task-level events broadcast by the task actor. */
export type TaskEvent =
| { type: "taskDetailUpdated"; detail: WorkbenchTaskDetail };
/** Session-level events broadcast by the task actor, filtered by sessionId on the client. */
export type SessionEvent =
| { type: "sessionUpdated"; session: WorkbenchSessionDetail };
/** App-level events broadcast by the app workspace actor. */
export type AppEvent =
| { type: "appUpdated"; snapshot: FoundryAppSnapshot };
/** Sandbox process events broadcast by the sandbox instance actor. */
export type SandboxProcessesEvent =
| { type: "processesUpdated"; processes: SandboxProcessRecord[] };
```
---
## 2. Backend: Materialized State + Broadcasts
### 2.1 Workspace actor — materialized sidebar state
**Files:**
- `packages/backend/src/actors/workspace/db/schema.ts` — add tables
- `packages/backend/src/actors/workspace/actions.ts` — replace `buildWorkbenchSnapshot`, add delta handlers
Add to workspace actor SQLite schema:
```typescript
export const taskSummaries = sqliteTable("task_summaries", {
taskId: text("task_id").primaryKey(),
repoId: text("repo_id").notNull(),
title: text("title").notNull(),
status: text("status").notNull(), // WorkbenchTaskStatus
repoName: text("repo_name").notNull(),
updatedAtMs: integer("updated_at_ms").notNull(),
branch: text("branch"),
pullRequestJson: text("pull_request_json"), // JSON-serialized WorkbenchPullRequestSummary | null
sessionsSummaryJson: text("sessions_summary_json").notNull().default("[]"), // JSON array of WorkbenchSessionSummary
});
```
New workspace actions:
```typescript
/**
* Called by task actors when their summary-level state changes.
* Upserts the task summary row and broadcasts the update to all connected clients.
*
* This is the core of the materialized state pattern: task actors push their
* summary changes here instead of requiring clients to fan out to every task.
*/
async applyTaskSummaryUpdate(c, input: { taskSummary: WorkbenchTaskSummary }) {
// Upsert into taskSummaries table
await c.db.insert(taskSummaries).values(toRow(input.taskSummary))
.onConflictDoUpdate({ target: taskSummaries.taskId, set: toRow(input.taskSummary) }).run();
// Broadcast to connected clients
c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary });
}
async removeTaskSummary(c, input: { taskId: string }) {
await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run();
c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId });
}
/**
* Initial fetch for the workspace topic.
* Reads entirely from local SQLite — no fan-out to child actors.
*/
async getWorkspaceSummary(c, input: { workspaceId: string }): Promise<WorkspaceSummarySnapshot> {
const repoRows = await c.db.select().from(repos).orderBy(desc(repos.updatedAt)).all();
const taskRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all();
return {
workspaceId: c.state.workspaceId,
repos: repoRows.map(toRepoSummary),
taskSummaries: taskRows.map(toTaskSummary),
};
}
```
Replace `buildWorkbenchSnapshot` (the fan-out) — keep it only as a `reconcileWorkbenchState` background action for recovery/rebuild.
### 2.2 Task actor — push summaries to workspace + broadcast detail
**Files:**
- `packages/backend/src/actors/task/workbench.ts` — replace `notifyWorkbenchUpdated` calls
Every place that currently calls `notifyWorkbenchUpdated(c)` (there are ~20 call sites) must instead:
1. Build the current `WorkbenchTaskSummary` from local state.
2. Push it to the workspace actor: `workspace.applyTaskSummaryUpdate({ taskSummary })`.
3. Build the current `WorkbenchTaskDetail` from local state.
4. Broadcast to directly-connected clients: `c.broadcast("taskUpdated", { type: "taskDetailUpdated", detail })`.
5. If session state changed, also broadcast: `c.broadcast("sessionUpdated", { type: "sessionUpdated", session: buildSessionDetail(c, sessionId) })`.
Add helper functions:
```typescript
/**
* Builds a WorkbenchTaskSummary from local task actor state.
* This is what gets pushed to the workspace actor for sidebar materialization.
*/
function buildTaskSummary(c: any): WorkbenchTaskSummary { ... }
/**
* Builds a WorkbenchTaskDetail from local task actor state.
* This is broadcast to clients directly connected to this task.
*/
function buildTaskDetail(c: any): WorkbenchTaskDetail { ... }
/**
* Builds a WorkbenchSessionDetail for a specific session.
* Broadcast to clients subscribed to this session's updates.
*/
function buildSessionDetail(c: any, sessionId: string): WorkbenchSessionDetail { ... }
/**
* Replaces the old notifyWorkbenchUpdated pattern.
* Pushes summary to workspace actor + broadcasts detail to direct subscribers.
*/
async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }) {
// Push summary to parent workspace actor
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyTaskSummaryUpdate({ taskSummary: buildTaskSummary(c) });
// Broadcast detail to clients connected to this task
c.broadcast("taskUpdated", { type: "taskDetailUpdated", detail: buildTaskDetail(c) });
// If a specific session changed, broadcast session detail
if (options?.sessionId) {
c.broadcast("sessionUpdated", {
type: "sessionUpdated",
session: buildSessionDetail(c, options.sessionId),
});
}
}
```
### 2.3 Task actor — new actions for initial fetch
```typescript
/**
* Initial fetch for the task topic.
* Reads from local SQLite only — no cross-actor calls.
*/
async getTaskDetail(c): Promise<WorkbenchTaskDetail> { ... }
/**
* Initial fetch for the session topic.
* Returns full session content including transcript.
*/
async getSessionDetail(c, input: { sessionId: string }): Promise<WorkbenchSessionDetail> { ... }
```
### 2.4 App workspace actor
**File:** `packages/backend/src/actors/workspace/app-shell.ts`
Change `c.broadcast("appUpdated", { at: Date.now(), sessionId })` to:
```typescript
c.broadcast("appUpdated", { type: "appUpdated", snapshot: await buildAppSnapshot(c, sessionId) });
```
### 2.5 Sandbox instance actor
**File:** `packages/backend/src/actors/sandbox-instance/index.ts`
Change `broadcastProcessesUpdated` to include the process list:
```typescript
function broadcastProcessesUpdated(c: any): void {
const processes = /* read from local DB */;
c.broadcast("processesUpdated", { type: "processesUpdated", processes });
}
```
---
## 3. Client Library: Interest Manager
### 3.1 Topic definitions
**File:** `packages/client/src/interest/topics.ts` (new)
```typescript
/**
* Topic definitions for the interest manager.
*
* Each topic defines how to connect to an actor, fetch initial state,
* which event to listen for, and how to apply incoming events to cached state.
*
* The interest manager uses these definitions to manage WebSocket connections,
* cached state, and subscriptions for all realtime data flows.
*/
export interface TopicDefinition<TData, TParams, TEvent> {
/** Derive a unique cache key from params. */
key: (params: TParams) => string;
/** Which broadcast event name to listen for on the actor connection. */
event: string;
/** Open a WebSocket connection to the actor. */
connect: (backend: BackendClient, params: TParams) => Promise<ActorConn>;
/** Fetch the initial snapshot from the actor. */
fetchInitial: (backend: BackendClient, params: TParams) => Promise<TData>;
/** Apply an incoming event to the current cached state. Returns the new state. */
applyEvent: (current: TData, event: TEvent) => TData;
}
export interface AppTopicParams {}
export interface WorkspaceTopicParams { workspaceId: string }
export interface TaskTopicParams { workspaceId: string; repoId: string; taskId: string }
export interface SessionTopicParams { workspaceId: string; repoId: string; taskId: string; sessionId: string }
export interface SandboxProcessesTopicParams { workspaceId: string; providerId: string; sandboxId: string }
export const topicDefinitions = {
app: {
key: () => "app",
event: "appUpdated",
connect: (b, _p) => b.connectWorkspace("app"),
fetchInitial: (b, _p) => b.getAppSnapshot(),
applyEvent: (_current, event: AppEvent) => event.snapshot,
} satisfies TopicDefinition<FoundryAppSnapshot, AppTopicParams, AppEvent>,
workspace: {
key: (p) => `workspace:${p.workspaceId}`,
event: "workspaceUpdated",
connect: (b, p) => b.connectWorkspace(p.workspaceId),
fetchInitial: (b, p) => b.getWorkspaceSummary(p.workspaceId),
applyEvent: (current, event: WorkspaceEvent) => {
switch (event.type) {
case "taskSummaryUpdated":
return {
...current,
taskSummaries: upsertById(current.taskSummaries, event.taskSummary),
};
case "taskRemoved":
return {
...current,
taskSummaries: current.taskSummaries.filter(t => t.id !== event.taskId),
};
case "repoAdded":
case "repoUpdated":
return {
...current,
repos: upsertById(current.repos, event.repo),
};
case "repoRemoved":
return {
...current,
repos: current.repos.filter(r => r.id !== event.repoId),
};
}
},
} satisfies TopicDefinition<WorkspaceSummarySnapshot, WorkspaceTopicParams, WorkspaceEvent>,
task: {
key: (p) => `task:${p.workspaceId}:${p.taskId}`,
event: "taskUpdated",
connect: (b, p) => b.connectTask(p.workspaceId, p.repoId, p.taskId),
fetchInitial: (b, p) => b.getTaskDetail(p.workspaceId, p.repoId, p.taskId),
applyEvent: (_current, event: TaskEvent) => event.detail,
} satisfies TopicDefinition<WorkbenchTaskDetail, TaskTopicParams, TaskEvent>,
session: {
key: (p) => `session:${p.workspaceId}:${p.taskId}:${p.sessionId}`,
event: "sessionUpdated",
// Reuses the task actor connection — same actor, different event.
connect: (b, p) => b.connectTask(p.workspaceId, p.repoId, p.taskId),
fetchInitial: (b, p) => b.getSessionDetail(p.workspaceId, p.repoId, p.taskId, p.sessionId),
applyEvent: (current, event: SessionEvent) => {
// Filter: only apply if this event is for our session
if (event.session.sessionId !== current.sessionId) return current;
return event.session;
},
} satisfies TopicDefinition<WorkbenchSessionDetail, SessionTopicParams, SessionEvent>,
sandboxProcesses: {
key: (p) => `sandbox:${p.workspaceId}:${p.sandboxId}`,
event: "processesUpdated",
connect: (b, p) => b.connectSandbox(p.workspaceId, p.providerId, p.sandboxId),
fetchInitial: (b, p) => b.listSandboxProcesses(p.workspaceId, p.providerId, p.sandboxId),
applyEvent: (_current, event: SandboxProcessesEvent) => event.processes,
} satisfies TopicDefinition<SandboxProcessRecord[], SandboxProcessesTopicParams, SandboxProcessesEvent>,
} as const;
/** Derive TypeScript types from the topic registry. */
export type TopicKey = keyof typeof topicDefinitions;
export type TopicParams<K extends TopicKey> = Parameters<(typeof topicDefinitions)[K]["fetchInitial"]>[1];
export type TopicData<K extends TopicKey> = Awaited<ReturnType<(typeof topicDefinitions)[K]["fetchInitial"]>>;
```
### 3.2 Interest manager interface
**File:** `packages/client/src/interest/manager.ts` (new)
```typescript
/**
* The InterestManager owns all realtime actor connections and cached state.
*
* Architecture:
* - Each topic (app, workspace, task, session, sandboxProcesses) maps to an actor + event.
* - On first subscription, the manager opens a WebSocket connection, fetches initial state,
* and listens for events. Events carry full replacement payloads for the changed entity.
* - Multiple subscribers to the same topic share one connection and one cached state.
* - When the last subscriber leaves, a 30-second grace period keeps the connection alive
* to avoid thrashing during screen navigation or React double-renders.
* - The interface is identical for mock and remote implementations.
*/
export interface InterestManager {
/**
* Subscribe to a topic. Returns an unsubscribe function.
* On first subscriber: opens connection, fetches initial state, starts listening.
* On last unsubscribe: starts 30s grace period before teardown.
*/
subscribe<K extends TopicKey>(
topicKey: K,
params: TopicParams<K>,
listener: () => void,
): () => void;
/** Get the current cached state for a topic. Returns undefined if not yet loaded. */
getSnapshot<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicData<K> | undefined;
/** Get the connection/loading status for a topic. */
getStatus<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicStatus;
/** Get the error (if any) for a topic. */
getError<K extends TopicKey>(topicKey: K, params: TopicParams<K>): Error | null;
/** Dispose all connections and cached state. */
dispose(): void;
}
export type TopicStatus = "loading" | "connected" | "error";
export interface TopicState<K extends TopicKey> {
data: TopicData<K> | undefined;
status: TopicStatus;
error: Error | null;
}
```
### 3.3 Remote implementation
**File:** `packages/client/src/interest/remote-manager.ts` (new)
```typescript
const GRACE_PERIOD_MS = 30_000;
/**
* Remote implementation of InterestManager.
* Manages WebSocket connections to RivetKit actors via BackendClient.
*/
export class RemoteInterestManager implements InterestManager {
private entries = new Map<string, TopicEntry<any, any, any>>();
constructor(private backend: BackendClient) {}
subscribe<K extends TopicKey>(topicKey: K, params: TopicParams<K>, listener: () => void): () => void {
const def = topicDefinitions[topicKey];
const cacheKey = def.key(params);
let entry = this.entries.get(cacheKey);
if (!entry) {
entry = new TopicEntry(def, this.backend, params);
this.entries.set(cacheKey, entry);
}
entry.cancelTeardown();
entry.addListener(listener);
entry.ensureStarted();
return () => {
entry!.removeListener(listener);
if (entry!.listenerCount === 0) {
entry!.scheduleTeardown(GRACE_PERIOD_MS, () => {
this.entries.delete(cacheKey);
});
}
};
}
getSnapshot<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicData<K> | undefined {
const cacheKey = topicDefinitions[topicKey].key(params);
return this.entries.get(cacheKey)?.data;
}
getStatus<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicStatus {
const cacheKey = topicDefinitions[topicKey].key(params);
return this.entries.get(cacheKey)?.status ?? "loading";
}
getError<K extends TopicKey>(topicKey: K, params: TopicParams<K>): Error | null {
const cacheKey = topicDefinitions[topicKey].key(params);
return this.entries.get(cacheKey)?.error ?? null;
}
dispose(): void {
for (const entry of this.entries.values()) {
entry.dispose();
}
this.entries.clear();
}
}
/**
* Internal entry managing one topic's connection, state, and listeners.
*
* Lifecycle:
* 1. ensureStarted() — opens WebSocket, fetches initial state, subscribes to events.
* 2. Events arrive — applyEvent() updates cached state, notifies listeners.
* 3. Last listener leaves — scheduleTeardown() starts 30s timer.
* 4. Timer fires or dispose() called — closes WebSocket, drops state.
* 5. If a new subscriber arrives during grace period — cancelTeardown(), reuse connection.
*/
class TopicEntry<TData, TParams, TEvent> {
data: TData | undefined = undefined;
status: TopicStatus = "loading";
error: Error | null = null;
listenerCount = 0;
private listeners = new Set<() => void>();
private conn: ActorConn | null = null;
private unsubscribeEvent: (() => void) | null = null;
private teardownTimer: ReturnType<typeof setTimeout> | null = null;
private started = false;
private startPromise: Promise<void> | null = null;
constructor(
private def: TopicDefinition<TData, TParams, TEvent>,
private backend: BackendClient,
private params: TParams,
) {}
addListener(listener: () => void) {
this.listeners.add(listener);
this.listenerCount = this.listeners.size;
}
removeListener(listener: () => void) {
this.listeners.delete(listener);
this.listenerCount = this.listeners.size;
}
ensureStarted() {
if (this.started || this.startPromise) return;
this.startPromise = this.start().finally(() => { this.startPromise = null; });
}
private async start() {
try {
// Open connection
this.conn = await this.def.connect(this.backend, this.params);
// Subscribe to events
this.unsubscribeEvent = this.conn.on(this.def.event, (event: TEvent) => {
if (this.data !== undefined) {
this.data = this.def.applyEvent(this.data, event);
this.notify();
}
});
// Fetch initial state
this.data = await this.def.fetchInitial(this.backend, this.params);
this.status = "connected";
this.started = true;
this.notify();
} catch (err) {
this.status = "error";
this.error = err instanceof Error ? err : new Error(String(err));
this.notify();
}
}
scheduleTeardown(ms: number, onTeardown: () => void) {
this.teardownTimer = setTimeout(() => {
this.dispose();
onTeardown();
}, ms);
}
cancelTeardown() {
if (this.teardownTimer) {
clearTimeout(this.teardownTimer);
this.teardownTimer = null;
}
}
dispose() {
this.cancelTeardown();
this.unsubscribeEvent?.();
if (this.conn) {
void (this.conn as any).dispose?.();
}
this.conn = null;
this.data = undefined;
this.status = "loading";
this.started = false;
}
private notify() {
for (const listener of [...this.listeners]) {
listener();
}
}
}
```
### 3.4 Mock implementation
**File:** `packages/client/src/interest/mock-manager.ts` (new)
Same `InterestManager` interface. Uses in-memory state. Topic definitions provide mock data. Mutations call `applyEvent` directly on the entry to simulate broadcasts. No WebSocket connections.
### 3.5 React hook
**File:** `packages/client/src/interest/use-interest.ts` (new)
```typescript
import { useSyncExternalStore, useMemo } from "react";
/**
* Subscribe to a realtime topic. Returns the current state, loading status, and error.
*
* - Pass `null` as params to disable the subscription (conditional interest).
* - Data is cached for 30 seconds after the last subscriber leaves.
* - Multiple components subscribing to the same topic share one connection.
*
* @example
* // Subscribe to workspace sidebar data
* const workspace = useInterest("workspace", { workspaceId });
*
* // Subscribe to task detail (only when viewing a task)
* const task = useInterest("task", selectedTaskId ? { workspaceId, repoId, taskId } : null);
*
* // Subscribe to active session content
* const session = useInterest("session", activeSessionId ? { workspaceId, repoId, taskId, sessionId } : null);
*/
export function useInterest<K extends TopicKey>(
manager: InterestManager,
topicKey: K,
params: TopicParams<K> | null,
): TopicState<K> {
// Stabilize params reference to avoid unnecessary resubscriptions
const paramsKey = params ? topicDefinitions[topicKey].key(params) : null;
const subscribe = useMemo(() => {
return (listener: () => void) => {
if (!params) return () => {};
return manager.subscribe(topicKey, params, listener);
};
}, [manager, topicKey, paramsKey]);
const getSnapshot = useMemo(() => {
return (): TopicState<K> => {
if (!params) return { data: undefined, status: "loading", error: null };
return {
data: manager.getSnapshot(topicKey, params),
status: manager.getStatus(topicKey, params),
error: manager.getError(topicKey, params),
};
};
}, [manager, topicKey, paramsKey]);
return useSyncExternalStore(subscribe, getSnapshot, getSnapshot);
}
```
### 3.6 BackendClient additions
**File:** `packages/client/src/backend-client.ts`
Add to the `BackendClient` interface:
```typescript
// New connection methods (return WebSocket-based ActorConn)
connectWorkspace(workspaceId: string): Promise<ActorConn>;
connectTask(workspaceId: string, repoId: string, taskId: string): Promise<ActorConn>;
connectSandbox(workspaceId: string, providerId: string, sandboxId: string): Promise<ActorConn>;
// New fetch methods (read from materialized state)
getWorkspaceSummary(workspaceId: string): Promise<WorkspaceSummarySnapshot>;
getTaskDetail(workspaceId: string, repoId: string, taskId: string): Promise<WorkbenchTaskDetail>;
getSessionDetail(workspaceId: string, repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail>;
```
Remove:
- `subscribeWorkbench`, `subscribeApp`, `subscribeSandboxProcesses` (replaced by interest manager)
- `getWorkbench` (replaced by `getWorkspaceSummary` + `getTaskDetail`)
---
## 4. Frontend: Hook Consumption
### 4.1 Provider setup
**File:** `packages/frontend/src/lib/interest.ts` (new)
```typescript
import { RemoteInterestManager } from "@sandbox-agent/foundry-client";
import { backendClient } from "./backend";
export const interestManager = new RemoteInterestManager(backendClient);
```
Or for mock mode:
```typescript
import { MockInterestManager } from "@sandbox-agent/foundry-client";
export const interestManager = new MockInterestManager();
```
### 4.2 Replace MockLayout workbench subscription
**File:** `packages/frontend/src/components/mock-layout.tsx`
Before:
```typescript
const taskWorkbenchClient = useMemo(() => getTaskWorkbenchClient(workspaceId), [workspaceId]);
const viewModel = useSyncExternalStore(
taskWorkbenchClient.subscribe.bind(taskWorkbenchClient),
taskWorkbenchClient.getSnapshot.bind(taskWorkbenchClient),
);
const tasks = viewModel.tasks ?? [];
```
After:
```typescript
const workspace = useInterest(interestManager, "workspace", { workspaceId });
const taskSummaries = workspace.data?.taskSummaries ?? [];
const repos = workspace.data?.repos ?? [];
```
### 4.3 Replace MockLayout task detail
When a task is selected, subscribe to its detail:
```typescript
const taskDetail = useInterest(interestManager, "task",
selectedTaskId ? { workspaceId, repoId: activeRepoId, taskId: selectedTaskId } : null
);
```
### 4.4 Replace session subscription
When a session tab is active:
```typescript
const sessionDetail = useInterest(interestManager, "session",
activeSessionId ? { workspaceId, repoId, taskId, sessionId: activeSessionId } : null
);
```
### 4.5 Replace workspace-dashboard.tsx polling
Remove ALL `useQuery` with `refetchInterval` in this file:
- `tasksQuery` (2.5s polling) → `useInterest("workspace", ...)`
- `taskDetailQuery` (2.5s polling) → `useInterest("task", ...)`
- `reposQuery` (10s polling) → `useInterest("workspace", ...)`
- `repoOverviewQuery` (5s polling) → `useInterest("workspace", ...)`
- `sessionsQuery` (3s polling) → `useInterest("task", ...)` (sessionsSummary field)
- `eventsQuery` (2.5s polling) → `useInterest("session", ...)`
### 4.6 Replace terminal-pane.tsx polling
- `taskQuery` (2s polling) → `useInterest("task", ...)`
- `processesQuery` (3s polling) → `useInterest("sandboxProcesses", ...)`
- Remove `subscribeSandboxProcesses` useEffect
### 4.7 Replace app client subscription
**File:** `packages/frontend/src/lib/mock-app.ts`
Before:
```typescript
export function useMockAppSnapshot(): FoundryAppSnapshot {
return useSyncExternalStore(appClient.subscribe.bind(appClient), appClient.getSnapshot.bind(appClient));
}
```
After:
```typescript
export function useAppSnapshot(): FoundryAppSnapshot {
const app = useInterest(interestManager, "app", {});
return app.data ?? DEFAULT_APP_SNAPSHOT;
}
```
### 4.8 Mutations
Mutations (`createTask`, `renameTask`, `sendMessage`, etc.) no longer need manual `refetch()` or `refresh()` calls after completion. The backend mutation triggers a broadcast, which the interest manager receives and applies automatically.
Before:
```typescript
const createSession = useMutation({
mutationFn: async () => startSessionFromTask(),
onSuccess: async (session) => {
setActiveSessionId(session.id);
await Promise.all([sessionsQuery.refetch(), eventsQuery.refetch()]);
},
});
```
After:
```typescript
const createSession = useMutation({
mutationFn: async () => startSessionFromTask(),
onSuccess: (session) => {
setActiveSessionId(session.id);
// No refetch needed — server broadcast updates the task and session topics automatically
},
});
```
---
## 5. Files to Delete / Remove
| File/Code | Reason |
|---|---|
| `packages/client/src/remote/workbench-client.ts` | Replaced by interest manager `workspace` + `task` topics |
| `packages/client/src/remote/app-client.ts` | Replaced by interest manager `app` topic |
| `packages/client/src/workbench-client.ts` | Factory for above — no longer needed |
| `packages/client/src/app-client.ts` | Factory for above — no longer needed |
| `packages/frontend/src/lib/workbench.ts` | Workbench client singleton — replaced by interest manager |
| `subscribeWorkbench` in `backend-client.ts` | Replaced by `connectWorkspace` + interest manager |
| `subscribeSandboxProcesses` in `backend-client.ts` | Replaced by `connectSandbox` + interest manager |
| `subscribeApp` in `backend-client.ts` | Replaced by `connectWorkspace("app")` + interest manager |
| `buildWorkbenchSnapshot` in `workspace/actions.ts` | Replaced by `getWorkspaceSummary` (local reads). Keep as `reconcileWorkbenchState` for recovery only. |
| `notifyWorkbenchUpdated` in `workspace/actions.ts` | Replaced by `applyTaskSummaryUpdate` + `c.broadcast` with payload |
| `notifyWorkbenchUpdated` in `task/workbench.ts` | Replaced by `broadcastTaskUpdate` helper |
| `TaskWorkbenchSnapshot` in `shared/workbench.ts` | Replaced by `WorkspaceSummarySnapshot` + `WorkbenchTaskDetail` |
| `WorkbenchTask` in `shared/workbench.ts` | Split into `WorkbenchTaskSummary` + `WorkbenchTaskDetail` |
| `getWorkbench` action on workspace actor | Replaced by `getWorkspaceSummary` |
| `TaskWorkbenchClient` interface | Replaced by `InterestManager` + `useInterest` hook |
| All `useQuery` with `refetchInterval` in `workspace-dashboard.tsx` | Replaced by `useInterest` |
| All `useQuery` with `refetchInterval` in `terminal-pane.tsx` | Replaced by `useInterest` |
| Mock workbench client (`packages/client/src/mock/workbench-client.ts`) | Replaced by `MockInterestManager` |
---
## 6. Migration Order
Implement in this order to keep the system working at each step:
### Phase 1: Types and backend materialization
1. Add new types to `packages/shared` (`WorkbenchTaskSummary`, `WorkbenchTaskDetail`, `WorkbenchSessionSummary`, `WorkbenchSessionDetail`, `WorkspaceSummarySnapshot`, event types).
2. Add `taskSummaries` table to workspace actor schema.
3. Add `applyTaskSummaryUpdate`, `removeTaskSummary`, `getWorkspaceSummary` actions to workspace actor.
4. Add `getTaskDetail`, `getSessionDetail` actions to task actor.
5. Replace all `notifyWorkbenchUpdated` call sites with `broadcastTaskUpdate` that pushes summary + broadcasts detail with payload.
6. Change app actor broadcast to include snapshot payload.
7. Change sandbox actor broadcast to include process list payload.
8. Add one-time reconciliation action to populate `taskSummaries` table from existing task actors (run on startup or on-demand).
### Phase 2: Client interest manager
9. Add `InterestManager` interface, `RemoteInterestManager`, `MockInterestManager` to `packages/client`.
10. Add topic definitions registry.
11. Add `useInterest` hook.
12. Add `connectWorkspace`, `connectTask`, `connectSandbox`, `getWorkspaceSummary`, `getTaskDetail`, `getSessionDetail` to `BackendClient`.
### Phase 3: Frontend migration
13. Replace `useMockAppSnapshot` with `useInterest("app", ...)`.
14. Replace `MockLayout` workbench subscription with `useInterest("workspace", ...)`.
15. Replace task detail view with `useInterest("task", ...)` + `useInterest("session", ...)`.
16. Replace `workspace-dashboard.tsx` polling queries with `useInterest`.
17. Replace `terminal-pane.tsx` polling queries with `useInterest`.
18. Remove manual `refetch()` calls from mutations.
### Phase 4: Cleanup
19. Delete old files (workbench-client, app-client, old subscribe functions, old types).
20. Remove `buildWorkbenchSnapshot` from hot path (keep as `reconcileWorkbenchState`).
21. Verify `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test` pass.
---
## 7. Architecture Comments
Add doc comments at these locations:
- **Topic definitions** — explain the materialized state pattern, why events carry full entity state instead of patches, and the relationship between topics.
- **`broadcastTaskUpdate` helper** — explain the dual-broadcast pattern (push summary to workspace + broadcast detail to direct subscribers).
- **`InterestManager` interface** — explain the grace period, deduplication, and why mock/remote share the same interface.
- **`useInterest` hook** — explain `useSyncExternalStore` integration, null params for conditional interest, and how params key stabilization works.
- **Workspace actor `taskSummaries` table** — explain this is a materialized read projection maintained by task actor pushes, not a source of truth.
- **`applyTaskSummaryUpdate` action** — explain this is the write path for the materialized projection, called by task actors, not by clients.
- **`getWorkspaceSummary` action** — explain this reads from local SQLite only, no fan-out, and why that's the correct pattern.
---
## 8. Testing
- Interest manager unit tests: subscribe/unsubscribe lifecycle, grace period, deduplication, event application.
- Mock implementation tests: verify same behavior as remote through shared test suite against the `InterestManager` interface.
- Backend integration: verify `applyTaskSummaryUpdate` correctly materializes and broadcasts.
- E2E: verify that a task mutation (e.g. rename) updates the sidebar in realtime without polling.

Binary file not shown.

After

Width:  |  Height:  |  Size: 672 KiB

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,290 @@
#!/usr/bin/env bun
/**
* Pull public GitHub organization data into a JSON fixture file.
*
* This script mirrors the sync logic in the backend workspace actor
* (see: packages/backend/src/actors/workspace/app-shell.ts syncGithubOrganizations
* and syncGithubOrganizationRepos). Keep the two in sync: when the backend
* sync workflow changes what data it fetches or how it structures organizations,
* update this script to match.
*
* Key difference from the backend sync: this script only fetches **public** data
* from the GitHub API (no auth token required, no private repos). It is used to
* populate realistic mock/test data for the Foundry frontend without needing
* GitHub OAuth credentials or a GitHub App installation.
*
* Usage:
* bun foundry/scripts/pull-org-data.ts <org-login> [--out <path>]
*
* Examples:
* bun foundry/scripts/pull-org-data.ts rivet-gg
* bun foundry/scripts/pull-org-data.ts rivet-gg --out foundry/scripts/data/rivet-gg.json
*/
import { parseArgs } from "node:util";
import { writeFileSync, mkdirSync } from "node:fs";
import { dirname, resolve } from "node:path";
// ── Types matching the backend sync output ──
// See: packages/shared/src/app-shell.ts
interface OrgFixtureRepo {
fullName: string;
cloneUrl: string;
description: string | null;
language: string | null;
stars: number;
updatedAt: string;
}
interface OrgFixtureMember {
id: string;
login: string;
avatarUrl: string;
role: "admin" | "member";
}
interface OrgFixturePullRequest {
number: number;
title: string;
state: "open";
draft: boolean;
headRefName: string;
author: string;
repoFullName: string;
updatedAt: string;
}
interface OrgFixture {
/** ISO timestamp of when this data was pulled */
pulledAt: string;
/** GitHub organization login (e.g. "rivet-gg") */
login: string;
/** GitHub numeric ID */
id: number;
/** Display name */
name: string | null;
/** Organization description */
description: string | null;
/** Public email */
email: string | null;
/** Blog/website URL */
blog: string | null;
/** Avatar URL */
avatarUrl: string;
/** Public repositories (excludes forks by default) */
repos: OrgFixtureRepo[];
/** Public members (only those with public membership) */
members: OrgFixtureMember[];
/** Open pull requests across all public repos */
openPullRequests: OrgFixturePullRequest[];
}
// ── GitHub API helpers ──
// Mirrors the pagination approach in packages/backend/src/services/app-github.ts
const API_BASE = "https://api.github.com";
const GITHUB_TOKEN = process.env.GITHUB_TOKEN ?? process.env.GH_TOKEN ?? null;
function authHeaders(): Record<string, string> {
const headers: Record<string, string> = {
Accept: "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
"User-Agent": "foundry-pull-org-data/1.0",
};
if (GITHUB_TOKEN) {
headers["Authorization"] = `Bearer ${GITHUB_TOKEN}`;
}
return headers;
}
async function githubGet<T>(url: string): Promise<T> {
const response = await fetch(url, { headers: authHeaders() });
if (!response.ok) {
const body = await response.text().catch(() => "");
throw new Error(`GitHub API ${response.status}: ${url}\n${body.slice(0, 500)}`);
}
return (await response.json()) as T;
}
function parseNextLink(linkHeader: string | null): string | null {
if (!linkHeader) return null;
for (const part of linkHeader.split(",")) {
const [urlPart, relPart] = part.split(";").map((v) => v.trim());
if (urlPart && relPart?.includes('rel="next"')) {
return urlPart.replace(/^<|>$/g, "");
}
}
return null;
}
async function githubPaginate<T>(path: string): Promise<T[]> {
let url: string | null = `${API_BASE}${path.startsWith("/") ? path : `/${path}`}`;
const items: T[] = [];
while (url) {
const response = await fetch(url, { headers: authHeaders() });
if (!response.ok) {
const body = await response.text().catch(() => "");
throw new Error(`GitHub API ${response.status}: ${url}\n${body.slice(0, 500)}`);
}
const page = (await response.json()) as T[];
items.push(...page);
url = parseNextLink(response.headers.get("link"));
}
return items;
}
// ── Main ──
async function pullOrgData(orgLogin: string): Promise<OrgFixture> {
console.log(`Fetching organization: ${orgLogin}`);
// 1. Fetch org profile
// Backend equivalent: getViewer() + listOrganizations() derive org identity
const org = await githubGet<{
id: number;
login: string;
name: string | null;
description: string | null;
email: string | null;
blog: string | null;
avatar_url: string;
public_repos: number;
public_members_url: string;
}>(`${API_BASE}/orgs/${orgLogin}`);
console.log(` ${org.name ?? org.login}${org.public_repos} public repos`);
// 2. Fetch public repos (non-fork, non-archived)
// Backend equivalent: listInstallationRepositories() or listUserRepositories()
// Key difference: we only fetch public repos here (type=public)
const rawRepos = await githubPaginate<{
full_name: string;
clone_url: string;
description: string | null;
language: string | null;
stargazers_count: number;
updated_at: string;
fork: boolean;
archived: boolean;
private: boolean;
}>(`/orgs/${orgLogin}/repos?per_page=100&type=public&sort=updated`);
const repos: OrgFixtureRepo[] = rawRepos
.filter((r) => !r.fork && !r.archived && !r.private)
.map((r) => ({
fullName: r.full_name,
cloneUrl: r.clone_url,
description: r.description,
language: r.language,
stars: r.stargazers_count,
updatedAt: r.updated_at,
}))
.sort((a, b) => b.stars - a.stars);
console.log(` ${repos.length} public repos (excluding forks/archived)`);
// 3. Fetch public members
// Backend equivalent: members are derived from the OAuth user + org membership
// Here we can only see members with public membership visibility
const rawMembers = await githubPaginate<{
id: number;
login: string;
avatar_url: string;
}>(`/orgs/${orgLogin}/members?per_page=100`);
const members: OrgFixtureMember[] = rawMembers.map((m) => ({
id: String(m.id),
login: m.login,
avatarUrl: m.avatar_url,
role: "member" as const,
}));
console.log(` ${members.length} public members`);
// 4. Fetch open PRs across all public repos
// Backend equivalent: ProjectPrSyncActor polls GitHub for open PRs per repo
// and stores them in the pr_cache table on the project actor
const openPullRequests: OrgFixturePullRequest[] = [];
for (const repo of repos) {
const rawPrs = await githubPaginate<{
number: number;
title: string;
state: string;
draft: boolean;
head: { ref: string };
user: { login: string } | null;
updated_at: string;
}>(`/repos/${repo.fullName}/pulls?state=open&per_page=100`);
for (const pr of rawPrs) {
openPullRequests.push({
number: pr.number,
title: pr.title,
state: "open",
draft: pr.draft,
headRefName: pr.head.ref,
author: pr.user?.login ?? "unknown",
repoFullName: repo.fullName,
updatedAt: pr.updated_at,
});
}
if (rawPrs.length > 0) {
console.log(` ${repo.fullName}: ${rawPrs.length} open PRs`);
}
}
console.log(` ${openPullRequests.length} total open PRs`);
return {
pulledAt: new Date().toISOString(),
login: org.login,
id: org.id,
name: org.name,
description: org.description,
email: org.email,
blog: org.blog,
avatarUrl: org.avatar_url,
repos,
members,
openPullRequests,
};
}
// ── CLI ──
const { values, positionals } = parseArgs({
args: process.argv.slice(2),
options: {
out: { type: "string", short: "o" },
help: { type: "boolean", short: "h" },
},
allowPositionals: true,
});
if (values.help || positionals.length === 0) {
console.log("Usage: bun foundry/scripts/pull-org-data.ts <org-login> [--out <path>]");
console.log("");
console.log("Pulls public GitHub organization data into a JSON fixture file.");
console.log("Set GITHUB_TOKEN or GH_TOKEN to avoid rate limits.");
process.exit(positionals.length === 0 && !values.help ? 1 : 0);
}
const orgLogin = positionals[0]!;
const defaultOutDir = resolve(import.meta.dirname ?? ".", "data");
const outPath = values.out ?? resolve(defaultOutDir, `${orgLogin}.json`);
try {
const data = await pullOrgData(orgLogin);
mkdirSync(dirname(outPath), { recursive: true });
writeFileSync(outPath, JSON.stringify(data, null, 2) + "\n");
console.log(`\nWrote ${outPath}`);
} catch (error) {
console.error(error instanceof Error ? error.message : String(error));
process.exit(1);
}

View file

@ -127,13 +127,13 @@ foundry-check:
foundry-dev:
pnpm install
mkdir -p foundry/.foundry/logs
HF_DOCKER_UID="$(id -u)" HF_DOCKER_GID="$(id -g)" docker compose -f foundry/compose.dev.yaml up --build --force-recreate -d
HF_DOCKER_UID="$(id -u)" HF_DOCKER_GID="$(id -g)" docker compose --env-file .env -f foundry/compose.dev.yaml up --build --force-recreate -d
[group('foundry')]
foundry-preview:
pnpm install
mkdir -p foundry/.foundry/logs
HF_DOCKER_UID="$(id -u)" HF_DOCKER_GID="$(id -g)" docker compose -f foundry/compose.preview.yaml up --build --force-recreate -d
HF_DOCKER_UID="$(id -u)" HF_DOCKER_GID="$(id -g)" docker compose --env-file .env -f foundry/compose.preview.yaml up --build --force-recreate -d
[group('foundry')]
foundry-frontend-dev host='127.0.0.1' port='4173' backend='http://127.0.0.1:7741/api/rivet':
@ -141,61 +141,49 @@ foundry-frontend-dev host='127.0.0.1' port='4173' backend='http://127.0.0.1:7741
VITE_HF_BACKEND_ENDPOINT="{{backend}}" pnpm --filter @sandbox-agent/foundry-frontend dev -- --host {{host}} --port {{port}}
[group('foundry')]
foundry-dev-mock host='127.0.0.1' port='4173':
foundry-dev-mock host='127.0.0.1' port='4174':
pnpm install
FOUNDRY_FRONTEND_CLIENT_MODE=mock pnpm --filter @sandbox-agent/foundry-frontend dev -- --host {{host}} --port {{port}}
[group('foundry')]
foundry-mock:
pnpm install
mkdir -p foundry/.foundry/logs
docker compose -f foundry/compose.mock.yaml up --build --force-recreate -d
[group('foundry')]
foundry-mock-down:
docker compose -f foundry/compose.mock.yaml down
[group('foundry')]
foundry-mock-logs:
docker compose -f foundry/compose.mock.yaml logs -f --tail=200
[group('foundry')]
foundry-dev-turbo:
pnpm exec turbo run dev --parallel --filter=@sandbox-agent/foundry-*
[group('foundry')]
foundry-dev-down:
docker compose -f foundry/compose.dev.yaml down
docker compose --env-file .env -f foundry/compose.dev.yaml down
[group('foundry')]
foundry-dev-logs:
docker compose -f foundry/compose.dev.yaml logs -f --tail=200
docker compose --env-file .env -f foundry/compose.dev.yaml logs -f --tail=200
[group('foundry')]
foundry-preview-down:
docker compose -f foundry/compose.preview.yaml down
docker compose --env-file .env -f foundry/compose.preview.yaml down
[group('foundry')]
foundry-preview-logs:
docker compose -f foundry/compose.preview.yaml logs -f --tail=200
docker compose --env-file .env -f foundry/compose.preview.yaml logs -f --tail=200
[group('foundry')]
foundry-format:
prettier --write foundry
[group('foundry')]
foundry-backend-start host='127.0.0.1' port='7741':
pnpm install
pnpm --filter @sandbox-agent/foundry-backend build
pnpm --filter @sandbox-agent/foundry-backend start -- --host {{host}} --port {{port}}
[group('foundry')]
foundry-hf *ARGS:
@echo "CLI package is disabled in this repo; use frontend workflows instead." >&2
@exit 1
[group('foundry')]
foundry-docker-build tag='foundry:local':
docker build -f foundry/docker/backend.Dockerfile -t {{tag}} .
[group('foundry')]
foundry-desktop-dev:
pnpm --filter @sandbox-agent/foundry-desktop dev
[group('foundry')]
foundry-desktop-build:
pnpm --filter @sandbox-agent/foundry-desktop build:all
[group('foundry')]
foundry-railway-up:
npx -y @railway/cli up --detach
[group('foundry')]
foundry-railway-status:
npx -y @railway/cli status --json

1024
pnpm-lock.yaml generated

File diff suppressed because it is too large Load diff

View file

@ -16,34 +16,46 @@ export async function tagDocker(opts: ReleaseOpts) {
console.log(`==> Source commit: ${sourceCommit}`);
}
// Check both architecture images exist using manifest inspect
console.log(`==> Checking images exist: ${IMAGE}:${sourceCommit}-{amd64,arm64}`);
try {
console.log(`==> Inspecting ${IMAGE}:${sourceCommit}-amd64`);
await $({ stdio: "inherit" })`docker manifest inspect ${IMAGE}:${sourceCommit}-amd64`;
console.log(`==> Inspecting ${IMAGE}:${sourceCommit}-arm64`);
await $({ stdio: "inherit" })`docker manifest inspect ${IMAGE}:${sourceCommit}-arm64`;
console.log(`==> Both images exist`);
await ensureArchImagesExist(sourceCommit, "");
} catch (error) {
console.warn(`⚠️ Docker images ${IMAGE}:${sourceCommit}-{amd64,arm64} not found - skipping Docker tagging`);
console.warn(` To enable Docker tagging, build and push images first, then retry the release.`);
return;
}
// Create and push manifest with version
await createManifest(sourceCommit, opts.version);
// Create and push manifest with latest
if (opts.latest) {
await createManifest(sourceCommit, "latest");
await createManifest(sourceCommit, opts.minorVersionChannel);
}
try {
await ensureArchImagesExist(sourceCommit, "-full");
await createManifest(sourceCommit, `${opts.version}-full`, "-full");
if (opts.latest) {
await createManifest(sourceCommit, `${opts.minorVersionChannel}-full`, "-full");
await createManifest(sourceCommit, "full", "-full");
}
} catch (error) {
console.warn(`⚠️ Full Docker images ${IMAGE}:${sourceCommit}-full-{amd64,arm64} not found - skipping full Docker tagging`);
console.warn(` To enable full Docker tagging, build and push full images first, then retry the release.`);
}
}
async function createManifest(from: string, to: string) {
console.log(`==> Creating manifest: ${IMAGE}:${to} from ${IMAGE}:${from}-{amd64,arm64}`);
// Use buildx imagetools to create and push multi-arch manifest
// This works with manifest lists as inputs (unlike docker manifest create)
await $({ stdio: "inherit" })`docker buildx imagetools create --tag ${IMAGE}:${to} ${IMAGE}:${from}-amd64 ${IMAGE}:${from}-arm64`;
async function ensureArchImagesExist(sourceCommit: string, variantSuffix: "" | "-full") {
console.log(`==> Checking images exist: ${IMAGE}:${sourceCommit}${variantSuffix}-{amd64,arm64}`);
console.log(`==> Inspecting ${IMAGE}:${sourceCommit}${variantSuffix}-amd64`);
await $({ stdio: "inherit" })`docker manifest inspect ${IMAGE}:${sourceCommit}${variantSuffix}-amd64`;
console.log(`==> Inspecting ${IMAGE}:${sourceCommit}${variantSuffix}-arm64`);
await $({ stdio: "inherit" })`docker manifest inspect ${IMAGE}:${sourceCommit}${variantSuffix}-arm64`;
console.log(`==> Both images exist`);
}
async function createManifest(from: string, to: string, variantSuffix: "" | "-full" = "") {
console.log(`==> Creating manifest: ${IMAGE}:${to} from ${IMAGE}:${from}${variantSuffix}-{amd64,arm64}`);
await $({
stdio: "inherit",
})`docker buildx imagetools create --tag ${IMAGE}:${to} ${IMAGE}:${from}${variantSuffix}-amd64 ${IMAGE}:${from}${variantSuffix}-arm64`;
}

View file

@ -79,7 +79,7 @@ pub enum Command {
Opencode(OpencodeArgs),
/// Manage the sandbox-agent background daemon.
Daemon(DaemonArgs),
/// Install or reinstall an agent without running the server.
/// Install or reinstall one agent, or `all` supported agents, without running the server.
InstallAgent(InstallAgentArgs),
/// Inspect locally discovered credentials.
Credentials(CredentialsArgs),
@ -295,7 +295,10 @@ pub struct AcpCloseArgs {
#[derive(Args, Debug)]
pub struct InstallAgentArgs {
agent: String,
#[arg(required_unless_present = "all", conflicts_with = "all")]
agent: Option<String>,
#[arg(long, conflicts_with = "agent")]
all: bool,
#[arg(long, short = 'r')]
reinstall: bool,
#[arg(long = "agent-version")]
@ -946,24 +949,73 @@ fn load_json_payload(
}
fn install_agent_local(args: &InstallAgentArgs) -> Result<(), CliError> {
let agent_id = AgentId::parse(&args.agent)
.ok_or_else(|| CliError::Server(format!("unsupported agent: {}", args.agent)))?;
if args.all && (args.agent_version.is_some() || args.agent_process_version.is_some()) {
return Err(CliError::Server(
"--agent-version and --agent-process-version are only supported for single-agent installs"
.to_string(),
));
}
let agents = resolve_install_agents(args)?;
let manager = AgentManager::new(default_install_dir())
.map_err(|err| CliError::Server(err.to_string()))?;
let result = manager
.install(
agent_id,
InstallOptions {
reinstall: args.reinstall,
version: args.agent_version.clone(),
agent_process_version: args.agent_process_version.clone(),
},
)
.map_err(|err| CliError::Server(err.to_string()))?;
if agents.len() == 1 {
let result = manager
.install(
agents[0],
InstallOptions {
reinstall: args.reinstall,
version: args.agent_version.clone(),
agent_process_version: args.agent_process_version.clone(),
},
)
.map_err(|err| CliError::Server(err.to_string()))?;
let output = install_result_json(result);
return write_stdout_line(&serde_json::to_string_pretty(&output)?);
}
let output = json!({
let mut results = Vec::with_capacity(agents.len());
for agent_id in agents {
let result = manager
.install(
agent_id,
InstallOptions {
reinstall: args.reinstall,
version: None,
agent_process_version: None,
},
)
.map_err(|err| CliError::Server(err.to_string()))?;
results.push(json!({
"agent": agent_id.as_str(),
"result": install_result_json(result),
}));
}
write_stdout_line(&serde_json::to_string_pretty(
&json!({ "agents": results }),
)?)
}
fn resolve_install_agents(args: &InstallAgentArgs) -> Result<Vec<AgentId>, CliError> {
if args.all {
return Ok(AgentId::all().to_vec());
}
let agent = args
.agent
.as_deref()
.ok_or_else(|| CliError::Server("missing agent: provide <AGENT> or --all".to_string()))?;
AgentId::parse(agent)
.map(|agent_id| vec![agent_id])
.ok_or_else(|| CliError::Server(format!("unsupported agent: {agent}")))
}
fn install_result_json(result: sandbox_agent_agent_management::agents::InstallResult) -> Value {
json!({
"alreadyInstalled": result.already_installed,
"artifacts": result.artifacts.into_iter().map(|artifact| json!({
"kind": format!("{:?}", artifact.kind),
@ -971,9 +1023,7 @@ fn install_agent_local(args: &InstallAgentArgs) -> Result<(), CliError> {
"source": format!("{:?}", artifact.source),
"version": artifact.version,
})).collect::<Vec<_>>()
});
write_stdout_line(&serde_json::to_string_pretty(&output)?)
})
}
#[derive(Serialize)]
@ -1416,6 +1466,60 @@ fn write_stderr_line(text: &str) -> Result<(), CliError> {
mod tests {
use super::*;
#[test]
fn resolve_install_agents_expands_all() {
assert_eq!(
resolve_install_agents(&InstallAgentArgs {
agent: None,
all: true,
reinstall: false,
agent_version: None,
agent_process_version: None,
})
.unwrap(),
AgentId::all().to_vec()
);
}
#[test]
fn resolve_install_agents_supports_single_agent() {
assert_eq!(
resolve_install_agents(&InstallAgentArgs {
agent: Some("codex".to_string()),
all: false,
reinstall: false,
agent_version: None,
agent_process_version: None,
})
.unwrap(),
vec![AgentId::Codex]
);
}
#[test]
fn resolve_install_agents_rejects_unknown_agent() {
assert!(resolve_install_agents(&InstallAgentArgs {
agent: Some("nope".to_string()),
all: false,
reinstall: false,
agent_version: None,
agent_process_version: None,
})
.is_err());
}
#[test]
fn resolve_install_agents_rejects_positional_all() {
assert!(resolve_install_agents(&InstallAgentArgs {
agent: Some("all".to_string()),
all: false,
reinstall: false,
agent_version: None,
agent_process_version: None,
})
.is_err());
}
#[test]
fn apply_last_event_id_header_sets_header_when_provided() {
let client = HttpClient::builder().build().expect("build client");