From 0fbea6ce61ae176519864bfe2ae6b448671dc451 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sat, 14 Mar 2026 20:28:41 -0700 Subject: [PATCH] wip --- README.md | 2 +- foundry/CLAUDE.md | 87 +- foundry/README.md | 6 +- foundry/compose.dev.yaml | 5 +- foundry/compose.preview.yaml | 2 - foundry/docker/backend.dev.Dockerfile | 15 - foundry/docker/backend.preview.Dockerfile | 15 - foundry/packages/backend/CLAUDE.md | 19 +- foundry/packages/backend/src/actors/events.ts | 40 +- .../src/actors/github-data/db/migrations.ts | 22 + .../src/actors/github-data/db/schema.ts | 9 + .../backend/src/actors/github-data/index.ts | 198 +- .../packages/backend/src/actors/handles.ts | 73 +- .../backend/src/actors/history/index.ts | 6 +- foundry/packages/backend/src/actors/index.ts | 15 +- foundry/packages/backend/src/actors/keys.ts | 30 +- .../packages/backend/src/actors/logging.ts | 6 +- .../{workspace => organization}/actions.ts | 425 ++-- .../{workspace => organization}/app-shell.ts | 401 ++-- .../actors/{project => organization}/db/db.ts | 2 +- .../actors/organization/db/drizzle.config.ts | 6 + .../db/drizzle/0000_melted_viper.sql | 6 - .../db/drizzle/meta/0000_snapshot.json | 31 - .../db/drizzle/meta/_journal.json | 0 .../db/migrations.ts | 14 +- .../{workspace => organization}/db/schema.ts | 10 +- .../backend/src/actors/organization/index.ts | 19 + .../src/actors/project-branch-sync/index.ts | 178 -- .../backend/src/actors/project/actions.ts | 1261 ----------- .../src/actors/project/db/drizzle.config.ts | 6 - .../db/drizzle/0000_useful_la_nuit.sql | 40 - .../db/drizzle/meta/0000_snapshot.json | 265 --- .../src/actors/project/db/migrations.ts | 46 - .../backend/src/actors/project/db/schema.ts | 41 - .../backend/src/actors/project/index.ts | 30 - .../backend/src/actors/project/stack-model.ts | 69 - .../backend/src/actors/repository/actions.ts | 557 +++++ .../actors/{workspace => repository}/db/db.ts | 2 +- .../actors/repository/db/drizzle.config.ts | 6 + .../db/drizzle/0000_useful_la_nuit.sql | 12 + .../db/drizzle/meta/0000_snapshot.json | 87 + .../db/drizzle/meta/_journal.json | 0 .../src/actors/repository/db/migrations.ts | 43 + .../src/actors/repository/db/schema.ts | 16 + .../backend/src/actors/repository/index.ts | 27 + .../backend/src/actors/sandbox/index.ts | 54 +- .../backend/src/actors/task/db/migrations.ts | 12 +- .../backend/src/actors/task/db/schema.ts | 4 +- .../packages/backend/src/actors/task/index.ts | 43 +- .../backend/src/actors/task/workbench.ts | 198 +- .../src/actors/task/workflow/commands.ts | 8 +- .../src/actors/task/workflow/common.ts | 14 +- .../backend/src/actors/task/workflow/index.ts | 15 +- .../backend/src/actors/task/workflow/init.ts | 20 +- .../backend/src/actors/task/workflow/push.ts | 6 +- .../src/actors/workspace/db/drizzle.config.ts | 6 - .../backend/src/actors/workspace/index.ts | 19 - .../backend/src/config/organization.ts | 13 + .../packages/backend/src/config/workspace.ts | 13 - foundry/packages/backend/src/driver.ts | 86 +- foundry/packages/backend/src/index.ts | 40 +- .../src/integrations/git-spice/index.ts | 223 -- .../backend/src/integrations/git/index.ts | 327 --- .../backend/src/integrations/github/index.ts | 292 +-- .../src/integrations/graphite/index.ts | 140 -- .../packages/backend/src/sandbox-config.ts | 12 +- .../backend/src/services/app-github.ts | 35 + .../backend/src/services/better-auth.ts | 96 +- .../backend/src/services/foundry-paths.ts | 20 - .../backend/src/services/github-auth.ts | 14 +- .../backend/src/services/repo-git-lock.ts | 45 - foundry/packages/backend/src/services/repo.ts | 27 + .../packages/backend/test/git-spice.test.ts | 129 -- .../backend/test/git-validate-remote.test.ts | 40 - .../backend/test/helpers/test-context.ts | 4 +- .../backend/test/helpers/test-driver.ts | 37 +- foundry/packages/backend/test/keys.test.ts | 11 +- ...test.ts => organization-isolation.test.ts} | 42 +- ...anization-star-sandbox-agent-repo.test.ts} | 8 +- .../backend/test/sandbox-config.test.ts | 6 +- .../packages/backend/test/stack-model.test.ts | 34 - .../backend/test/workbench-unread.test.ts | 8 +- foundry/packages/cli/src/index.ts | 257 ++- .../src/{workspace => organization}/config.ts | 6 +- foundry/packages/cli/src/theme.ts | 8 +- foundry/packages/cli/src/tui.ts | 27 +- .../packages/cli/test/backend-manager.test.ts | 6 +- ...ig.test.ts => organization-config.test.ts} | 14 +- foundry/packages/cli/test/theme.test.ts | 4 +- foundry/packages/cli/test/tui-format.test.ts | 6 +- foundry/packages/client/src/app-client.ts | 2 +- foundry/packages/client/src/backend-client.ts | 624 +++--- foundry/packages/client/src/index.ts | 10 +- .../client/src/interest/mock-manager.ts | 12 - foundry/packages/client/src/keys.ts | 28 +- foundry/packages/client/src/mock-app.ts | 16 +- .../client/src/mock/backend-client.ts | 237 +- .../client/src/mock/workbench-client.ts | 126 +- .../packages/client/src/remote/app-client.ts | 4 +- .../client/src/remote/workbench-client.ts | 58 +- .../src/{interest => subscription}/manager.ts | 8 +- .../client/src/subscription/mock-manager.ts | 12 + .../remote-manager.ts | 10 +- .../src/{interest => subscription}/topics.ts | 55 +- .../use-subscription.ts} | 8 +- foundry/packages/client/src/view-model.ts | 2 +- .../packages/client/src/workbench-client.ts | 18 +- .../packages/client/src/workbench-model.ts | 77 +- .../test/e2e/full-integration-e2e.test.ts | 44 +- .../client/test/e2e/github-pr-e2e.test.ts | 53 +- foundry/packages/client/test/e2e/helpers.ts | 84 + .../client/test/e2e/workbench-e2e.test.ts | 313 +-- .../test/e2e/workbench-load-e2e.test.ts | 53 +- foundry/packages/client/test/keys.test.ts | 12 +- ...r.test.ts => subscription-manager.test.ts} | 70 +- .../packages/client/test/view-model.test.ts | 12 +- .../src-tauri/gen/schemas/acl-manifests.json | 1923 ++++++++++++++++- .../src-tauri/gen/schemas/desktop-schema.json | 90 +- .../src-tauri/gen/schemas/macOS-schema.json | 90 +- foundry/packages/frontend/src/app/router.tsx | 84 +- .../frontend/src/components/dev-panel.tsx | 90 +- .../frontend/src/components/mock-layout.tsx | 653 +++--- .../components/mock-layout/message-list.tsx | 70 +- .../components/mock-layout/right-sidebar.tsx | 6 +- .../{tab-strip.tsx => session-strip.tsx} | 80 +- .../src/components/mock-layout/sidebar.tsx | 567 +++-- .../components/mock-layout/terminal-pane.tsx | 54 +- .../mock-layout/transcript-header.tsx | 14 +- .../src/components/mock-layout/ui.tsx | 10 +- .../components/mock-layout/view-model.test.ts | 10 +- .../src/components/mock-layout/view-model.ts | 28 +- .../src/components/mock-onboarding.tsx | 22 +- ...shboard.tsx => organization-dashboard.tsx} | 445 +--- .../frontend/src/features/tasks/model.test.ts | 6 +- foundry/packages/frontend/src/lib/backend.ts | 4 +- foundry/packages/frontend/src/lib/env.ts | 4 +- foundry/packages/frontend/src/lib/interest.ts | 5 - foundry/packages/frontend/src/lib/mock-app.ts | 14 +- .../packages/frontend/src/lib/subscription.ts | 5 + foundry/packages/shared/src/app-shell.ts | 2 +- foundry/packages/shared/src/config.ts | 4 +- foundry/packages/shared/src/contracts.ts | 105 +- foundry/packages/shared/src/index.ts | 2 +- foundry/packages/shared/src/organization.ts | 13 + .../packages/shared/src/realtime-events.ts | 12 +- foundry/packages/shared/src/workbench.ts | 64 +- foundry/packages/shared/src/workspace.ts | 13 - ...workspace.test.ts => organization.test.ts} | 16 +- foundry/research/friction/general.mdx | 40 +- foundry/research/friction/rivet.mdx | 46 +- foundry/research/friction/sandboxes.mdx | 6 +- .../realtime-interest-manager-spec.md | 248 +-- .../00-end-to-end-async-realtime-plan.md | 34 +- .../01-task-creation-bootstrap-only.md | 14 +- ...02-repo-overview-from-cached-projection.md | 30 +- ...03-repo-actions-via-background-workflow.md | 24 +- ...on-creation-without-inline-provisioning.md | 4 +- ...5-workbench-snapshot-from-derived-state.md | 4 +- .../07-auth-identity-simplification.md | 48 +- .../specs/async-action-fixes/README.md | 6 +- foundry/research/specs/frontend.md | 4 +- foundry/research/specs/github-data-actor.md | 44 +- .../specs/rivetkit-opentui-migration-plan.md | 174 +- foundry/scripts/build-test-image.sh | 2 +- foundry/scripts/data/rivet-dev.json | 12 +- foundry/scripts/pull-org-data.ts | 8 +- 166 files changed, 6675 insertions(+), 7105 deletions(-) rename foundry/packages/backend/src/actors/{workspace => organization}/actions.ts (64%) rename foundry/packages/backend/src/actors/{workspace => organization}/app-shell.ts (84%) rename foundry/packages/backend/src/actors/{project => organization}/db/db.ts (68%) create mode 100644 foundry/packages/backend/src/actors/organization/db/drizzle.config.ts rename foundry/packages/backend/src/actors/{workspace => organization}/db/drizzle/0000_melted_viper.sql (94%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/drizzle/meta/0000_snapshot.json (95%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/drizzle/meta/_journal.json (100%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/migrations.ts (96%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/schema.ts (91%) create mode 100644 foundry/packages/backend/src/actors/organization/index.ts delete mode 100644 foundry/packages/backend/src/actors/project-branch-sync/index.ts delete mode 100644 foundry/packages/backend/src/actors/project/actions.ts delete mode 100644 foundry/packages/backend/src/actors/project/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql delete mode 100644 foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json delete mode 100644 foundry/packages/backend/src/actors/project/db/migrations.ts delete mode 100644 foundry/packages/backend/src/actors/project/db/schema.ts delete mode 100644 foundry/packages/backend/src/actors/project/index.ts delete mode 100644 foundry/packages/backend/src/actors/project/stack-model.ts create mode 100644 foundry/packages/backend/src/actors/repository/actions.ts rename foundry/packages/backend/src/actors/{workspace => repository}/db/db.ts (68%) create mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle.config.ts create mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql create mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json rename foundry/packages/backend/src/actors/{project => repository}/db/drizzle/meta/_journal.json (100%) create mode 100644 foundry/packages/backend/src/actors/repository/db/migrations.ts create mode 100644 foundry/packages/backend/src/actors/repository/db/schema.ts create mode 100644 foundry/packages/backend/src/actors/repository/index.ts delete mode 100644 foundry/packages/backend/src/actors/workspace/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/workspace/index.ts create mode 100644 foundry/packages/backend/src/config/organization.ts delete mode 100644 foundry/packages/backend/src/config/workspace.ts delete mode 100644 foundry/packages/backend/src/integrations/git-spice/index.ts delete mode 100644 foundry/packages/backend/src/integrations/git/index.ts delete mode 100644 foundry/packages/backend/src/integrations/graphite/index.ts delete mode 100644 foundry/packages/backend/src/services/foundry-paths.ts delete mode 100644 foundry/packages/backend/src/services/repo-git-lock.ts delete mode 100644 foundry/packages/backend/test/git-spice.test.ts delete mode 100644 foundry/packages/backend/test/git-validate-remote.test.ts rename foundry/packages/backend/test/{workspace-isolation.test.ts => organization-isolation.test.ts} (63%) rename foundry/packages/backend/test/{workspace-star-sandbox-agent-repo.test.ts => organization-star-sandbox-agent-repo.test.ts} (80%) delete mode 100644 foundry/packages/backend/test/stack-model.test.ts rename foundry/packages/cli/src/{workspace => organization}/config.ts (71%) rename foundry/packages/cli/test/{workspace-config.test.ts => organization-config.test.ts} (59%) delete mode 100644 foundry/packages/client/src/interest/mock-manager.ts rename foundry/packages/client/src/{interest => subscription}/manager.ts (82%) create mode 100644 foundry/packages/client/src/subscription/mock-manager.ts rename foundry/packages/client/src/{interest => subscription}/remote-manager.ts (94%) rename foundry/packages/client/src/{interest => subscription}/topics.ts (73%) rename foundry/packages/client/src/{interest/use-interest.ts => subscription/use-subscription.ts} (85%) create mode 100644 foundry/packages/client/test/e2e/helpers.ts rename foundry/packages/client/test/{interest-manager.test.ts => subscription-manager.test.ts} (60%) rename foundry/packages/frontend/src/components/mock-layout/{tab-strip.tsx => session-strip.tsx} (78%) rename foundry/packages/frontend/src/components/{workspace-dashboard.tsx => organization-dashboard.tsx} (79%) delete mode 100644 foundry/packages/frontend/src/lib/interest.ts create mode 100644 foundry/packages/frontend/src/lib/subscription.ts create mode 100644 foundry/packages/shared/src/organization.ts delete mode 100644 foundry/packages/shared/src/workspace.ts rename foundry/packages/shared/test/{workspace.test.ts => organization.test.ts} (59%) diff --git a/README.md b/README.md index d4bfc61..eb427d7 100644 --- a/README.md +++ b/README.md @@ -277,7 +277,7 @@ Coding agents expect interactive terminals with proper TTY handling. SSH with pi - **Storage of sessions on disk**: Sessions are already stored by the respective coding agents on disk. It's assumed that the consumer is streaming data from this machine to an external storage, such as Postgres, ClickHouse, or Rivet. - **Direct LLM wrappers**: Use the [Vercel AI SDK](https://ai-sdk.dev/docs/introduction) if you want to implement your own agent from scratch. - **Git Repo Management**: Just use git commands or the features provided by your sandbox provider of choice. -- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide guides that let you integrate this project with sandbox providers. +- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide guides that let you integrate this repository with sandbox providers. ## Roadmap diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md index 7d2bb37..e347a60 100644 --- a/foundry/CLAUDE.md +++ b/foundry/CLAUDE.md @@ -12,10 +12,10 @@ Use TypeScript for all source code. Use `pnpm` workspaces and Turborepo. -- Workspace root uses `pnpm-workspace.yaml` and `turbo.json`. +- Repository root uses `pnpm-workspace.yaml` and `turbo.json`. - Packages live in `packages/*`. - `core` is renamed to `shared`. -- `packages/cli` is disabled and excluded from active workspace validation. +- `packages/cli` is disabled and excluded from active monorepo validation. - Integrations and providers live under `packages/backend/src/{integrations,providers}`. ## CLI Status @@ -23,14 +23,14 @@ Use `pnpm` workspaces and Turborepo. - `packages/cli` is fully disabled for active development. - Do not implement new behavior in `packages/cli` unless explicitly requested. - Frontend is the primary product surface; prioritize `packages/frontend` + supporting `packages/client`/`packages/backend`. -- Workspace `build`, `typecheck`, and `test` intentionally exclude `@sandbox-agent/foundry-cli`. -- `pnpm-workspace.yaml` excludes `packages/cli` from workspace package resolution. +- Monorepo `build`, `typecheck`, and `test` intentionally exclude `@sandbox-agent/foundry-cli`. +- `pnpm-workspace.yaml` excludes `packages/cli` from monorepo package resolution. ## Common Commands - Foundry is the canonical name for this product tree. Do not introduce or preserve legacy pre-Foundry naming in code, docs, commands, or runtime paths. - Install deps: `pnpm install` -- Full active-workspace validation: `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test` +- Full active-monorepo validation: `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test` - Start the full dev stack (real backend + frontend): `just foundry-dev` — frontend on **port 4173**, backend on **port 7741** (Docker via `compose.dev.yaml`) - Start the mock frontend stack (no backend): `just foundry-mock` — mock frontend on **port 4174** (Docker via `compose.mock.yaml`) - Start the local production-build preview stack: `just foundry-preview` @@ -59,9 +59,9 @@ Use `pnpm` workspaces and Turborepo. ## Railway Logs -- Production Foundry Railway logs can be read from a linked workspace with `railway logs --deployment --lines 200` or `railway logs --deployment --lines 200`. +- Production Foundry Railway logs can be read from a linked checkout with `railway logs --deployment --lines 200` or `railway logs --deployment --lines 200`. - Production deploys should go through `git push` to the deployment branch/workflow. Do not use `railway up` for Foundry deploys. -- If Railway logs fail because the workspace is not linked to the correct project/service/environment, run: +- If Railway logs fail because the checkout is not linked to the correct Railway project/service/environment, run: `railway link --project 33e3e2df-32c5-41c5-a4af-dca8654acb1d --environment cf387142-61fd-4668-8cf7-b3559e0983cb --service 91c7e450-d6d2-481a-b2a4-0a916f4160fc` - That links this directory to the `sandbox-agent` project, `production` environment, and `foundry-api` service. - Production proxy chain: `api.sandboxagent.dev` routes through Cloudflare → Fastly/Varnish → Railway. When debugging request duplication, timeouts, or retry behavior, check headers like `cf-ray`, `x-varnish`, `x-railway-edge`, and `cdn-loop` to identify which layer is involved. @@ -96,19 +96,19 @@ Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or ### Materialized state in coordinator actors -- **Workspace actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the workspace actor when they mutate. The workspace actor broadcasts the updated entity to connected clients. `getWorkspaceSummary` reads from local tables only — no fan-out to child actors. +- **Organization actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the organization actor when they mutate. The organization actor broadcasts the updated entity to connected clients. `getOrganizationSummary` reads from local tables only — no fan-out to child actors. - **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it. -- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session tab is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). -- The expensive fan-out (querying every project/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path. +- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). +- The expensive fan-out (querying every repository/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path. -### Interest manager +### Subscription manager -The interest manager (`packages/client`) is a global singleton that manages WebSocket connections, cached state, and subscriptions for all topics. It: +The subscription manager (`packages/client`) is a global singleton that manages WebSocket connections, cached state, and subscriptions for all topics. It: - **Deduplicates** — multiple subscribers to the same topic share one connection and one cached state. - **Grace period (30s)** — when the last subscriber leaves, the connection and state stay alive for 30 seconds before teardown. This keeps data warm for back-navigation and prevents thrashing. -- **Exposes a single hook** — `useInterest(topicKey, params)` returns `{ data, status, error }`. Null params = no subscription (conditional interest). -- **Shared harness, separate implementations** — the `InterestManager` interface is shared between mock and remote implementations. The mock implementation uses in-memory state. The remote implementation uses WebSocket connections. The API/client exposure is identical for both. +- **Exposes a single hook** — `useSubscription(topicKey, params)` returns `{ data, status, error }`. Null params = no subscription (conditional subscription). +- **Shared harness, separate implementations** — the `SubscriptionManager` interface is shared between mock and remote implementations. The mock implementation uses in-memory state. The remote implementation uses WebSocket connections. The API/client exposure is identical for both. ### Topics @@ -116,23 +116,31 @@ Each topic maps to one actor connection and one event stream: | Topic | Actor | Event | Data | |---|---|---|---| -| `app` | Workspace `"app"` | `appUpdated` | Auth, orgs, onboarding | -| `workspace` | Workspace `{workspaceId}` | `workspaceUpdated` | Repo catalog, task summaries, repo summaries | -| `task` | Task `{workspaceId, repoId, taskId}` | `taskUpdated` | Session summaries, sandbox info, diffs, file tree | -| `session` | Task `{workspaceId, repoId, taskId}` (filtered by sessionId) | `sessionUpdated` | Transcript, draft state | +| `app` | Organization `"app"` | `appUpdated` | Auth, orgs, onboarding | +| `organization` | Organization `{organizationId}` | `organizationUpdated` | Repo catalog, task summaries, repo summaries | +| `task` | Task `{organizationId, repoId, taskId}` | `taskUpdated` | Session summaries, sandbox info, diffs, file tree | +| `session` | Task `{organizationId, repoId, taskId}` (filtered by sessionId) | `sessionUpdated` | Transcript, draft state | | `sandboxProcesses` | SandboxInstance | `processesUpdated` | Process list | -The client subscribes to `app` always, `workspace` when entering a workspace, `task` when viewing a task, and `session` when viewing a specific session tab. At most 4 actor connections at a time (app + workspace + task + sandbox if terminal is open). The `session` topic reuses the task actor connection and filters by session ID. +The client subscribes to `app` always, `organization` when entering an organization, `task` when viewing a task, and `session` when viewing a specific session. At most 4 actor connections at a time (app + organization + task + sandbox if terminal is open). The `session` topic reuses the task actor connection and filters by session ID. ### Rules - Do not add `useQuery` with `refetchInterval` for data that should be push-based. - Do not broadcast empty notification events. Events must carry the full new state of the changed entity. - Do not re-fetch full snapshots after mutations. The mutation triggers a server-side broadcast with the new entity state; the client replaces it in local state. -- All event subscriptions go through the interest manager. Do not create ad-hoc `handle.connect()` + `conn.on()` patterns. -- Backend mutations that affect sidebar data (task title, status, branch, PR state) must push the updated summary to the parent workspace actor, which broadcasts to workspace subscribers. +- All event subscriptions go through the subscription manager. Do not create ad-hoc `handle.connect()` + `conn.on()` patterns. +- Backend mutations that affect sidebar data (task title, status, branch, PR state) must push the updated summary to the parent organization actor, which broadcasts to organization subscribers. - Comment architecture-related code: add doc comments explaining the materialized state pattern, why deltas flow the way they do, and the relationship between parent/child actor broadcasts. New contributors should understand the data flow from comments alone. +## Git State Policy + +- The backend stores zero git state. No local clones, no refs, no working trees, and no git-spice. +- Repository metadata (branches, default branch, pull requests) comes from GitHub API data and webhook events already flowing into the system. +- All git operations that require a working tree run inside the task's sandbox via `executeInSandbox()`. +- Do not add backend git clone paths, `git fetch`, `git for-each-ref`, or direct backend git CLI calls. If you need git data, either read stored GitHub metadata or run the command inside a sandbox. +- The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain. + ## UI System - Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation. @@ -166,14 +174,14 @@ For all Rivet/RivetKit implementation: 2. SQLite is **per actor instance** (per actor key), not a shared backend-global database: - Each actor instance gets its own SQLite DB. - Schema design should assume a single actor instance owns the entire DB. - - Do not add `workspaceId`/`repoId`/`taskId` columns just to "namespace" rows for a given actor instance; use actor state and/or the actor key instead. - - Example: the `task` actor instance already represents `(workspaceId, repoId, taskId)`, so its SQLite tables should not need those columns for primary keys. + - Do not add `organizationId`/`repoId`/`taskId` columns just to "namespace" rows for a given actor instance; use actor state and/or the actor key instead. + - Example: the `task` actor instance already represents `(organizationId, repoId, taskId)`, so its SQLite tables should not need those columns for primary keys. 3. Do not use backend-global SQLite singletons; database access must go through actor `db` providers (`c.db`). -4. The default dependency source for RivetKit is the published `rivetkit` package so workspace installs and CI remain self-contained. +4. The default dependency source for RivetKit is the published `rivetkit` package so monorepo installs and CI remain self-contained. 5. When working on coordinated RivetKit changes, you may temporarily relink to a local checkout instead of the published package. - - Dedicated local checkout for this workspace: `/Users/nathan/conductor/workspaces/task/rivet-checkout` + - Dedicated local checkout for this repo: `/Users/nathan/conductor/workspaces/task/rivet-checkout` - Preferred local link target: `../rivet-checkout/rivetkit-typescript/packages/rivetkit` - - Sub-packages (`@rivetkit/sqlite-vfs`, etc.) resolve transitively from the RivetKit workspace when using the local checkout. + - Sub-packages (`@rivetkit/sqlite-vfs`, etc.) resolve transitively from the RivetKit monorepo when using the local checkout. 6. Before using a local checkout, build RivetKit in the rivet repo: ```bash cd ../rivet-checkout/rivetkit-typescript @@ -187,17 +195,17 @@ For all Rivet/RivetKit implementation: - Do not add an extra proxy or manager-specific route layer in the backend. - Let RivetKit own metadata/public endpoint behavior for `/v1/rivet`. -## Workspace + Actor Rules +## Organization + Actor Rules -- Everything is scoped to a workspace. -- Workspace resolution order: `--workspace` flag -> config default -> `"default"`. -- `ControlPlaneActor` is replaced by `WorkspaceActor` (workspace coordinator). -- Every actor key must be prefixed with workspace namespace (`["ws", workspaceId, ...]`). +- Everything is scoped to an organization. +- Organization resolution order: `--organization` flag -> config default -> `"default"`. +- `ControlPlaneActor` is replaced by `OrganizationActor` (organization coordinator). +- Every actor key must be prefixed with organization namespace (`["org", organizationId, ...]`). - CLI/TUI/GUI must use `@sandbox-agent/foundry-client` (`packages/client`) for backend access; `rivetkit/client` imports are only allowed inside `packages/client`. - Do not add custom backend REST endpoints (no `/v1/*` shim layer). - We own the sandbox-agent project; treat sandbox-agent defects as first-party bugs and fix them instead of working around them. - Keep strict single-writer ownership: each table/row has exactly one actor writer. -- Parent actors (`workspace`, `project`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout. +- Parent actors (`organization`, `repository`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout. - Periodic syncing lives in dedicated child actors with one timeout cadence each. - Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes. - Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs. @@ -218,7 +226,7 @@ Action handlers must return fast. The pattern: 3. **Validating preconditions** — check state synchronously in the action handler *before* enqueuing. If a precondition isn't met (e.g. session not ready, task not initialized), throw an error immediately. Do not implicitly provision missing dependencies or poll for readiness inside the action handler. It is the client's responsibility to ensure preconditions are met before calling the action. Examples: -- `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via workspace events. +- `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via organization events. - `sendWorkbenchMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. - `createWorkbenchSession` → `wait: true` (returns `{ tabId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. @@ -232,11 +240,11 @@ All `wait: true` sends must have an explicit `timeout`. Maximum timeout for any ### Task creation: resolve metadata before creating the actor -When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (project) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. +When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (repository) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. - Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O. -- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against remote branches and the project's task index. -- The project actor already has the repo clone and task index. Do the git fetch + name resolution there. +- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against the repository's task index. +- The repository actor already has the task index and GitHub-backed default branch metadata. Resolve the branch name there without local git fetches. - Do not defer naming to a background provision workflow. Do not poll for names to become available. - The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return. - Actor handle policy: @@ -245,8 +253,7 @@ When creating a task, all deterministic metadata (title, branch name) must be re - Use create semantics only on explicit provisioning/create paths where creating a new actor instance is intended. - `getOrCreate` is a last resort for create paths when an explicit create API is unavailable; never use it in read/command paths. - For long-lived cross-actor links (for example sandbox/session runtime access), persist actor identity (`actorId`) and keep a fallback lookup path by actor id. -- Docker dev: `compose.dev.yaml` mounts a named volume at `/root/.local/share/foundry/repos` to persist backend-managed git clones across restarts. Code must still work if this volume is not present (create directories as needed). -- RivetKit actor `c.state` is durable, but in Docker it is stored under `/root/.local/share/rivetkit`. If that path is not persisted, actor state-derived indexes (for example, in `project` actor state) can be lost after container recreation even when other data still exists. +- RivetKit actor `c.state` is durable, but in Docker it is stored under `/root/.local/share/rivetkit`. If that path is not persisted, actor state-derived indexes can be lost after container recreation even when other data still exists. - Workflow history divergence policy: - Production: never auto-delete actor state to resolve `HistoryDivergedError`; ship explicit workflow migrations (`ctx.removed(...)`, step compatibility). - Development: manual local state reset is allowed as an operator recovery path when migrations are not yet available. @@ -265,9 +272,9 @@ When creating a task, all deterministic metadata (title, branch name) must be re - Secrets (e.g. `OPENAI_API_KEY`, `GITHUB_TOKEN`/`GH_TOKEN`) must be provided via environment variables, never hardcoded in the repo. - `~/misc/env.txt` and `~/misc/the-foundry.env` contain the expected local OpenAI + GitHub OAuth/App config for dev. - For local GitHub webhook development, use the configured Smee proxy (`SMEE_URL`) to forward deliveries into `POST /v1/webhooks/github`. Check `.env` / `foundry/.env` if you need the current channel URL. - - If GitHub repos, PRs, or install state are not showing up, verify that the GitHub App is installed for the workspace and that webhook delivery is enabled and healthy. Foundry depends on webhook events for GitHub-backed state; missing webhooks means the product will appear broken. + - If GitHub repos, PRs, or install state are not showing up, verify that the GitHub App is installed for the organization and that webhook delivery is enabled and healthy. Foundry depends on webhook events for GitHub-backed state; missing webhooks means the product will appear broken. - Do not assume `gh auth token` is sufficient for Foundry task provisioning against private repos. Sandbox/bootstrap git clone, push, and PR flows require a repo-capable `GITHUB_TOKEN`/`GH_TOKEN` in the backend container. - - Preferred product behavior for org workspaces is to mint a GitHub App installation token from the workspace installation and inject it into backend/sandbox git operations. Do not rely on an operator's ambient CLI auth as the long-term solution. + - Preferred product behavior for organizations is to mint a GitHub App installation token from the organization installation and inject it into backend/sandbox git operations. Do not rely on an operator's ambient CLI auth as the long-term solution. - Treat client E2E tests in `packages/client/test` as the primary end-to-end source of truth for product behavior. - Keep backend tests small and targeted. Only retain backend-only tests for invariants or persistence rules that are not well-covered through client E2E. - Do not keep large browser E2E suites around in a broken state. If a frontend browser E2E is not maintained and producing signal, remove it until it can be replaced with a reliable test. diff --git a/foundry/README.md b/foundry/README.md index f65d93e..47501ef 100644 --- a/foundry/README.md +++ b/foundry/README.md @@ -1,6 +1,6 @@ # Foundry -TypeScript workspace task system powered by RivetKit actors, SQLite/Drizzle state, and OpenTUI. +TypeScript organization task system powered by RivetKit actors, SQLite/Drizzle state, and OpenTUI. **Documentation**: see `../docs/` in the repository root @@ -12,12 +12,12 @@ pnpm install pnpm -w build ``` -## Project Goals +## Repository Goals - **Simple**: There's one screen. It has everything you need. You can use it blindfolded. - **Fast**: No waiting around. - **Collaborative**: Built for fast moving teams that need code reviewed & shipped fast. -- **Pluggable**: Works for small side projects to enterprise teams. +- **Pluggable**: Works for small side repositories to enterprise teams. ## License diff --git a/foundry/compose.dev.yaml b/foundry/compose.dev.yaml index d425654..b96805e 100644 --- a/foundry/compose.dev.yaml +++ b/foundry/compose.dev.yaml @@ -68,8 +68,6 @@ services: - "foundry_backend_persist_rivet_node_modules:/app/sdks/persist-rivet/node_modules" - "foundry_backend_typescript_node_modules:/app/sdks/typescript/node_modules" - "foundry_backend_pnpm_store:/root/.local/share/pnpm/store" - # Persist backend-managed local git clones across container restarts. - - "foundry_git_repos:/root/.local/share/foundry/repos" # Persist RivetKit local storage across container restarts. - "foundry_rivetkit_storage:/root/.local/share/foundry/rivetkit" @@ -90,7 +88,7 @@ services: # Ensure logs in .foundry/ persist on the host even if we change source mounts later. - "./.foundry:/app/foundry/.foundry" - "../../../task/rivet-checkout:/task/rivet-checkout:ro" - # Use Linux-native workspace dependencies inside the container instead of host node_modules. + # Use Linux-native repo dependencies inside the container instead of host node_modules. - "foundry_node_modules:/app/node_modules" - "foundry_client_node_modules:/app/foundry/packages/client/node_modules" - "foundry_frontend_node_modules:/app/foundry/packages/frontend/node_modules" @@ -125,7 +123,6 @@ volumes: foundry_backend_persist_rivet_node_modules: {} foundry_backend_typescript_node_modules: {} foundry_backend_pnpm_store: {} - foundry_git_repos: {} foundry_rivetkit_storage: {} foundry_node_modules: {} foundry_client_node_modules: {} diff --git a/foundry/compose.preview.yaml b/foundry/compose.preview.yaml index 6213885..aa43b52 100644 --- a/foundry/compose.preview.yaml +++ b/foundry/compose.preview.yaml @@ -24,7 +24,6 @@ services: - "7841:7841" volumes: - "${HOME}/.codex:/root/.codex" - - "foundry_preview_git_repos:/root/.local/share/foundry/repos" - "foundry_preview_rivetkit_storage:/root/.local/share/foundry/rivetkit" frontend: @@ -38,5 +37,4 @@ services: - "4273:4273" volumes: - foundry_preview_git_repos: {} foundry_preview_rivetkit_storage: {} diff --git a/foundry/docker/backend.dev.Dockerfile b/foundry/docker/backend.dev.Dockerfile index 0182aa5..46177c3 100644 --- a/foundry/docker/backend.dev.Dockerfile +++ b/foundry/docker/backend.dev.Dockerfile @@ -2,7 +2,6 @@ FROM oven/bun:1.3 -ARG GIT_SPICE_VERSION=v0.23.0 ARG SANDBOX_AGENT_VERSION=0.3.0 RUN apt-get update \ @@ -18,20 +17,6 @@ RUN apt-get update \ RUN npm install -g pnpm@10.28.2 -RUN set -eux; \ - arch="$(dpkg --print-architecture)"; \ - case "$arch" in \ - amd64) spice_arch="x86_64" ;; \ - arm64) spice_arch="aarch64" ;; \ - *) echo "Unsupported architecture for git-spice: $arch" >&2; exit 1 ;; \ - esac; \ - tmpdir="$(mktemp -d)"; \ - curl -fsSL "https://github.com/abhinav/git-spice/releases/download/${GIT_SPICE_VERSION}/git-spice.Linux-${spice_arch}.tar.gz" -o "${tmpdir}/git-spice.tgz"; \ - tar -xzf "${tmpdir}/git-spice.tgz" -C "${tmpdir}"; \ - install -m 0755 "${tmpdir}/gs" /usr/local/bin/gs; \ - ln -sf /usr/local/bin/gs /usr/local/bin/git-spice; \ - rm -rf "${tmpdir}" - RUN curl -fsSL "https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/install.sh" | sh ENV PATH="/root/.local/bin:${PATH}" diff --git a/foundry/docker/backend.preview.Dockerfile b/foundry/docker/backend.preview.Dockerfile index 8c30ae0..00774f2 100644 --- a/foundry/docker/backend.preview.Dockerfile +++ b/foundry/docker/backend.preview.Dockerfile @@ -2,7 +2,6 @@ FROM oven/bun:1.3 -ARG GIT_SPICE_VERSION=v0.23.0 ARG SANDBOX_AGENT_VERSION=0.3.0 RUN apt-get update \ @@ -17,20 +16,6 @@ RUN apt-get update \ && npm install -g pnpm@10.28.2 \ && rm -rf /var/lib/apt/lists/* -RUN set -eux; \ - arch="$(dpkg --print-architecture)"; \ - case "$arch" in \ - amd64) spice_arch="x86_64" ;; \ - arm64) spice_arch="aarch64" ;; \ - *) echo "Unsupported architecture for git-spice: $arch" >&2; exit 1 ;; \ - esac; \ - tmpdir="$(mktemp -d)"; \ - curl -fsSL "https://github.com/abhinav/git-spice/releases/download/${GIT_SPICE_VERSION}/git-spice.Linux-${spice_arch}.tar.gz" -o "${tmpdir}/git-spice.tgz"; \ - tar -xzf "${tmpdir}/git-spice.tgz" -C "${tmpdir}"; \ - install -m 0755 "${tmpdir}/gs" /usr/local/bin/gs; \ - ln -sf /usr/local/bin/gs /usr/local/bin/git-spice; \ - rm -rf "${tmpdir}" - RUN curl -fsSL "https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/install.sh" | sh ENV PATH="/root/.local/bin:${PATH}" diff --git a/foundry/packages/backend/CLAUDE.md b/foundry/packages/backend/CLAUDE.md index 949db90..aef6cfd 100644 --- a/foundry/packages/backend/CLAUDE.md +++ b/foundry/packages/backend/CLAUDE.md @@ -5,30 +5,29 @@ Keep the backend actor tree aligned with this shape unless we explicitly decide to change it: ```text -WorkspaceActor -├─ HistoryActor(workspace-scoped global feed) -├─ ProjectActor(repo) -│ ├─ ProjectBranchSyncActor -│ ├─ ProjectPrSyncActor +OrganizationActor +├─ HistoryActor(organization-scoped global feed) +├─ GithubDataActor +├─ RepositoryActor(repo) │ └─ TaskActor(task) │ ├─ TaskSessionActor(session) × N │ │ └─ SessionStatusSyncActor(session) × 0..1 │ └─ Task-local workbench state -└─ SandboxInstanceActor(providerId, sandboxId) × N +└─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N ``` ## Ownership Rules -- `WorkspaceActor` is the workspace coordinator and lookup/index owner. -- `HistoryActor` is workspace-scoped. There is one workspace-level history feed. -- `ProjectActor` is the repo coordinator and owns repo-local caches/indexes. +- `OrganizationActor` is the organization coordinator and lookup/index owner. +- `HistoryActor` is organization-scoped. There is one organization-level history feed. +- `RepositoryActor` is the repo coordinator and owns repo-local caches/indexes. - `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized. - `TaskActor` can have many sessions. - `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time. - Session unread state and draft prompts are backend-owned workbench state, not frontend-local state. - Branch rename is a real git operation, not just metadata. - `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity. -- Sync actors are polling workers only. They feed parent actors and should not become the source of truth. +- The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`. - When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default. ## Maintenance diff --git a/foundry/packages/backend/src/actors/events.ts b/foundry/packages/backend/src/actors/events.ts index 8872dfa..4a514ad 100644 --- a/foundry/packages/backend/src/actors/events.ts +++ b/foundry/packages/backend/src/actors/events.ts @@ -1,51 +1,51 @@ -import type { TaskStatus, ProviderId } from "@sandbox-agent/foundry-shared"; +import type { TaskStatus, SandboxProviderId } from "@sandbox-agent/foundry-shared"; export interface TaskCreatedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; - providerId: ProviderId; + sandboxProviderId: SandboxProviderId; branchName: string; title: string; } export interface TaskStatusEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; status: TaskStatus; message: string; } -export interface ProjectSnapshotEvent { - workspaceId: string; +export interface RepositorySnapshotEvent { + organizationId: string; repoId: string; updatedAt: number; } export interface AgentStartedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } export interface AgentIdleEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } export interface AgentErrorEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; message: string; } export interface PrCreatedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -53,7 +53,7 @@ export interface PrCreatedEvent { } export interface PrClosedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -61,7 +61,7 @@ export interface PrClosedEvent { } export interface PrReviewEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -70,7 +70,7 @@ export interface PrReviewEvent { } export interface CiStatusChangedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -81,7 +81,7 @@ export type TaskStepName = "auto_commit" | "push" | "pr_submit"; export type TaskStepStatus = "started" | "completed" | "skipped" | "failed"; export interface TaskStepEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; step: TaskStepName; @@ -90,23 +90,15 @@ export interface TaskStepEvent { } export interface BranchSwitchedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; branchName: string; } export interface SessionAttachedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } - -export interface BranchSyncedEvent { - workspaceId: string; - repoId: string; - taskId: string; - branchName: string; - strategy: string; -} diff --git a/foundry/packages/backend/src/actors/github-data/db/migrations.ts b/foundry/packages/backend/src/actors/github-data/db/migrations.ts index 528e8fa..87cc76f 100644 --- a/foundry/packages/backend/src/actors/github-data/db/migrations.ts +++ b/foundry/packages/backend/src/actors/github-data/db/migrations.ts @@ -6,6 +6,18 @@ const journal = { tag: "0000_github_data", breakpoints: true, }, + { + idx: 1, + when: 1773810002000, + tag: "0001_default_branch", + breakpoints: true, + }, + { + idx: 2, + when: 1773810300000, + tag: "0002_github_branches", + breakpoints: true, + }, ], } as const; @@ -56,6 +68,16 @@ CREATE TABLE \`github_pull_requests\` ( \`is_draft\` integer NOT NULL, \`updated_at\` integer NOT NULL ); +`, + m0001: `ALTER TABLE \`github_repositories\` ADD \`default_branch\` text NOT NULL DEFAULT 'main'; +`, + m0002: `CREATE TABLE \`github_branches\` ( + \`branch_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`branch_name\` text NOT NULL, + \`commit_sha\` text NOT NULL, + \`updated_at\` integer NOT NULL +); `, } as const, }; diff --git a/foundry/packages/backend/src/actors/github-data/db/schema.ts b/foundry/packages/backend/src/actors/github-data/db/schema.ts index 9527fc6..fe37863 100644 --- a/foundry/packages/backend/src/actors/github-data/db/schema.ts +++ b/foundry/packages/backend/src/actors/github-data/db/schema.ts @@ -16,6 +16,15 @@ export const githubRepositories = sqliteTable("github_repositories", { fullName: text("full_name").notNull(), cloneUrl: text("clone_url").notNull(), private: integer("private").notNull(), + defaultBranch: text("default_branch").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubBranches = sqliteTable("github_branches", { + branchId: text("branch_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + branchName: text("branch_name").notNull(), + commitSha: text("commit_sha").notNull(), updatedAt: integer("updated_at").notNull(), }); diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts index 6b7d1f8..accfb5d 100644 --- a/foundry/packages/backend/src/actors/github-data/index.ts +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -3,16 +3,16 @@ import { eq } from "drizzle-orm"; import { actor } from "rivetkit"; import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateWorkspace, getTask } from "../handles.js"; +import { getOrCreateOrganization, getTask } from "../handles.js"; import { repoIdFromRemote } from "../../services/repo.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; import { githubDataDb } from "./db/db.js"; -import { githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; +import { githubBranches, githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; const META_ROW_ID = 1; interface GithubDataInput { - workspaceId: string; + organizationId: string; } interface GithubMemberRecord { @@ -28,6 +28,13 @@ interface GithubRepositoryRecord { fullName: string; cloneUrl: string; private: boolean; + defaultBranch: string; +} + +interface GithubBranchRecord { + repoId: string; + branchName: string; + commitSha: string; } interface GithubPullRequestRecord { @@ -156,21 +163,21 @@ async function writeMeta(c: any, patch: Partial [row.prId, row])); const afterById = new Map(afterRows.map((row) => [row.prId, row])); @@ -258,7 +282,7 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: if (!changed) { continue; } - await workspace.applyOpenPullRequestUpdate({ + await organization.applyOpenPullRequestUpdate({ pullRequest: pullRequestSummaryFromRow(row), }); await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); @@ -268,14 +292,14 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: if (afterById.has(prId)) { continue; } - await workspace.removeOpenPullRequest({ prId }); + await organization.removeOpenPullRequest({ prId }); await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); } } async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - const match = await workspace.findTaskForGithubBranch({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + const match = await organization.findTaskForGithubBranch({ repoId: row.repoId, branchName: row.headRefName, }); @@ -283,7 +307,7 @@ async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { return; } try { - const task = getTask(c, c.state.workspaceId, row.repoId, match.taskId); + const task = getTask(c, c.state.organizationId, row.repoId, match.taskId); await task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }); } catch { // Best-effort only. Task summary refresh will still clear the PR state. @@ -391,6 +415,69 @@ async function resolvePullRequests( })); } +async function listRepositoryBranchesForContext( + context: Awaited>, + repository: GithubRepositoryRecord, +): Promise { + const { appShell } = getActorRuntimeContext(); + let branches: Array<{ name: string; commitSha: string }> = []; + + if (context.installationId != null) { + try { + branches = await appShell.github.listInstallationRepositoryBranches(context.installationId, repository.fullName); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + + if (branches.length === 0 && context.accessToken) { + branches = await appShell.github.listUserRepositoryBranches(context.accessToken, repository.fullName); + } + + const repoId = repoIdFromRemote(repository.cloneUrl); + return branches.map((branch) => ({ + repoId, + branchName: branch.name, + commitSha: branch.commitSha, + })); +} + +async function resolveBranches( + _c: any, + context: Awaited>, + repositories: GithubRepositoryRecord[], +): Promise { + return (await Promise.all(repositories.map((repository) => listRepositoryBranchesForContext(context, repository)))).flat(); +} + +async function refreshRepositoryBranches( + c: any, + context: Awaited>, + repository: GithubRepositoryRecord, + updatedAt: number, +): Promise { + const nextBranches = await listRepositoryBranchesForContext(context, repository); + await c.db + .delete(githubBranches) + .where(eq(githubBranches.repoId, repoIdFromRemote(repository.cloneUrl))) + .run(); + + for (const branch of nextBranches) { + await c.db + .insert(githubBranches) + .values({ + branchId: `${branch.repoId}:${branch.branchName}`, + repoId: branch.repoId, + branchName: branch.branchName, + commitSha: branch.commitSha, + updatedAt, + }) + .run(); + } +} + async function readAllPullRequestRows(c: any) { return await c.db.select().from(githubPullRequests).all(); } @@ -409,15 +496,17 @@ async function runFullSync(c: any, input: FullSyncInput = {}) { }); const repositories = await resolveRepositories(c, context); + const branches = await resolveBranches(c, context, repositories); const members = await resolveMembers(c, context); const pullRequests = await resolvePullRequests(c, context, repositories); await replaceRepositories(c, repositories, startedAt); + await replaceBranches(c, branches, startedAt); await replaceMembers(c, members, startedAt); await replacePullRequests(c, pullRequests); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubDataProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubDataProjection({ connectedAccount: context.connectedAccount, installationStatus: context.installationStatus, installationId: context.installationId, @@ -455,16 +544,18 @@ export const githubData = actor({ actionTimeout: 5 * 60_000, }, createState: (_c, input: GithubDataInput) => ({ - workspaceId: input.workspaceId, + organizationId: input.organizationId, }), actions: { async getSummary(c) { const repositories = await c.db.select().from(githubRepositories).all(); + const branches = await c.db.select().from(githubBranches).all(); const members = await c.db.select().from(githubMembers).all(); const pullRequests = await c.db.select().from(githubPullRequests).all(); return { ...(await readMeta(c)), repositoryCount: repositories.length, + branchCount: branches.length, memberCount: members.length, pullRequestCount: pullRequests.length, }; @@ -477,14 +568,39 @@ export const githubData = actor({ fullName: row.fullName, cloneUrl: row.cloneUrl, private: Boolean(row.private), + defaultBranch: row.defaultBranch, })); }, + async getRepository(c, input: { repoId: string }) { + const row = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!row) { + return null; + } + return { + repoId: row.repoId, + fullName: row.fullName, + cloneUrl: row.cloneUrl, + private: Boolean(row.private), + defaultBranch: row.defaultBranch, + }; + }, + async listPullRequestsForRepository(c, input: { repoId: string }) { const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); return rows.map(pullRequestSummaryFromRow); }, + async listBranchesForRepository(c, input: { repoId: string }) { + const rows = await c.db.select().from(githubBranches).where(eq(githubBranches.repoId, input.repoId)).all(); + return rows + .map((row) => ({ + branchName: row.branchName, + commitSha: row.commitSha, + })) + .sort((left, right) => left.branchName.localeCompare(right.branchName)); + }, + async listOpenPullRequests(c) { const rows = await c.db.select().from(githubPullRequests).all(); return rows.map(pullRequestSummaryFromRow).sort((left, right) => right.updatedAtMs - left.updatedAtMs); @@ -539,6 +655,7 @@ export const githubData = actor({ fullName: repository.fullName, cloneUrl: repository.cloneUrl, private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, updatedAt, }) .onConflictDoUpdate({ @@ -547,13 +664,25 @@ export const githubData = actor({ fullName: repository.fullName, cloneUrl: repository.cloneUrl, private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, updatedAt, }, }) .run(); + await refreshRepositoryBranches( + c, + context, + { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + defaultBranch: repository.defaultBranch, + }, + updatedAt, + ); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubRepositoryProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubRepositoryProjection({ repoId: input.repoId, remoteUrl: repository.cloneUrl, }); @@ -562,6 +691,7 @@ export const githubData = actor({ fullName: repository.fullName, cloneUrl: repository.cloneUrl, private: repository.private, + defaultBranch: repository.defaultBranch, }; }, @@ -656,6 +786,7 @@ export const githubData = actor({ async clearState(c, input: ClearStateInput) { const beforeRows = await readAllPullRequestRows(c); await c.db.delete(githubPullRequests).run(); + await c.db.delete(githubBranches).run(); await c.db.delete(githubRepositories).run(); await c.db.delete(githubMembers).run(); await writeMeta(c, { @@ -667,8 +798,8 @@ export const githubData = actor({ lastSyncAt: null, }); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubDataProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubDataProjection({ connectedAccount: input.connectedAccount, installationStatus: input.installationStatus, installationId: input.installationId, @@ -683,6 +814,7 @@ export const githubData = actor({ async handlePullRequestWebhook(c, input: PullRequestWebhookInput) { const beforeRows = await readAllPullRequestRows(c); const repoId = repoIdFromRemote(input.repository.cloneUrl); + const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); const updatedAt = Date.now(); const state = normalizePrStatus(input.pullRequest); const prId = `${repoId}#${input.pullRequest.number}`; @@ -694,6 +826,7 @@ export const githubData = actor({ fullName: input.repository.fullName, cloneUrl: input.repository.cloneUrl, private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", updatedAt, }) .onConflictDoUpdate({ @@ -702,6 +835,7 @@ export const githubData = actor({ fullName: input.repository.fullName, cloneUrl: input.repository.cloneUrl, private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", updatedAt, }, }) @@ -753,8 +887,8 @@ export const githubData = actor({ lastSyncAt: updatedAt, }); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubRepositoryProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubRepositoryProjection({ repoId, remoteUrl: input.repository.cloneUrl, }); diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts index b488bf7..bd17fb0 100644 --- a/foundry/packages/backend/src/actors/handles.ts +++ b/foundry/packages/backend/src/actors/handles.ts @@ -1,12 +1,12 @@ -import { authUserKey, githubDataKey, taskKey, historyKey, projectBranchSyncKey, projectKey, taskSandboxKey, workspaceKey } from "./keys.js"; +import { authUserKey, githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "./keys.js"; export function actorClient(c: any) { return c.client(); } -export async function getOrCreateWorkspace(c: any, workspaceId: string) { - return await actorClient(c).workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, +export async function getOrCreateOrganization(c: any, organizationId: string) { + return await actorClient(c).organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); } @@ -20,76 +20,61 @@ export function getAuthUser(c: any, userId: string) { return actorClient(c).authUser.get(authUserKey(userId)); } -export async function getOrCreateProject(c: any, workspaceId: string, repoId: string, remoteUrl: string) { - return await actorClient(c).project.getOrCreate(projectKey(workspaceId, repoId), { +export async function getOrCreateRepository(c: any, organizationId: string, repoId: string, remoteUrl: string) { + return await actorClient(c).repository.getOrCreate(repositoryKey(organizationId, repoId), { createWithInput: { - workspaceId, + organizationId, repoId, remoteUrl, }, }); } -export function getProject(c: any, workspaceId: string, repoId: string) { - return actorClient(c).project.get(projectKey(workspaceId, repoId)); +export function getRepository(c: any, organizationId: string, repoId: string) { + return actorClient(c).repository.get(repositoryKey(organizationId, repoId)); } -export function getTask(c: any, workspaceId: string, repoId: string, taskId: string) { - return actorClient(c).task.get(taskKey(workspaceId, repoId, taskId)); +export function getTask(c: any, organizationId: string, repoId: string, taskId: string) { + return actorClient(c).task.get(taskKey(organizationId, repoId, taskId)); } -export async function getOrCreateTask(c: any, workspaceId: string, repoId: string, taskId: string, createWithInput: Record) { - return await actorClient(c).task.getOrCreate(taskKey(workspaceId, repoId, taskId), { +export async function getOrCreateTask(c: any, organizationId: string, repoId: string, taskId: string, createWithInput: Record) { + return await actorClient(c).task.getOrCreate(taskKey(organizationId, repoId, taskId), { createWithInput, }); } -export async function getOrCreateHistory(c: any, workspaceId: string, repoId: string) { - return await actorClient(c).history.getOrCreate(historyKey(workspaceId, repoId), { +export async function getOrCreateHistory(c: any, organizationId: string, repoId: string) { + return await actorClient(c).history.getOrCreate(historyKey(organizationId, repoId), { createWithInput: { - workspaceId, + organizationId, repoId, }, }); } -export async function getOrCreateGithubData(c: any, workspaceId: string) { - return await actorClient(c).githubData.getOrCreate(githubDataKey(workspaceId), { +export async function getOrCreateGithubData(c: any, organizationId: string) { + return await actorClient(c).githubData.getOrCreate(githubDataKey(organizationId), { createWithInput: { - workspaceId, + organizationId, }, }); } -export function getGithubData(c: any, workspaceId: string) { - return actorClient(c).githubData.get(githubDataKey(workspaceId)); +export function getGithubData(c: any, organizationId: string) { + return actorClient(c).githubData.get(githubDataKey(organizationId)); } -export async function getOrCreateProjectBranchSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) { - return await actorClient(c).projectBranchSync.getOrCreate(projectBranchSyncKey(workspaceId, repoId), { - createWithInput: { - workspaceId, - repoId, - repoPath, - intervalMs, - }, - }); +export function getTaskSandbox(c: any, organizationId: string, sandboxId: string) { + return actorClient(c).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); } -export function getTaskSandbox(c: any, workspaceId: string, sandboxId: string) { - return actorClient(c).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId)); -} - -export async function getOrCreateTaskSandbox(c: any, workspaceId: string, sandboxId: string, createWithInput?: Record) { - return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(workspaceId, sandboxId), { +export async function getOrCreateTaskSandbox(c: any, organizationId: string, sandboxId: string, createWithInput?: Record) { + return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(organizationId, sandboxId), { createWithInput, }); } -export function selfProjectBranchSync(c: any) { - return actorClient(c).projectBranchSync.getForId(c.actorId); -} - export function selfHistory(c: any) { return actorClient(c).history.getForId(c.actorId); } @@ -98,12 +83,12 @@ export function selfTask(c: any) { return actorClient(c).task.getForId(c.actorId); } -export function selfWorkspace(c: any) { - return actorClient(c).workspace.getForId(c.actorId); +export function selfOrganization(c: any) { + return actorClient(c).organization.getForId(c.actorId); } -export function selfProject(c: any) { - return actorClient(c).project.getForId(c.actorId); +export function selfRepository(c: any) { + return actorClient(c).repository.getForId(c.actorId); } export function selfAuthUser(c: any) { diff --git a/foundry/packages/backend/src/actors/history/index.ts b/foundry/packages/backend/src/actors/history/index.ts index d2caa12..fa1373b 100644 --- a/foundry/packages/backend/src/actors/history/index.ts +++ b/foundry/packages/backend/src/actors/history/index.ts @@ -8,7 +8,7 @@ import { historyDb } from "./db/db.js"; import { events } from "./db/schema.js"; export interface HistoryInput { - workspaceId: string; + organizationId: string; repoId: string; } @@ -70,7 +70,7 @@ export const history = actor({ icon: "database", }, createState: (_c, input: HistoryInput) => ({ - workspaceId: input.workspaceId, + organizationId: input.organizationId, repoId: input.repoId, }), actions: { @@ -106,7 +106,7 @@ export const history = actor({ return rows.map((row) => ({ ...row, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repoId: c.state.repoId, })); }, diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts index 4f67459..2f9e566 100644 --- a/foundry/packages/backend/src/actors/index.ts +++ b/foundry/packages/backend/src/actors/index.ts @@ -3,10 +3,9 @@ import { setup } from "rivetkit"; import { githubData } from "./github-data/index.js"; import { task } from "./task/index.js"; import { history } from "./history/index.js"; -import { projectBranchSync } from "./project-branch-sync/index.js"; -import { project } from "./project/index.js"; +import { repository } from "./repository/index.js"; import { taskSandbox } from "./sandbox/index.js"; -import { workspace } from "./workspace/index.js"; +import { organization } from "./organization/index.js"; import { logger } from "../logging.js"; const RUNNER_VERSION = Math.floor(Date.now() / 1000); @@ -23,13 +22,12 @@ export const registry = setup({ }, use: { authUser, - workspace, - project, + organization, + repository, task, taskSandbox, history, githubData, - projectBranchSync, }, }); @@ -40,7 +38,6 @@ export * from "./github-data/index.js"; export * from "./task/index.js"; export * from "./history/index.js"; export * from "./keys.js"; -export * from "./project-branch-sync/index.js"; -export * from "./project/index.js"; +export * from "./repository/index.js"; export * from "./sandbox/index.js"; -export * from "./workspace/index.js"; +export * from "./organization/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts index 1dfaa48..59e669e 100644 --- a/foundry/packages/backend/src/actors/keys.ts +++ b/foundry/packages/backend/src/actors/keys.ts @@ -1,33 +1,29 @@ export type ActorKey = string[]; -export function workspaceKey(workspaceId: string): ActorKey { - return ["ws", workspaceId]; +export function organizationKey(organizationId: string): ActorKey { + return ["org", organizationId]; } export function authUserKey(userId: string): ActorKey { - return ["ws", "app", "user", userId]; + return ["org", "app", "user", userId]; } -export function projectKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId]; +export function repositoryKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId]; } -export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "task", taskId]; +export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "task", taskId]; } -export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey { - return ["ws", workspaceId, "sandbox", sandboxId]; +export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { + return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "history"]; +export function historyKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "history"]; } -export function githubDataKey(workspaceId: string): ActorKey { - return ["ws", workspaceId, "github-data"]; -} - -export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "branch-sync"]; +export function githubDataKey(organizationId: string): ActorKey { + return ["org", organizationId, "github-data"]; } diff --git a/foundry/packages/backend/src/actors/logging.ts b/foundry/packages/backend/src/actors/logging.ts index 6a4616a..afc7d37 100644 --- a/foundry/packages/backend/src/actors/logging.ts +++ b/foundry/packages/backend/src/actors/logging.ts @@ -2,7 +2,11 @@ import { logger } from "../logging.js"; export function resolveErrorMessage(error: unknown): string { if (error instanceof Error) { - return error.message; + let msg = error.message; + if (error.cause) { + msg += ` [cause: ${resolveErrorMessage(error.cause)}]`; + } + return msg; } return String(error); } diff --git a/foundry/packages/backend/src/actors/workspace/actions.ts b/foundry/packages/backend/src/actors/organization/actions.ts similarity index 64% rename from foundry/packages/backend/src/actors/workspace/actions.ts rename to foundry/packages/backend/src/actors/organization/actions.ts index f4ee4db..d83e776 100644 --- a/foundry/packages/backend/src/actors/workspace/actions.ts +++ b/foundry/packages/backend/src/actors/organization/actions.ts @@ -2,16 +2,13 @@ import { desc, eq } from "drizzle-orm"; import { Loop } from "rivetkit/workflow"; import type { - AddRepoInput, CreateTaskInput, HistoryEvent, HistoryQueryInput, ListTasksInput, - ProviderId, + SandboxProviderId, RepoOverview, RepoRecord, - RepoStackActionInput, - RepoStackActionResult, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult, SwitchResult, @@ -25,37 +22,33 @@ import type { TaskWorkbenchSelectInput, TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, WorkbenchOpenPrSummary, - WorkbenchRepoSummary, + WorkbenchRepositorySummary, WorkbenchSessionSummary, WorkbenchTaskSummary, - WorkspaceEvent, - WorkspaceSummarySnapshot, - WorkspaceUseInput, + OrganizationEvent, + OrganizationSummarySnapshot, + OrganizationUseInput, } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js"; +import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateRepository, selfOrganization } from "../handles.js"; import { logActorWarning, resolveErrorMessage } from "../logging.js"; -import { availableSandboxProviderIds, defaultSandboxProviderId } from "../../sandbox-config.js"; -import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; -import { organizationProfile, taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js"; +import { defaultSandboxProviderId } from "../../sandbox-config.js"; +import { repoIdFromRemote } from "../../services/repo.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +import { organizationProfile, taskLookup, repos, taskSummaries } from "./db/schema.js"; import { agentTypeForModel } from "../task/workbench.js"; import { expectQueueResponse } from "../../services/queue.js"; -import { workspaceAppActions } from "./app-shell.js"; +import { organizationAppActions } from "./app-shell.js"; -interface WorkspaceState { - workspaceId: string; -} - -interface RefreshProviderProfilesCommand { - providerId?: ProviderId; +interface OrganizationState { + organizationId: string; } interface GetTaskInput { - workspaceId: string; + organizationId: string; taskId: string; } @@ -64,32 +57,30 @@ interface TaskProxyActionInput extends GetTaskInput { } interface RepoOverviewInput { - workspaceId: string; + organizationId: string; repoId: string; } -const WORKSPACE_QUEUE_NAMES = [ - "workspace.command.addRepo", - "workspace.command.createTask", - "workspace.command.refreshProviderProfiles", - "workspace.command.syncGithubOrganizationRepos", - "workspace.command.syncGithubSession", +const ORGANIZATION_QUEUE_NAMES = [ + "organization.command.createTask", + "organization.command.syncGithubOrganizationRepos", + "organization.command.syncGithubSession", ] as const; const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; -type WorkspaceQueueName = (typeof WORKSPACE_QUEUE_NAMES)[number]; +type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; -export { WORKSPACE_QUEUE_NAMES }; +export { ORGANIZATION_QUEUE_NAMES }; -export function workspaceWorkflowQueueName(name: WorkspaceQueueName): WorkspaceQueueName { +export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { return name; } const ORGANIZATION_PROFILE_ROW_ID = "profile"; -function assertWorkspace(c: { state: WorkspaceState }, workspaceId: string): void { - if (workspaceId !== c.state.workspaceId) { - throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} command=${workspaceId}`); +function assertOrganization(c: { state: OrganizationState }, organizationId: string): void { + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`); } } @@ -135,12 +126,12 @@ async function collectAllTaskSummaries(c: any): Promise { const all: TaskSummary[] = []; for (const row of repoRows) { try { - const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl); - const snapshot = await project.listTaskSummaries({ includeArchived: true }); + const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl); + const snapshot = await repository.listTaskSummaries({ includeArchived: true }); all.push(...snapshot); } catch (error) { - logActorWarning("workspace", "failed collecting tasks for repo", { - workspaceId: c.state.workspaceId, + logActorWarning("organization", "failed collecting tasks for repo", { + organizationId: c.state.organizationId, repoId: row.repoId, error: resolveErrorMessage(error), }); @@ -165,7 +156,7 @@ function repoLabelFromRemote(remoteUrl: string): string { return remoteUrl; } -function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepoSummary { +function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepositorySummary { const repoTasks = taskRows.filter((task) => task.repoId === repoRow.repoId); const latestActivityMs = repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), repoRow.updatedAt); @@ -206,14 +197,14 @@ function taskSummaryFromRow(row: any): WorkbenchTaskSummary { } async function listOpenPullRequestsSnapshot(c: any, taskRows: WorkbenchTaskSummary[]): Promise { - const githubData = getGithubData(c, c.state.workspaceId); + const githubData = getGithubData(c, c.state.organizationId); const openPullRequests = await githubData.listOpenPullRequests({}).catch(() => []); const claimedBranches = new Set(taskRows.filter((task) => task.branch).map((task) => `${task.repoId}:${task.branch}`)); return openPullRequests.filter((pullRequest: WorkbenchOpenPrSummary) => !claimedBranches.has(`${pullRequest.repoId}:${pullRequest.headRefName}`)); } -async function reconcileWorkbenchProjection(c: any): Promise { +async function reconcileWorkbenchProjection(c: any): Promise { const repoRows = await c.db .select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }) .from(repos) @@ -223,12 +214,12 @@ async function reconcileWorkbenchProjection(c: any): Promise right.updatedAtMs - left.updatedAtMs); return { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repos: repoRows.map((row) => buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), taskSummaries: taskRows, openPullRequests: await listOpenPullRequestsSnapshot(c, taskRows), @@ -268,15 +259,15 @@ async function reconcileWorkbenchProjection(c: any): Promise { +async function getOrganizationSummarySnapshot(c: any): Promise { const repoRows = await c.db .select({ repoId: repos.repoId, @@ -290,7 +281,7 @@ async function getWorkspaceSummarySnapshot(c: any): Promise buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), taskSummaries: summaries, openPullRequests: await listOpenPullRequestsSnapshot(c, summaries), @@ -304,61 +295,14 @@ async function broadcastRepoSummary( ): Promise { const matchingTaskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoRow.repoId)).all(); const repo = buildRepoSummary(repoRow, matchingTaskRows.map(taskSummaryFromRow)); - c.broadcast("workspaceUpdated", { type, repo } satisfies WorkspaceEvent); -} - -async function addRepoMutation(c: any, input: AddRepoInput): Promise { - assertWorkspace(c, input.workspaceId); - - const remoteUrl = normalizeRemoteUrl(input.remoteUrl); - if (!remoteUrl) { - throw new Error("remoteUrl is required"); - } - - const { driver } = getActorRuntimeContext(); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - await driver.git.validateRemote(remoteUrl, { githubToken: auth?.githubToken ?? null }); - - const repoId = repoIdFromRemote(remoteUrl); - const now = Date.now(); - const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, repoId)).get(); - - await c.db - .insert(repos) - .values({ - repoId, - remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl, - updatedAt: now, - }, - }) - .run(); - - await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", { - repoId, - remoteUrl, - updatedAt: now, - }); - return { - workspaceId: c.state.workspaceId, - repoId, - remoteUrl, - createdAt: now, - updatedAt: now, - }; + c.broadcast("organizationUpdated", { type, repo } satisfies OrganizationEvent); } async function createTaskMutation(c: any, input: CreateTaskInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const { config } = getActorRuntimeContext(); - const providerId = input.providerId ?? defaultSandboxProviderId(config); + const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); const repoId = input.repoId; const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); @@ -367,27 +311,11 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise { - const body = command ?? {}; - const { config } = getActorRuntimeContext(); - const providerIds: ProviderId[] = body.providerId ? [body.providerId] : availableSandboxProviderIds(config); - - for (const providerId of providerIds) { - await c.db - .insert(providerProfiles) - .values({ - providerId, - profileJson: JSON.stringify({ providerId }), - updatedAt: Date.now(), - }) - .onConflictDoUpdate({ - target: providerProfiles.providerId, - set: { - profileJson: JSON.stringify({ providerId }), - updatedAt: Date.now(), - }, - }) - .run(); - } -} - -export async function runWorkspaceWorkflow(ctx: any): Promise { - await ctx.loop("workspace-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-workspace-command", { - names: [...WORKSPACE_QUEUE_NAMES], +export async function runOrganizationWorkflow(ctx: any): Promise { + await ctx.loop("organization-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-organization-command", { + names: [...ORGANIZATION_QUEUE_NAMES], completable: true, }); if (!msg) { @@ -458,19 +362,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { } try { - if (msg.name === "workspace.command.addRepo") { + if (msg.name === "organization.command.createTask") { const result = await loopCtx.step({ - name: "workspace-add-repo", - timeout: 60_000, - run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "workspace.command.createTask") { - const result = await loopCtx.step({ - name: "workspace-create-task", + name: "organization-create-task", timeout: 5 * 60_000, run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), }); @@ -478,17 +372,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { return Loop.continue(undefined); } - if (msg.name === "workspace.command.refreshProviderProfiles") { - await loopCtx.step("workspace-refresh-provider-profiles", async () => - refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand), - ); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "workspace.command.syncGithubSession") { + if (msg.name === "organization.command.syncGithubSession") { await loopCtx.step({ - name: "workspace-sync-github-session", + name: "organization-sync-github-session", timeout: 60_000, run: async () => { const { syncGithubOrganizations } = await import("./app-shell.js"); @@ -499,9 +385,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { return Loop.continue(undefined); } - if (msg.name === "workspace.command.syncGithubOrganizationRepos") { + if (msg.name === "organization.command.syncGithubOrganizationRepos") { await loopCtx.step({ - name: "workspace-sync-github-organization-repos", + name: "organization-sync-github-organization-repos", timeout: 60_000, run: async () => { const { syncGithubOrganizationRepos } = await import("./app-shell.js"); @@ -513,14 +399,12 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { } } catch (error) { const message = resolveErrorMessage(error); - logActorWarning("workspace", "workspace workflow command failed", { - workspaceId: loopCtx.state.workspaceId, + logActorWarning("organization", "organization workflow command failed", { queueName: msg.name, error: message, }); await msg.complete({ error: message }).catch((completeError: unknown) => { - logActorWarning("workspace", "workspace workflow failed completing error response", { - workspaceId: loopCtx.state.workspaceId, + logActorWarning("organization", "organization workflow failed completing error response", { queueName: msg.name, error: resolveErrorMessage(completeError), }); @@ -531,25 +415,15 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { }); } -export const workspaceActions = { - ...workspaceAppActions, - async useWorkspace(c: any, input: WorkspaceUseInput): Promise<{ workspaceId: string }> { - assertWorkspace(c, input.workspaceId); - return { workspaceId: c.state.workspaceId }; +export const organizationActions = { + ...organizationAppActions, + async useOrganization(c: any, input: OrganizationUseInput): Promise<{ organizationId: string }> { + assertOrganization(c, input.organizationId); + return { organizationId: c.state.organizationId }; }, - async addRepo(c: any, input: AddRepoInput): Promise { - const self = selfWorkspace(c); - return expectQueueResponse( - await self.send(workspaceWorkflowQueueName("workspace.command.addRepo"), input, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async listRepos(c: any, input: WorkspaceUseInput): Promise { - assertWorkspace(c, input.workspaceId); + async listRepos(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); const rows = await c.db .select({ @@ -563,7 +437,7 @@ export const workspaceActions = { .all(); return rows.map((row) => ({ - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repoId: row.repoId, remoteUrl: row.remoteUrl, createdAt: row.createdAt, @@ -572,9 +446,9 @@ export const workspaceActions = { }, async createTask(c: any, input: CreateTaskInput): Promise { - const self = selfWorkspace(c); + const self = selfOrganization(c); return expectQueueResponse( - await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, { + await self.send(organizationWorkflowQueueName("organization.command.createTask"), input, { wait: true, timeout: 10_000, }), @@ -582,9 +456,12 @@ export const workspaceActions = { }, async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const { driver } = getActorRuntimeContext(); - await driver.github.starRepository(SANDBOX_AGENT_REPO); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.starRepository(SANDBOX_AGENT_REPO, { + githubToken: auth?.githubToken ?? null, + }); return { repo: SANDBOX_AGENT_REPO, starredAt: Date.now(), @@ -594,7 +471,7 @@ export const workspaceActions = { /** * Called by task actors when their summary-level state changes. * This is the write path for the local materialized projection; clients read - * the projection via `getWorkspaceSummary`, but only task actors should push + * the projection via `getOrganizationSummary`, but only task actors should push * rows into it. */ async applyTaskSummaryUpdate(c: any, input: { taskSummary: WorkbenchTaskSummary }): Promise { @@ -606,12 +483,12 @@ export const workspaceActions = { set: taskSummaryRowFromSummary(input.taskSummary), }) .run(); - c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies OrganizationEvent); }, async removeTaskSummary(c: any, input: { taskId: string }): Promise { await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); - c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies OrganizationEvent); }, async findTaskForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { @@ -626,13 +503,13 @@ export const workspaceActions = { for (const summary of matches) { try { - const task = getTask(c, c.state.workspaceId, input.repoId, summary.taskId); - await workspaceActions.applyTaskSummaryUpdate(c, { + const task = getTask(c, c.state.organizationId, input.repoId, summary.taskId); + await organizationActions.applyTaskSummaryUpdate(c, { taskSummary: await task.getTaskSummary({}), }); } catch (error) { - logActorWarning("workspace", "failed refreshing task summary for GitHub branch", { - workspaceId: c.state.workspaceId, + logActorWarning("organization", "failed refreshing task summary for GitHub branch", { + organizationId: c.state.organizationId, repoId: input.repoId, branchName: input.branchName, taskId: summary.taskId, @@ -647,11 +524,11 @@ export const workspaceActions = { if (summaries.some((summary) => summary.branch === input.pullRequest.headRefName)) { return; } - c.broadcast("workspaceUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies OrganizationEvent); }, async removeOpenPullRequest(c: any, input: { prId: string }): Promise { - c.broadcast("workspaceUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies OrganizationEvent); }, async applyGithubRepositoryProjection(c: any, input: { repoId: string; remoteUrl: string }): Promise { @@ -728,7 +605,7 @@ export const workspaceActions = { continue; } await c.db.delete(repos).where(eq(repos.repoId, repo.repoId)).run(); - c.broadcast("workspaceUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies OrganizationEvent); } const profile = await c.db @@ -756,13 +633,13 @@ export const workspaceActions = { async recordGithubWebhookReceipt( c: any, input: { - workspaceId: string; + organizationId: string; event: string; action?: string | null; receivedAt?: number; }, ): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const profile = await c.db .select({ id: organizationProfile.id }) @@ -783,20 +660,20 @@ export const workspaceActions = { .run(); }, - async getWorkspaceSummary(c: any, input: WorkspaceUseInput): Promise { - assertWorkspace(c, input.workspaceId); - return await getWorkspaceSummarySnapshot(c); + async getOrganizationSummary(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); + return await getOrganizationSummarySnapshot(c); }, - async reconcileWorkbenchState(c: any, input: WorkspaceUseInput): Promise { - assertWorkspace(c, input.workspaceId); + async reconcileWorkbenchState(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); return await reconcileWorkbenchProjection(c); }, - async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; tabId?: string }> { + async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { // Step 1: Create the task record (wait: true — local state mutations only). - const created = await workspaceActions.createTask(c, { - workspaceId: c.state.workspaceId, + const created = await organizationActions.createTask(c, { + organizationId: c.state.organizationId, repoId: input.repoId, task: input.task, ...(input.title ? { explicitTitle: input.title } : {}), @@ -807,7 +684,7 @@ export const workspaceActions = { // Step 2: Enqueue session creation + initial message (wait: false). // The task workflow creates the session record and sends the message in // the background. The client observes progress via push events on the - // task interest topic. + // task subscription topic. const task = await requireWorkbenchTask(c, created.taskId); await task.createWorkbenchSessionAndSend({ model: input.model, @@ -832,7 +709,7 @@ export const workspaceActions = { await task.renameWorkbenchBranch(input); }, - async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> { + async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { const task = await requireWorkbenchTask(c, input.taskId); return await task.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) }); }, @@ -862,12 +739,12 @@ export const workspaceActions = { await task.sendWorkbenchMessage(input); }, - async stopWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise { + async stopWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { const task = await requireWorkbenchTask(c, input.taskId); await task.stopWorkbenchSession(input); }, - async closeWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise { + async closeWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { const task = await requireWorkbenchTask(c, input.taskId); await task.closeWorkbenchSession(input); }, @@ -883,23 +760,23 @@ export const workspaceActions = { }, async reloadGithubOrganization(c: any): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadOrganization({}); + await getOrCreateGithubData(c, c.state.organizationId).reloadOrganization({}); }, async reloadGithubPullRequests(c: any): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadAllPullRequests({}); + await getOrCreateGithubData(c, c.state.organizationId).reloadAllPullRequests({}); }, async reloadGithubRepository(c: any, input: { repoId: string }): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadRepository(input); + await getOrCreateGithubData(c, c.state.organizationId).reloadRepository(input); }, async reloadGithubPullRequest(c: any, input: { repoId: string; prNumber: number }): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadPullRequest(input); + await getOrCreateGithubData(c, c.state.organizationId).reloadPullRequest(input); }, async listTasks(c: any, input: ListTasksInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); if (input.repoId) { const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); @@ -907,67 +784,41 @@ export const workspaceActions = { throw new Error(`Unknown repo: ${input.repoId}`); } - const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl); - return await project.listTaskSummaries({ includeArchived: true }); + const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); + return await repository.listTaskSummaries({ includeArchived: true }); } return await collectAllTaskSummaries(c); }, async getRepoOverview(c: any, input: RepoOverviewInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); if (!repoRow) { throw new Error(`Unknown repo: ${input.repoId}`); } - const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl); - await project.ensure({ remoteUrl: repoRow.remoteUrl }); - return await project.getRepoOverview({}); - }, - - async runRepoStackAction(c: any, input: RepoStackActionInput): Promise { - assertWorkspace(c, input.workspaceId); - - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${input.repoId}`); - } - - const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl); - await project.ensure({ remoteUrl: repoRow.remoteUrl }); - return await project.runRepoStackAction({ - action: input.action, - branchName: input.branchName, - parentBranch: input.parentBranch, - }); + const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); + return await repository.getRepoOverview({}); }, async switchTask(c: any, taskId: string): Promise { const repoId = await resolveRepoId(c, taskId); - const h = getTask(c, c.state.workspaceId, repoId, taskId); + const h = getTask(c, c.state.organizationId, repoId, taskId); const record = await h.get(); const switched = await h.switch(); return { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, taskId, - providerId: record.providerId, + sandboxProviderId: record.sandboxProviderId, switchTarget: switched.switchTarget, }; }, - async refreshProviderProfiles(c: any, command?: RefreshProviderProfilesCommand): Promise { - const self = selfWorkspace(c); - await self.send(workspaceWorkflowQueueName("workspace.command.refreshProviderProfiles"), command ?? {}, { - wait: true, - timeout: 10_000, - }); - }, - async history(c: any, input: HistoryQueryInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const limit = input.limit ?? 20; const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all(); @@ -976,7 +827,7 @@ export const workspaceActions = { for (const row of repoRows) { try { - const hist = await getOrCreateHistory(c, c.state.workspaceId, row.repoId); + const hist = await getOrCreateHistory(c, c.state.organizationId, row.repoId); const items = await hist.list({ branch: input.branch, taskId: input.taskId, @@ -984,8 +835,8 @@ export const workspaceActions = { }); allEvents.push(...items); } catch (error) { - logActorWarning("workspace", "history lookup failed for repo", { - workspaceId: c.state.workspaceId, + logActorWarning("organization", "history lookup failed for repo", { + organizationId: c.state.organizationId, repoId: row.repoId, error: resolveErrorMessage(error), }); @@ -997,7 +848,7 @@ export const workspaceActions = { }, async getTask(c: any, input: GetTaskInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); @@ -1006,49 +857,49 @@ export const workspaceActions = { throw new Error(`Unknown repo: ${repoId}`); } - const project = await getOrCreateProject(c, c.state.workspaceId, repoId, repoRow.remoteUrl); - return await project.getTaskEnriched({ taskId: input.taskId }); + const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, repoRow.remoteUrl); + return await repository.getTaskEnriched({ taskId: input.taskId }); }, async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); return await h.attach({ reason: input.reason }); }, async pushTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.push({ reason: input.reason }); }, async syncTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.sync({ reason: input.reason }); }, async mergeTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.merge({ reason: input.reason }); }, async archiveTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.archive({ reason: input.reason }); }, async killTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.kill({ reason: input.reason }); }, }; diff --git a/foundry/packages/backend/src/actors/workspace/app-shell.ts b/foundry/packages/backend/src/actors/organization/app-shell.ts similarity index 84% rename from foundry/packages/backend/src/actors/workspace/app-shell.ts rename to foundry/packages/backend/src/actors/organization/app-shell.ts index d9a5dfa..20febfd 100644 --- a/foundry/packages/backend/src/actors/workspace/app-shell.ts +++ b/foundry/packages/backend/src/actors/organization/app-shell.ts @@ -10,7 +10,7 @@ import type { UpdateFoundryOrganizationProfileInput, } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateGithubData, getOrCreateWorkspace, selfWorkspace } from "../handles.js"; +import { getOrCreateGithubData, getOrCreateOrganization, selfOrganization } from "../handles.js"; import { GitHubAppError } from "../../services/app-github.js"; import { getBetterAuthService } from "../../services/better-auth.js"; import { repoIdFromRemote, repoLabelFromRemote } from "../../services/repo.js"; @@ -28,13 +28,13 @@ import { stripeLookup, } from "./db/schema.js"; -export const APP_SHELL_WORKSPACE_ID = "app"; +export const APP_SHELL_ORGANIZATION_ID = "app"; // ── Better Auth adapter where-clause helpers ── // These convert the adapter's `{ field, value, operator }` clause arrays into -// Drizzle predicates for workspace-level auth index / verification tables. +// Drizzle predicates for organization-level auth index / verification tables. -function workspaceAuthColumn(table: any, field: string): any { +function organizationAuthColumn(table: any, field: string): any { const column = table[field]; if (!column) { throw new Error(`Unknown auth table field: ${field}`); @@ -52,8 +52,8 @@ function normalizeAuthValue(value: unknown): unknown { return value; } -function workspaceAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { - const column = workspaceAuthColumn(table, clause.field); +function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { + const column = organizationAuthColumn(table, clause.field); const value = normalizeAuthValue(clause.value); switch (clause.operator) { case "ne": @@ -82,13 +82,13 @@ function workspaceAuthClause(table: any, clause: { field: string; value: unknown } } -function workspaceAuthWhere(table: any, clauses: any[] | undefined): any { +function organizationAuthWhere(table: any, clauses: any[] | undefined): any { if (!clauses || clauses.length === 0) { return undefined; } - let expr = workspaceAuthClause(table, clauses[0]); + let expr = organizationAuthClause(table, clauses[0]); for (const clause of clauses.slice(1)) { - const next = workspaceAuthClause(table, clause); + const next = organizationAuthClause(table, clause); expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); } return expr; @@ -104,15 +104,15 @@ function roundDurationMs(start: number): number { return Math.round((performance.now() - start) * 100) / 100; } -function assertAppWorkspace(c: any): void { - if (c.state.workspaceId !== APP_SHELL_WORKSPACE_ID) { - throw new Error(`App shell action requires workspace ${APP_SHELL_WORKSPACE_ID}, got ${c.state.workspaceId}`); +function assertAppOrganization(c: any): void { + if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { + throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); } } -function assertOrganizationWorkspace(c: any): void { - if (c.state.workspaceId === APP_SHELL_WORKSPACE_ID) { - throw new Error("Organization action cannot run on the reserved app workspace"); +function assertOrganizationShell(c: any): void { + if (c.state.organizationId === APP_SHELL_ORGANIZATION_ID) { + throw new Error("Organization action cannot run on the reserved app organization"); } } @@ -124,12 +124,12 @@ function slugify(value: string): string { .replace(/^-+|-+$/g, ""); } -function personalWorkspaceId(login: string): string { +function personalOrganizationId(login: string): string { return `personal-${slugify(login)}`; } -function organizationWorkspaceId(kind: FoundryOrganization["kind"], login: string): string { - return kind === "personal" ? personalWorkspaceId(login) : slugify(login); +function organizationOrganizationId(kind: FoundryOrganization["kind"], login: string): string { + return kind === "personal" ? personalOrganizationId(login) : slugify(login); } function hasRepoScope(scopes: string[]): boolean { @@ -217,12 +217,12 @@ function stripeWebhookSubscription(event: any) { }; } -async function getOrganizationState(workspace: any) { - return await workspace.getOrganizationShellState({}); +async function getOrganizationState(organization: any) { + return await organization.getOrganizationShellState({}); } -async function getOrganizationStateIfInitialized(workspace: any) { - return await workspace.getOrganizationShellStateIfInitialized({}); +async function getOrganizationStateIfInitialized(organization: any) { + return await organization.getOrganizationShellStateIfInitialized({}); } async function listSnapshotOrganizations(c: any, sessionId: string, organizationIds: string[]) { @@ -230,13 +230,13 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization organizationIds.map(async (organizationId) => { const organizationStartedAt = performance.now(); try { - const workspace = await getOrCreateWorkspace(c, organizationId); - const organizationState = await getOrganizationStateIfInitialized(workspace); + const organization = await getOrCreateOrganization(c, organizationId); + const organizationState = await getOrganizationStateIfInitialized(organization); if (!organizationState) { logger.warn( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), }, @@ -247,7 +247,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization logger.info( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), }, @@ -260,7 +260,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization logger.error( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), errorMessage: message, @@ -273,7 +273,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization logger.info( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), }, @@ -291,7 +291,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization } async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const startedAt = performance.now(); const auth = getBetterAuthService(); let authState = await auth.getAuthState(sessionId); @@ -318,7 +318,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.info( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, eligibleOrganizationCount: eligibleOrganizationIds.length, eligibleOrganizationIds, }, @@ -333,7 +333,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.info( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, organizationIds: uninitializedOrganizationIds, }, "build_app_snapshot_repairing_organizations", @@ -344,7 +344,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.warn( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, organizationIds: uninitializedOrganizationIds, }, "build_app_snapshot_repair_skipped_no_access_token", @@ -393,7 +393,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.info( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, eligibleOrganizationCount: eligibleOrganizationIds.length, organizationCount: organizations.length, durationMs: roundDurationMs(startedAt), @@ -439,7 +439,7 @@ function requireEligibleOrganization(session: any, organizationId: string): void } async function upsertStripeLookupEntries(c: any, organizationId: string, customerId: string | null, subscriptionId: string | null): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); for (const lookupKey of [customerId ? `customer:${customerId}` : null, subscriptionId ? `subscription:${subscriptionId}` : null]) { if (!lookupKey) { @@ -464,7 +464,7 @@ async function upsertStripeLookupEntries(c: any, organizationId: string, custome } async function findOrganizationIdForStripeEvent(c: any, customerId: string | null, subscriptionId: string | null): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const customerLookup = customerId ? await c.db .select({ organizationId: stripeLookup.organizationId }) @@ -511,7 +511,7 @@ async function safeListInstallations(accessToken: string): Promise { } /** - * Slow path: list GitHub orgs + installations, sync each org workspace, + * Slow path: list GitHub orgs + installations, sync each org organization, * and update the session's eligible organization list. Called from the * workflow queue so it runs in the background after the callback has * already returned a redirect to the browser. @@ -521,7 +521,7 @@ export async function syncGithubOrganizations(c: any, input: { sessionId: string } async function syncGithubOrganizationsInternal(c: any, input: { sessionId: string; accessToken: string }, options: { broadcast: boolean }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const auth = getBetterAuthService(); const { appShell } = getActorRuntimeContext(); const { sessionId, accessToken } = input; @@ -554,10 +554,10 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin ]; for (const account of accounts) { - const organizationId = organizationWorkspaceId(account.kind, account.githubLogin); + const organizationId = organizationOrganizationId(account.kind, account.githubLogin); const installation = installations.find((candidate) => candidate.accountLogin === account.githubLogin) ?? null; - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.syncOrganizationShellFromGithub({ + const organization = await getOrCreateOrganization(c, organizationId); + await organization.syncOrganizationShellFromGithub({ userId: githubUserId, userName: viewer.name || viewer.login, userEmail: viewer.email ?? `${viewer.login}@users.noreply.github.com`, @@ -597,22 +597,22 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin } export async function syncGithubOrganizationRepos(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); const githubData = await getOrCreateGithubData(c, input.organizationId); try { await githubData.fullSync({ accessToken: session.githubAccessToken, - connectedAccount: organization.snapshot.github.connectedAccount, - installationId: organization.githubInstallationId, - installationStatus: organization.snapshot.github.installationStatus, - githubLogin: organization.githubLogin, - kind: organization.snapshot.kind, + connectedAccount: organizationState.snapshot.github.connectedAccount, + installationId: organizationState.githubInstallationId, + installationStatus: organizationState.snapshot.github.installationStatus, + githubLogin: organizationState.githubLogin, + kind: organizationState.snapshot.kind, label: "Importing repository catalog...", }); @@ -625,8 +625,8 @@ export async function syncGithubOrganizationRepos(c: any, input: { sessionId: st const installationStatus = error instanceof GitHubAppError && (error.status === 403 || error.status === 404) ? "reconnect_required" - : organization.snapshot.github.installationStatus; - await workspace.markOrganizationSyncFailed({ + : organizationState.snapshot.github.installationStatus; + await organizationHandle.markOrganizationSyncFailed({ message: error instanceof Error ? error.message : "GitHub import failed", installationStatus, }); @@ -640,20 +640,20 @@ export async function syncGithubOrganizationRepos(c: any, input: { sessionId: st } async function readOrganizationProfileRow(c: any) { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); return await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, PROFILE_ROW_ID)).get(); } async function requireOrganizationProfileRow(c: any) { const row = await readOrganizationProfileRow(c); if (!row) { - throw new Error(`Organization profile is not initialized for workspace ${c.state.workspaceId}`); + throw new Error(`Organization profile is not initialized for organization ${c.state.organizationId}`); } return row; } async function listOrganizationMembers(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select().from(organizationMembers).orderBy(organizationMembers.role, organizationMembers.name).all(); return rows.map((row) => ({ id: row.id, @@ -665,13 +665,13 @@ async function listOrganizationMembers(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select({ email: seatAssignments.email }).from(seatAssignments).orderBy(seatAssignments.email).all(); return rows.map((row) => row.email); } async function listOrganizationInvoices(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select().from(invoices).orderBy(desc(invoices.issuedAt), desc(invoices.createdAt)).all(); return rows.map((row) => ({ id: row.id, @@ -683,7 +683,7 @@ async function listOrganizationInvoices(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all(); return rows.map((row) => repoLabelFromRemote(row.remoteUrl)).sort((left, right) => left.localeCompare(right)); } @@ -710,8 +710,8 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number const invoiceRows = await listOrganizationInvoices(c); const state = { - id: c.state.workspaceId, - workspaceId: c.state.workspaceId, + id: c.state.organizationId, + organizationId: c.state.organizationId, kind: row.kind, githubLogin: row.githubLogin, githubInstallationId: row.githubInstallationId ?? null, @@ -720,8 +720,8 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number stripePriceId: row.stripePriceId ?? null, billingPlanId: row.billingPlanId, snapshot: { - id: c.state.workspaceId, - workspaceId: c.state.workspaceId, + id: c.state.organizationId, + organizationId: c.state.organizationId, kind: row.kind, settings: { displayName: row.displayName, @@ -759,7 +759,7 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number logger.info( { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, githubLogin: row.githubLogin, repoCount: repoCatalog.length, memberCount: members.length, @@ -774,7 +774,7 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number } async function applySubscriptionState( - workspace: any, + organization: any, subscription: { id: string; customerId: string; @@ -787,15 +787,15 @@ async function applySubscriptionState( }, fallbackPlanId: FoundryBillingPlanId, ): Promise { - await workspace.applyOrganizationStripeSubscription({ + await organization.applyOrganizationStripeSubscription({ subscription, fallbackPlanId, }); } -export const workspaceAppActions = { +export const organizationAppActions = { async authFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const clauses = [ ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), @@ -804,12 +804,12 @@ export const workspaceAppActions = { if (clauses.length === 0) { return null; } - const predicate = workspaceAuthWhere(authSessionIndex, clauses); + const predicate = organizationAuthWhere(authSessionIndex, clauses); return await c.db.select().from(authSessionIndex).where(predicate!).get(); }, async authUpsertSessionIndex(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); await c.db @@ -834,7 +834,7 @@ export const workspaceAppActions = { }, async authDeleteSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const clauses = [ ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), @@ -843,18 +843,18 @@ export const workspaceAppActions = { if (clauses.length === 0) { return; } - const predicate = workspaceAuthWhere(authSessionIndex, clauses); + const predicate = organizationAuthWhere(authSessionIndex, clauses); await c.db.delete(authSessionIndex).where(predicate!).run(); }, async authFindEmailIndex(c: any, input: { email: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); }, async authUpsertEmailIndex(c: any, input: { email: string; userId: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); await c.db @@ -876,13 +876,13 @@ export const workspaceAppActions = { }, async authDeleteEmailIndex(c: any, input: { email: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); }, async authFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); if (input.id) { return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); @@ -898,7 +898,7 @@ export const workspaceAppActions = { }, async authUpsertAccountIndex(c: any, input: { id: string; providerId: string; accountId: string; userId: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); await c.db @@ -924,7 +924,7 @@ export const workspaceAppActions = { }, async authDeleteAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); if (input.id) { await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); @@ -939,7 +939,7 @@ export const workspaceAppActions = { }, async authCreateVerification(c: any, input: { data: Record }) { - assertAppWorkspace(c); + assertAppOrganization(c); await c.db .insert(authVerification) @@ -953,22 +953,22 @@ export const workspaceAppActions = { }, async authFindOneVerification(c: any, input: { where: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; }, async authFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); let query = c.db.select().from(authVerification); if (predicate) { query = query.where(predicate); } if (input.sortBy?.field) { - const column = workspaceAuthColumn(authVerification, input.sortBy.field); + const column = organizationAuthColumn(authVerification, input.sortBy.field); query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); } if (typeof input.limit === "number") { @@ -981,9 +981,9 @@ export const workspaceAppActions = { }, async authUpdateVerification(c: any, input: { where: any[]; update: Record }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return null; } @@ -996,9 +996,9 @@ export const workspaceAppActions = { }, async authUpdateManyVerification(c: any, input: { where: any[]; update: Record }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return 0; } @@ -1012,9 +1012,9 @@ export const workspaceAppActions = { }, async authDeleteVerification(c: any, input: { where: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return; } @@ -1022,9 +1022,9 @@ export const workspaceAppActions = { }, async authDeleteManyVerification(c: any, input: { where: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return 0; } @@ -1034,9 +1034,9 @@ export const workspaceAppActions = { }, async authCountVerification(c: any, input: { where?: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); const row = predicate ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() : await c.db.select({ value: sqlCount() }).from(authVerification).get(); @@ -1051,7 +1051,7 @@ export const workspaceAppActions = { c: any, input: { organizationId: string; requireRepoScope?: boolean }, ): Promise<{ accessToken: string; scopes: string[] } | null> { - assertAppWorkspace(c); + assertAppOrganization(c); const auth = getBetterAuthService(); const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); @@ -1081,7 +1081,7 @@ export const workspaceAppActions = { }, async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); await getBetterAuthService().upsertUserProfile(session.authUserId, { starterRepoStatus: "skipped", @@ -1092,12 +1092,12 @@ export const workspaceAppActions = { }, async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - await workspace.starSandboxAgentRepo({ - workspaceId: input.organizationId, + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.starSandboxAgentRepo({ + organizationId: input.organizationId, }); await getBetterAuthService().upsertUserProfile(session.authUserId, { starterRepoStatus: "starred", @@ -1108,22 +1108,22 @@ export const workspaceAppActions = { }, async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (organization.snapshot.github.syncStatus !== "synced") { - if (organization.snapshot.github.syncStatus !== "syncing") { - await workspace.markOrganizationSyncStarted({ + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.github.syncStatus !== "synced") { + if (organizationState.snapshot.github.syncStatus !== "syncing") { + await organizationHandle.markOrganizationSyncStarted({ label: "Importing repository catalog...", }); - const self = selfWorkspace(c); + const self = selfOrganization(c); await self.send( - "workspace.command.syncGithubOrganizationRepos", + "organization.command.syncGithubOrganizationRepos", { sessionId: input.sessionId, organizationId: input.organizationId }, { wait: false, @@ -1140,11 +1140,11 @@ export const workspaceAppActions = { c: any, input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, ): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - await workspace.updateOrganizationShellProfile({ + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.updateOrganizationShellProfile({ displayName: input.displayName, slug: input.slug, primaryDomain: input.primaryDomain, @@ -1153,23 +1153,23 @@ export const workspaceAppActions = { }, async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (organization.snapshot.github.syncStatus === "syncing") { + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.github.syncStatus === "syncing") { return await buildAppSnapshot(c, input.sessionId); } - await workspace.markOrganizationSyncStarted({ + await organizationHandle.markOrganizationSyncStarted({ label: "Importing repository catalog...", }); - const self = selfWorkspace(c); + const self = selfOrganization(c); await self.send( - "workspace.command.syncGithubOrganizationRepos", + "organization.command.syncGithubOrganizationRepos", { sessionId: input.sessionId, organizationId: input.organizationId }, { wait: false, @@ -1180,32 +1180,32 @@ export const workspaceAppActions = { }, async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (organization.snapshot.kind !== "organization") { + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.kind !== "organization") { return { - url: `${appShell.appUrl}/workspaces/${input.organizationId}`, + url: `${appShell.appUrl}/organizations/${input.organizationId}`, }; } return { - url: await appShell.github.buildInstallationUrl(organization.githubLogin, randomUUID()), + url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), }; }, async createAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; planId: FoundryBillingPlanId }): Promise<{ url: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); if (input.planId === "free") { - await workspace.applyOrganizationFreePlan({ clearSubscription: false }); + await organizationHandle.applyOrganizationFreePlan({ clearSubscription: false }); return { url: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, }; @@ -1215,16 +1215,16 @@ export const workspaceAppActions = { throw new Error("Stripe is not configured"); } - let customerId = organization.stripeCustomerId; + let customerId = organizationState.stripeCustomerId; if (!customerId) { customerId = ( await appShell.stripe.createCustomer({ organizationId: input.organizationId, - displayName: organization.snapshot.settings.displayName, + displayName: organizationState.snapshot.settings.displayName, email: session.currentUserEmail, }) ).id; - await workspace.applyOrganizationStripeCustomer({ customerId }); + await organizationHandle.applyOrganizationStripeCustomer({ customerId }); await upsertStripeLookupEntries(c, input.organizationId, customerId, null); } @@ -1245,24 +1245,24 @@ export const workspaceAppActions = { }, async finalizeAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; checkoutSessionId: string }): Promise<{ redirectTo: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); const completion = await appShell.stripe.retrieveCheckoutCompletion(input.checkoutSessionId); if (completion.customerId) { - await workspace.applyOrganizationStripeCustomer({ customerId: completion.customerId }); + await organizationHandle.applyOrganizationStripeCustomer({ customerId: completion.customerId }); } await upsertStripeLookupEntries(c, input.organizationId, completion.customerId, completion.subscriptionId); if (completion.subscriptionId) { const subscription = await appShell.stripe.retrieveSubscription(completion.subscriptionId); - await applySubscriptionState(workspace, subscription, completion.planId ?? organization.billingPlanId); + await applySubscriptionState(organizationHandle, subscription, completion.planId ?? organizationState.billingPlanId); } if (completion.paymentMethodLabel) { - await workspace.setOrganizationBillingPaymentMethod({ + await organizationHandle.setOrganizationBillingPaymentMethod({ label: completion.paymentMethodLabel, }); } @@ -1273,73 +1273,73 @@ export const workspaceAppActions = { }, async createAppBillingPortalSession(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (!organization.stripeCustomerId) { + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (!organizationState.stripeCustomerId) { throw new Error("Stripe customer is not available for this organization"); } const portal = await appShell.stripe.createPortalSession({ - customerId: organization.stripeCustomerId, + customerId: organizationState.stripeCustomerId, returnUrl: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, }); return { url: portal.url }; }, async cancelAppScheduledRenewal(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); - if (organization.stripeSubscriptionId && appShell.stripe.isConfigured()) { - const subscription = await appShell.stripe.updateSubscriptionCancellation(organization.stripeSubscriptionId, true); - await applySubscriptionState(workspace, subscription, organization.billingPlanId); - await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organization.stripeCustomerId, subscription.id); + if (organizationState.stripeSubscriptionId && appShell.stripe.isConfigured()) { + const subscription = await appShell.stripe.updateSubscriptionCancellation(organizationState.stripeSubscriptionId, true); + await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); + await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await workspace.setOrganizationBillingStatus({ status: "scheduled_cancel" }); + await organizationHandle.setOrganizationBillingStatus({ status: "scheduled_cancel" }); } return await buildAppSnapshot(c, input.sessionId); }, async resumeAppSubscription(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); - if (organization.stripeSubscriptionId && appShell.stripe.isConfigured()) { - const subscription = await appShell.stripe.updateSubscriptionCancellation(organization.stripeSubscriptionId, false); - await applySubscriptionState(workspace, subscription, organization.billingPlanId); - await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organization.stripeCustomerId, subscription.id); + if (organizationState.stripeSubscriptionId && appShell.stripe.isConfigured()) { + const subscription = await appShell.stripe.updateSubscriptionCancellation(organizationState.stripeSubscriptionId, false); + await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); + await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await workspace.setOrganizationBillingStatus({ status: "active" }); + await organizationHandle.setOrganizationBillingStatus({ status: "active" }); } return await buildAppSnapshot(c, input.sessionId); }, - async recordAppSeatUsage(c: any, input: { sessionId: string; workspaceId: string }): Promise { - assertAppWorkspace(c); + async recordAppSeatUsage(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.workspaceId); - const workspace = await getOrCreateWorkspace(c, input.workspaceId); - await workspace.recordOrganizationSeatUsage({ + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.recordOrganizationSeatUsage({ email: session.currentUserEmail, }); return await buildAppSnapshot(c, input.sessionId); }, async handleAppStripeWebhook(c: any, input: { payload: string; signatureHeader: string | null }): Promise<{ ok: true }> { - assertAppWorkspace(c); + assertAppOrganization(c); const { appShell } = getActorRuntimeContext(); const event = appShell.stripe.verifyWebhookEvent(input.payload, input.signatureHeader); @@ -1353,9 +1353,9 @@ export const workspaceAppActions = { typeof object.subscription === "string" ? object.subscription : null, )); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); + const organization = await getOrCreateOrganization(c, organizationId); if (typeof object.customer === "string") { - await workspace.applyOrganizationStripeCustomer({ customerId: object.customer }); + await organization.applyOrganizationStripeCustomer({ customerId: object.customer }); } await upsertStripeLookupEntries( c, @@ -1371,9 +1371,13 @@ export const workspaceAppActions = { const subscription = stripeWebhookSubscription(event); const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); - const organization = await getOrganizationState(workspace); - await applySubscriptionState(workspace, subscription, appShell.stripe.planIdForPriceId(subscription.priceId ?? "") ?? organization.billingPlanId); + const organizationHandle = await getOrCreateOrganization(c, organizationId); + const organizationState = await getOrganizationState(organizationHandle); + await applySubscriptionState( + organizationHandle, + subscription, + appShell.stripe.planIdForPriceId(subscription.priceId ?? "") ?? organizationState.billingPlanId, + ); await upsertStripeLookupEntries(c, organizationId, subscription.customerId, subscription.id); } return { ok: true }; @@ -1383,8 +1387,8 @@ export const workspaceAppActions = { const subscription = stripeWebhookSubscription(event); const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.applyOrganizationFreePlan({ clearSubscription: true }); + const organization = await getOrCreateOrganization(c, organizationId); + await organization.applyOrganizationFreePlan({ clearSubscription: true }); } return { ok: true }; } @@ -1393,10 +1397,10 @@ export const workspaceAppActions = { const invoice = event.data.object as Record; const organizationId = await findOrganizationIdForStripeEvent(c, typeof invoice.customer === "string" ? invoice.customer : null, null); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); + const organization = await getOrCreateOrganization(c, organizationId); const rawAmount = typeof invoice.amount_paid === "number" ? invoice.amount_paid : invoice.amount_due; const amountUsd = Math.round((typeof rawAmount === "number" ? rawAmount : 0) / 100); - await workspace.upsertOrganizationInvoice({ + await organization.upsertOrganizationInvoice({ id: String(invoice.id), label: typeof invoice.number === "string" ? `Invoice ${invoice.number}` : "Stripe invoice", issuedAt: formatUnixDate(typeof invoice.created === "number" ? invoice.created : Math.floor(Date.now() / 1000)), @@ -1410,7 +1414,7 @@ export const workspaceAppActions = { }, async handleAppGithubWebhook(c: any, input: { payload: string; signatureHeader: string | null; eventHeader: string | null }): Promise<{ ok: true }> { - assertAppWorkspace(c); + assertAppOrganization(c); const { appShell } = getActorRuntimeContext(); const { event, body } = appShell.github.verifyWebhookEvent(input.payload, input.signatureHeader, input.eventHeader); @@ -1429,11 +1433,11 @@ export const workspaceAppActions = { } const kind: FoundryOrganization["kind"] = accountType === "User" ? "personal" : "organization"; - const organizationId = organizationWorkspaceId(kind, accountLogin); + const organizationId = organizationOrganizationId(kind, accountLogin); const receivedAt = Date.now(); - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.recordGithubWebhookReceipt({ - workspaceId: organizationId, + const organization = await getOrCreateOrganization(c, organizationId); + await organization.recordGithubWebhookReceipt({ + organizationId: organizationId, event, action: body.action ?? null, receivedAt, @@ -1556,6 +1560,13 @@ export const workspaceAppActions = { }, }); } + if ((event === "push" || event === "create" || event === "delete") && body.repository?.clone_url) { + const repoId = repoIdFromRemote(body.repository.clone_url); + const knownRepository = await githubData.getRepository({ repoId }); + if (knownRepository) { + await githubData.reloadRepository({ repoId }); + } + } } return { ok: true }; } @@ -1588,13 +1599,13 @@ export const workspaceAppActions = { appConfigured: boolean; }, ): Promise<{ organizationId: string }> { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const now = Date.now(); const existing = await readOrganizationProfileRow(c); const slug = existing?.slug ?? slugify(input.githubLogin); - const organizationId = organizationWorkspaceId(input.kind, input.githubLogin); - if (organizationId !== c.state.workspaceId) { - throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} github=${organizationId}`); + const organizationId = organizationOrganizationId(input.kind, input.githubLogin); + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); } const installationStatus = @@ -1698,17 +1709,17 @@ export const workspaceAppActions = { }, async getOrganizationShellState(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); return await buildOrganizationState(c); }, async getOrganizationShellStateIfInitialized(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); return await buildOrganizationStateIfInitialized(c); }, async updateOrganizationShellProfile(c: any, input: Pick): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const existing = await requireOrganizationProfileRow(c); await c.db .update(organizationProfile) @@ -1723,7 +1734,7 @@ export const workspaceAppActions = { }, async markOrganizationSyncStarted(c: any, input: { label: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1743,7 +1754,7 @@ export const workspaceAppActions = { lastSyncLabel: string; }, ): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const now = Date.now(); for (const repository of input.repositories) { const remoteUrl = repository.cloneUrl; @@ -1778,7 +1789,7 @@ export const workspaceAppActions = { }, async markOrganizationSyncFailed(c: any, input: { message: string; installationStatus: FoundryOrganization["github"]["installationStatus"] }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1792,7 +1803,7 @@ export const workspaceAppActions = { }, async applyOrganizationStripeCustomer(c: any, input: { customerId: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1819,7 +1830,7 @@ export const workspaceAppActions = { fallbackPlanId: FoundryBillingPlanId; }, ): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const { appShell } = getActorRuntimeContext(); const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; await c.db @@ -1841,7 +1852,7 @@ export const workspaceAppActions = { }, async applyOrganizationFreePlan(c: any, input: { clearSubscription: boolean }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const patch: Record = { billingPlanId: "free", billingStatus: "active", @@ -1859,7 +1870,7 @@ export const workspaceAppActions = { }, async setOrganizationBillingPaymentMethod(c: any, input: { label: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1871,7 +1882,7 @@ export const workspaceAppActions = { }, async setOrganizationBillingStatus(c: any, input: { status: FoundryBillingState["status"] }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1883,7 +1894,7 @@ export const workspaceAppActions = { }, async upsertOrganizationInvoice(c: any, input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .insert(invoices) .values({ @@ -1907,7 +1918,7 @@ export const workspaceAppActions = { }, async recordOrganizationSeatUsage(c: any, input: { email: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .insert(seatAssignments) .values({ @@ -1919,7 +1930,7 @@ export const workspaceAppActions = { }, async applyGithubInstallationCreated(c: any, input: { installationId: number }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1932,7 +1943,7 @@ export const workspaceAppActions = { }, async applyGithubInstallationRemoved(c: any, _input: {}): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1947,7 +1958,7 @@ export const workspaceAppActions = { }, async applyGithubRepositoryChanges(c: any, input: { added: Array<{ fullName: string; private: boolean }>; removed: string[] }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const now = Date.now(); for (const repo of input.added) { diff --git a/foundry/packages/backend/src/actors/project/db/db.ts b/foundry/packages/backend/src/actors/organization/db/db.ts similarity index 68% rename from foundry/packages/backend/src/actors/project/db/db.ts rename to foundry/packages/backend/src/actors/organization/db/db.ts index 49b5b72..f7eb392 100644 --- a/foundry/packages/backend/src/actors/project/db/db.ts +++ b/foundry/packages/backend/src/actors/organization/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const projectDb = db({ schema, migrations }); +export const organizationDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts b/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts new file mode 100644 index 0000000..eb43667 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/organization/db/drizzle", + schema: "./src/actors/organization/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql similarity index 94% rename from foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql rename to foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql index 7410e3b..09b77f9 100644 --- a/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql @@ -69,12 +69,6 @@ CREATE TABLE `organization_profile` ( `updated_at` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE `provider_profiles` ( - `provider_id` text PRIMARY KEY NOT NULL, - `profile_json` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint CREATE TABLE `repos` ( `repo_id` text PRIMARY KEY NOT NULL, `remote_url` text NOT NULL, diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json similarity index 95% rename from foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json rename to foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json index 0ae9736..cdcc44c 100644 --- a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json @@ -457,37 +457,6 @@ "uniqueConstraints": {}, "checkConstraints": {} }, - "provider_profiles": { - "name": "provider_profiles", - "columns": { - "provider_id": { - "name": "provider_id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "profile_json": { - "name": "profile_json", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, "repos": { "name": "repos", "columns": { diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json similarity index 100% rename from foundry/packages/backend/src/actors/workspace/db/drizzle/meta/_journal.json rename to foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json diff --git a/foundry/packages/backend/src/actors/workspace/db/migrations.ts b/foundry/packages/backend/src/actors/organization/db/migrations.ts similarity index 96% rename from foundry/packages/backend/src/actors/workspace/db/migrations.ts rename to foundry/packages/backend/src/actors/organization/db/migrations.ts index a86578d..b3e09f1 100644 --- a/foundry/packages/backend/src/actors/workspace/db/migrations.ts +++ b/foundry/packages/backend/src/actors/organization/db/migrations.ts @@ -22,6 +22,12 @@ const journal = { tag: "0002_task_summaries", breakpoints: true, }, + { + idx: 3, + when: 1773810001000, + tag: "0003_drop_provider_profiles", + breakpoints: true, + }, ], } as const; @@ -99,12 +105,6 @@ CREATE TABLE \`organization_profile\` ( \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE \`provider_profiles\` ( - \`provider_id\` text PRIMARY KEY NOT NULL, - \`profile_json\` text NOT NULL, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint CREATE TABLE \`repos\` ( \`repo_id\` text PRIMARY KEY NOT NULL, \`remote_url\` text NOT NULL, @@ -170,6 +170,8 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` ( \`pull_request_json\` text, \`sessions_summary_json\` text DEFAULT '[]' NOT NULL ); +`, + m0003: `DROP TABLE IF EXISTS \`provider_profiles\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/workspace/db/schema.ts b/foundry/packages/backend/src/actors/organization/db/schema.ts similarity index 91% rename from foundry/packages/backend/src/actors/workspace/db/schema.ts rename to foundry/packages/backend/src/actors/organization/db/schema.ts index 6571b62..f1e069e 100644 --- a/foundry/packages/backend/src/actors/workspace/db/schema.ts +++ b/foundry/packages/backend/src/actors/organization/db/schema.ts @@ -1,12 +1,6 @@ import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; -// SQLite is per workspace actor instance, so no workspaceId column needed. -export const providerProfiles = sqliteTable("provider_profiles", { - providerId: text("provider_id").notNull().primaryKey(), - // Structured by the provider profile snapshot returned by provider integrations. - profileJson: text("profile_json").notNull(), - updatedAt: integer("updated_at").notNull(), -}); +// SQLite is per organization actor instance, so no organizationId column needed. export const repos = sqliteTable("repos", { repoId: text("repo_id").notNull().primaryKey(), @@ -23,7 +17,7 @@ export const taskLookup = sqliteTable("task_lookup", { /** * Materialized sidebar projection maintained by task actors. * The source of truth still lives on each task actor; this table exists so - * workspace reads can stay local and avoid fan-out across child actors. + * organization reads can stay local and avoid fan-out across child actors. */ export const taskSummaries = sqliteTable("task_summaries", { taskId: text("task_id").notNull().primaryKey(), diff --git a/foundry/packages/backend/src/actors/organization/index.ts b/foundry/packages/backend/src/actors/organization/index.ts new file mode 100644 index 0000000..1ea0196 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/index.ts @@ -0,0 +1,19 @@ +import { actor, queue } from "rivetkit"; +import { workflow } from "rivetkit/workflow"; +import { organizationDb } from "./db/db.js"; +import { runOrganizationWorkflow, ORGANIZATION_QUEUE_NAMES, organizationActions } from "./actions.js"; + +export const organization = actor({ + db: organizationDb, + queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Organization", + icon: "compass", + actionTimeout: 5 * 60_000, + }, + createState: (_c, organizationId: string) => ({ + organizationId, + }), + actions: organizationActions, + run: workflow(runOrganizationWorkflow), +}); diff --git a/foundry/packages/backend/src/actors/project-branch-sync/index.ts b/foundry/packages/backend/src/actors/project-branch-sync/index.ts deleted file mode 100644 index 1003822..0000000 --- a/foundry/packages/backend/src/actors/project-branch-sync/index.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import type { GitDriver } from "../../driver.js"; -import { getActorRuntimeContext } from "../context.js"; -import { getProject, selfProjectBranchSync } from "../handles.js"; -import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js"; -import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js"; -import { parentLookupFromStack } from "../project/stack-model.js"; -import { withRepoGitLock } from "../../services/repo-git-lock.js"; - -export interface ProjectBranchSyncInput { - workspaceId: string; - repoId: string; - repoPath: string; - intervalMs: number; -} - -interface SetIntervalCommand { - intervalMs: number; -} - -interface EnrichedBranchSnapshot { - branchName: string; - commitSha: string; - parentBranch: string | null; - trackedInStack: boolean; - diffStat: string | null; - hasUnpushed: boolean; - conflictsWithMain: boolean; -} - -interface ProjectBranchSyncState extends PollingControlState { - workspaceId: string; - repoId: string; - repoPath: string; -} - -const CONTROL = { - start: "project.branch_sync.control.start", - stop: "project.branch_sync.control.stop", - setInterval: "project.branch_sync.control.set_interval", - force: "project.branch_sync.control.force", -} as const; - -async function enrichBranches(workspaceId: string, repoId: string, repoPath: string, git: GitDriver): Promise { - return await withRepoGitLock(repoPath, async () => { - await git.fetch(repoPath); - const branches = await git.listRemoteBranches(repoPath); - const { driver } = getActorRuntimeContext(); - const stackEntries = await driver.stack.listStack(repoPath).catch(() => []); - const parentByBranch = parentLookupFromStack(stackEntries); - const enriched: EnrichedBranchSnapshot[] = []; - - const baseRef = await git.remoteDefaultBaseRef(repoPath); - const baseSha = await git.revParse(repoPath, baseRef).catch(() => ""); - - for (const branch of branches) { - let branchDiffStat: string | null = null; - let branchHasUnpushed = false; - let branchConflicts = false; - - try { - branchDiffStat = await git.diffStatForBranch(repoPath, branch.branchName); - } catch (error) { - logActorWarning("project-branch-sync", "diffStatForBranch failed", { - workspaceId, - repoId, - branchName: branch.branchName, - error: resolveErrorMessage(error), - }); - branchDiffStat = null; - } - - try { - const headSha = await git.revParse(repoPath, `origin/${branch.branchName}`); - branchHasUnpushed = Boolean(baseSha && headSha && headSha !== baseSha); - } catch (error) { - logActorWarning("project-branch-sync", "revParse failed", { - workspaceId, - repoId, - branchName: branch.branchName, - error: resolveErrorMessage(error), - }); - branchHasUnpushed = false; - } - - try { - branchConflicts = await git.conflictsWithMain(repoPath, branch.branchName); - } catch (error) { - logActorWarning("project-branch-sync", "conflictsWithMain failed", { - workspaceId, - repoId, - branchName: branch.branchName, - error: resolveErrorMessage(error), - }); - branchConflicts = false; - } - - enriched.push({ - branchName: branch.branchName, - commitSha: branch.commitSha, - parentBranch: parentByBranch.get(branch.branchName) ?? null, - trackedInStack: parentByBranch.has(branch.branchName), - diffStat: branchDiffStat, - hasUnpushed: branchHasUnpushed, - conflictsWithMain: branchConflicts, - }); - } - - return enriched; - }); -} - -async function pollBranches(c: { state: ProjectBranchSyncState }): Promise { - const { driver } = getActorRuntimeContext(); - const enrichedItems = await enrichBranches(c.state.workspaceId, c.state.repoId, c.state.repoPath, driver.git); - const parent = getProject(c, c.state.workspaceId, c.state.repoId); - await parent.applyBranchSyncResult({ items: enrichedItems, at: Date.now() }); -} - -export const projectBranchSync = actor({ - queues: { - [CONTROL.start]: queue(), - [CONTROL.stop]: queue(), - [CONTROL.setInterval]: queue(), - [CONTROL.force]: queue(), - }, - options: { - name: "Project Branch Sync", - icon: "code-branch", - // Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling. - noSleep: true, - }, - createState: (_c, input: ProjectBranchSyncInput): ProjectBranchSyncState => ({ - workspaceId: input.workspaceId, - repoId: input.repoId, - repoPath: input.repoPath, - intervalMs: input.intervalMs, - running: true, - }), - actions: { - async start(c): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 }); - }, - - async stop(c): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 }); - }, - - async setIntervalMs(c, payload: SetIntervalCommand): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 }); - }, - - async force(c): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.force, {}, { wait: true, timeout: 10_000 }); - }, - }, - run: workflow(async (ctx) => { - await runWorkflowPollingLoop(ctx, { - loopName: "project-branch-sync-loop", - control: CONTROL, - onPoll: async (loopCtx) => { - try { - await pollBranches(loopCtx); - } catch (error) { - logActorWarning("project-branch-sync", "poll failed", { - error: resolveErrorMessage(error), - stack: resolveErrorStack(error), - }); - } - }, - }); - }), -}); diff --git a/foundry/packages/backend/src/actors/project/actions.ts b/foundry/packages/backend/src/actors/project/actions.ts deleted file mode 100644 index 36355c6..0000000 --- a/foundry/packages/backend/src/actors/project/actions.ts +++ /dev/null @@ -1,1261 +0,0 @@ -// @ts-nocheck -import { randomUUID } from "node:crypto"; -import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; -import { Loop } from "rivetkit/workflow"; -import type { AgentType, TaskRecord, TaskSummary, ProviderId, RepoOverview, RepoStackAction, RepoStackActionResult } from "@sandbox-agent/foundry-shared"; -import { getActorRuntimeContext } from "../context.js"; -import { getGithubData, getTask, getOrCreateTask, getOrCreateHistory, getOrCreateProjectBranchSync, selfProject } from "../handles.js"; -import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; -import { foundryRepoClonePath } from "../../services/foundry-paths.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; -import { expectQueueResponse } from "../../services/queue.js"; -import { withRepoGitLock } from "../../services/repo-git-lock.js"; -import { branches, taskIndex, repoActionJobs, repoMeta } from "./db/schema.js"; -import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js"; -import { normalizeBaseBranchName } from "../../integrations/git-spice/index.js"; -import { sortBranchesForOverview } from "./stack-model.js"; - -interface EnsureProjectCommand { - remoteUrl: string; -} - -interface EnsureProjectResult { - localPath: string; -} - -interface CreateTaskCommand { - task: string; - providerId: ProviderId; - agentType: AgentType | null; - explicitTitle: string | null; - explicitBranchName: string | null; - initialPrompt: string | null; - onBranch: string | null; -} - -interface HydrateTaskIndexCommand {} - -interface ListReservedBranchesCommand {} - -interface RegisterTaskBranchCommand { - taskId: string; - branchName: string; - requireExistingRemote?: boolean; -} - -interface ListTaskSummariesCommand { - includeArchived?: boolean; -} - -interface GetTaskEnrichedCommand { - taskId: string; -} - -interface GetPullRequestForBranchCommand { - branchName: string; -} - -interface BranchSyncResult { - items: Array<{ - branchName: string; - commitSha: string; - parentBranch?: string | null; - trackedInStack?: boolean; - diffStat?: string | null; - hasUnpushed?: boolean; - conflictsWithMain?: boolean; - }>; - at: number; -} - -interface RepoOverviewCommand {} - -interface RunRepoStackActionCommand { - jobId?: string; - action: RepoStackAction; - branchName?: string; - parentBranch?: string; -} - -const PROJECT_QUEUE_NAMES = [ - "project.command.ensure", - "project.command.hydrateTaskIndex", - "project.command.createTask", - "project.command.registerTaskBranch", - "project.command.runRepoStackAction", - "project.command.applyBranchSyncResult", -] as const; - -type ProjectQueueName = (typeof PROJECT_QUEUE_NAMES)[number]; - -export { PROJECT_QUEUE_NAMES }; - -export function projectWorkflowQueueName(name: ProjectQueueName): ProjectQueueName { - return name; -} - -async function ensureLocalClone(c: any, remoteUrl: string): Promise { - const { config, driver } = getActorRuntimeContext(); - const localPath = foundryRepoClonePath(config, c.state.workspaceId, c.state.repoId); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - await driver.git.ensureCloned(remoteUrl, localPath, { githubToken: auth?.githubToken ?? null }); - c.state.localPath = localPath; - return localPath; -} - -async function ensureProjectSyncActors(c: any, localPath: string): Promise { - if (c.state.syncActorsStarted) { - return; - } - - const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); - c.state.syncActorsStarted = true; - - void branchSync.start().catch((error: unknown) => { - logActorWarning("project.sync", "starting branch sync actor failed", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - error: resolveErrorMessage(error), - }); - }); -} - -async function ensureRepoActionJobsTable(c: any): Promise { - await c.db.execute(` - CREATE TABLE IF NOT EXISTS repo_action_jobs ( - job_id text PRIMARY KEY NOT NULL, - action text NOT NULL, - branch_name text, - parent_branch text, - status text NOT NULL, - message text NOT NULL, - created_at integer NOT NULL, - updated_at integer NOT NULL, - completed_at integer - ) - `); -} - -async function writeRepoActionJob( - c: any, - input: { - jobId: string; - action: RepoStackAction; - branchName: string | null; - parentBranch: string | null; - status: "queued" | "running" | "completed" | "error"; - message: string; - createdAt?: number; - completedAt?: number | null; - }, -): Promise { - await ensureRepoActionJobsTable(c); - const now = Date.now(); - await c.db - .insert(repoActionJobs) - .values({ - jobId: input.jobId, - action: input.action, - branchName: input.branchName, - parentBranch: input.parentBranch, - status: input.status, - message: input.message, - createdAt: input.createdAt ?? now, - updatedAt: now, - completedAt: input.completedAt ?? null, - }) - .onConflictDoUpdate({ - target: repoActionJobs.jobId, - set: { - status: input.status, - message: input.message, - updatedAt: now, - completedAt: input.completedAt ?? null, - }, - }) - .run(); -} - -async function listRepoActionJobRows(c: any): Promise< - Array<{ - jobId: string; - action: RepoStackAction; - branchName: string | null; - parentBranch: string | null; - status: "queued" | "running" | "completed" | "error"; - message: string; - createdAt: number; - updatedAt: number; - completedAt: number | null; - }> -> { - await ensureRepoActionJobsTable(c); - const rows = await c.db.select().from(repoActionJobs).orderBy(desc(repoActionJobs.updatedAt)).limit(20).all(); - return rows.map((row: any) => ({ - jobId: row.jobId, - action: row.action, - branchName: row.branchName ?? null, - parentBranch: row.parentBranch ?? null, - status: row.status, - message: row.message, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - completedAt: row.completedAt ?? null, - })); -} - -async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { - try { - await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); - } catch { - // Best-effort cleanup only; preserve the original caller flow. - } -} - -function isStaleTaskReferenceError(error: unknown): boolean { - const message = resolveErrorMessage(error); - return isActorNotFoundError(error) || message.startsWith("Task not found:"); -} - -async function ensureTaskIndexHydrated(c: any): Promise { - if (c.state.taskIndexHydrated) { - return; - } - - const existing = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).limit(1).get(); - - if (existing) { - c.state.taskIndexHydrated = true; - return; - } - - // Migration path for old project actors that only tracked tasks in history. - try { - const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId); - const rows = await history.list({ limit: 5_000 }); - const seen = new Set(); - let skippedMissingTaskActors = 0; - - for (const row of rows) { - if (!row.taskId || seen.has(row.taskId)) { - continue; - } - seen.add(row.taskId); - - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - await h.get(); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - skippedMissingTaskActors += 1; - continue; - } - throw error; - } - - await c.db - .insert(taskIndex) - .values({ - taskId: row.taskId, - branchName: row.branchName, - createdAt: row.createdAt, - updatedAt: row.createdAt, - }) - .onConflictDoNothing() - .run(); - } - - if (skippedMissingTaskActors > 0) { - logActorWarning("project", "skipped missing tasks while hydrating index", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - skippedMissingTaskActors, - }); - } - } catch (error) { - logActorWarning("project", "task index hydration from history failed", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - error: resolveErrorMessage(error), - }); - } - - c.state.taskIndexHydrated = true; -} - -async function ensureProjectReady(c: any): Promise { - if (!c.state.remoteUrl) { - throw new Error("project remoteUrl is not initialized"); - } - if (!c.state.localPath) { - await ensureLocalClone(c, c.state.remoteUrl); - } - if (!c.state.localPath) { - throw new Error("project local repo is not initialized"); - } - await ensureProjectSyncActors(c, c.state.localPath); - return c.state.localPath; -} - -async function ensureProjectReadyForRead(c: any): Promise { - if (!c.state.remoteUrl) { - throw new Error("project remoteUrl is not initialized"); - } - - if (!c.state.localPath) { - const result = await projectActions.ensure(c, { remoteUrl: c.state.remoteUrl }); - c.state.localPath = result?.localPath ?? c.state.localPath; - } - - if (!c.state.localPath) { - throw new Error("project local repo is not initialized"); - } - - if (!c.state.syncActorsStarted) { - await ensureProjectSyncActors(c, c.state.localPath); - } - - return c.state.localPath; -} - -async function ensureTaskIndexHydratedForRead(c: any): Promise { - if (c.state.taskIndexHydrated) { - return; - } - await projectActions.hydrateTaskIndex(c, {}); -} - -async function forceProjectSync(c: any, localPath: string): Promise { - const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); - await branchSync.force(); -} - -async function enrichTaskRecord(c: any, record: TaskRecord): Promise { - const branchName = record.branchName; - const br = - branchName != null - ? await c.db - .select({ - diffStat: branches.diffStat, - hasUnpushed: branches.hasUnpushed, - conflictsWithMain: branches.conflictsWithMain, - parentBranch: branches.parentBranch, - }) - .from(branches) - .where(eq(branches.branchName, branchName)) - .get() - : null; - - const pr = - branchName != null - ? await getGithubData(c, c.state.workspaceId) - .listPullRequestsForRepository({ repoId: c.state.repoId }) - .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) - .catch(() => null) - : null; - - return { - ...record, - diffStat: br?.diffStat ?? null, - hasUnpushed: br?.hasUnpushed != null ? String(br.hasUnpushed) : null, - conflictsWithMain: br?.conflictsWithMain != null ? String(br.conflictsWithMain) : null, - parentBranch: br?.parentBranch ?? null, - prUrl: pr?.url ?? null, - prAuthor: pr?.authorLogin ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - }; -} - -async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise { - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName, - createdAt: updatedAt || now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); -} - -async function ensureProjectMutation(c: any, cmd: EnsureProjectCommand): Promise { - c.state.remoteUrl = cmd.remoteUrl; - const localPath = await ensureLocalClone(c, cmd.remoteUrl); - - await c.db - .insert(repoMeta) - .values({ - id: 1, - remoteUrl: cmd.remoteUrl, - updatedAt: Date.now(), - }) - .onConflictDoUpdate({ - target: repoMeta.id, - set: { - remoteUrl: cmd.remoteUrl, - updatedAt: Date.now(), - }, - }) - .run(); - - return { localPath }; -} - -async function hydrateTaskIndexMutation(c: any, _cmd?: HydrateTaskIndexCommand): Promise { - await ensureTaskIndexHydrated(c); -} - -async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { - const workspaceId = c.state.workspaceId; - const repoId = c.state.repoId; - const repoRemote = c.state.remoteUrl; - const onBranch = cmd.onBranch?.trim() || null; - const taskId = randomUUID(); - let initialBranchName: string | null = null; - let initialTitle: string | null = null; - - if (onBranch) { - initialBranchName = onBranch; - initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); - - await registerTaskBranchMutation(c, { - taskId, - branchName: onBranch, - requireExistingRemote: true, - }); - } else { - const localPath = await ensureProjectReady(c); - const { driver } = getActorRuntimeContext(); - - // Read locally cached remote-tracking refs — no network fetch. - // The branch sync actor keeps these reasonably fresh. If a rare naming - // collision occurs with a very recently created remote branch, it will - // be caught lazily on push/checkout. - const remoteBranches = (await driver.git.listLocalRemoteRefs(localPath)).map((branch: any) => branch.branchName); - - await ensureTaskIndexHydrated(c); - const reservedBranchRows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - const reservedBranches = reservedBranchRows - .map((row: { branchName: string | null }) => row.branchName) - .filter((branchName): branchName is string => typeof branchName === "string" && branchName.length > 0); - - const resolved = resolveCreateFlowDecision({ - task: cmd.task, - explicitTitle: cmd.explicitTitle ?? undefined, - explicitBranchName: cmd.explicitBranchName ?? undefined, - localBranches: remoteBranches, - taskBranches: reservedBranches, - }); - - initialBranchName = resolved.branchName; - initialTitle = resolved.title; - - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName: resolved.branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoNothing() - .run(); - } - - let task: Awaited>; - try { - task = await getOrCreateTask(c, workspaceId, repoId, taskId, { - workspaceId, - repoId, - taskId, - repoRemote, - branchName: initialBranchName, - title: initialTitle, - task: cmd.task, - providerId: cmd.providerId, - agentType: cmd.agentType, - explicitTitle: null, - explicitBranchName: null, - initialPrompt: cmd.initialPrompt, - }); - } catch (error) { - if (initialBranchName) { - await c.db - .delete(taskIndex) - .where(eq(taskIndex.taskId, taskId)) - .run() - .catch(() => {}); - } - throw error; - } - - const created = await task.initialize({ providerId: cmd.providerId }); - - const history = await getOrCreateHistory(c, workspaceId, repoId); - await history.append({ - kind: "task.created", - taskId, - payload: { - repoId, - providerId: cmd.providerId, - }, - }); - - return created; -} - -async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const localPath = await ensureProjectReady(c); - - const branchName = cmd.branchName.trim(); - const requireExistingRemote = cmd.requireExistingRemote === true; - if (!branchName) { - throw new Error("branchName is required"); - } - - await ensureTaskIndexHydrated(c); - - const existingOwner = await c.db - .select({ taskId: taskIndex.taskId }) - .from(taskIndex) - .where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId))) - .get(); - - if (existingOwner) { - let ownerMissing = false; - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, existingOwner.taskId); - await h.get(); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - ownerMissing = true; - await deleteStaleTaskIndexRow(c, existingOwner.taskId); - logActorWarning("project", "pruned stale task index row during branch registration", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: existingOwner.taskId, - branchName, - }); - } else { - throw error; - } - } - if (!ownerMissing) { - throw new Error(`branch is already assigned to a different task: ${branchName}`); - } - } - - const { driver } = getActorRuntimeContext(); - - let headSha = ""; - let trackedInStack = false; - let parentBranch: string | null = null; - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - - await withRepoGitLock(localPath, async () => { - await driver.git.fetch(localPath, { githubToken: auth?.githubToken ?? null }); - const baseRef = await driver.git.remoteDefaultBaseRef(localPath); - const normalizedBase = normalizeBaseBranchName(baseRef); - let branchAvailableInRepo = false; - - if (requireExistingRemote) { - try { - headSha = await driver.git.revParse(localPath, `origin/${branchName}`); - branchAvailableInRepo = true; - } catch { - throw new Error(`Remote branch not found: ${branchName}`); - } - } else { - try { - headSha = await driver.git.revParse(localPath, `origin/${branchName}`); - branchAvailableInRepo = true; - } catch { - headSha = await driver.git.revParse(localPath, baseRef); - } - } - - if (branchAvailableInRepo && (await driver.stack.available(localPath).catch(() => false))) { - let stackRows = await driver.stack.listStack(localPath).catch(() => []); - let stackRow = stackRows.find((entry) => entry.branchName === branchName); - - if (!stackRow) { - try { - await driver.stack.trackBranch(localPath, branchName, normalizedBase); - } catch (error) { - logActorWarning("project", "stack track failed while registering branch", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - branchName, - error: resolveErrorMessage(error), - }); - } - stackRows = await driver.stack.listStack(localPath).catch(() => []); - stackRow = stackRows.find((entry) => entry.branchName === branchName); - } - - trackedInStack = Boolean(stackRow); - parentBranch = stackRow?.parentBranch ?? null; - } - }); - - const now = Date.now(); - await c.db - .insert(branches) - .values({ - branchName, - commitSha: headSha, - parentBranch, - trackedInStack: trackedInStack ? 1 : 0, - firstSeenAt: now, - lastSeenAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: branches.branchName, - set: { - commitSha: headSha, - parentBranch, - trackedInStack: trackedInStack ? 1 : 0, - lastSeenAt: now, - updatedAt: now, - }, - }) - .run(); - - await c.db - .insert(taskIndex) - .values({ - taskId: cmd.taskId, - branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); - - return { branchName, headSha }; -} - -async function runRepoStackActionMutation(c: any, cmd: RunRepoStackActionCommand): Promise { - const localPath = await ensureProjectReady(c); - await ensureTaskIndexHydrated(c); - - const { driver } = getActorRuntimeContext(); - const at = Date.now(); - const jobId = cmd.jobId ?? randomUUID(); - const action = cmd.action; - const branchName = cmd.branchName?.trim() || null; - const parentBranch = cmd.parentBranch?.trim() || null; - - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "running", - message: `Running ${action}`, - createdAt: at, - }); - - if (!(await driver.stack.available(localPath).catch(() => false))) { - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "error", - message: "git-spice is not available for this repo", - createdAt: at, - completedAt: Date.now(), - }); - return { - jobId, - action, - executed: false, - status: "error", - message: "git-spice is not available for this repo", - at, - }; - } - - if ((action === "restack_subtree" || action === "rebase_branch" || action === "reparent_branch") && !branchName) { - throw new Error(`branchName is required for action: ${action}`); - } - if (action === "reparent_branch" && !parentBranch) { - throw new Error("parentBranch is required for action: reparent_branch"); - } - - await forceProjectSync(c, localPath); - - if (branchName) { - const row = await c.db.select({ branchName: branches.branchName }).from(branches).where(eq(branches.branchName, branchName)).get(); - if (!row) { - throw new Error(`Branch not found in repo snapshot: ${branchName}`); - } - } - - if (action === "reparent_branch") { - if (!parentBranch) { - throw new Error("parentBranch is required for action: reparent_branch"); - } - if (parentBranch === branchName) { - throw new Error("parentBranch must be different from branchName"); - } - const parentRow = await c.db.select({ branchName: branches.branchName }).from(branches).where(eq(branches.branchName, parentBranch)).get(); - if (!parentRow) { - throw new Error(`Parent branch not found in repo snapshot: ${parentBranch}`); - } - } - - try { - await withRepoGitLock(localPath, async () => { - if (action === "sync_repo") { - await driver.stack.syncRepo(localPath); - } else if (action === "restack_repo") { - await driver.stack.restackRepo(localPath); - } else if (action === "restack_subtree") { - await driver.stack.restackSubtree(localPath, branchName!); - } else if (action === "rebase_branch") { - await driver.stack.rebaseBranch(localPath, branchName!); - } else if (action === "reparent_branch") { - await driver.stack.reparentBranch(localPath, branchName!, parentBranch!); - } else { - throw new Error(`Unsupported repo stack action: ${action}`); - } - }); - - try { - const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId); - await history.append({ - kind: "repo.stack_action", - branchName: branchName ?? null, - payload: { - action, - branchName: branchName ?? null, - parentBranch: parentBranch ?? null, - jobId, - }, - }); - } catch (error) { - logActorWarning("project", "failed appending repo stack history event", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - action, - error: resolveErrorMessage(error), - }); - } - - await forceProjectSync(c, localPath); - - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "completed", - message: `Completed ${action}`, - createdAt: at, - completedAt: Date.now(), - }); - } catch (error) { - const message = resolveErrorMessage(error); - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "error", - message, - createdAt: at, - completedAt: Date.now(), - }); - throw error; - } - - return { - jobId, - action, - executed: true, - status: "completed", - message: `Completed ${action}`, - at, - }; -} - -async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Promise { - const incoming = new Set(body.items.map((item) => item.branchName)); - const reservedRows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - const reservedBranches = new Set( - reservedRows.map((row) => row.branchName).filter((branchName): branchName is string => typeof branchName === "string" && branchName.length > 0), - ); - - for (const item of body.items) { - const existing = await c.db - .select({ - firstSeenAt: branches.firstSeenAt, - }) - .from(branches) - .where(eq(branches.branchName, item.branchName)) - .get(); - - await c.db - .insert(branches) - .values({ - branchName: item.branchName, - commitSha: item.commitSha, - parentBranch: item.parentBranch ?? null, - trackedInStack: item.trackedInStack ? 1 : 0, - diffStat: item.diffStat ?? null, - hasUnpushed: item.hasUnpushed ? 1 : 0, - conflictsWithMain: item.conflictsWithMain ? 1 : 0, - firstSeenAt: existing?.firstSeenAt ?? body.at, - lastSeenAt: body.at, - updatedAt: body.at, - }) - .onConflictDoUpdate({ - target: branches.branchName, - set: { - commitSha: item.commitSha, - parentBranch: item.parentBranch ?? null, - trackedInStack: item.trackedInStack ? 1 : 0, - diffStat: item.diffStat ?? null, - hasUnpushed: item.hasUnpushed ? 1 : 0, - conflictsWithMain: item.conflictsWithMain ? 1 : 0, - firstSeenAt: existing?.firstSeenAt ?? body.at, - lastSeenAt: body.at, - updatedAt: body.at, - }, - }) - .run(); - } - - const existingRows = await c.db.select({ branchName: branches.branchName }).from(branches).all(); - - for (const row of existingRows) { - if (incoming.has(row.branchName) || reservedBranches.has(row.branchName)) { - continue; - } - await c.db.delete(branches).where(eq(branches.branchName, row.branchName)).run(); - } -} - -export async function runProjectWorkflow(ctx: any): Promise { - await ctx.loop("project-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-project-command", { - names: [...PROJECT_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - try { - if (msg.name === "project.command.ensure") { - const result = await loopCtx.step({ - name: "project-ensure", - timeout: 5 * 60_000, - run: async () => ensureProjectMutation(loopCtx, msg.body as EnsureProjectCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.hydrateTaskIndex") { - await loopCtx.step("project-hydrate-task-index", async () => hydrateTaskIndexMutation(loopCtx, msg.body as HydrateTaskIndexCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.createTask") { - const result = await loopCtx.step({ - name: "project-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.registerTaskBranch") { - const result = await loopCtx.step({ - name: "project-register-task-branch", - timeout: 5 * 60_000, - run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.runRepoStackAction") { - const result = await loopCtx.step({ - name: "project-run-repo-stack-action", - timeout: 12 * 60_000, - run: async () => runRepoStackActionMutation(loopCtx, msg.body as RunRepoStackActionCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.applyBranchSyncResult") { - await loopCtx.step({ - name: "project-apply-branch-sync-result", - timeout: 60_000, - run: async () => applyBranchSyncResultMutation(loopCtx, msg.body as BranchSyncResult), - }); - await msg.complete({ ok: true }); - } - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("project", "project workflow command failed", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch((completeError: unknown) => { - logActorWarning("project", "project workflow failed completing error response", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - queueName: msg.name, - error: resolveErrorMessage(completeError), - }); - }); - } - - return Loop.continue(undefined); - }); -} - -export const projectActions = { - async ensure(c: any, cmd: EnsureProjectCommand): Promise { - const self = selfProject(c); - return expectQueueResponse( - await self.send(projectWorkflowQueueName("project.command.ensure"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async createTask(c: any, cmd: CreateTaskCommand): Promise { - const self = selfProject(c); - return expectQueueResponse( - await self.send(projectWorkflowQueueName("project.command.createTask"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async listReservedBranches(c: any, _cmd?: ListReservedBranchesCommand): Promise { - await ensureTaskIndexHydratedForRead(c); - - const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - - return rows.map((row) => row.branchName).filter((name): name is string => typeof name === "string" && name.trim().length > 0); - }, - - async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const self = selfProject(c); - return expectQueueResponse<{ branchName: string; headSha: string }>( - await self.send(projectWorkflowQueueName("project.command.registerTaskBranch"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async hydrateTaskIndex(c: any, cmd?: HydrateTaskIndexCommand): Promise { - const self = selfProject(c); - await self.send(projectWorkflowQueueName("project.command.hydrateTaskIndex"), cmd ?? {}, { - wait: true, - timeout: 10_000, - }); - }, - - async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise { - const body = cmd ?? {}; - const records: TaskSummary[] = []; - - await ensureTaskIndexHydratedForRead(c); - - const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all(); - - for (const row of taskRows) { - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - const record = await h.get(); - - if (!body.includeArchived && record.status === "archived") { - continue; - } - - records.push({ - workspaceId: record.workspaceId, - repoId: record.repoId, - taskId: record.taskId, - branchName: record.branchName, - title: record.title, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - logActorWarning("project", "pruned stale task index row during summary listing", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - }); - continue; - } - logActorWarning("project", "failed loading task summary row", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - error: resolveErrorMessage(error), - }); - } - } - - records.sort((a, b) => b.updatedAt - a.updatedAt); - return records; - }, - - async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise { - await ensureTaskIndexHydratedForRead(c); - - const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get(); - if (!row) { - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, cmd.taskId); - const record = await h.get(); - await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now()); - return await enrichTaskRecord(c, record); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); - } - throw error; - } - } - - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, cmd.taskId); - const record = await h.get(); - return await enrichTaskRecord(c, record); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, cmd.taskId); - throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); - } - throw error; - } - }, - - async getRepoOverview(c: any, _cmd?: RepoOverviewCommand): Promise { - const localPath = await ensureProjectReadyForRead(c); - await ensureTaskIndexHydratedForRead(c); - - const { driver } = getActorRuntimeContext(); - const now = Date.now(); - const baseRef = await driver.git.remoteDefaultBaseRef(localPath).catch(() => null); - const stackAvailable = await driver.stack.available(localPath).catch(() => false); - - const branchRowsRaw = await c.db - .select({ - branchName: branches.branchName, - commitSha: branches.commitSha, - parentBranch: branches.parentBranch, - trackedInStack: branches.trackedInStack, - diffStat: branches.diffStat, - hasUnpushed: branches.hasUnpushed, - conflictsWithMain: branches.conflictsWithMain, - firstSeenAt: branches.firstSeenAt, - lastSeenAt: branches.lastSeenAt, - updatedAt: branches.updatedAt, - }) - .from(branches) - .all(); - - const taskRows = await c.db - .select({ - taskId: taskIndex.taskId, - branchName: taskIndex.branchName, - updatedAt: taskIndex.updatedAt, - }) - .from(taskIndex) - .all(); - - const taskMetaByBranch = new Map(); - - for (const row of taskRows) { - if (!row.branchName) { - continue; - } - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - const record = await h.get(); - taskMetaByBranch.set(row.branchName, { - taskId: row.taskId, - title: record.title ?? null, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - logActorWarning("project", "pruned stale task index row during repo overview", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - branchName: row.branchName, - }); - continue; - } - logActorWarning("project", "failed loading task while building repo overview", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - branchName: row.branchName, - error: resolveErrorMessage(error), - }); - } - } - - const githubData = getGithubData(c, c.state.workspaceId); - const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); - const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); - - const combinedRows = sortBranchesForOverview( - branchRowsRaw.map((row) => ({ - branchName: row.branchName, - parentBranch: row.parentBranch ?? null, - updatedAt: row.updatedAt, - })), - ); - - const detailByBranch = new Map(branchRowsRaw.map((row) => [row.branchName, row])); - - const branchRows = combinedRows.map((ordering) => { - const row = detailByBranch.get(ordering.branchName)!; - const taskMeta = taskMetaByBranch.get(row.branchName); - const pr = prByBranch.get(row.branchName); - return { - branchName: row.branchName, - commitSha: row.commitSha, - parentBranch: row.parentBranch ?? null, - trackedInStack: Boolean(row.trackedInStack), - diffStat: row.diffStat ?? null, - hasUnpushed: Boolean(row.hasUnpushed), - conflictsWithMain: Boolean(row.conflictsWithMain), - taskId: taskMeta?.taskId ?? null, - taskTitle: taskMeta?.title ?? null, - taskStatus: taskMeta?.status ?? null, - prNumber: pr?.number ?? null, - prState: pr?.state ?? null, - prUrl: pr?.url ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - firstSeenAt: row.firstSeenAt ?? null, - lastSeenAt: row.lastSeenAt ?? null, - updatedAt: Math.max(row.updatedAt, taskMeta?.updatedAt ?? 0), - }; - }); - - const latestBranchSync = await c.db.select({ updatedAt: branches.updatedAt }).from(branches).orderBy(desc(branches.updatedAt)).limit(1).get(); - const githubSummary = await githubData.getSummary().catch(() => null); - - return { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - remoteUrl: c.state.remoteUrl, - baseRef, - stackAvailable, - fetchedAt: now, - branchSyncAt: latestBranchSync?.updatedAt ?? null, - prSyncAt: githubSummary?.lastSyncAt ?? null, - branchSyncStatus: latestBranchSync ? "synced" : "pending", - prSyncStatus: githubSummary?.syncStatus ?? "pending", - repoActionJobs: await listRepoActionJobRows(c), - branches: branchRows, - }; - }, - - async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> { - const branchName = cmd.branchName?.trim(); - if (!branchName) { - return null; - } - const githubData = getGithubData(c, c.state.workspaceId); - return await githubData.getPullRequestForBranch({ - repoId: c.state.repoId, - branchName, - }); - }, - - async runRepoStackAction(c: any, cmd: RunRepoStackActionCommand): Promise { - const self = selfProject(c); - const jobId = randomUUID(); - const at = Date.now(); - const action = cmd.action; - const branchName = cmd.branchName?.trim() || null; - const parentBranch = cmd.parentBranch?.trim() || null; - - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "queued", - message: `Queued ${action}`, - createdAt: at, - }); - - await self.send( - projectWorkflowQueueName("project.command.runRepoStackAction"), - { - ...cmd, - jobId, - }, - { - wait: false, - }, - ); - - return { - jobId, - action, - executed: true, - status: "queued", - message: `Queued ${action}`, - at, - }; - }, - - async applyBranchSyncResult(c: any, body: BranchSyncResult): Promise { - const self = selfProject(c); - await self.send(projectWorkflowQueueName("project.command.applyBranchSyncResult"), body, { - wait: true, - timeout: 10_000, - }); - }, -}; diff --git a/foundry/packages/backend/src/actors/project/db/drizzle.config.ts b/foundry/packages/backend/src/actors/project/db/drizzle.config.ts deleted file mode 100644 index 5f53fc9..0000000 --- a/foundry/packages/backend/src/actors/project/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/project/db/drizzle", - schema: "./src/actors/project/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql b/foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql deleted file mode 100644 index f4f23ff..0000000 --- a/foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql +++ /dev/null @@ -1,40 +0,0 @@ -CREATE TABLE `branches` ( - `branch_name` text PRIMARY KEY NOT NULL, - `commit_sha` text NOT NULL, - `parent_branch` text, - `tracked_in_stack` integer DEFAULT 0 NOT NULL, - `diff_stat` text, - `has_unpushed` integer DEFAULT 0 NOT NULL, - `conflicts_with_main` integer DEFAULT 0 NOT NULL, - `first_seen_at` integer, - `last_seen_at` integer, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `pr_cache` ( - `branch_name` text PRIMARY KEY NOT NULL, - `pr_number` integer NOT NULL, - `state` text NOT NULL, - `title` text NOT NULL, - `pr_url` text, - `pr_author` text, - `is_draft` integer DEFAULT 0 NOT NULL, - `ci_status` text, - `review_status` text, - `reviewer` text, - `fetched_at` integer, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `repo_meta` ( - `id` integer PRIMARY KEY NOT NULL, - `remote_url` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `task_index` ( - `task_id` text PRIMARY KEY NOT NULL, - `branch_name` text, - `created_at` integer NOT NULL, - `updated_at` integer NOT NULL -); diff --git a/foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json deleted file mode 100644 index baf5913..0000000 --- a/foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json +++ /dev/null @@ -1,265 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9", - "prevId": "00000000-0000-0000-0000-000000000000", - "tables": { - "branches": { - "name": "branches", - "columns": { - "branch_name": { - "name": "branch_name", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "commit_sha": { - "name": "commit_sha", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "parent_branch": { - "name": "parent_branch", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "tracked_in_stack": { - "name": "tracked_in_stack", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "diff_stat": { - "name": "diff_stat", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "has_unpushed": { - "name": "has_unpushed", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "conflicts_with_main": { - "name": "conflicts_with_main", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "first_seen_at": { - "name": "first_seen_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "last_seen_at": { - "name": "last_seen_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "pr_cache": { - "name": "pr_cache", - "columns": { - "branch_name": { - "name": "branch_name", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "pr_number": { - "name": "pr_number", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "state": { - "name": "state", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "title": { - "name": "title", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "pr_url": { - "name": "pr_url", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "pr_author": { - "name": "pr_author", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "is_draft": { - "name": "is_draft", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "ci_status": { - "name": "ci_status", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "review_status": { - "name": "review_status", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "reviewer": { - "name": "reviewer", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "fetched_at": { - "name": "fetched_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "repo_meta": { - "name": "repo_meta", - "columns": { - "id": { - "name": "id", - "type": "integer", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "remote_url": { - "name": "remote_url", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "task_index": { - "name": "task_index", - "columns": { - "task_id": { - "name": "task_id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "branch_name": { - "name": "branch_name", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - } - }, - "views": {}, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} diff --git a/foundry/packages/backend/src/actors/project/db/migrations.ts b/foundry/packages/backend/src/actors/project/db/migrations.ts deleted file mode 100644 index fd705b7..0000000 --- a/foundry/packages/backend/src/actors/project/db/migrations.ts +++ /dev/null @@ -1,46 +0,0 @@ -// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. -// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). -// Do not hand-edit this file. - -const journal = { - entries: [ - { - idx: 0, - when: 1773376221848, - tag: "0000_useful_la_nuit", - breakpoints: true, - }, - ], -} as const; - -export default { - journal, - migrations: { - m0000: `CREATE TABLE \`branches\` ( - \`branch_name\` text PRIMARY KEY NOT NULL, - \`commit_sha\` text NOT NULL, - \`parent_branch\` text, - \`tracked_in_stack\` integer DEFAULT 0 NOT NULL, - \`diff_stat\` text, - \`has_unpushed\` integer DEFAULT 0 NOT NULL, - \`conflicts_with_main\` integer DEFAULT 0 NOT NULL, - \`first_seen_at\` integer, - \`last_seen_at\` integer, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`repo_meta\` ( - \`id\` integer PRIMARY KEY NOT NULL, - \`remote_url\` text NOT NULL, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`task_index\` ( - \`task_id\` text PRIMARY KEY NOT NULL, - \`branch_name\` text, - \`created_at\` integer NOT NULL, - \`updated_at\` integer NOT NULL -); -`, - } as const, -}; diff --git a/foundry/packages/backend/src/actors/project/db/schema.ts b/foundry/packages/backend/src/actors/project/db/schema.ts deleted file mode 100644 index bb61d75..0000000 --- a/foundry/packages/backend/src/actors/project/db/schema.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; - -// SQLite is per project actor instance (workspaceId+repoId), so no workspaceId/repoId columns needed. - -export const branches = sqliteTable("branches", { - branchName: text("branch_name").notNull().primaryKey(), - commitSha: text("commit_sha").notNull(), - parentBranch: text("parent_branch"), - trackedInStack: integer("tracked_in_stack").notNull().default(0), - diffStat: text("diff_stat"), - hasUnpushed: integer("has_unpushed").notNull().default(0), - conflictsWithMain: integer("conflicts_with_main").notNull().default(0), - firstSeenAt: integer("first_seen_at"), - lastSeenAt: integer("last_seen_at"), - updatedAt: integer("updated_at").notNull(), -}); - -export const repoMeta = sqliteTable("repo_meta", { - id: integer("id").primaryKey(), - remoteUrl: text("remote_url").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const taskIndex = sqliteTable("task_index", { - taskId: text("task_id").notNull().primaryKey(), - branchName: text("branch_name"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const repoActionJobs = sqliteTable("repo_action_jobs", { - jobId: text("job_id").notNull().primaryKey(), - action: text("action").notNull(), - branchName: text("branch_name"), - parentBranch: text("parent_branch"), - status: text("status").notNull(), - message: text("message").notNull(), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), - completedAt: integer("completed_at"), -}); diff --git a/foundry/packages/backend/src/actors/project/index.ts b/foundry/packages/backend/src/actors/project/index.ts deleted file mode 100644 index c5ba8a7..0000000 --- a/foundry/packages/backend/src/actors/project/index.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import { projectDb } from "./db/db.js"; -import { PROJECT_QUEUE_NAMES, projectActions, runProjectWorkflow } from "./actions.js"; - -export interface ProjectInput { - workspaceId: string; - repoId: string; - remoteUrl: string; -} - -export const project = actor({ - db: projectDb, - queues: Object.fromEntries(PROJECT_QUEUE_NAMES.map((name) => [name, queue()])), - options: { - name: "Project", - icon: "folder", - actionTimeout: 5 * 60_000, - }, - createState: (_c, input: ProjectInput) => ({ - workspaceId: input.workspaceId, - repoId: input.repoId, - remoteUrl: input.remoteUrl, - localPath: null as string | null, - syncActorsStarted: false, - taskIndexHydrated: false, - }), - actions: projectActions, - run: workflow(runProjectWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/project/stack-model.ts b/foundry/packages/backend/src/actors/project/stack-model.ts deleted file mode 100644 index 78c9888..0000000 --- a/foundry/packages/backend/src/actors/project/stack-model.ts +++ /dev/null @@ -1,69 +0,0 @@ -export interface StackEntry { - branchName: string; - parentBranch: string | null; -} - -export interface OrderedBranchRow { - branchName: string; - parentBranch: string | null; - updatedAt: number; -} - -export function normalizeParentBranch(branchName: string, parentBranch: string | null | undefined): string | null { - const parent = parentBranch?.trim() || null; - if (!parent || parent === branchName) { - return null; - } - return parent; -} - -export function parentLookupFromStack(entries: StackEntry[]): Map { - const lookup = new Map(); - for (const entry of entries) { - const branchName = entry.branchName.trim(); - if (!branchName) { - continue; - } - lookup.set(branchName, normalizeParentBranch(branchName, entry.parentBranch)); - } - return lookup; -} - -export function sortBranchesForOverview(rows: OrderedBranchRow[]): OrderedBranchRow[] { - const byName = new Map(rows.map((row) => [row.branchName, row])); - const depthMemo = new Map(); - const computing = new Set(); - - const depthFor = (branchName: string): number => { - const cached = depthMemo.get(branchName); - if (cached != null) { - return cached; - } - if (computing.has(branchName)) { - return 999; - } - - computing.add(branchName); - const row = byName.get(branchName); - const parent = row?.parentBranch; - let depth = 0; - if (parent && parent !== branchName && byName.has(parent)) { - depth = Math.min(998, depthFor(parent) + 1); - } - computing.delete(branchName); - depthMemo.set(branchName, depth); - return depth; - }; - - return [...rows].sort((a, b) => { - const da = depthFor(a.branchName); - const db = depthFor(b.branchName); - if (da !== db) { - return da - db; - } - if (a.updatedAt !== b.updatedAt) { - return b.updatedAt - a.updatedAt; - } - return a.branchName.localeCompare(b.branchName); - }); -} diff --git a/foundry/packages/backend/src/actors/repository/actions.ts b/foundry/packages/backend/src/actors/repository/actions.ts new file mode 100644 index 0000000..9ef8e75 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/actions.ts @@ -0,0 +1,557 @@ +// @ts-nocheck +import { randomUUID } from "node:crypto"; +import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; +import { Loop } from "rivetkit/workflow"; +import type { AgentType, RepoOverview, SandboxProviderId, TaskRecord, TaskSummary } from "@sandbox-agent/foundry-shared"; +import { getGithubData, getOrCreateHistory, getOrCreateTask, getTask, selfRepository } from "../handles.js"; +import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js"; +import { expectQueueResponse } from "../../services/queue.js"; +import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; +import { repoMeta, taskIndex } from "./db/schema.js"; + +interface CreateTaskCommand { + task: string; + sandboxProviderId: SandboxProviderId; + agentType: AgentType | null; + explicitTitle: string | null; + explicitBranchName: string | null; + initialPrompt: string | null; + onBranch: string | null; +} + +interface RegisterTaskBranchCommand { + taskId: string; + branchName: string; + requireExistingRemote?: boolean; +} + +interface ListTaskSummariesCommand { + includeArchived?: boolean; +} + +interface GetTaskEnrichedCommand { + taskId: string; +} + +interface GetPullRequestForBranchCommand { + branchName: string; +} + +const REPOSITORY_QUEUE_NAMES = ["repository.command.createTask", "repository.command.registerTaskBranch"] as const; + +type RepositoryQueueName = (typeof REPOSITORY_QUEUE_NAMES)[number]; + +export { REPOSITORY_QUEUE_NAMES }; + +export function repositoryWorkflowQueueName(name: RepositoryQueueName): RepositoryQueueName { + return name; +} + +function isStaleTaskReferenceError(error: unknown): boolean { + const message = resolveErrorMessage(error); + return isActorNotFoundError(error) || message.startsWith("Task not found:"); +} + +async function persistRemoteUrl(c: any, remoteUrl: string): Promise { + c.state.remoteUrl = remoteUrl; + await c.db + .insert(repoMeta) + .values({ + id: 1, + remoteUrl, + updatedAt: Date.now(), + }) + .onConflictDoUpdate({ + target: repoMeta.id, + set: { + remoteUrl, + updatedAt: Date.now(), + }, + }) + .run(); +} + +async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { + try { + await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); + } catch { + // Best effort cleanup only. + } +} + +async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise { + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + branchName, + createdAt: updatedAt || now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); +} + +async function listKnownTaskBranches(c: any): Promise { + const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); + return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); +} + +async function resolveGitHubRepository(c: any) { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getRepository({ repoId: c.state.repoId }).catch(() => null); +} + +async function listGitHubBranches(c: any): Promise> { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.listBranchesForRepository({ repoId: c.state.repoId }).catch(() => []); +} + +async function enrichTaskRecord(c: any, record: TaskRecord): Promise { + const branchName = record.branchName?.trim() || null; + if (!branchName) { + return record; + } + + const pr = + branchName != null + ? await getGithubData(c, c.state.organizationId) + .listPullRequestsForRepository({ repoId: c.state.repoId }) + .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) + .catch(() => null) + : null; + + return { + ...record, + prUrl: pr?.url ?? null, + prAuthor: pr?.authorLogin ?? null, + ciStatus: null, + reviewStatus: null, + reviewer: pr?.authorLogin ?? null, + diffStat: record.diffStat ?? null, + hasUnpushed: record.hasUnpushed ?? null, + conflictsWithMain: record.conflictsWithMain ?? null, + parentBranch: record.parentBranch ?? null, + }; +} + +async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { + const organizationId = c.state.organizationId; + const repoId = c.state.repoId; + const repoRemote = c.state.remoteUrl; + const onBranch = cmd.onBranch?.trim() || null; + const taskId = randomUUID(); + let initialBranchName: string | null = null; + let initialTitle: string | null = null; + + await persistRemoteUrl(c, repoRemote); + + if (onBranch) { + initialBranchName = onBranch; + initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); + + await registerTaskBranchMutation(c, { + taskId, + branchName: onBranch, + requireExistingRemote: true, + }); + } else { + const reservedBranches = await listKnownTaskBranches(c); + const resolved = resolveCreateFlowDecision({ + task: cmd.task, + explicitTitle: cmd.explicitTitle ?? undefined, + explicitBranchName: cmd.explicitBranchName ?? undefined, + localBranches: [], + taskBranches: reservedBranches, + }); + + initialBranchName = resolved.branchName; + initialTitle = resolved.title; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + branchName: resolved.branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoNothing() + .run(); + } + + let taskHandle: Awaited>; + try { + taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { + organizationId, + repoId, + taskId, + repoRemote, + branchName: initialBranchName, + title: initialTitle, + task: cmd.task, + sandboxProviderId: cmd.sandboxProviderId, + agentType: cmd.agentType, + explicitTitle: null, + explicitBranchName: null, + initialPrompt: cmd.initialPrompt, + }); + } catch (error) { + if (initialBranchName) { + await deleteStaleTaskIndexRow(c, taskId); + } + throw error; + } + + const created = await taskHandle.initialize({ sandboxProviderId: cmd.sandboxProviderId }); + + const history = await getOrCreateHistory(c, organizationId, repoId); + await history.append({ + kind: "task.created", + taskId, + payload: { + repoId, + sandboxProviderId: cmd.sandboxProviderId, + }, + }); + + return created; +} + +async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { + const branchName = cmd.branchName.trim(); + if (!branchName) { + throw new Error("branchName is required"); + } + + await persistRemoteUrl(c, c.state.remoteUrl); + + const existingOwner = await c.db + .select({ taskId: taskIndex.taskId }) + .from(taskIndex) + .where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId))) + .get(); + + if (existingOwner) { + let ownerMissing = false; + try { + await getTask(c, c.state.organizationId, c.state.repoId, existingOwner.taskId).get(); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + ownerMissing = true; + await deleteStaleTaskIndexRow(c, existingOwner.taskId); + } else { + throw error; + } + } + if (!ownerMissing) { + throw new Error(`branch is already assigned to a different task: ${branchName}`); + } + } + + const branches = await listGitHubBranches(c); + const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null; + if (cmd.requireExistingRemote && !branchMatch) { + throw new Error(`Remote branch not found: ${branchName}`); + } + + const repository = await resolveGitHubRepository(c); + const defaultBranch = repository?.defaultBranch ?? "main"; + const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? ""; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId: cmd.taskId, + branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); + + return { branchName, headSha }; +} + +async function listTaskSummaries(c: any, includeArchived = false): Promise { + const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all(); + const records: TaskSummary[] = []; + + for (const row of taskRows) { + try { + const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); + if (!includeArchived && record.status === "archived") { + continue; + } + records.push({ + organizationId: record.organizationId, + repoId: record.repoId, + taskId: record.taskId, + branchName: record.branchName, + title: record.title, + status: record.status, + updatedAt: record.updatedAt, + }); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + await deleteStaleTaskIndexRow(c, row.taskId); + continue; + } + logActorWarning("repository", "failed loading task summary row", { + organizationId: c.state.organizationId, + repoId: c.state.repoId, + taskId: row.taskId, + error: resolveErrorMessage(error), + }); + } + } + + records.sort((a, b) => b.updatedAt - a.updatedAt); + return records; +} + +function sortOverviewBranches( + branches: Array<{ + branchName: string; + commitSha: string; + taskId: string | null; + taskTitle: string | null; + taskStatus: TaskRecord["status"] | null; + prNumber: number | null; + prState: string | null; + prUrl: string | null; + ciStatus: string | null; + reviewStatus: string | null; + reviewer: string | null; + updatedAt: number; + }>, + defaultBranch: string | null, +) { + return [...branches].sort((left, right) => { + if (defaultBranch) { + if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1; + if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1; + } + if (Boolean(left.taskId) !== Boolean(right.taskId)) { + return left.taskId ? -1 : 1; + } + if (left.updatedAt !== right.updatedAt) { + return right.updatedAt - left.updatedAt; + } + return left.branchName.localeCompare(right.branchName); + }); +} + +export async function runRepositoryWorkflow(ctx: any): Promise { + await ctx.loop("repository-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-repository-command", { + names: [...REPOSITORY_QUEUE_NAMES], + completable: true, + }); + if (!msg) { + return Loop.continue(undefined); + } + + try { + if (msg.name === "repository.command.createTask") { + const result = await loopCtx.step({ + name: "repository-create-task", + timeout: 5 * 60_000, + run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } + + if (msg.name === "repository.command.registerTaskBranch") { + const result = await loopCtx.step({ + name: "repository-register-task-branch", + timeout: 60_000, + run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("repository", "repository workflow command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} + +export const repositoryActions = { + async createTask(c: any, cmd: CreateTaskCommand): Promise { + const self = selfRepository(c); + return expectQueueResponse( + await self.send(repositoryWorkflowQueueName("repository.command.createTask"), cmd, { + wait: true, + timeout: 10_000, + }), + ); + }, + + async listReservedBranches(c: any): Promise { + return await listKnownTaskBranches(c); + }, + + async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { + const self = selfRepository(c); + return expectQueueResponse<{ branchName: string; headSha: string }>( + await self.send(repositoryWorkflowQueueName("repository.command.registerTaskBranch"), cmd, { + wait: true, + timeout: 10_000, + }), + ); + }, + + async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise { + return await listTaskSummaries(c, cmd?.includeArchived === true); + }, + + async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise { + const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get(); + if (!row) { + const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); + await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now()); + return await enrichTaskRecord(c, record); + } + + try { + const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); + return await enrichTaskRecord(c, record); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + await deleteStaleTaskIndexRow(c, cmd.taskId); + throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); + } + throw error; + } + }, + + async getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const repository = await resolveGitHubRepository(c); + return { + defaultBranch: repository?.defaultBranch ?? null, + fullName: repository?.fullName ?? null, + remoteUrl: c.state.remoteUrl, + }; + }, + + async getRepoOverview(c: any): Promise { + await persistRemoteUrl(c, c.state.remoteUrl); + + const now = Date.now(); + const repository = await resolveGitHubRepository(c); + const githubBranches = await listGitHubBranches(c).catch(() => []); + const githubData = getGithubData(c, c.state.organizationId); + const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); + const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); + + const taskRows = await c.db + .select({ + taskId: taskIndex.taskId, + branchName: taskIndex.branchName, + updatedAt: taskIndex.updatedAt, + }) + .from(taskIndex) + .all(); + + const taskMetaByBranch = new Map(); + for (const row of taskRows) { + if (!row.branchName) { + continue; + } + try { + const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); + taskMetaByBranch.set(row.branchName, { + taskId: row.taskId, + title: record.title ?? null, + status: record.status, + updatedAt: record.updatedAt, + }); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + await deleteStaleTaskIndexRow(c, row.taskId); + continue; + } + } + } + + const branchMap = new Map(); + for (const branch of githubBranches) { + branchMap.set(branch.branchName, branch); + } + for (const branchName of taskMetaByBranch.keys()) { + if (!branchMap.has(branchName)) { + branchMap.set(branchName, { branchName, commitSha: "" }); + } + } + if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) { + branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" }); + } + + const branches = sortOverviewBranches( + [...branchMap.values()].map((branch) => { + const taskMeta = taskMetaByBranch.get(branch.branchName); + const pr = prByBranch.get(branch.branchName); + return { + branchName: branch.branchName, + commitSha: branch.commitSha, + taskId: taskMeta?.taskId ?? null, + taskTitle: taskMeta?.title ?? null, + taskStatus: taskMeta?.status ?? null, + prNumber: pr?.number ?? null, + prState: pr?.state ?? null, + prUrl: pr?.url ?? null, + ciStatus: null, + reviewStatus: null, + reviewer: pr?.authorLogin ?? null, + updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now), + }; + }), + repository?.defaultBranch ?? null, + ); + + return { + organizationId: c.state.organizationId, + repoId: c.state.repoId, + remoteUrl: c.state.remoteUrl, + baseRef: repository?.defaultBranch ?? null, + fetchedAt: now, + branches, + }; + }, + + async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> { + const branchName = cmd.branchName?.trim(); + if (!branchName) { + return null; + } + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getPullRequestForBranch({ + repoId: c.state.repoId, + branchName, + }); + }, +}; diff --git a/foundry/packages/backend/src/actors/workspace/db/db.ts b/foundry/packages/backend/src/actors/repository/db/db.ts similarity index 68% rename from foundry/packages/backend/src/actors/workspace/db/db.ts rename to foundry/packages/backend/src/actors/repository/db/db.ts index 1b7c080..79bed8e 100644 --- a/foundry/packages/backend/src/actors/workspace/db/db.ts +++ b/foundry/packages/backend/src/actors/repository/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const workspaceDb = db({ schema, migrations }); +export const repositoryDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts b/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts new file mode 100644 index 0000000..8b9a1b9 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/repository/db/drizzle", + schema: "./src/actors/repository/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql b/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql new file mode 100644 index 0000000..14bc071 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql @@ -0,0 +1,12 @@ +CREATE TABLE `repo_meta` ( + `id` integer PRIMARY KEY NOT NULL, + `remote_url` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_index` ( + `task_id` text PRIMARY KEY NOT NULL, + `branch_name` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000..940b4e6 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json @@ -0,0 +1,87 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "repo_meta": { + "name": "repo_meta", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "remote_url": { + "name": "remote_url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_index": { + "name": "task_index", + "columns": { + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "branch_name": { + "name": "branch_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/foundry/packages/backend/src/actors/project/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json similarity index 100% rename from foundry/packages/backend/src/actors/project/db/drizzle/meta/_journal.json rename to foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json diff --git a/foundry/packages/backend/src/actors/repository/db/migrations.ts b/foundry/packages/backend/src/actors/repository/db/migrations.ts new file mode 100644 index 0000000..ebdb167 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/migrations.ts @@ -0,0 +1,43 @@ +// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. + +const journal = { + entries: [ + { + idx: 0, + when: 1773376221848, + tag: "0000_useful_la_nuit", + breakpoints: true, + }, + { + idx: 1, + when: 1778900000000, + tag: "0001_remove_local_git_state", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`repo_meta\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`remote_url\` text NOT NULL, +\t\`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`task_index\` ( +\t\`task_id\` text PRIMARY KEY NOT NULL, +\t\`branch_name\` text, +\t\`created_at\` integer NOT NULL, +\t\`updated_at\` integer NOT NULL +); +`, + m0001: `DROP TABLE IF EXISTS \`branches\`; +--> statement-breakpoint +DROP TABLE IF EXISTS \`repo_action_jobs\`; +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/repository/db/schema.ts b/foundry/packages/backend/src/actors/repository/db/schema.ts new file mode 100644 index 0000000..ddb2f19 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/schema.ts @@ -0,0 +1,16 @@ +import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; + +// SQLite is per repository actor instance (organizationId+repoId). + +export const repoMeta = sqliteTable("repo_meta", { + id: integer("id").primaryKey(), + remoteUrl: text("remote_url").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const taskIndex = sqliteTable("task_index", { + taskId: text("task_id").notNull().primaryKey(), + branchName: text("branch_name"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/repository/index.ts b/foundry/packages/backend/src/actors/repository/index.ts new file mode 100644 index 0000000..4253a90 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/index.ts @@ -0,0 +1,27 @@ +import { actor, queue } from "rivetkit"; +import { workflow } from "rivetkit/workflow"; +import { repositoryDb } from "./db/db.js"; +import { REPOSITORY_QUEUE_NAMES, repositoryActions, runRepositoryWorkflow } from "./actions.js"; + +export interface RepositoryInput { + organizationId: string; + repoId: string; + remoteUrl: string; +} + +export const repository = actor({ + db: repositoryDb, + queues: Object.fromEntries(REPOSITORY_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Repository", + icon: "folder", + actionTimeout: 5 * 60_000, + }, + createState: (_c, input: RepositoryInput) => ({ + organizationId: input.organizationId, + repoId: input.repoId, + remoteUrl: input.remoteUrl, + }), + actions: repositoryActions, + run: workflow(runRepositoryWorkflow), +}); diff --git a/foundry/packages/backend/src/actors/sandbox/index.ts b/foundry/packages/backend/src/actors/sandbox/index.ts index e65c151..2e2087b 100644 --- a/foundry/packages/backend/src/actors/sandbox/index.ts +++ b/foundry/packages/backend/src/actors/sandbox/index.ts @@ -4,21 +4,21 @@ import { existsSync } from "node:fs"; import Dockerode from "dockerode"; import { SandboxAgent } from "sandbox-agent"; import { getActorRuntimeContext } from "../context.js"; -import { workspaceKey } from "../keys.js"; +import { organizationKey } from "../keys.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; -const SANDBOX_REPO_CWD = "/home/sandbox/workspace/repo"; +const SANDBOX_REPO_CWD = "/home/sandbox/organization/repo"; const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full"; const DEFAULT_LOCAL_SANDBOX_PORT = 2468; const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" }); -function parseTaskSandboxKey(key: readonly string[]): { workspaceId: string; taskId: string } { - if (key.length !== 4 || key[0] !== "ws" || key[2] !== "sandbox") { +function parseTaskSandboxKey(key: readonly string[]): { organizationId: string; taskId: string } { + if (key.length !== 4 || key[0] !== "org" || key[2] !== "sandbox") { throw new Error(`Invalid task sandbox key: ${JSON.stringify(key)}`); } return { - workspaceId: key[1]!, + organizationId: key[1]!, taskId: key[3]!, }; } @@ -191,24 +191,24 @@ function sanitizeActorResult(value: unknown, seen = new WeakSet()): unkn const baseTaskSandbox = sandboxActor({ createProvider: async (c) => { const { config } = getActorRuntimeContext(); - const { workspaceId, taskId } = parseTaskSandboxKey(c.key); - const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, + const { organizationId, taskId } = parseTaskSandboxKey(c.key); + const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); - const task = await workspace.getTask({ workspaceId, taskId }); - const providerId = resolveSandboxProviderId(config, task.providerId); + const task = await organization.getTask({ organizationId, taskId }); + const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId); - if (providerId === "e2b") { + if (sandboxProviderId === "e2b") { return e2b({ create: () => ({ - template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x", + template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x", envs: sandboxEnvObject(), }), installAgents: ["claude", "codex"], }); } - return createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + return createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); }, }); @@ -236,23 +236,23 @@ async function providerForConnection(c: any): Promise { const providerFactory = baseTaskSandbox.config.actions as Record; void providerFactory; const { config } = getActorRuntimeContext(); - const { workspaceId, taskId } = parseTaskSandboxKey(c.key); - const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, + const { organizationId, taskId } = parseTaskSandboxKey(c.key); + const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); - const task = await workspace.getTask({ workspaceId, taskId }); - const providerId = resolveSandboxProviderId(config, task.providerId); + const task = await organization.getTask({ organizationId, taskId }); + const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId); const provider = - providerId === "e2b" + sandboxProviderId === "e2b" ? e2b({ create: () => ({ - template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x", + template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x", envs: sandboxEnvObject(), }), installAgents: ["claude", "codex"], }) - : createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + : createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); c.vars.provider = provider; return provider; @@ -360,31 +360,31 @@ export const taskSandbox = actor({ } }, - async providerState(c: any): Promise<{ providerId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { + async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { const { config } = getActorRuntimeContext(); const { taskId } = parseTaskSandboxKey(c.key); const at = Date.now(); - const providerId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null); + const sandboxProviderId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null); if (c.state.sandboxDestroyed) { - return { providerId, sandboxId: taskId, state: "destroyed", at }; + return { sandboxProviderId, sandboxId: taskId, state: "destroyed", at }; } if (!c.state.sandboxId) { - return { providerId, sandboxId: taskId, state: "pending", at }; + return { sandboxProviderId, sandboxId: taskId, state: "pending", at }; } try { const health = await baseActions.getHealth(c); return { - providerId, + sandboxProviderId, sandboxId: taskId, state: health.status === "ok" ? "running" : "degraded", at, }; } catch { return { - providerId, + sandboxProviderId, sandboxId: taskId, state: "error", at, diff --git a/foundry/packages/backend/src/actors/task/db/migrations.ts b/foundry/packages/backend/src/actors/task/db/migrations.ts index 4d4630b..dc3193e 100644 --- a/foundry/packages/backend/src/actors/task/db/migrations.ts +++ b/foundry/packages/backend/src/actors/task/db/migrations.ts @@ -10,6 +10,12 @@ const journal = { tag: "0000_charming_maestro", breakpoints: true, }, + { + idx: 1, + when: 1773810000000, + tag: "0001_sandbox_provider_columns", + breakpoints: true, + }, ], } as const; @@ -63,9 +69,13 @@ CREATE TABLE \`task_workbench_sessions\` ( \`created\` integer DEFAULT 1 NOT NULL, \`closed\` integer DEFAULT 0 NOT NULL, \`thinking_since_ms\` integer, - \`created_at\` integer NOT NULL, +\`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); +`, + m0001: `ALTER TABLE \`task\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; +--> statement-breakpoint +ALTER TABLE \`task_sandboxes\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/task/db/schema.ts b/foundry/packages/backend/src/actors/task/db/schema.ts index 2b59f4b..0c1f6cd 100644 --- a/foundry/packages/backend/src/actors/task/db/schema.ts +++ b/foundry/packages/backend/src/actors/task/db/schema.ts @@ -9,7 +9,7 @@ export const task = sqliteTable( branchName: text("branch_name"), title: text("title"), task: text("task").notNull(), - providerId: text("provider_id").notNull(), + sandboxProviderId: text("sandbox_provider_id").notNull(), status: text("status").notNull(), agentType: text("agent_type").default("claude"), prSubmitted: integer("pr_submitted").default(0), @@ -39,7 +39,7 @@ export const taskRuntime = sqliteTable( export const taskSandboxes = sqliteTable("task_sandboxes", { sandboxId: text("sandbox_id").notNull().primaryKey(), - providerId: text("provider_id").notNull(), + sandboxProviderId: text("sandbox_provider_id").notNull(), sandboxActorId: text("sandbox_actor_id"), switchTarget: text("switch_target").notNull(), cwd: text("cwd"), diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts index 968171c..f2b9e51 100644 --- a/foundry/packages/backend/src/actors/task/index.ts +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -9,7 +9,7 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchUpdateDraftInput, - ProviderId, + SandboxProviderId, } from "@sandbox-agent/foundry-shared"; import { expectQueueResponse } from "../../services/queue.js"; import { selfTask } from "../handles.js"; @@ -37,15 +37,14 @@ import { import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js"; export interface TaskInput { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; repoRemote: string; - repoLocalPath?: string; branchName: string | null; title: string | null; task: string; - providerId: ProviderId; + sandboxProviderId: SandboxProviderId; agentType: AgentType | null; explicitTitle: string | null; explicitBranchName: string | null; @@ -53,15 +52,15 @@ export interface TaskInput { } interface InitializeCommand { - providerId?: ProviderId; + sandboxProviderId?: SandboxProviderId; } interface TaskActionCommand { reason?: string; } -interface TaskTabCommand { - tabId: string; +interface TaskSessionCommand { + sessionId: string; } interface TaskStatusSyncCommand { @@ -123,15 +122,14 @@ export const task = actor({ actionTimeout: 5 * 60_000, }, createState: (_c, input: TaskInput) => ({ - workspaceId: input.workspaceId, + organizationId: input.organizationId, repoId: input.repoId, taskId: input.taskId, repoRemote: input.repoRemote, - repoLocalPath: input.repoLocalPath, branchName: input.branchName, title: input.title, task: input.task, - providerId: input.providerId, + sandboxProviderId: input.sandboxProviderId, agentType: input.agentType, explicitTitle: input.explicitTitle, explicitBranchName: input.explicitBranchName, @@ -257,7 +255,7 @@ export const task = actor({ }); }, - async createWorkbenchSession(c, input?: { model?: string }): Promise<{ tabId: string }> { + async createWorkbenchSession(c, input?: { model?: string }): Promise<{ sessionId: string }> { const self = selfTask(c); const result = await self.send( taskWorkflowQueueName("task.command.workbench.create_session"), @@ -267,7 +265,7 @@ export const task = actor({ timeout: 10_000, }, ); - return expectQueueResponse<{ tabId: string }>(result); + return expectQueueResponse<{ sessionId: string }>(result); }, /** @@ -287,7 +285,7 @@ export const task = actor({ const self = selfTask(c); await self.send( taskWorkflowQueueName("task.command.workbench.rename_session"), - { sessionId: input.tabId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand, + { sessionId: input.sessionId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand, { wait: true, timeout: 10_000, @@ -299,7 +297,7 @@ export const task = actor({ const self = selfTask(c); await self.send( taskWorkflowQueueName("task.command.workbench.set_session_unread"), - { sessionId: input.tabId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand, + { sessionId: input.sessionId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand, { wait: true, timeout: 10_000, @@ -312,13 +310,12 @@ export const task = actor({ await self.send( taskWorkflowQueueName("task.command.workbench.update_draft"), { - sessionId: input.tabId, + sessionId: input.sessionId, text: input.text, attachments: input.attachments, } satisfies TaskWorkbenchUpdateDraftCommand, { - wait: true, - timeout: 10_000, + wait: false, }, ); }, @@ -327,7 +324,7 @@ export const task = actor({ const self = selfTask(c); await self.send( taskWorkflowQueueName("task.command.workbench.change_model"), - { sessionId: input.tabId, model: input.model } satisfies TaskWorkbenchChangeModelCommand, + { sessionId: input.sessionId, model: input.model } satisfies TaskWorkbenchChangeModelCommand, { wait: true, timeout: 10_000, @@ -340,7 +337,7 @@ export const task = actor({ await self.send( taskWorkflowQueueName("task.command.workbench.send_message"), { - sessionId: input.tabId, + sessionId: input.sessionId, text: input.text, attachments: input.attachments, } satisfies TaskWorkbenchSendMessageCommand, @@ -350,9 +347,9 @@ export const task = actor({ ); }, - async stopWorkbenchSession(c, input: TaskTabCommand): Promise { + async stopWorkbenchSession(c, input: TaskSessionCommand): Promise { const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, { + await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { wait: false, }); }, @@ -365,9 +362,9 @@ export const task = actor({ }); }, - async closeWorkbenchSession(c, input: TaskTabCommand): Promise { + async closeWorkbenchSession(c, input: TaskSessionCommand): Promise { const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, { + await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { wait: false, }); }, diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workbench.ts index 9277152..d689b3a 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workbench.ts @@ -3,10 +3,11 @@ import { randomUUID } from "node:crypto"; import { basename, dirname } from "node:path"; import { asc, eq } from "drizzle-orm"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateProject, getOrCreateTaskSandbox, getOrCreateWorkspace, getTaskSandbox, selfTask } from "../handles.js"; +import { getOrCreateRepository, getOrCreateTaskSandbox, getOrCreateOrganization, getTaskSandbox, selfTask } from "../handles.js"; import { SANDBOX_REPO_CWD } from "../sandbox/index.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +import { githubRepoFullNameFromRemote } from "../../services/repo.js"; import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js"; import { getCurrentRecord } from "./workflow/common.js"; @@ -172,8 +173,7 @@ async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean } const mapped = rows.map((row: any) => ({ ...row, id: row.sessionId, - sessionId: row.sandboxSessionId ?? null, - tabId: row.sessionId, + sessionId: row.sessionId, sandboxSessionId: row.sandboxSessionId ?? null, status: row.status ?? "ready", errorMessage: row.errorMessage ?? null, @@ -209,8 +209,7 @@ async function readSessionMeta(c: any, sessionId: string): Promise { return { ...row, id: row.sessionId, - sessionId: row.sandboxSessionId ?? null, - tabId: row.sessionId, + sessionId: row.sessionId, sandboxSessionId: row.sandboxSessionId ?? null, status: row.status ?? "ready", errorMessage: row.errorMessage ?? null, @@ -227,7 +226,7 @@ async function readSessionMeta(c: any, sessionId: string): Promise { async function ensureSessionMeta( c: any, params: { - tabId: string; + sessionId: string; sandboxSessionId?: string | null; model?: string; sessionName?: string; @@ -238,7 +237,7 @@ async function ensureSessionMeta( }, ): Promise { await ensureWorkbenchSessionTable(c); - const existing = await readSessionMeta(c, params.tabId); + const existing = await readSessionMeta(c, params.sessionId); if (existing) { return existing; } @@ -251,7 +250,7 @@ async function ensureSessionMeta( await c.db .insert(taskWorkbenchSessions) .values({ - sessionId: params.tabId, + sessionId: params.sessionId, sandboxSessionId: params.sandboxSessionId ?? null, sessionName, model, @@ -271,20 +270,20 @@ async function ensureSessionMeta( }) .run(); - return await readSessionMeta(c, params.tabId); + return await readSessionMeta(c, params.sessionId); } -async function updateSessionMeta(c: any, tabId: string, values: Record): Promise { - await ensureSessionMeta(c, { tabId }); +async function updateSessionMeta(c: any, sessionId: string, values: Record): Promise { + await ensureSessionMeta(c, { sessionId }); await c.db .update(taskWorkbenchSessions) .set({ ...values, updatedAt: Date.now(), }) - .where(eq(taskWorkbenchSessions.sessionId, tabId)) + .where(eq(taskWorkbenchSessions.sessionId, sessionId)) .run(); - return await readSessionMeta(c, tabId); + return await readSessionMeta(c, sessionId); } async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise { @@ -296,20 +295,20 @@ async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: strin return await readSessionMeta(c, row.sessionId); } -async function requireReadySessionMeta(c: any, tabId: string): Promise { - const meta = await readSessionMeta(c, tabId); +async function requireReadySessionMeta(c: any, sessionId: string): Promise { + const meta = await readSessionMeta(c, sessionId); if (!meta) { - throw new Error(`Unknown workbench tab: ${tabId}`); + throw new Error(`Unknown workbench session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { - throw new Error(meta.errorMessage ?? "This workbench tab is still preparing"); + throw new Error(meta.errorMessage ?? "This workbench session is still preparing"); } return meta; } -export function requireSendableSessionMeta(meta: any, tabId: string): any { +export function requireSendableSessionMeta(meta: any, sessionId: string): any { if (!meta) { - throw new Error(`Unknown workbench tab: ${tabId}`); + throw new Error(`Unknown workbench session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { throw new Error(`Session is not ready (status: ${meta.status}). Wait for session provisioning to complete.`); @@ -331,23 +330,23 @@ async function getTaskSandboxRuntime( ): Promise<{ sandbox: any; sandboxId: string; - providerId: string; + sandboxProviderId: string; switchTarget: string; cwd: string; }> { const { config } = getActorRuntimeContext(); const sandboxId = stableSandboxId(c); - const providerId = resolveSandboxProviderId(config, record.providerId ?? c.state.providerId ?? null); - const sandbox = await getOrCreateTaskSandbox(c, c.state.workspaceId, sandboxId, {}); + const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? c.state.sandboxProviderId ?? null); + const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, sandboxId, {}); const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null; - const switchTarget = providerId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; + const switchTarget = sandboxProviderId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; const now = Date.now(); await c.db .insert(taskSandboxes) .values({ sandboxId, - providerId, + sandboxProviderId, sandboxActorId: typeof actorId === "string" ? actorId : null, switchTarget, cwd: SANDBOX_REPO_CWD, @@ -358,7 +357,7 @@ async function getTaskSandboxRuntime( .onConflictDoUpdate({ target: taskSandboxes.sandboxId, set: { - providerId, + sandboxProviderId, sandboxActorId: typeof actorId === "string" ? actorId : null, switchTarget, cwd: SANDBOX_REPO_CWD, @@ -381,7 +380,7 @@ async function getTaskSandboxRuntime( return { sandbox, sandboxId, - providerId, + sandboxProviderId, switchTarget, cwd: SANDBOX_REPO_CWD, }; @@ -392,17 +391,10 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise): Promise { - await updateSessionMeta(c, tabId, { +async function writeSessionTranscript(c: any, sessionId: string, transcript: Array): Promise { + await updateSessionMeta(c, sessionId, { transcriptJson: JSON.stringify(transcript), transcriptUpdatedAt: Date.now(), }); @@ -689,12 +681,12 @@ async function enqueueWorkbenchRefresh( await self.send(command, body, { wait: false }); } -async function enqueueWorkbenchEnsureSession(c: any, tabId: string): Promise { +async function enqueueWorkbenchEnsureSession(c: any, sessionId: string): Promise { const self = selfTask(c); await self.send( "task.command.workbench.ensure_session", { - tabId, + sessionId, }, { wait: false, @@ -742,8 +734,8 @@ async function readPullRequestSummary(c: any, branchName: string | null) { } try { - const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote); - return await project.getPullRequestForBranch({ branchName }); + const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); + return await repository.getPullRequestForBranch({ branchName }); } catch { return null; } @@ -754,7 +746,7 @@ export async function ensureWorkbenchSeeded(c: any): Promise { const record = await getCurrentRecord({ db: c.db, state: c.state }); if (record.activeSessionId) { await ensureSessionMeta(c, { - tabId: record.activeSessionId, + sessionId: record.activeSessionId, sandboxSessionId: record.activeSessionId, model: defaultModelForAgent(record.agentType), sessionName: "Session 1", @@ -783,7 +775,8 @@ function buildSessionSummary(record: any, meta: any): any { return { id: meta.id, - sessionId: derivedSandboxSessionId, + sessionId: meta.sessionId, + sandboxSessionId: derivedSandboxSessionId, sessionName: meta.sessionName, agent: agentKindForModel(meta.model), model: meta.model, @@ -798,9 +791,8 @@ function buildSessionSummary(record: any, meta: any): any { function buildSessionDetailFromMeta(record: any, meta: any): any { const summary = buildSessionSummary(record, meta); return { - sessionId: meta.tabId, - tabId: meta.tabId, - sandboxSessionId: summary.sessionId, + sessionId: meta.sessionId, + sandboxSessionId: summary.sandboxSessionId ?? null, sessionName: summary.sessionName, agent: summary.agent, model: summary.model, @@ -820,7 +812,7 @@ function buildSessionDetailFromMeta(record: any, meta: any): any { /** * Builds a WorkbenchTaskSummary from local task actor state. Task actors push - * this to the parent workspace actor so workspace sidebar reads stay local. + * this to the parent organization actor so organization sidebar reads stay local. */ export async function buildTaskSummary(c: any): Promise { const record = await ensureWorkbenchSeeded(c); @@ -866,7 +858,7 @@ export async function buildTaskDetail(c: any): Promise { fileTree: gitState.fileTree, minutesUsed: 0, sandboxes: (record.sandboxes ?? []).map((sandbox: any) => ({ - providerId: sandbox.providerId, + sandboxProviderId: sandbox.sandboxProviderId, sandboxId: sandbox.sandboxId, cwd: sandbox.cwd ?? null, })), @@ -875,13 +867,13 @@ export async function buildTaskDetail(c: any): Promise { } /** - * Builds a WorkbenchSessionDetail for a specific session tab. + * Builds a WorkbenchSessionDetail for a specific session. */ -export async function buildSessionDetail(c: any, tabId: string): Promise { +export async function buildSessionDetail(c: any, sessionId: string): Promise { const record = await ensureWorkbenchSeeded(c); - const meta = await readSessionMeta(c, tabId); + const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { - throw new Error(`Unknown workbench session tab: ${tabId}`); + throw new Error(`Unknown workbench session: ${sessionId}`); } if (!meta.sandboxSessionId) { @@ -891,7 +883,7 @@ export async function buildSessionDetail(c: any, tabId: string): Promise { try { const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) { - await writeSessionTranscript(c, meta.tabId, transcript); + await writeSessionTranscript(c, meta.sessionId, transcript); return buildSessionDetailFromMeta(record, { ...meta, transcript, @@ -913,21 +905,21 @@ export async function getTaskDetail(c: any): Promise { return await buildTaskDetail(c); } -export async function getSessionDetail(c: any, tabId: string): Promise { - return await buildSessionDetail(c, tabId); +export async function getSessionDetail(c: any, sessionId: string): Promise { + return await buildSessionDetail(c, sessionId); } /** * Replaces the old notifyWorkbenchUpdated pattern. * * The task actor emits two kinds of updates: - * - Push summary state up to the parent workspace actor so the sidebar + * - Push summary state up to the parent organization actor so the sidebar * materialized projection stays current. * - Broadcast full detail/session payloads down to direct task subscribers. */ export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise { - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); c.broadcast("taskUpdated", { type: "taskDetailUpdated", detail: await buildTaskDetail(c), @@ -956,8 +948,8 @@ export async function refreshWorkbenchSessionTranscript(c: any, sessionId: strin } const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); - await writeSessionTranscript(c, meta.tabId, transcript); - await broadcastTaskUpdate(c, { sessionId: meta.tabId }); + await writeSessionTranscript(c, meta.sessionId, transcript); + await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } export async function renameWorkbenchTask(c: any, value: string): Promise { @@ -1021,31 +1013,31 @@ export async function renameWorkbenchBranch(c: any, value: string): Promise { - const tabId = `tab-${randomUUID()}`; +export async function createWorkbenchSession(c: any, model?: string): Promise<{ sessionId: string }> { + const sessionId = `session-${randomUUID()}`; const record = await ensureWorkbenchSeeded(c); await ensureSessionMeta(c, { - tabId, + sessionId, model: model ?? defaultModelForAgent(record.agentType), sandboxSessionId: null, status: pendingWorkbenchSessionStatus(record), created: false, }); - await broadcastTaskUpdate(c, { sessionId: tabId }); - await enqueueWorkbenchEnsureSession(c, tabId); - return { tabId }; + await broadcastTaskUpdate(c, { sessionId: sessionId }); + await enqueueWorkbenchEnsureSession(c, sessionId); + return { sessionId }; } -export async function ensureWorkbenchSession(c: any, tabId: string, model?: string): Promise { - const meta = await readSessionMeta(c, tabId); +export async function ensureWorkbenchSession(c: any, sessionId: string, model?: string): Promise { + const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { return; } @@ -1055,12 +1047,12 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { sessionId: meta.sandboxSessionId, }); - await broadcastTaskUpdate(c, { sessionId: tabId }); + await broadcastTaskUpdate(c, { sessionId: sessionId }); return; } - await updateSessionMeta(c, tabId, { - sandboxSessionId: meta.sandboxSessionId ?? tabId, + await updateSessionMeta(c, sessionId, { + sandboxSessionId: meta.sandboxSessionId ?? sessionId, status: "pending_session_create", errorMessage: null, }); @@ -1069,7 +1061,7 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); await runtime.sandbox.createSession({ - id: meta.sandboxSessionId ?? tabId, + id: meta.sandboxSessionId ?? sessionId, agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)), model: model ?? meta.model ?? defaultModelForAgent(record.agentType), sessionInit: { @@ -1077,22 +1069,22 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri }, }); - await updateSessionMeta(c, tabId, { - sandboxSessionId: meta.sandboxSessionId ?? tabId, + await updateSessionMeta(c, sessionId, { + sandboxSessionId: meta.sandboxSessionId ?? sessionId, status: "ready", errorMessage: null, }); await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId: meta.sandboxSessionId ?? tabId, + sessionId: meta.sandboxSessionId ?? sessionId, }); } catch (error) { - await updateSessionMeta(c, tabId, { + await updateSessionMeta(c, sessionId, { status: "error", errorMessage: error instanceof Error ? error.message : String(error), }); } - await broadcastTaskUpdate(c, { sessionId: tabId }); + await broadcastTaskUpdate(c, { sessionId: sessionId }); } export async function enqueuePendingWorkbenchSessions(c: any): Promise { @@ -1105,7 +1097,7 @@ export async function enqueuePendingWorkbenchSessions(c: any): Promise { await self.send( "task.command.workbench.ensure_session", { - tabId: row.tabId, + sessionId: row.sessionId, model: row.model, }, { @@ -1159,7 +1151,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str let shouldEnsure = nextMeta.status === "pending_provision" || nextMeta.status === "pending_session_create" || nextMeta.status === "error"; if (shouldRecreateSessionForModelChange(nextMeta)) { - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(nextMeta.sandboxSessionId); nextMeta = await updateSessionMeta(c, sessionId, { sandboxSessionId: null, @@ -1171,7 +1163,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str }); shouldEnsure = true; } else if (nextMeta.status === "ready" && nextMeta.sandboxSessionId) { - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); if (typeof sandbox.rawSendSessionMethod === "function") { try { await sandbox.rawSendSessionMethod(nextMeta.sandboxSessionId, "session/set_config_option", { @@ -1245,7 +1237,7 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri export async function stopWorkbenchSession(c: any, sessionId: string): Promise { const meta = await requireReadySessionMeta(c, sessionId); - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(meta.sandboxSessionId); await updateSessionMeta(c, sessionId, { thinkingSinceMs: null, @@ -1255,7 +1247,7 @@ export async function stopWorkbenchSession(c: any, sessionId: string): Promise { const record = await ensureWorkbenchSeeded(c); - const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { tabId: sessionId, sandboxSessionId: sessionId })); + const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { sessionId: sessionId, sandboxSessionId: sessionId })); let changed = false; if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) { @@ -1309,13 +1301,13 @@ export async function syncWorkbenchSessionStatus(c: any, sessionId: string, stat } if (changed) { + await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + sessionId, + }); if (status !== "running") { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId, - }); await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); } - await broadcastTaskUpdate(c, { sessionId: meta.tabId }); + await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } } @@ -1331,7 +1323,7 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise< return; } if (meta.sandboxSessionId) { - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(meta.sandboxSessionId); } await updateSessionMeta(c, sessionId, { @@ -1357,10 +1349,10 @@ export async function markWorkbenchUnread(c: any): Promise { if (!latest) { return; } - await updateSessionMeta(c, latest.tabId, { + await updateSessionMeta(c, latest.sessionId, { unread: 1, }); - await broadcastTaskUpdate(c, { sessionId: latest.tabId }); + await broadcastTaskUpdate(c, { sessionId: latest.sessionId }); } export async function publishWorkbenchPr(c: any): Promise { @@ -1368,17 +1360,17 @@ export async function publishWorkbenchPr(c: any): Promise { if (!record.branchName) { throw new Error("cannot publish PR without a branch"); } - let repoLocalPath = c.state.repoLocalPath; - if (!repoLocalPath) { - const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote); - const result = await project.ensure({ remoteUrl: c.state.repoRemote }); - repoLocalPath = result.localPath; - c.state.repoLocalPath = repoLocalPath; + const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); + const metadata = await repository.getRepositoryMetadata({}); + const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(c.state.repoRemote); + if (!repoFullName) { + throw new Error(`Unable to resolve GitHub repository for ${c.state.repoRemote}`); } const { driver } = getActorRuntimeContext(); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - const created = await driver.github.createPr(repoLocalPath, record.branchName, record.title ?? c.state.task, undefined, { + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.createPr(repoFullName, record.branchName, record.title ?? c.state.task, undefined, { githubToken: auth?.githubToken ?? null, + baseBranch: metadata.defaultBranch ?? undefined, }); await c.db .update(taskTable) diff --git a/foundry/packages/backend/src/actors/task/workflow/commands.ts b/foundry/packages/backend/src/actors/task/workflow/commands.ts index 5e55b6c..d03ade1 100644 --- a/foundry/packages/backend/src/actors/task/workflow/commands.ts +++ b/foundry/packages/backend/src/actors/task/workflow/commands.ts @@ -28,7 +28,7 @@ export async function handleAttachActivity(loopCtx: any, msg: any): Promise 0) { target = connection.endpoint; @@ -78,9 +78,9 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { + void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => { logActorWarning("task.commands", "failed to release sandbox during archive", { - workspaceId: loopCtx.state.workspaceId, + organizationId: loopCtx.state.organizationId, repoId: loopCtx.state.repoId, taskId: loopCtx.state.taskId, sandboxId: record.activeSandboxId, @@ -106,7 +106,7 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise { return; } - await getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy(); + await getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(); } export async function killWriteDbActivity(loopCtx: any, msg: any): Promise { diff --git a/foundry/packages/backend/src/actors/task/workflow/common.ts b/foundry/packages/backend/src/actors/task/workflow/common.ts index 0dfc667..ae1e8dd 100644 --- a/foundry/packages/backend/src/actors/task/workflow/common.ts +++ b/foundry/packages/backend/src/actors/task/workflow/common.ts @@ -93,7 +93,7 @@ export async function getCurrentRecord(ctx: any): Promise { branchName: taskTable.branchName, title: taskTable.title, task: taskTable.task, - providerId: taskTable.providerId, + sandboxProviderId: taskTable.sandboxProviderId, status: taskTable.status, statusMessage: taskRuntime.statusMessage, activeSandboxId: taskRuntime.activeSandboxId, @@ -115,7 +115,7 @@ export async function getCurrentRecord(ctx: any): Promise { const sandboxes = await db .select({ sandboxId: taskSandboxes.sandboxId, - providerId: taskSandboxes.providerId, + sandboxProviderId: taskSandboxes.sandboxProviderId, sandboxActorId: taskSandboxes.sandboxActorId, switchTarget: taskSandboxes.switchTarget, cwd: taskSandboxes.cwd, @@ -126,21 +126,21 @@ export async function getCurrentRecord(ctx: any): Promise { .all(); return { - workspaceId: ctx.state.workspaceId, + organizationId: ctx.state.organizationId, repoId: ctx.state.repoId, repoRemote: ctx.state.repoRemote, taskId: ctx.state.taskId, branchName: row.branchName, title: row.title, task: row.task, - providerId: row.providerId, + sandboxProviderId: row.sandboxProviderId, status: row.status, statusMessage: row.statusMessage ?? null, activeSandboxId: row.activeSandboxId ?? null, activeSessionId: row.activeSessionId ?? null, sandboxes: sandboxes.map((sb) => ({ sandboxId: sb.sandboxId, - providerId: sb.providerId, + sandboxProviderId: sb.sandboxProviderId, sandboxActorId: sb.sandboxActorId ?? null, switchTarget: sb.switchTarget, cwd: sb.cwd ?? null, @@ -165,8 +165,8 @@ export async function getCurrentRecord(ctx: any): Promise { export async function appendHistory(ctx: any, kind: string, payload: Record): Promise { const client = ctx.client(); - const history = await client.history.getOrCreate(historyKey(ctx.state.workspaceId, ctx.state.repoId), { - createWithInput: { workspaceId: ctx.state.workspaceId, repoId: ctx.state.repoId }, + const history = await client.history.getOrCreate(historyKey(ctx.state.organizationId, ctx.state.repoId), { + createWithInput: { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId }, }); await history.append({ kind, diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts index c14ab78..f6ffd10 100644 --- a/foundry/packages/backend/src/actors/task/workflow/index.ts +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -155,7 +155,7 @@ const commandHandlers: Record = { await loopCtx.step({ name: "workbench-send-initial-message", timeout: 5 * 60_000, - run: async () => sendWorkbenchMessage(loopCtx, created.tabId, msg.body.text, []), + run: async () => sendWorkbenchMessage(loopCtx, created.sessionId, msg.body.text, []), }); } catch (error) { logActorWarning("task.workflow", "create_session_and_send failed", { @@ -169,7 +169,7 @@ const commandHandlers: Record = { await loopCtx.step({ name: "workbench-ensure-session", timeout: 5 * 60_000, - run: async () => ensureWorkbenchSession(loopCtx, msg.body.tabId, msg.body?.model), + run: async () => ensureWorkbenchSession(loopCtx, msg.body.sessionId, msg.body?.model), }); await msg.complete({ ok: true }); }, @@ -278,7 +278,16 @@ export async function runTaskWorkflow(ctx: any): Promise { } const handler = commandHandlers[msg.name as TaskQueueName]; if (handler) { - await handler(loopCtx, msg); + try { + await handler(loopCtx, msg); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("task.workflow", "task workflow command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } } return Loop.continue(undefined); }); diff --git a/foundry/packages/backend/src/actors/task/workflow/init.ts b/foundry/packages/backend/src/actors/task/workflow/init.ts index 9cfe3d3..8a9962d 100644 --- a/foundry/packages/backend/src/actors/task/workflow/init.ts +++ b/foundry/packages/backend/src/actors/task/workflow/init.ts @@ -17,7 +17,7 @@ async function ensureTaskRuntimeCacheColumns(db: any): Promise { export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise { const { config } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); const now = Date.now(); await ensureTaskRuntimeCacheColumns(loopCtx.db); @@ -29,7 +29,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< branchName: loopCtx.state.branchName, title: loopCtx.state.title, task: loopCtx.state.task, - providerId, + sandboxProviderId, status: "init_bootstrap_db", agentType: loopCtx.state.agentType ?? config.default_agent, createdAt: now, @@ -41,7 +41,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< branchName: loopCtx.state.branchName, title: loopCtx.state.title, task: loopCtx.state.task, - providerId, + sandboxProviderId, status: "init_bootstrap_db", agentType: loopCtx.state.agentType ?? config.default_agent, updatedAt: now, @@ -99,7 +99,7 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro }); } catch (error) { logActorWarning("task.init", "background provision command failed", { - workspaceId: loopCtx.state.workspaceId, + organizationId: loopCtx.state.organizationId, repoId: loopCtx.state.repoId, taskId: loopCtx.state.taskId, error: resolveErrorMessage(error), @@ -111,7 +111,7 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro export async function initCompleteActivity(loopCtx: any, body: any): Promise { const now = Date.now(); const { config } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); await setTaskState(loopCtx, "init_complete", "task initialized"); await loopCtx.db @@ -125,12 +125,12 @@ export async function initCompleteActivity(loopCtx: any, body: any): Promise [name, queue()])), - options: { - name: "Workspace", - icon: "compass", - actionTimeout: 5 * 60_000, - }, - createState: (_c, workspaceId: string) => ({ - workspaceId, - }), - actions: workspaceActions, - run: workflow(runWorkspaceWorkflow), -}); diff --git a/foundry/packages/backend/src/config/organization.ts b/foundry/packages/backend/src/config/organization.ts new file mode 100644 index 0000000..8b5c766 --- /dev/null +++ b/foundry/packages/backend/src/config/organization.ts @@ -0,0 +1,13 @@ +import type { AppConfig } from "@sandbox-agent/foundry-shared"; + +export function defaultOrganization(config: AppConfig): string { + const organizationId = config.organization.default.trim(); + return organizationId.length > 0 ? organizationId : "default"; +} + +export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string { + if (flagOrganization && flagOrganization.trim().length > 0) { + return flagOrganization.trim(); + } + return defaultOrganization(config); +} diff --git a/foundry/packages/backend/src/config/workspace.ts b/foundry/packages/backend/src/config/workspace.ts deleted file mode 100644 index 2225200..0000000 --- a/foundry/packages/backend/src/config/workspace.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { AppConfig } from "@sandbox-agent/foundry-shared"; - -export function defaultWorkspace(config: AppConfig): string { - const ws = config.workspace.default.trim(); - return ws.length > 0 ? ws : "default"; -} - -export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string { - if (flagWorkspace && flagWorkspace.trim().length > 0) { - return flagWorkspace.trim(); - } - return defaultWorkspace(config); -} diff --git a/foundry/packages/backend/src/driver.ts b/foundry/packages/backend/src/driver.ts index 7152592..5c01035 100644 --- a/foundry/packages/backend/src/driver.ts +++ b/foundry/packages/backend/src/driver.ts @@ -1,67 +1,12 @@ -import type { BranchSnapshot } from "./integrations/git/index.js"; -import type { PullRequestSnapshot } from "./integrations/github/index.js"; -import { - validateRemote, - ensureCloned, - fetch, - listRemoteBranches, - listLocalRemoteRefs, - remoteDefaultBaseRef, - revParse, - ensureRemoteBranch, - diffStatForBranch, - conflictsWithMain, -} from "./integrations/git/index.js"; -import { - gitSpiceAvailable, - gitSpiceListStack, - gitSpiceRebaseBranch, - gitSpiceReparentBranch, - gitSpiceRestackRepo, - gitSpiceRestackSubtree, - gitSpiceSyncRepo, - gitSpiceTrackBranch, -} from "./integrations/git-spice/index.js"; -import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js"; - -export interface GitDriver { - validateRemote(remoteUrl: string, options?: { githubToken?: string | null }): Promise; - ensureCloned(remoteUrl: string, targetPath: string, options?: { githubToken?: string | null }): Promise; - fetch(repoPath: string, options?: { githubToken?: string | null }): Promise; - listRemoteBranches(repoPath: string, options?: { githubToken?: string | null }): Promise; - /** Read remote-tracking refs from the local clone without fetching. */ - listLocalRemoteRefs(repoPath: string): Promise; - remoteDefaultBaseRef(repoPath: string): Promise; - revParse(repoPath: string, ref: string): Promise; - ensureRemoteBranch(repoPath: string, branchName: string, options?: { githubToken?: string | null }): Promise; - diffStatForBranch(repoPath: string, branchName: string): Promise; - conflictsWithMain(repoPath: string, branchName: string): Promise; -} - -export interface StackBranchSnapshot { - branchName: string; - parentBranch: string | null; -} - -export interface StackDriver { - available(repoPath: string): Promise; - listStack(repoPath: string): Promise; - syncRepo(repoPath: string): Promise; - restackRepo(repoPath: string): Promise; - restackSubtree(repoPath: string, branchName: string): Promise; - rebaseBranch(repoPath: string, branchName: string): Promise; - reparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise; - trackBranch(repoPath: string, branchName: string, parentBranch: string): Promise; -} +import { createPr, starRepository } from "./integrations/github/index.js"; export interface GithubDriver { - listPullRequests(repoPath: string, options?: { githubToken?: string | null }): Promise; createPr( - repoPath: string, + repoFullName: string, headBranch: string, title: string, body?: string, - options?: { githubToken?: string | null }, + options?: { githubToken?: string | null; baseBranch?: string | null }, ): Promise<{ number: number; url: string }>; starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise; } @@ -71,38 +16,13 @@ export interface TmuxDriver { } export interface BackendDriver { - git: GitDriver; - stack: StackDriver; github: GithubDriver; tmux: TmuxDriver; } export function createDefaultDriver(): BackendDriver { return { - git: { - validateRemote, - ensureCloned, - fetch, - listRemoteBranches, - listLocalRemoteRefs, - remoteDefaultBaseRef, - revParse, - ensureRemoteBranch, - diffStatForBranch, - conflictsWithMain, - }, - stack: { - available: gitSpiceAvailable, - listStack: gitSpiceListStack, - syncRepo: gitSpiceSyncRepo, - restackRepo: gitSpiceRestackRepo, - restackSubtree: gitSpiceRestackSubtree, - rebaseBranch: gitSpiceRebaseBranch, - reparentBranch: gitSpiceReparentBranch, - trackBranch: gitSpiceTrackBranch, - }, github: { - listPullRequests, createPr, starRepository, }, diff --git a/foundry/packages/backend/src/index.ts b/foundry/packages/backend/src/index.ts index fb75b94..3af36c3 100644 --- a/foundry/packages/backend/src/index.ts +++ b/foundry/packages/backend/src/index.ts @@ -3,14 +3,14 @@ import { cors } from "hono/cors"; import { randomUUID } from "node:crypto"; import { initActorRuntimeContext } from "./actors/context.js"; import { registry } from "./actors/index.js"; -import { workspaceKey } from "./actors/keys.js"; +import { organizationKey } from "./actors/keys.js"; import { loadConfig } from "./config/backend.js"; import { createBackends, createNotificationService } from "./notifications/index.js"; import { createDefaultDriver } from "./driver.js"; import { createClient } from "rivetkit/client"; import { initBetterAuthService } from "./services/better-auth.js"; import { createDefaultAppShellServices } from "./services/app-shell-runtime.js"; -import { APP_SHELL_WORKSPACE_ID } from "./actors/workspace/app-shell.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/app-shell.js"; import { logger } from "./logging.js"; export interface BackendStartOptions { @@ -18,7 +18,7 @@ export interface BackendStartOptions { port?: number; } -interface AppWorkspaceLogContext { +interface AppOrganizationLogContext { action?: string; cfConnectingIp?: string; cfRay?: string; @@ -68,8 +68,8 @@ export async function startBackend(options: BackendStartOptions = {}): Promise ({ + const requestHeaderContext = (c: any): AppOrganizationLogContext => ({ cfConnectingIp: c.req.header("cf-connecting-ip") ?? undefined, cfRay: c.req.header("cf-ray") ?? undefined, forwardedFor: c.req.header("x-forwarded-for") ?? undefined, @@ -164,27 +164,27 @@ export async function startBackend(options: BackendStartOptions = {}): Promise { - if (cachedAppWorkspace) return cachedAppWorkspace; + const appOrganization = async (context: AppOrganizationLogContext = {}) => { + if (cachedAppOrganization) return cachedAppOrganization; const start = performance.now(); try { - const handle = await actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), { - createWithInput: APP_SHELL_WORKSPACE_ID, + const handle = await actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { + createWithInput: APP_SHELL_ORGANIZATION_ID, }); - cachedAppWorkspace = handle; + cachedAppOrganization = handle; logger.info( { ...context, cache: "miss", durationMs: Math.round((performance.now() - start) * 100) / 100, }, - "app_workspace_resolve", + "app_organization_resolve", ); return handle; } catch (error) { @@ -196,13 +196,13 @@ export async function startBackend(options: BackendStartOptions = {}): Promise ({ + const requestLogContext = (c: any, sessionId?: string): AppOrganizationLogContext => ({ ...requestHeaderContext(c), method: c.req.method, path: c.req.path, @@ -255,7 +255,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise { const payload = await c.req.text(); - await (await appWorkspace(requestLogContext(c))).handleAppStripeWebhook({ + await (await appOrganization(requestLogContext(c))).handleAppStripeWebhook({ payload, signatureHeader: c.req.header("stripe-signature") ?? null, }); @@ -276,7 +276,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise { const payload = await c.req.text(); - await (await appWorkspace(requestLogContext(c))).handleAppGithubWebhook({ + await (await appOrganization(requestLogContext(c))).handleAppGithubWebhook({ payload, signatureHeader: c.req.header("x-hub-signature-256") ?? null, eventHeader: c.req.header("x-github-event") ?? null, diff --git a/foundry/packages/backend/src/integrations/git-spice/index.ts b/foundry/packages/backend/src/integrations/git-spice/index.ts deleted file mode 100644 index 877c82a..0000000 --- a/foundry/packages/backend/src/integrations/git-spice/index.ts +++ /dev/null @@ -1,223 +0,0 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - -const DEFAULT_TIMEOUT_MS = 2 * 60_000; - -interface SpiceCommand { - command: string; - prefix: string[]; -} - -export interface SpiceStackEntry { - branchName: string; - parentBranch: string | null; -} - -function spiceCommands(): SpiceCommand[] { - const explicit = process.env.HF_GIT_SPICE_BIN?.trim(); - const list: SpiceCommand[] = []; - if (explicit) { - list.push({ command: explicit, prefix: [] }); - } - list.push({ command: "git-spice", prefix: [] }); - list.push({ command: "git", prefix: ["spice"] }); - return list; -} - -function commandLabel(cmd: SpiceCommand): string { - return [cmd.command, ...cmd.prefix].join(" "); -} - -function looksMissing(error: unknown): boolean { - const detail = error instanceof Error ? error.message : String(error); - return detail.includes("ENOENT") || detail.includes("not a git command") || detail.includes("command not found"); -} - -async function tryRun(repoPath: string, cmd: SpiceCommand, args: string[]): Promise<{ stdout: string; stderr: string }> { - return await execFileAsync(cmd.command, [...cmd.prefix, ...args], { - cwd: repoPath, - timeout: DEFAULT_TIMEOUT_MS, - maxBuffer: 1024 * 1024 * 8, - env: { - ...process.env, - NO_COLOR: "1", - FORCE_COLOR: "0", - }, - }); -} - -async function pickCommand(repoPath: string): Promise { - for (const candidate of spiceCommands()) { - try { - await tryRun(repoPath, candidate, ["--help"]); - return candidate; - } catch (error) { - if (looksMissing(error)) { - continue; - } - } - } - return null; -} - -async function runSpice(repoPath: string, args: string[]): Promise<{ stdout: string; stderr: string }> { - const cmd = await pickCommand(repoPath); - if (!cmd) { - throw new Error("git-spice is not available (set HF_GIT_SPICE_BIN or install git-spice)"); - } - return await tryRun(repoPath, cmd, args); -} - -function parseLogJson(stdout: string): SpiceStackEntry[] { - const trimmed = stdout.trim(); - if (!trimmed) { - return []; - } - - const entries: SpiceStackEntry[] = []; - - // `git-spice log ... --json` prints one JSON object per line. - for (const line of trimmed.split("\n")) { - const raw = line.trim(); - if (!raw.startsWith("{")) { - continue; - } - try { - const value = JSON.parse(raw) as { - name?: string; - branch?: string; - parent?: string | null; - parentBranch?: string | null; - }; - const branchName = (value.name ?? value.branch ?? "").trim(); - if (!branchName) { - continue; - } - const parentRaw = value.parent ?? value.parentBranch ?? null; - const parentBranch = parentRaw ? parentRaw.trim() || null : null; - entries.push({ branchName, parentBranch }); - } catch { - continue; - } - } - - const seen = new Set(); - return entries.filter((entry) => { - if (seen.has(entry.branchName)) { - return false; - } - seen.add(entry.branchName); - return true; - }); -} - -async function runFallbacks(repoPath: string, commands: string[][], errorContext: string): Promise { - const failures: string[] = []; - for (const args of commands) { - try { - await runSpice(repoPath, args); - return; - } catch (error) { - failures.push(`${args.join(" ")} :: ${error instanceof Error ? error.message : String(error)}`); - } - } - throw new Error(`${errorContext}. attempts=${failures.join(" | ")}`); -} - -export async function gitSpiceAvailable(repoPath: string): Promise { - return (await pickCommand(repoPath)) !== null; -} - -export async function gitSpiceListStack(repoPath: string): Promise { - try { - const { stdout } = await runSpice(repoPath, ["log", "short", "--all", "--json", "--no-cr-status", "--no-prompt"]); - return parseLogJson(stdout); - } catch { - return []; - } -} - -export async function gitSpiceSyncRepo(repoPath: string): Promise { - await runFallbacks( - repoPath, - [ - ["repo", "sync", "--restack", "--no-prompt"], - ["repo", "sync", "--restack"], - ["repo", "sync"], - ], - "git-spice repo sync failed", - ); -} - -export async function gitSpiceRestackRepo(repoPath: string): Promise { - await runFallbacks( - repoPath, - [ - ["repo", "restack", "--no-prompt"], - ["repo", "restack"], - ], - "git-spice repo restack failed", - ); -} - -export async function gitSpiceRestackSubtree(repoPath: string, branchName: string): Promise { - await runFallbacks( - repoPath, - [ - ["upstack", "restack", "--branch", branchName, "--no-prompt"], - ["upstack", "restack", "--branch", branchName], - ["branch", "restack", "--branch", branchName, "--no-prompt"], - ["branch", "restack", "--branch", branchName], - ], - `git-spice restack subtree failed for ${branchName}`, - ); -} - -export async function gitSpiceRebaseBranch(repoPath: string, branchName: string): Promise { - await runFallbacks( - repoPath, - [ - ["branch", "restack", "--branch", branchName, "--no-prompt"], - ["branch", "restack", "--branch", branchName], - ], - `git-spice branch restack failed for ${branchName}`, - ); -} - -export async function gitSpiceReparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise { - await runFallbacks( - repoPath, - [ - ["upstack", "onto", "--branch", branchName, parentBranch, "--no-prompt"], - ["upstack", "onto", "--branch", branchName, parentBranch], - ["branch", "onto", "--branch", branchName, parentBranch, "--no-prompt"], - ["branch", "onto", "--branch", branchName, parentBranch], - ], - `git-spice reparent failed for ${branchName} -> ${parentBranch}`, - ); -} - -export async function gitSpiceTrackBranch(repoPath: string, branchName: string, parentBranch: string): Promise { - await runFallbacks( - repoPath, - [ - ["branch", "track", branchName, "--base", parentBranch, "--no-prompt"], - ["branch", "track", branchName, "--base", parentBranch], - ], - `git-spice track failed for ${branchName}`, - ); -} - -export function normalizeBaseBranchName(ref: string): string { - const trimmed = ref.trim(); - if (!trimmed) { - return "main"; - } - return trimmed.startsWith("origin/") ? trimmed.slice("origin/".length) : trimmed; -} - -export function describeSpiceCommandForLogs(repoPath: string): Promise { - return pickCommand(repoPath).then((cmd) => (cmd ? commandLabel(cmd) : null)); -} diff --git a/foundry/packages/backend/src/integrations/git/index.ts b/foundry/packages/backend/src/integrations/git/index.ts deleted file mode 100644 index 728239e..0000000 --- a/foundry/packages/backend/src/integrations/git/index.ts +++ /dev/null @@ -1,327 +0,0 @@ -import { execFile } from "node:child_process"; -import { chmodSync, existsSync, mkdirSync, mkdtempSync, writeFileSync } from "node:fs"; -import { tmpdir } from "node:os"; -import { dirname, resolve } from "node:path"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - -const DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS = 15_000; -const DEFAULT_GIT_FETCH_TIMEOUT_MS = 2 * 60_000; -const DEFAULT_GIT_CLONE_TIMEOUT_MS = 5 * 60_000; - -interface GitAuthOptions { - githubToken?: string | null; -} - -function resolveGithubToken(options?: GitAuthOptions): string | null { - const token = options?.githubToken ?? process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN ?? process.env.HF_GITHUB_TOKEN ?? process.env.HF_GH_TOKEN ?? null; - if (!token) return null; - const trimmed = token.trim(); - return trimmed.length > 0 ? trimmed : null; -} - -let cachedAskpassPath: string | null = null; -function ensureAskpassScript(): string { - if (cachedAskpassPath) { - return cachedAskpassPath; - } - - const dir = mkdtempSync(resolve(tmpdir(), "foundry-git-askpass-")); - const path = resolve(dir, "askpass.sh"); - - // Git invokes $GIT_ASKPASS with the prompt string as argv[1]. Provide both username and password. - // We avoid embedding the token in this file; it is read from env at runtime. - const content = [ - "#!/bin/sh", - 'prompt="$1"', - // Prefer GH_TOKEN/GITHUB_TOKEN but support HF_* aliases too. - 'token="${GH_TOKEN:-${GITHUB_TOKEN:-${HF_GITHUB_TOKEN:-${HF_GH_TOKEN:-}}}}"', - 'case "$prompt" in', - ' *Username*) echo "x-access-token" ;;', - ' *Password*) echo "$token" ;;', - ' *) echo "" ;;', - "esac", - "", - ].join("\n"); - - writeFileSync(path, content, "utf8"); - chmodSync(path, 0o700); - cachedAskpassPath = path; - return path; -} - -function gitEnv(options?: GitAuthOptions): Record { - const env: Record = { ...(process.env as Record) }; - env.GIT_TERMINAL_PROMPT = "0"; - - const token = resolveGithubToken(options); - if (token) { - env.GIT_ASKPASS = ensureAskpassScript(); - // Some tooling expects these vars; keep them aligned. - env.GITHUB_TOKEN = token; - env.GH_TOKEN = token; - } - - return env; -} - -async function configureGithubAuth(repoPath: string, options?: GitAuthOptions): Promise { - const token = resolveGithubToken(options); - if (!token) { - return; - } - - const authHeader = Buffer.from(`x-access-token:${token}`, "utf8").toString("base64"); - await execFileAsync("git", ["-C", repoPath, "config", "--local", "credential.helper", ""], { - env: gitEnv(options), - }); - await execFileAsync("git", ["-C", repoPath, "config", "--local", "http.https://github.com/.extraheader", `AUTHORIZATION: basic ${authHeader}`], { - env: gitEnv(options), - }); -} - -export interface BranchSnapshot { - branchName: string; - commitSha: string; -} - -export async function fetch(repoPath: string, options?: GitAuthOptions): Promise { - await execFileAsync("git", ["-C", repoPath, "fetch", "--prune", "--no-auto-gc"], { - timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, - env: gitEnv(options), - }); -} - -export async function revParse(repoPath: string, ref: string): Promise { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "rev-parse", ref], { env: gitEnv() }); - return stdout.trim(); -} - -export async function validateRemote(remoteUrl: string, options?: GitAuthOptions): Promise { - const remote = remoteUrl.trim(); - if (!remote) { - throw new Error("remoteUrl is required"); - } - try { - await execFileAsync("git", ["ls-remote", "--exit-code", remote, "HEAD"], { - // This command does not need repo context. Running from a neutral directory - // avoids inheriting broken worktree .git indirection inside dev containers. - cwd: tmpdir(), - maxBuffer: 1024 * 1024, - timeout: DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS, - env: gitEnv(options), - }); - } catch (error) { - const detail = error instanceof Error ? error.message : String(error); - throw new Error(`git remote validation failed: ${detail}`); - } -} - -function isGitRepo(path: string): boolean { - return existsSync(resolve(path, ".git")); -} - -export async function ensureCloned(remoteUrl: string, targetPath: string, options?: GitAuthOptions): Promise { - const remote = remoteUrl.trim(); - if (!remote) { - throw new Error("remoteUrl is required"); - } - - if (existsSync(targetPath)) { - if (!isGitRepo(targetPath)) { - throw new Error(`targetPath exists but is not a git repo: ${targetPath}`); - } - - // Keep origin aligned with the configured remote URL. - await execFileAsync("git", ["-C", targetPath, "remote", "set-url", "origin", remote], { - maxBuffer: 1024 * 1024, - timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, - env: gitEnv(options), - }); - await configureGithubAuth(targetPath, options); - await fetch(targetPath, options); - return; - } - - mkdirSync(dirname(targetPath), { recursive: true }); - await execFileAsync("git", ["clone", remote, targetPath], { - maxBuffer: 1024 * 1024 * 8, - timeout: DEFAULT_GIT_CLONE_TIMEOUT_MS, - env: gitEnv(options), - }); - await configureGithubAuth(targetPath, options); - await fetch(targetPath, options); - await ensureLocalBaseBranch(targetPath); -} - -async function hasLocalBranches(repoPath: string): Promise { - try { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short)", "refs/heads"], { - env: gitEnv(), - }); - return stdout - .split("\n") - .map((line) => line.trim()) - .some(Boolean); - } catch { - return false; - } -} - -async function ensureLocalBaseBranch(repoPath: string): Promise { - if (await hasLocalBranches(repoPath)) { - return; - } - - const baseRef = await remoteDefaultBaseRef(repoPath); - const localBranch = baseRef.replace(/^origin\//, ""); - - await execFileAsync("git", ["-C", repoPath, "checkout", "-B", localBranch, baseRef], { - maxBuffer: 1024 * 1024, - timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, - env: gitEnv(), - }); -} - -export async function remoteDefaultBaseRef(repoPath: string): Promise { - try { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "symbolic-ref", "refs/remotes/origin/HEAD"], { env: gitEnv() }); - const ref = stdout.trim(); // refs/remotes/origin/main - const match = ref.match(/^refs\/remotes\/(.+)$/); - if (match?.[1]) { - return match[1]; - } - } catch { - // fall through - } - - const candidates = ["origin/main", "origin/master", "main", "master"]; - for (const ref of candidates) { - try { - await execFileAsync("git", ["-C", repoPath, "rev-parse", "--verify", ref], { env: gitEnv() }); - return ref; - } catch { - continue; - } - } - return "origin/main"; -} - -/** - * Fetch from origin, then read remote-tracking refs. - * Use when you need guaranteed-fresh branch data and can tolerate network I/O. - */ -export async function listRemoteBranches(repoPath: string, options?: GitAuthOptions): Promise { - await fetch(repoPath, options); - return listLocalRemoteRefs(repoPath); -} - -/** - * Read remote-tracking refs (`refs/remotes/origin/*`) from the local clone - * without fetching. The data is only as fresh as the last fetch — use this - * when the branch sync actor keeps refs current and you want to avoid - * blocking on network I/O. - */ -export async function listLocalRemoteRefs(repoPath: string): Promise { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short) %(objectname)", "refs/remotes/origin"], { - maxBuffer: 1024 * 1024, - env: gitEnv(), - }); - - return stdout - .trim() - .split("\n") - .filter((line) => line.trim().length > 0) - .map((line) => { - const [refName, commitSha] = line.trim().split(/\s+/, 2); - const short = (refName ?? "").trim(); - const branchName = short.replace(/^origin\//, ""); - return { branchName, commitSha: commitSha ?? "" }; - }) - .filter((row) => row.branchName.length > 0 && row.branchName !== "HEAD" && row.branchName !== "origin" && row.commitSha.length > 0); -} - -async function remoteBranchExists(repoPath: string, branchName: string): Promise { - try { - await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`], { env: gitEnv() }); - return true; - } catch { - return false; - } -} - -export async function ensureRemoteBranch(repoPath: string, branchName: string, options?: GitAuthOptions): Promise { - await fetch(repoPath, options); - await ensureLocalBaseBranch(repoPath); - if (await remoteBranchExists(repoPath, branchName)) { - return; - } - - const baseRef = await remoteDefaultBaseRef(repoPath); - await execFileAsync("git", ["-C", repoPath, "push", "origin", `${baseRef}:refs/heads/${branchName}`], { - maxBuffer: 1024 * 1024 * 2, - env: gitEnv(options), - }); - await fetch(repoPath, options); -} - -export async function diffStatForBranch(repoPath: string, branchName: string): Promise { - try { - const baseRef = await remoteDefaultBaseRef(repoPath); - const headRef = `origin/${branchName}`; - const { stdout } = await execFileAsync("git", ["-C", repoPath, "diff", "--shortstat", `${baseRef}...${headRef}`], { - maxBuffer: 1024 * 1024, - env: gitEnv(), - }); - const trimmed = stdout.trim(); - if (!trimmed) { - return "+0/-0"; - } - const insertMatch = trimmed.match(/(\d+)\s+insertion/); - const deleteMatch = trimmed.match(/(\d+)\s+deletion/); - const insertions = insertMatch ? insertMatch[1] : "0"; - const deletions = deleteMatch ? deleteMatch[1] : "0"; - return `+${insertions}/-${deletions}`; - } catch { - return "+0/-0"; - } -} - -export async function conflictsWithMain(repoPath: string, branchName: string): Promise { - try { - const baseRef = await remoteDefaultBaseRef(repoPath); - const headRef = `origin/${branchName}`; - // Use merge-tree (git 2.38+) for a clean conflict check. - try { - await execFileAsync("git", ["-C", repoPath, "merge-tree", "--write-tree", "--no-messages", baseRef, headRef], { env: gitEnv() }); - // If merge-tree exits 0, no conflicts. Non-zero exit means conflicts. - return false; - } catch { - // merge-tree exits non-zero when there are conflicts - return true; - } - } catch { - return false; - } -} - -export async function getOriginOwner(repoPath: string): Promise { - try { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "remote", "get-url", "origin"], { env: gitEnv() }); - const url = stdout.trim(); - // Handle SSH: git@github.com:owner/repo.git - const sshMatch = url.match(/[:\/]([^\/]+)\/[^\/]+(?:\.git)?$/); - if (sshMatch) { - return sshMatch[1] ?? ""; - } - // Handle HTTPS: https://github.com/owner/repo.git - const httpsMatch = url.match(/\/\/[^\/]+\/([^\/]+)\//); - if (httpsMatch) { - return httpsMatch[1] ?? ""; - } - return ""; - } catch { - return ""; - } -} diff --git a/foundry/packages/backend/src/integrations/github/index.ts b/foundry/packages/backend/src/integrations/github/index.ts index 536c9db..87fc996 100644 --- a/foundry/packages/backend/src/integrations/github/index.ts +++ b/foundry/packages/backend/src/integrations/github/index.ts @@ -1,262 +1,80 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - interface GithubAuthOptions { githubToken?: string | null; + baseBranch?: string | null; } -function ghEnv(options?: GithubAuthOptions): Record { - const env: Record = { ...(process.env as Record) }; +function authHeaders(options?: GithubAuthOptions): HeadersInit { const token = options?.githubToken?.trim(); - if (token) { - env.GH_TOKEN = token; - env.GITHUB_TOKEN = token; + if (!token) { + throw new Error("GitHub token is required for this operation"); } - return env; -} - -export interface PullRequestSnapshot { - number: number; - headRefName: string; - state: string; - title: string; - url: string; - author: string; - isDraft: boolean; - ciStatus: string | null; - reviewStatus: string | null; - reviewer: string | null; -} - -interface GhPrListItem { - number: number; - headRefName: string; - state: string; - title: string; - url?: string; - author?: { login?: string }; - isDraft?: boolean; - statusCheckRollup?: Array<{ - state?: string; - status?: string; - conclusion?: string; - __typename?: string; - }>; - reviews?: Array<{ - state?: string; - author?: { login?: string }; - }>; -} - -function parseCiStatus(checks: GhPrListItem["statusCheckRollup"]): string | null { - if (!checks || checks.length === 0) return null; - - let total = 0; - let successes = 0; - let hasRunning = false; - - for (const check of checks) { - total++; - const conclusion = check.conclusion?.toUpperCase(); - const state = check.state?.toUpperCase(); - const status = check.status?.toUpperCase(); - - if (conclusion === "SUCCESS" || state === "SUCCESS") { - successes++; - } else if (status === "IN_PROGRESS" || status === "QUEUED" || status === "PENDING" || state === "PENDING") { - hasRunning = true; - } - } - - if (hasRunning && successes < total) { - return "running"; - } - - return `${successes}/${total}`; -} - -function parseReviewStatus(reviews: GhPrListItem["reviews"]): { status: string | null; reviewer: string | null } { - if (!reviews || reviews.length === 0) { - return { status: null, reviewer: null }; - } - - // Build a map of latest review per author - const latestByAuthor = new Map(); - for (const review of reviews) { - const login = review.author?.login ?? "unknown"; - const state = review.state?.toUpperCase() ?? ""; - if (state === "COMMENTED") continue; // Skip comments, only track actionable reviews - latestByAuthor.set(login, { state, login }); - } - - // Check for CHANGES_REQUESTED first (takes priority), then APPROVED - for (const [, entry] of latestByAuthor) { - if (entry.state === "CHANGES_REQUESTED") { - return { status: "CHANGES_REQUESTED", reviewer: entry.login }; - } - } - - for (const [, entry] of latestByAuthor) { - if (entry.state === "APPROVED") { - return { status: "APPROVED", reviewer: entry.login }; - } - } - - // If there are reviews but none are APPROVED or CHANGES_REQUESTED - if (latestByAuthor.size > 0) { - const first = latestByAuthor.values().next().value; - return { status: "PENDING", reviewer: first?.login ?? null }; - } - - return { status: null, reviewer: null }; -} - -function snapshotFromGhItem(item: GhPrListItem): PullRequestSnapshot { - const { status: reviewStatus, reviewer } = parseReviewStatus(item.reviews); return { - number: item.number, - headRefName: item.headRefName, - state: item.state, - title: item.title, - url: item.url ?? "", - author: item.author?.login ?? "", - isDraft: item.isDraft ?? false, - ciStatus: parseCiStatus(item.statusCheckRollup), - reviewStatus, - reviewer, + Accept: "application/vnd.github+json", + Authorization: `Bearer ${token}`, + "X-GitHub-Api-Version": "2022-11-28", }; } -const PR_JSON_FIELDS = "number,headRefName,state,title,url,author,isDraft,statusCheckRollup,reviews"; - -export async function listPullRequests(repoPath: string, options?: GithubAuthOptions): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "list", "--json", PR_JSON_FIELDS, "--limit", "200"], { - maxBuffer: 1024 * 1024 * 4, - cwd: repoPath, - env: ghEnv(options), - }); - - const parsed = JSON.parse(stdout) as GhPrListItem[]; - - return parsed.map((item) => { - // Handle fork PRs where headRefName may contain "owner:branch" - const headRefName = item.headRefName.includes(":") ? (item.headRefName.split(":").pop() ?? item.headRefName) : item.headRefName; - - return snapshotFromGhItem({ ...item, headRefName }); - }); - } catch { - return []; - } -} - -export async function getPrInfo(repoPath: string, branchName: string, options?: GithubAuthOptions): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", PR_JSON_FIELDS], { - maxBuffer: 1024 * 1024 * 4, - cwd: repoPath, - env: ghEnv(options), - }); - - const item = JSON.parse(stdout) as GhPrListItem; - return snapshotFromGhItem(item); - } catch { - return null; - } +async function githubRequest(path: string, init: RequestInit, options?: GithubAuthOptions): Promise { + return await fetch(`https://api.github.com${path}`, { + ...init, + headers: { + ...authHeaders(options), + ...(init.headers ?? {}), + }, + }); } export async function createPr( - repoPath: string, + repoFullName: string, headBranch: string, title: string, body?: string, options?: GithubAuthOptions, ): Promise<{ number: number; url: string }> { - const args = ["pr", "create", "--title", title, "--head", headBranch]; - if (body) { - args.push("--body", body); - } else { - args.push("--body", ""); + const baseBranch = options?.baseBranch?.trim() || "main"; + const response = await githubRequest( + `/repos/${repoFullName}/pulls`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + title, + head: headBranch, + base: baseBranch, + body: body ?? "", + }), + }, + options, + ); + + const payload = (await response.json()) as { number?: number; html_url?: string; message?: string }; + if (!response.ok || !payload.number || !payload.html_url) { + throw new Error(payload.message ?? `Failed to create pull request for ${repoFullName}`); } - const { stdout } = await execFileAsync("gh", args, { - maxBuffer: 1024 * 1024, - cwd: repoPath, - env: ghEnv(options), - }); - - // gh pr create outputs the PR URL on success - const url = stdout.trim(); - // Extract PR number from URL: https://github.com/owner/repo/pull/123 - const numberMatch = url.match(/\/pull\/(\d+)/); - const number = numberMatch ? parseInt(numberMatch[1]!, 10) : 0; - - return { number, url }; + return { + number: payload.number, + url: payload.html_url, + }; } export async function starRepository(repoFullName: string, options?: GithubAuthOptions): Promise { - try { - await execFileAsync("gh", ["api", "--method", "PUT", `user/starred/${repoFullName}`], { - maxBuffer: 1024 * 1024, - env: ghEnv(options), - }); - } catch (error) { - const message = - error instanceof Error ? error.message : `Failed to star GitHub repository ${repoFullName}. Ensure GitHub auth is configured for the backend.`; - throw new Error(message); - } -} - -export async function getAllowedMergeMethod(repoPath: string, options?: GithubAuthOptions): Promise<"squash" | "rebase" | "merge"> { - try { - // Get the repo owner/name from gh - const { stdout: repoJson } = await execFileAsync("gh", ["repo", "view", "--json", "owner,name"], { cwd: repoPath, env: ghEnv(options) }); - const repo = JSON.parse(repoJson) as { owner: { login: string }; name: string }; - const repoFullName = `${repo.owner.login}/${repo.name}`; - - const { stdout } = await execFileAsync("gh", ["api", `repos/${repoFullName}`, "--jq", ".allow_squash_merge, .allow_rebase_merge, .allow_merge_commit"], { - maxBuffer: 1024 * 1024, - cwd: repoPath, - env: ghEnv(options), - }); - - const lines = stdout.trim().split("\n"); - const allowSquash = lines[0]?.trim() === "true"; - const allowRebase = lines[1]?.trim() === "true"; - const allowMerge = lines[2]?.trim() === "true"; - - if (allowSquash) return "squash"; - if (allowRebase) return "rebase"; - if (allowMerge) return "merge"; - return "squash"; - } catch { - return "squash"; - } -} - -export async function mergePr(repoPath: string, prNumber: number, options?: GithubAuthOptions): Promise { - const method = await getAllowedMergeMethod(repoPath, options); - await execFileAsync("gh", ["pr", "merge", String(prNumber), `--${method}`, "--delete-branch"], { cwd: repoPath, env: ghEnv(options) }); -} - -export async function isPrMerged(repoPath: string, branchName: string, options?: GithubAuthOptions): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "state"], { cwd: repoPath, env: ghEnv(options) }); - const parsed = JSON.parse(stdout) as { state: string }; - return parsed.state.toUpperCase() === "MERGED"; - } catch { - return false; - } -} - -export async function getPrTitle(repoPath: string, branchName: string): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "title"], { cwd: repoPath }); - const parsed = JSON.parse(stdout) as { title: string }; - return parsed.title; - } catch { - return null; + const response = await githubRequest( + `/user/starred/${repoFullName}`, + { + method: "PUT", + headers: { + "Content-Length": "0", + }, + }, + options, + ); + + if (!response.ok) { + const payload = (await response.json().catch(() => null)) as { message?: string } | null; + throw new Error(payload?.message ?? `Failed to star GitHub repository ${repoFullName}`); } } diff --git a/foundry/packages/backend/src/integrations/graphite/index.ts b/foundry/packages/backend/src/integrations/graphite/index.ts deleted file mode 100644 index 4c708b0..0000000 --- a/foundry/packages/backend/src/integrations/graphite/index.ts +++ /dev/null @@ -1,140 +0,0 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - -export async function graphiteAvailable(repoPath: string): Promise { - try { - await execFileAsync("gt", ["trunk"], { cwd: repoPath }); - return true; - } catch { - return false; - } -} - -export async function graphiteGet(repoPath: string, branchName: string): Promise { - try { - await execFileAsync("gt", ["get", branchName], { cwd: repoPath }); - return true; - } catch { - return false; - } -} - -export async function graphiteCreateBranch(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["create", branchName], { cwd: repoPath }); -} - -export async function graphiteCheckout(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["checkout", branchName], { cwd: repoPath }); -} - -export async function graphiteSubmit(repoPath: string): Promise { - await execFileAsync("gt", ["submit", "--no-edit"], { cwd: repoPath }); -} - -export async function graphiteMergeBranch(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["merge", branchName], { cwd: repoPath }); -} - -export async function graphiteAbandon(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["abandon", branchName], { cwd: repoPath }); -} - -export interface GraphiteStackEntry { - branchName: string; - parentBranch: string | null; -} - -export async function graphiteGetStack(repoPath: string): Promise { - try { - // Try JSON output first - const { stdout } = await execFileAsync("gt", ["log", "--json"], { - cwd: repoPath, - maxBuffer: 1024 * 1024, - }); - - const parsed = JSON.parse(stdout) as Array<{ - branch?: string; - name?: string; - parent?: string; - parentBranch?: string; - }>; - - return parsed.map((entry) => ({ - branchName: entry.branch ?? entry.name ?? "", - parentBranch: entry.parent ?? entry.parentBranch ?? null, - })); - } catch { - // Fall back to text parsing of `gt log` - try { - const { stdout } = await execFileAsync("gt", ["log"], { - cwd: repoPath, - maxBuffer: 1024 * 1024, - }); - - const entries: GraphiteStackEntry[] = []; - const lines = stdout.split("\n").filter((l) => l.trim().length > 0); - - // Parse indented tree output: each line has tree chars (|, /, \, -, etc.) - // followed by branch names. Build parent-child from indentation level. - const branchStack: string[] = []; - - for (const line of lines) { - // Strip ANSI color codes - const clean = line.replace(/\x1b\[[0-9;]*m/g, ""); - // Extract branch name: skip tree characters and whitespace - const branchMatch = clean.match(/[│├└─|/\\*\s]*(?:◉|○|●)?\s*(.+)/); - if (!branchMatch) continue; - - const branchName = branchMatch[1]!.trim(); - if (!branchName || branchName.startsWith("(") || branchName === "") continue; - - // Determine indentation level by counting leading whitespace/tree chars - const indent = clean.search(/[a-zA-Z0-9]/); - const level = Math.max(0, Math.floor(indent / 2)); - - // Trim stack to current level - while (branchStack.length > level) { - branchStack.pop(); - } - - const parentBranch = branchStack.length > 0 ? (branchStack[branchStack.length - 1] ?? null) : null; - - entries.push({ branchName, parentBranch }); - branchStack.push(branchName); - } - - return entries; - } catch { - return []; - } - } -} - -export async function graphiteGetParent(repoPath: string, branchName: string): Promise { - try { - // Try `gt get ` to see parent info - const { stdout } = await execFileAsync("gt", ["get", branchName], { - cwd: repoPath, - maxBuffer: 1024 * 1024, - }); - - // Parse output for parent branch reference - const parentMatch = stdout.match(/parent:\s*(\S+)/i); - if (parentMatch) { - return parentMatch[1] ?? null; - } - } catch { - // Fall through to stack-based lookup - } - - // Fall back to stack info - try { - const stack = await graphiteGetStack(repoPath); - const entry = stack.find((e) => e.branchName === branchName); - return entry?.parentBranch ?? null; - } catch { - return null; - } -} diff --git a/foundry/packages/backend/src/sandbox-config.ts b/foundry/packages/backend/src/sandbox-config.ts index 4fa388f..9d85f51 100644 --- a/foundry/packages/backend/src/sandbox-config.ts +++ b/foundry/packages/backend/src/sandbox-config.ts @@ -1,10 +1,10 @@ -import type { AppConfig, ProviderId } from "@sandbox-agent/foundry-shared"; +import type { AppConfig, SandboxProviderId } from "@sandbox-agent/foundry-shared"; function hasE2BApiKey(config: AppConfig): boolean { - return Boolean(config.providers.e2b.apiKey?.trim()); + return Boolean(config.sandboxProviders.e2b.apiKey?.trim()); } -function forcedSandboxProviderId(): ProviderId | null { +function forcedSandboxProviderId(): SandboxProviderId | null { const raw = process.env.FOUNDRY_SANDBOX_PROVIDER?.trim() ?? process.env.HF_SANDBOX_PROVIDER?.trim() ?? null; if (raw === "local" || raw === "e2b") { return raw; @@ -12,7 +12,7 @@ function forcedSandboxProviderId(): ProviderId | null { return null; } -export function defaultSandboxProviderId(config: AppConfig): ProviderId { +export function defaultSandboxProviderId(config: AppConfig): SandboxProviderId { const forced = forcedSandboxProviderId(); if (forced === "local") { return "local"; @@ -26,11 +26,11 @@ export function defaultSandboxProviderId(config: AppConfig): ProviderId { return hasE2BApiKey(config) ? "e2b" : "local"; } -export function availableSandboxProviderIds(config: AppConfig): ProviderId[] { +export function availableSandboxProviderIds(config: AppConfig): SandboxProviderId[] { return hasE2BApiKey(config) ? ["e2b", "local"] : ["local"]; } -export function resolveSandboxProviderId(config: AppConfig, requested?: ProviderId | null): ProviderId { +export function resolveSandboxProviderId(config: AppConfig, requested?: SandboxProviderId | null): SandboxProviderId { if (requested === "e2b" && !hasE2BApiKey(config)) { throw new Error("E2B provider is not configured. Set E2B_API_KEY before selecting the e2b provider."); } diff --git a/foundry/packages/backend/src/services/app-github.ts b/foundry/packages/backend/src/services/app-github.ts index 065c382..6cb6db3 100644 --- a/foundry/packages/backend/src/services/app-github.ts +++ b/foundry/packages/backend/src/services/app-github.ts @@ -38,6 +38,12 @@ export interface GitHubRepositoryRecord { fullName: string; cloneUrl: string; private: boolean; + defaultBranch: string; +} + +export interface GitHubBranchRecord { + name: string; + commitSha: string; } export interface GitHubMemberRecord { @@ -341,12 +347,14 @@ export class GitHubAppClient { full_name: string; clone_url: string; private: boolean; + default_branch: string; }>("/user/repos?per_page=100&affiliation=owner,collaborator,organization_member&sort=updated", accessToken); return repositories.map((repository) => ({ fullName: repository.full_name, cloneUrl: repository.clone_url, private: repository.private, + defaultBranch: repository.default_branch, })); } @@ -356,12 +364,14 @@ export class GitHubAppClient { full_name: string; clone_url: string; private: boolean; + default_branch: string; }>("/installation/repositories?per_page=100", accessToken); return repositories.map((repository) => ({ fullName: repository.full_name, cloneUrl: repository.clone_url, private: repository.private, + defaultBranch: repository.default_branch, })); } @@ -371,11 +381,13 @@ export class GitHubAppClient { full_name: string; clone_url: string; private: boolean; + default_branch: string; }>(`/repos/${fullName}`, accessToken); return { fullName: repository.full_name, cloneUrl: repository.clone_url, private: repository.private, + defaultBranch: repository.default_branch, }; } catch (error) { if (error instanceof GitHubAppError && error.status === 404) { @@ -390,6 +402,15 @@ export class GitHubAppClient { return await this.getUserRepository(accessToken, fullName); } + async listUserRepositoryBranches(accessToken: string, fullName: string): Promise { + return await this.listRepositoryBranches(accessToken, fullName); + } + + async listInstallationRepositoryBranches(installationId: number, fullName: string): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.listRepositoryBranches(accessToken, fullName); + } + async listOrganizationMembers(accessToken: string, organizationLogin: string): Promise { const members = await this.paginate<{ id: number; @@ -687,6 +708,20 @@ export class GitHubAppClient { nextUrl: parseNextLink(response.headers.get("link")), }; } + + private async listRepositoryBranches(accessToken: string, fullName: string): Promise { + const branches = await this.paginate<{ + name: string; + commit?: { sha?: string | null } | null; + }>(`/repos/${fullName}/branches?per_page=100`, accessToken); + + return branches + .map((branch) => ({ + name: branch.name?.trim() ?? "", + commitSha: branch.commit?.sha?.trim() ?? "", + })) + .filter((branch) => branch.name.length > 0 && branch.commitSha.length > 0); + } } function parseNextLink(linkHeader: string | null): string | null { diff --git a/foundry/packages/backend/src/services/better-auth.ts b/foundry/packages/backend/src/services/better-auth.ts index 325ea59..4509402 100644 --- a/foundry/packages/backend/src/services/better-auth.ts +++ b/foundry/packages/backend/src/services/better-auth.ts @@ -1,7 +1,7 @@ import { betterAuth } from "better-auth"; import { createAdapterFactory } from "better-auth/adapters"; -import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js"; -import { authUserKey, workspaceKey } from "../actors/keys.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; +import { authUserKey, organizationKey } from "../actors/keys.js"; import { logger } from "../logging.js"; const AUTH_BASE_PATH = "/v1/auth"; @@ -43,7 +43,7 @@ async function callAuthEndpoint(auth: any, url: string, init?: RequestInit): Pro return await auth.handler(new Request(url, init)); } -function resolveRouteUserId(workspace: any, resolved: any): string | null { +function resolveRouteUserId(organization: any, resolved: any): string | null { if (!resolved) { return null; } @@ -75,11 +75,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } // getOrCreate is intentional here: the adapter runs during Better Auth callbacks - // which can fire before any explicit create path. The app workspace and auth user + // which can fire before any explicit create path. The app organization and auth user // actors must exist by the time the adapter needs them. - const appWorkspace = () => - actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), { - createWithInput: APP_SHELL_WORKSPACE_ID, + const appOrganization = () => + actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { + createWithInput: APP_SHELL_ORGANIZATION_ID, }); // getOrCreate is intentional: Better Auth creates user records during OAuth @@ -109,9 +109,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } const email = direct("email"); if (typeof email === "string" && email.length > 0) { - const workspace = await appWorkspace(); - const resolved = await workspace.authFindEmailIndex({ email: email.toLowerCase() }); - return resolveRouteUserId(workspace, resolved); + const organization = await appOrganization(); + const resolved = await organization.authFindEmailIndex({ email: email.toLowerCase() }); + return resolveRouteUserId(organization, resolved); } return null; } @@ -124,12 +124,12 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const sessionId = direct("id") ?? data?.id; const sessionToken = direct("token") ?? data?.token; if (typeof sessionId === "string" || typeof sessionToken === "string") { - const workspace = await appWorkspace(); - const resolved = await workspace.authFindSessionIndex({ + const organization = await appOrganization(); + const resolved = await organization.authFindSessionIndex({ ...(typeof sessionId === "string" ? { sessionId } : {}), ...(typeof sessionToken === "string" ? { sessionToken } : {}), }); - return resolveRouteUserId(workspace, resolved); + return resolveRouteUserId(organization, resolved); } return null; } @@ -142,14 +142,14 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const accountRecordId = direct("id") ?? data?.id; const providerId = direct("providerId") ?? data?.providerId; const accountId = direct("accountId") ?? data?.accountId; - const workspace = await appWorkspace(); + const organization = await appOrganization(); if (typeof accountRecordId === "string" && accountRecordId.length > 0) { - const resolved = await workspace.authFindAccountIndex({ id: accountRecordId }); - return resolveRouteUserId(workspace, resolved); + const resolved = await organization.authFindAccountIndex({ id: accountRecordId }); + return resolveRouteUserId(organization, resolved); } if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) { - const resolved = await workspace.authFindAccountIndex({ providerId, accountId }); - return resolveRouteUserId(workspace, resolved); + const resolved = await organization.authFindAccountIndex({ providerId, accountId }); + return resolveRouteUserId(organization, resolved); } return null; } @@ -157,9 +157,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; }; - const ensureWorkspaceVerification = async (method: string, payload: Record) => { - const workspace = await appWorkspace(); - return await workspace[method](payload); + const ensureOrganizationVerification = async (method: string, payload: Record) => { + const organization = await appOrganization(); + return await organization[method](payload); }; return { @@ -170,7 +170,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin create: async ({ model, data }) => { const transformed = await transformInput(data, model, "create", true); if (model === "verification") { - return await ensureWorkspaceVerification("authCreateVerification", { data: transformed }); + return await ensureOrganizationVerification("authCreateVerification", { data: transformed }); } const userId = await resolveUserIdForQuery(model, undefined, transformed); @@ -180,17 +180,17 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const userActor = await getAuthUser(userId); const created = await userActor.createAuthRecord({ model, data: transformed }); - const workspace = await appWorkspace(); + const organization = await appOrganization(); if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) { - await workspace.authUpsertEmailIndex({ + await organization.authUpsertEmailIndex({ email: transformed.email.toLowerCase(), userId, }); } if (model === "session") { - await workspace.authUpsertSessionIndex({ + await organization.authUpsertSessionIndex({ sessionId: String(created.id), sessionToken: String(created.token), userId, @@ -198,7 +198,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account") { - await workspace.authUpsertAccountIndex({ + await organization.authUpsertAccountIndex({ id: String(created.id), providerId: String(created.providerId), accountId: String(created.accountId), @@ -212,7 +212,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin findOne: async ({ model, where, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findOne" }); if (model === "verification") { - return await ensureWorkspaceVerification("authFindOneVerification", { where: transformedWhere, join }); + return await ensureOrganizationVerification("authFindOneVerification", { where: transformedWhere, join }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -228,7 +228,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin findMany: async ({ model, where, limit, sortBy, offset, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findMany" }); if (model === "verification") { - return await ensureWorkspaceVerification("authFindManyVerification", { + return await ensureOrganizationVerification("authFindManyVerification", { where: transformedWhere, limit, sortBy, @@ -240,11 +240,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin if (model === "session") { const tokenClause = transformedWhere?.find((entry: any) => entry.field === "token" && entry.operator === "in"); if (tokenClause && Array.isArray(tokenClause.value)) { - const workspace = await appWorkspace(); + const organization = await appOrganization(); const resolved = await Promise.all( (tokenClause.value as string[]).map(async (sessionToken: string) => ({ sessionToken, - route: await workspace.authFindSessionIndex({ sessionToken }), + route: await organization.authFindSessionIndex({ sessionToken }), })), ); const byUser = new Map(); @@ -284,7 +284,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "update" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureWorkspaceVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -302,19 +302,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) : null; const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate }); - const workspace = await appWorkspace(); + const organization = await appOrganization(); if (model === "user" && updated) { if (before?.email && before.email !== updated.email) { - await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); } if (updated.email) { - await workspace.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId }); + await organization.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId }); } } if (model === "session" && updated) { - await workspace.authUpsertSessionIndex({ + await organization.authUpsertSessionIndex({ sessionId: String(updated.id), sessionToken: String(updated.token), userId, @@ -322,7 +322,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account" && updated) { - await workspace.authUpsertAccountIndex({ + await organization.authUpsertAccountIndex({ id: String(updated.id), providerId: String(updated.providerId), accountId: String(updated.accountId), @@ -337,7 +337,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "updateMany" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureWorkspaceVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -352,7 +352,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin delete: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "delete" }); if (model === "verification") { - await ensureWorkspaceVerification("authDeleteVerification", { where: transformedWhere }); + await ensureOrganizationVerification("authDeleteVerification", { where: transformedWhere }); return; } @@ -362,19 +362,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } const userActor = await getAuthUser(userId); - const workspace = await appWorkspace(); + const organization = await appOrganization(); const before = await userActor.findOneAuthRecord({ model, where: transformedWhere }); await userActor.deleteAuthRecord({ model, where: transformedWhere }); if (model === "session" && before) { - await workspace.authDeleteSessionIndex({ + await organization.authDeleteSessionIndex({ sessionId: before.id, sessionToken: before.token, }); } if (model === "account" && before) { - await workspace.authDeleteAccountIndex({ + await organization.authDeleteAccountIndex({ id: before.id, providerId: before.providerId, accountId: before.accountId, @@ -382,14 +382,14 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "user" && before?.email) { - await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); } }, deleteMany: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" }); if (model === "verification") { - return await ensureWorkspaceVerification("authDeleteManyVerification", { where: transformedWhere }); + return await ensureOrganizationVerification("authDeleteManyVerification", { where: transformedWhere }); } if (model === "session") { @@ -398,11 +398,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } const userActor = await getAuthUser(userId); - const workspace = await appWorkspace(); + const organization = await appOrganization(); const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 }); const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere }); for (const session of sessions) { - await workspace.authDeleteSessionIndex({ + await organization.authDeleteSessionIndex({ sessionId: session.id, sessionToken: session.token, }); @@ -423,7 +423,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin count: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "count" }); if (model === "verification") { - return await ensureWorkspaceVerification("authCountVerification", { where: transformedWhere }); + return await ensureOrganizationVerification("authCountVerification", { where: transformedWhere }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -476,8 +476,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin }, async getAuthState(sessionId: string) { - const workspace = await appWorkspace(); - const route = await workspace.authFindSessionIndex({ sessionId }); + const organization = await appOrganization(); + const route = await organization.authFindSessionIndex({ sessionId }); if (!route?.userId) { return null; } diff --git a/foundry/packages/backend/src/services/foundry-paths.ts b/foundry/packages/backend/src/services/foundry-paths.ts deleted file mode 100644 index d56c38d..0000000 --- a/foundry/packages/backend/src/services/foundry-paths.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { AppConfig } from "@sandbox-agent/foundry-shared"; -import { homedir } from "node:os"; -import { dirname, join, resolve } from "node:path"; - -function expandPath(input: string): string { - if (input.startsWith("~/")) { - return `${homedir()}/${input.slice(2)}`; - } - return input; -} - -export function foundryDataDir(config: AppConfig): string { - // Keep data collocated with the backend DB by default. - const dbPath = expandPath(config.backend.dbPath); - return resolve(dirname(dbPath)); -} - -export function foundryRepoClonePath(config: AppConfig, workspaceId: string, repoId: string): string { - return resolve(join(foundryDataDir(config), "repos", workspaceId, repoId)); -} diff --git a/foundry/packages/backend/src/services/github-auth.ts b/foundry/packages/backend/src/services/github-auth.ts index 8249927..ebbbce9 100644 --- a/foundry/packages/backend/src/services/github-auth.ts +++ b/foundry/packages/backend/src/services/github-auth.ts @@ -1,20 +1,20 @@ -import { getOrCreateWorkspace } from "../actors/handles.js"; -import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js"; +import { getOrCreateOrganization } from "../actors/handles.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; export interface ResolvedGithubAuth { githubToken: string; scopes: string[]; } -export async function resolveWorkspaceGithubAuth(c: any, workspaceId: string): Promise { - if (!workspaceId || workspaceId === APP_SHELL_WORKSPACE_ID) { +export async function resolveOrganizationGithubAuth(c: any, organizationId: string): Promise { + if (!organizationId || organizationId === APP_SHELL_ORGANIZATION_ID) { return null; } try { - const appWorkspace = await getOrCreateWorkspace(c, APP_SHELL_WORKSPACE_ID); - const resolved = await appWorkspace.resolveAppGithubToken({ - organizationId: workspaceId, + const appOrganization = await getOrCreateOrganization(c, APP_SHELL_ORGANIZATION_ID); + const resolved = await appOrganization.resolveAppGithubToken({ + organizationId: organizationId, requireRepoScope: true, }); if (!resolved?.accessToken) { diff --git a/foundry/packages/backend/src/services/repo-git-lock.ts b/foundry/packages/backend/src/services/repo-git-lock.ts deleted file mode 100644 index 971b95c..0000000 --- a/foundry/packages/backend/src/services/repo-git-lock.ts +++ /dev/null @@ -1,45 +0,0 @@ -interface RepoLockState { - locked: boolean; - waiters: Array<() => void>; -} - -const repoLocks = new Map(); - -async function acquireRepoLock(repoPath: string): Promise<() => void> { - let state = repoLocks.get(repoPath); - if (!state) { - state = { locked: false, waiters: [] }; - repoLocks.set(repoPath, state); - } - - if (!state.locked) { - state.locked = true; - return () => releaseRepoLock(repoPath, state); - } - - await new Promise((resolve) => { - state!.waiters.push(resolve); - }); - - return () => releaseRepoLock(repoPath, state!); -} - -function releaseRepoLock(repoPath: string, state: RepoLockState): void { - const next = state.waiters.shift(); - if (next) { - next(); - return; - } - - state.locked = false; - repoLocks.delete(repoPath); -} - -export async function withRepoGitLock(repoPath: string, fn: () => Promise): Promise { - const release = await acquireRepoLock(repoPath); - try { - return await fn(); - } finally { - release(); - } -} diff --git a/foundry/packages/backend/src/services/repo.ts b/foundry/packages/backend/src/services/repo.ts index 910f4e8..fb673cc 100644 --- a/foundry/packages/backend/src/services/repo.ts +++ b/foundry/packages/backend/src/services/repo.ts @@ -82,3 +82,30 @@ export function repoLabelFromRemote(remoteUrl: string): string { return basename(trimmed.replace(/\.git$/i, "")); } + +export function githubRepoFullNameFromRemote(remoteUrl: string): string | null { + const normalized = normalizeRemoteUrl(remoteUrl); + if (!normalized) { + return null; + } + + try { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, "").toLowerCase(); + if (hostname !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + const owner = parts[0]?.trim(); + const repo = (parts[1] ?? "").replace(/\.git$/i, "").trim(); + if (!owner || !repo) { + return null; + } + return `${owner}/${repo}`; + } catch { + return null; + } +} diff --git a/foundry/packages/backend/test/git-spice.test.ts b/foundry/packages/backend/test/git-spice.test.ts deleted file mode 100644 index d0b0455..0000000 --- a/foundry/packages/backend/test/git-spice.test.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { chmodSync, mkdtempSync, writeFileSync, readFileSync } from "node:fs"; -import { tmpdir } from "node:os"; -import { join } from "node:path"; -import { describe, expect, it } from "vitest"; -import { gitSpiceAvailable, gitSpiceListStack, gitSpiceRestackSubtree } from "../src/integrations/git-spice/index.js"; - -function makeTempDir(prefix: string): string { - return mkdtempSync(join(tmpdir(), prefix)); -} - -function writeScript(path: string, body: string): void { - writeFileSync(path, body, "utf8"); - chmodSync(path, 0o755); -} - -async function withEnv(updates: Record, fn: () => Promise): Promise { - const previous = new Map(); - for (const [key, value] of Object.entries(updates)) { - previous.set(key, process.env[key]); - if (value == null) { - delete process.env[key]; - } else { - process.env[key] = value; - } - } - - try { - return await fn(); - } finally { - for (const [key, value] of previous) { - if (value == null) { - delete process.env[key]; - } else { - process.env[key] = value; - } - } - } -} - -describe("git-spice integration", () => { - it("parses stack rows from mixed/malformed json output", async () => { - const repoPath = makeTempDir("hf-git-spice-parse-"); - const scriptPath = join(repoPath, "fake-git-spice.sh"); - writeScript( - scriptPath, - [ - "#!/bin/sh", - 'if [ \"$1\" = \"--help\" ]; then', - " exit 0", - "fi", - 'if [ \"$1\" = \"log\" ]; then', - " echo 'noise line'", - ' echo \'{"branch":"feature/a","parent":"main"}\'', - " echo '{bad json'", - ' echo \'{"name":"feature/b","parentBranch":"feature/a"}\'', - ' echo \'{"name":"feature/a","parent":"main"}\'', - " exit 0", - "fi", - "exit 1", - ].join("\n"), - ); - - await withEnv({ HF_GIT_SPICE_BIN: scriptPath }, async () => { - const rows = await gitSpiceListStack(repoPath); - expect(rows).toEqual([ - { branchName: "feature/a", parentBranch: "main" }, - { branchName: "feature/b", parentBranch: "feature/a" }, - ]); - }); - }); - - it("falls back across versioned subtree restack command variants", async () => { - const repoPath = makeTempDir("hf-git-spice-fallback-"); - const scriptPath = join(repoPath, "fake-git-spice.sh"); - const logPath = join(repoPath, "calls.log"); - writeScript( - scriptPath, - [ - "#!/bin/sh", - 'echo \"$*\" >> \"$SPICE_LOG_PATH\"', - 'if [ \"$1\" = \"--help\" ]; then', - " exit 0", - "fi", - 'if [ \"$1\" = \"upstack\" ] && [ \"$2\" = \"restack\" ]; then', - " exit 1", - "fi", - 'if [ \"$1\" = \"branch\" ] && [ \"$2\" = \"restack\" ] && [ \"$5\" = \"--no-prompt\" ]; then', - " exit 0", - "fi", - "exit 1", - ].join("\n"), - ); - - await withEnv( - { - HF_GIT_SPICE_BIN: scriptPath, - SPICE_LOG_PATH: logPath, - }, - async () => { - await gitSpiceRestackSubtree(repoPath, "feature/a"); - }, - ); - - const lines = readFileSync(logPath, "utf8") - .trim() - .split("\n") - .filter((line) => line.trim().length > 0); - - expect(lines).toContain("upstack restack --branch feature/a --no-prompt"); - expect(lines).toContain("upstack restack --branch feature/a"); - expect(lines).toContain("branch restack --branch feature/a --no-prompt"); - expect(lines).not.toContain("branch restack --branch feature/a"); - }); - - it("reports unavailable when explicit binary and PATH are missing", async () => { - const repoPath = makeTempDir("hf-git-spice-missing-"); - - await withEnv( - { - HF_GIT_SPICE_BIN: "/non-existent/hf-git-spice-binary", - PATH: "/non-existent/bin", - }, - async () => { - const available = await gitSpiceAvailable(repoPath); - expect(available).toBe(false); - }, - ); - }); -}); diff --git a/foundry/packages/backend/test/git-validate-remote.test.ts b/foundry/packages/backend/test/git-validate-remote.test.ts deleted file mode 100644 index 47849a2..0000000 --- a/foundry/packages/backend/test/git-validate-remote.test.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { afterEach, beforeEach, describe, expect, test } from "vitest"; -import { mkdtempSync, mkdirSync, writeFileSync } from "node:fs"; -import { tmpdir } from "node:os"; -import { join, resolve } from "node:path"; -import { promisify } from "node:util"; -import { execFile } from "node:child_process"; -import { validateRemote } from "../src/integrations/git/index.js"; - -const execFileAsync = promisify(execFile); - -describe("validateRemote", () => { - const originalCwd = process.cwd(); - - beforeEach(() => { - process.chdir(originalCwd); - }); - - afterEach(() => { - process.chdir(originalCwd); - }); - - test("ignores broken worktree gitdir in current directory", async () => { - const sandboxDir = mkdtempSync(join(tmpdir(), "validate-remote-cwd-")); - const brokenRepoDir = resolve(sandboxDir, "broken-worktree"); - const remoteRepoDir = resolve(sandboxDir, "remote"); - - mkdirSync(brokenRepoDir, { recursive: true }); - writeFileSync(resolve(brokenRepoDir, ".git"), "gitdir: /definitely/missing/worktree\n", "utf8"); - await execFileAsync("git", ["init", remoteRepoDir]); - await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.name", "Foundry Test"]); - await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.email", "test@example.com"]); - writeFileSync(resolve(remoteRepoDir, "README.md"), "# test\n", "utf8"); - await execFileAsync("git", ["-C", remoteRepoDir, "add", "README.md"]); - await execFileAsync("git", ["-C", remoteRepoDir, "commit", "-m", "init"]); - - process.chdir(brokenRepoDir); - - await expect(validateRemote(remoteRepoDir)).resolves.toBeUndefined(); - }); -}); diff --git a/foundry/packages/backend/test/helpers/test-context.ts b/foundry/packages/backend/test/helpers/test-context.ts index 31b1965..be169a8 100644 --- a/foundry/packages/backend/test/helpers/test-context.ts +++ b/foundry/packages/backend/test/helpers/test-context.ts @@ -9,7 +9,7 @@ export function createTestConfig(overrides?: Partial): AppConfig { return ConfigSchema.parse({ auto_submit: true, notify: ["terminal" as const], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -19,7 +19,7 @@ export function createTestConfig(overrides?: Partial): AppConfig { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, diff --git a/foundry/packages/backend/test/helpers/test-driver.ts b/foundry/packages/backend/test/helpers/test-driver.ts index c370d87..39975e5 100644 --- a/foundry/packages/backend/test/helpers/test-driver.ts +++ b/foundry/packages/backend/test/helpers/test-driver.ts @@ -1,48 +1,15 @@ -import type { BackendDriver, GitDriver, GithubDriver, StackDriver, TmuxDriver } from "../../src/driver.js"; +import type { BackendDriver, GithubDriver, TmuxDriver } from "../../src/driver.js"; export function createTestDriver(overrides?: Partial): BackendDriver { return { - git: overrides?.git ?? createTestGitDriver(), - stack: overrides?.stack ?? createTestStackDriver(), github: overrides?.github ?? createTestGithubDriver(), tmux: overrides?.tmux ?? createTestTmuxDriver(), }; } -export function createTestGitDriver(overrides?: Partial): GitDriver { - return { - validateRemote: async () => {}, - ensureCloned: async () => {}, - fetch: async () => {}, - listRemoteBranches: async () => [], - listLocalRemoteRefs: async () => [], - remoteDefaultBaseRef: async () => "origin/main", - revParse: async () => "abc1234567890", - ensureRemoteBranch: async () => {}, - diffStatForBranch: async () => "+0/-0", - conflictsWithMain: async () => false, - ...overrides, - }; -} - -export function createTestStackDriver(overrides?: Partial): StackDriver { - return { - available: async () => false, - listStack: async () => [], - syncRepo: async () => {}, - restackRepo: async () => {}, - restackSubtree: async () => {}, - rebaseBranch: async () => {}, - reparentBranch: async () => {}, - trackBranch: async () => {}, - ...overrides, - }; -} - export function createTestGithubDriver(overrides?: Partial): GithubDriver { return { - listPullRequests: async () => [], - createPr: async (_repoPath, _headBranch, _title) => ({ + createPr: async (_repoFullName, _headBranch, _title) => ({ number: 1, url: `https://github.com/test/repo/pull/1`, }), diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts index 28bf1dc..ac5f3c8 100644 --- a/foundry/packages/backend/test/keys.test.ts +++ b/foundry/packages/backend/test/keys.test.ts @@ -1,20 +1,19 @@ import { describe, expect, it } from "vitest"; -import { githubDataKey, historyKey, projectBranchSyncKey, projectKey, taskKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js"; +import { githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; describe("actor keys", () => { - it("prefixes every key with workspace namespace", () => { + it("prefixes every key with organization namespace", () => { const keys = [ - workspaceKey("default"), - projectKey("default", "repo"), + organizationKey("default"), + repositoryKey("default", "repo"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), historyKey("default", "repo"), githubDataKey("default"), - projectBranchSyncKey("default", "repo"), ]; for (const key of keys) { - expect(key[0]).toBe("ws"); + expect(key[0]).toBe("org"); expect(key[1]).toBe("default"); } }); diff --git a/foundry/packages/backend/test/workspace-isolation.test.ts b/foundry/packages/backend/test/organization-isolation.test.ts similarity index 63% rename from foundry/packages/backend/test/workspace-isolation.test.ts rename to foundry/packages/backend/test/organization-isolation.test.ts index fa004c7..fcd1950 100644 --- a/foundry/packages/backend/test/workspace-isolation.test.ts +++ b/foundry/packages/backend/test/organization-isolation.test.ts @@ -6,8 +6,9 @@ import { execFileSync } from "node:child_process"; import { setTimeout as delay } from "node:timers/promises"; import { describe, expect, it } from "vitest"; import { setupTest } from "rivetkit/test"; -import { workspaceKey } from "../src/actors/keys.js"; +import { organizationKey } from "../src/actors/keys.js"; import { registry } from "../src/actors/index.js"; +import { repoIdFromRemote } from "../src/services/repo.js"; import { createTestDriver } from "./helpers/test-driver.js"; import { createTestRuntimeContext } from "./helpers/test-context.js"; @@ -24,59 +25,60 @@ function createRepo(): { repoPath: string } { return { repoPath }; } -async function waitForWorkspaceRows(ws: any, workspaceId: string, expectedCount: number) { +async function waitForOrganizationRows(ws: any, organizationId: string, expectedCount: number) { for (let attempt = 0; attempt < 40; attempt += 1) { - const rows = await ws.listTasks({ workspaceId }); + const rows = await ws.listTasks({ organizationId }); if (rows.length >= expectedCount) { return rows; } await delay(50); } - return ws.listTasks({ workspaceId }); + return ws.listTasks({ organizationId }); } -describe("workspace isolation", () => { - it.skipIf(!runActorIntegration)("keeps task lists isolated by workspace", async (t) => { +describe("organization isolation", () => { + it.skipIf(!runActorIntegration)("keeps task lists isolated by organization", async (t) => { const testDriver = createTestDriver(); createTestRuntimeContext(testDriver); const { client } = await setupTest(t, registry); - const wsA = await client.workspace.getOrCreate(workspaceKey("alpha"), { + const wsA = await client.organization.getOrCreate(organizationKey("alpha"), { createWithInput: "alpha", }); - const wsB = await client.workspace.getOrCreate(workspaceKey("beta"), { + const wsB = await client.organization.getOrCreate(organizationKey("beta"), { createWithInput: "beta", }); const { repoPath } = createRepo(); - const repoA = await wsA.addRepo({ workspaceId: "alpha", remoteUrl: repoPath }); - const repoB = await wsB.addRepo({ workspaceId: "beta", remoteUrl: repoPath }); + const repoId = repoIdFromRemote(repoPath); + await wsA.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); + await wsB.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); await wsA.createTask({ - workspaceId: "alpha", - repoId: repoA.repoId, + organizationId: "alpha", + repoId, task: "task A", - providerId: "local", + sandboxProviderId: "local", explicitBranchName: "feature/a", explicitTitle: "A", }); await wsB.createTask({ - workspaceId: "beta", - repoId: repoB.repoId, + organizationId: "beta", + repoId, task: "task B", - providerId: "local", + sandboxProviderId: "local", explicitBranchName: "feature/b", explicitTitle: "B", }); - const aRows = await waitForWorkspaceRows(wsA, "alpha", 1); - const bRows = await waitForWorkspaceRows(wsB, "beta", 1); + const aRows = await waitForOrganizationRows(wsA, "alpha", 1); + const bRows = await waitForOrganizationRows(wsB, "beta", 1); expect(aRows.length).toBe(1); expect(bRows.length).toBe(1); - expect(aRows[0]?.workspaceId).toBe("alpha"); - expect(bRows[0]?.workspaceId).toBe("beta"); + expect(aRows[0]?.organizationId).toBe("alpha"); + expect(bRows[0]?.organizationId).toBe("beta"); expect(aRows[0]?.taskId).not.toBe(bRows[0]?.taskId); }); }); diff --git a/foundry/packages/backend/test/workspace-star-sandbox-agent-repo.test.ts b/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts similarity index 80% rename from foundry/packages/backend/test/workspace-star-sandbox-agent-repo.test.ts rename to foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts index 8eabb99..b3a2410 100644 --- a/foundry/packages/backend/test/workspace-star-sandbox-agent-repo.test.ts +++ b/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts @@ -1,14 +1,14 @@ // @ts-nocheck import { describe, expect, it } from "vitest"; import { setupTest } from "rivetkit/test"; -import { workspaceKey } from "../src/actors/keys.js"; +import { organizationKey } from "../src/actors/keys.js"; import { registry } from "../src/actors/index.js"; import { createTestDriver } from "./helpers/test-driver.js"; import { createTestRuntimeContext } from "./helpers/test-context.js"; const runActorIntegration = process.env.HF_ENABLE_ACTOR_INTEGRATION_TESTS === "1"; -describe("workspace star sandbox agent repo", () => { +describe("organization star sandbox agent repo", () => { it.skipIf(!runActorIntegration)("stars the sandbox agent repo through the github driver", async (t) => { const calls: string[] = []; const testDriver = createTestDriver({ @@ -26,11 +26,11 @@ describe("workspace star sandbox agent repo", () => { createTestRuntimeContext(testDriver); const { client } = await setupTest(t, registry); - const ws = await client.workspace.getOrCreate(workspaceKey("alpha"), { + const ws = await client.organization.getOrCreate(organizationKey("alpha"), { createWithInput: "alpha", }); - const result = await ws.starSandboxAgentRepo({ workspaceId: "alpha" }); + const result = await ws.starSandboxAgentRepo({ organizationId: "alpha" }); expect(calls).toEqual(["rivet-dev/sandbox-agent"]); expect(result.repo).toBe("rivet-dev/sandbox-agent"); diff --git a/foundry/packages/backend/test/sandbox-config.test.ts b/foundry/packages/backend/test/sandbox-config.test.ts index 0b53f03..354f794 100644 --- a/foundry/packages/backend/test/sandbox-config.test.ts +++ b/foundry/packages/backend/test/sandbox-config.test.ts @@ -6,7 +6,7 @@ function makeConfig(overrides?: Partial): AppConfig { return ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -16,7 +16,7 @@ function makeConfig(overrides?: Partial): AppConfig { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, @@ -33,7 +33,7 @@ describe("sandbox config", () => { it("prefers e2b when an api key is configured", () => { const config = makeConfig({ - providers: { + sandboxProviders: { local: {}, e2b: { apiKey: "test-token" }, }, diff --git a/foundry/packages/backend/test/stack-model.test.ts b/foundry/packages/backend/test/stack-model.test.ts deleted file mode 100644 index ca0a79f..0000000 --- a/foundry/packages/backend/test/stack-model.test.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { normalizeParentBranch, parentLookupFromStack, sortBranchesForOverview } from "../src/actors/project/stack-model.js"; - -describe("stack-model", () => { - it("normalizes self-parent references to null", () => { - expect(normalizeParentBranch("feature/a", "feature/a")).toBeNull(); - expect(normalizeParentBranch("feature/a", "main")).toBe("main"); - expect(normalizeParentBranch("feature/a", null)).toBeNull(); - }); - - it("builds parent lookup with sanitized entries", () => { - const lookup = parentLookupFromStack([ - { branchName: "feature/a", parentBranch: "main" }, - { branchName: "feature/b", parentBranch: "feature/b" }, - { branchName: " ", parentBranch: "main" }, - ]); - - expect(lookup.get("feature/a")).toBe("main"); - expect(lookup.get("feature/b")).toBeNull(); - expect(lookup.has(" ")).toBe(false); - }); - - it("orders branches by graph depth and handles cycles safely", () => { - const rows = sortBranchesForOverview([ - { branchName: "feature/b", parentBranch: "feature/a", updatedAt: 200 }, - { branchName: "feature/a", parentBranch: "main", updatedAt: 100 }, - { branchName: "main", parentBranch: null, updatedAt: 50 }, - { branchName: "cycle-a", parentBranch: "cycle-b", updatedAt: 300 }, - { branchName: "cycle-b", parentBranch: "cycle-a", updatedAt: 250 }, - ]); - - expect(rows.map((row) => row.branchName)).toEqual(["main", "feature/a", "feature/b", "cycle-a", "cycle-b"]); - }); -}); diff --git a/foundry/packages/backend/test/workbench-unread.test.ts b/foundry/packages/backend/test/workbench-unread.test.ts index 4972c64..fc94e97 100644 --- a/foundry/packages/backend/test/workbench-unread.test.ts +++ b/foundry/packages/backend/test/workbench-unread.test.ts @@ -59,8 +59,8 @@ describe("workbench model changes", () => { }); describe("workbench send readiness", () => { - it("rejects unknown tabs", () => { - expect(() => requireSendableSessionMeta(null, "tab-1")).toThrow("Unknown workbench tab: tab-1"); + it("rejects unknown sessions", () => { + expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workbench session: session-1"); }); it("rejects pending sessions", () => { @@ -70,7 +70,7 @@ describe("workbench send readiness", () => { status: "pending_session_create", sandboxSessionId: null, }, - "tab-2", + "session-2", ), ).toThrow("Session is not ready (status: pending_session_create). Wait for session provisioning to complete."); }); @@ -81,6 +81,6 @@ describe("workbench send readiness", () => { sandboxSessionId: "session-1", }; - expect(requireSendableSessionMeta(meta, "tab-3")).toBe(meta); + expect(requireSendableSessionMeta(meta, "session-3")).toBe(meta); }); }); diff --git a/foundry/packages/cli/src/index.ts b/foundry/packages/cli/src/index.ts index 4043f32..fdf5a19 100644 --- a/foundry/packages/cli/src/index.ts +++ b/foundry/packages/cli/src/index.ts @@ -8,7 +8,7 @@ import { ensureBackendRunning, getBackendStatus, parseBackendPort, stopBackend } import { writeStderr, writeStdout } from "./io.js"; import { openEditorForTask } from "./task-editor.js"; import { spawnCreateTmuxWindow } from "./tmux.js"; -import { loadConfig, resolveWorkspace, saveConfig } from "./workspace/config.js"; +import { loadConfig, resolveOrganization, saveConfig } from "./organization/config.js"; async function ensureBunRuntime(): Promise { if (typeof (globalThis as { Bun?: unknown }).Bun !== "undefined") { @@ -41,9 +41,9 @@ async function ensureBunRuntime(): Promise { throw new Error("hf requires Bun runtime. Set HF_BUN or install Bun at ~/.bun/bin/bun."); } -async function runTuiCommand(config: ReturnType, workspaceId: string): Promise { +async function runTuiCommand(config: ReturnType, organizationId: string): Promise { const mod = await import("./tui.js"); - await mod.runTui(config, workspaceId); + await mod.runTui(config, organizationId); } function readOption(args: string[], flag: string): string | undefined { @@ -87,6 +87,92 @@ function positionals(args: string[]): string[] { return out; } +function normalizeRepoSelector(value: string): string { + let normalized = value.trim(); + if (!normalized) { + return ""; + } + + normalized = normalized.replace(/\/+$/, ""); + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) { + return `https://github.com/${normalized}.git`; + } + + if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) { + normalized = `https://${normalized.replace(/^www\./i, "")}`; + } + + try { + if (/^https?:\/\//i.test(normalized)) { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`; + } + } + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // Keep the selector as-is for matching below. + } + + return normalized; +} + +function githubRepoFullNameFromSelector(value: string): string | null { + const normalized = normalizeRepoSelector(value); + try { + const url = new URL(normalized); + if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } catch { + return null; + } +} + +async function resolveImportedRepo( + client: ReturnType, + organizationId: string, + repoSelector: string, +): Promise>[number]> { + const selector = repoSelector.trim(); + if (!selector) { + throw new Error("Missing required --repo "); + } + + const normalizedSelector = normalizeRepoSelector(selector); + const selectorFullName = githubRepoFullNameFromSelector(selector); + const repos = await client.listRepos(organizationId); + const match = repos.find((repo) => { + if (repo.repoId === selector) { + return true; + } + if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) { + return true; + } + const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl); + return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName); + }); + + if (!match) { + throw new Error( + `Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running hf create.`, + ); + } + + return match; +} + function printUsage(): void { writeStdout(` Usage: @@ -94,22 +180,22 @@ Usage: hf backend stop [--host HOST] [--port PORT] hf backend status hf backend inspect - hf status [--workspace WS] [--json] - hf history [--workspace WS] [--limit N] [--branch NAME] [--task ID] [--json] - hf workspace use - hf tui [--workspace WS] + hf status [--organization ORG] [--json] + hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json] + hf organization use + hf tui [--organization ORG] - hf create [task] [--workspace WS] --repo [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH] - hf list [--workspace WS] [--format table|json] [--full] - hf switch [task-id | -] [--workspace WS] - hf attach [--workspace WS] - hf merge [--workspace WS] - hf archive [--workspace WS] - hf push [--workspace WS] - hf sync [--workspace WS] - hf kill [--workspace WS] [--delete-branch] [--abandon] - hf prune [--workspace WS] [--dry-run] [--yes] - hf statusline [--workspace WS] [--format table|claude-code] + hf create [task] [--organization ORG] --repo [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH] + hf list [--organization ORG] [--format table|json] [--full] + hf switch [task-id | -] [--organization ORG] + hf attach [--organization ORG] + hf merge [--organization ORG] + hf archive [--organization ORG] + hf push [--organization ORG] + hf sync [--organization ORG] + hf kill [--organization ORG] [--delete-branch] [--abandon] + hf prune [--organization ORG] [--dry-run] [--yes] + hf statusline [--organization ORG] [--format table|claude-code] hf db path hf db nuke @@ -123,19 +209,19 @@ Tips: function printStatusUsage(): void { writeStdout(` Usage: - hf status [--workspace WS] [--json] + hf status [--organization ORG] [--json] Text Output: - workspace= + organization= backend running= pid= version= tasks total= status queued= running= idle= archived= killed= error= - providers = ... - providers - + sandboxProviders = ... + sandboxProviders - JSON Output: { - "workspaceId": "default", + "organizationId": "default", "backend": { ...backend status object... }, "tasks": { "total": 4, @@ -149,7 +235,7 @@ JSON Output: function printHistoryUsage(): void { writeStdout(` Usage: - hf history [--workspace WS] [--limit N] [--branch NAME] [--task ID] [--json] + hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json] Text Output: \t\t\t @@ -164,18 +250,23 @@ JSON Output: [ { "id": "...", - "workspaceId": "default", + "organizationId": "default", "kind": "task.created", "taskId": "...", "repoId": "...", "branchName": "feature/foo", - "payloadJson": "{\\"providerId\\":\\"local\\"}", + "payloadJson": "{\\"sandboxProviderId\\":\\"local\\"}", "createdAt": 1770607522229 } ] `); } +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { + const rows = await client.listTasks(organizationId); + return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); +} + async function handleBackend(args: string[]): Promise { const sub = args[0] ?? "start"; const config = loadConfig(); @@ -232,38 +323,38 @@ async function handleBackend(args: string[]): Promise { throw new Error(`Unknown backend subcommand: ${sub}`); } -async function handleWorkspace(args: string[]): Promise { +async function handleOrganization(args: string[]): Promise { const sub = args[0]; if (sub !== "use") { - throw new Error("Usage: hf workspace use "); + throw new Error("Usage: hf organization use "); } const name = args[1]; if (!name) { - throw new Error("Missing workspace name"); + throw new Error("Missing organization name"); } const config = loadConfig(); - config.workspace.default = name; + config.organization.default = name; saveConfig(config); const client = createBackendClientFromConfig(config); try { - await client.useWorkspace(name); + await client.useOrganization(name); } catch { // Backend may not be running yet. Config is already updated. } - writeStdout(`workspace=${name}`); + writeStdout(`organization=${name}`); } async function handleList(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const format = readOption(args, "--format") ?? "table"; const full = hasFlag(args, "--full"); const client = createBackendClientFromConfig(config); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); if (format === "json") { writeStdout(JSON.stringify(rows, null, 2)); @@ -277,10 +368,10 @@ async function handleList(args: string[]): Promise { for (const row of rows) { const age = formatRelativeAge(row.updatedAt); - let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.providerId}\t${age}`; + let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.sandboxProviderId}\t${age}`; if (full) { - const task = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task; - line += `\t${row.title}\t${task}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`; + const preview = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task; + line += `\t${row.title}\t${preview}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`; } writeStdout(line); } @@ -292,9 +383,9 @@ async function handlePush(args: string[]): Promise { throw new Error("Missing task id for push"); } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); - await client.runAction(workspaceId, taskId, "push"); + await client.runAction(organizationId, taskId, "push"); writeStdout("ok"); } @@ -304,9 +395,9 @@ async function handleSync(args: string[]): Promise { throw new Error("Missing task id for sync"); } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); - await client.runAction(workspaceId, taskId, "sync"); + await client.runAction(organizationId, taskId, "sync"); writeStdout("ok"); } @@ -316,7 +407,7 @@ async function handleKill(args: string[]): Promise { throw new Error("Missing task id for kill"); } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const deleteBranch = hasFlag(args, "--delete-branch"); const abandon = hasFlag(args, "--abandon"); @@ -328,17 +419,17 @@ async function handleKill(args: string[]): Promise { } const client = createBackendClientFromConfig(config); - await client.runAction(workspaceId, taskId, "kill"); + await client.runAction(organizationId, taskId, "kill"); writeStdout("ok"); } async function handlePrune(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const dryRun = hasFlag(args, "--dry-run"); const yes = hasFlag(args, "--yes"); const client = createBackendClientFromConfig(config); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const prunable = rows.filter((r) => r.status === "archived" || r.status === "killed"); if (prunable.length === 0) { @@ -366,10 +457,10 @@ async function handlePrune(args: string[]): Promise { async function handleStatusline(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const format = readOption(args, "--format") ?? "table"; const client = createBackendClientFromConfig(config); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const summary = summarizeTasks(rows); const running = summary.byStatus.running; const idle = summary.byStatus.idle; @@ -402,7 +493,7 @@ async function handleDb(args: string[]): Promise { async function waitForTaskReady( client: ReturnType, - workspaceId: string, + organizationId: string, taskId: string, timeoutMs: number, ): Promise { @@ -410,7 +501,7 @@ async function waitForTaskReady( let delayMs = 250; for (;;) { - const record = await client.getTask(workspaceId, taskId); + const record = await client.getTask(organizationId, taskId); const hasName = Boolean(record.branchName && record.title); const hasSandbox = Boolean(record.activeSandboxId); @@ -432,11 +523,11 @@ async function waitForTaskReady( async function handleCreate(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); - const repoRemote = readOption(args, "--repo"); - if (!repoRemote) { - throw new Error("Missing required --repo "); + const repoSelector = readOption(args, "--repo"); + if (!repoSelector) { + throw new Error("Missing required --repo "); } const explicitBranchName = readOption(args, "--name") ?? readOption(args, "--branch"); const explicitTitle = readOption(args, "--title"); @@ -446,15 +537,15 @@ async function handleCreate(args: string[]): Promise { const onBranch = readOption(args, "--on"); const taskFromArgs = positionals(args).join(" ").trim(); - const task = taskFromArgs || openEditorForTask(); + const taskPrompt = taskFromArgs || openEditorForTask(); const client = createBackendClientFromConfig(config); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await resolveImportedRepo(client, organizationId, repoSelector); const payload = CreateTaskInputSchema.parse({ - workspaceId, + organizationId, repoId: repo.repoId, - task, + task: taskPrompt, explicitTitle: explicitTitle || undefined, explicitBranchName: explicitBranchName || undefined, agentType, @@ -462,30 +553,30 @@ async function handleCreate(args: string[]): Promise { }); const created = await client.createTask(payload); - const task = await waitForTaskReady(client, workspaceId, created.taskId, 180_000); - const switched = await client.switchTask(workspaceId, task.taskId); - const attached = await client.attachTask(workspaceId, task.taskId); + const createdTask = await waitForTaskReady(client, organizationId, created.taskId, 180_000); + const switched = await client.switchTask(organizationId, createdTask.taskId); + const attached = await client.attachTask(organizationId, createdTask.taskId); - writeStdout(`Branch: ${task.branchName ?? "-"}`); - writeStdout(`Task: ${task.taskId}`); - writeStdout(`Provider: ${task.providerId}`); + writeStdout(`Branch: ${createdTask.branchName ?? "-"}`); + writeStdout(`Task: ${createdTask.taskId}`); + writeStdout(`Provider: ${createdTask.sandboxProviderId}`); writeStdout(`Session: ${attached.sessionId ?? "none"}`); writeStdout(`Target: ${switched.switchTarget || attached.target}`); - writeStdout(`Title: ${task.title ?? "-"}`); + writeStdout(`Title: ${createdTask.title ?? "-"}`); const tmuxResult = spawnCreateTmuxWindow({ - branchName: task.branchName ?? task.taskId, + branchName: createdTask.branchName ?? createdTask.taskId, targetPath: switched.switchTarget || attached.target, sessionId: attached.sessionId, }); if (tmuxResult.created) { - writeStdout(`Window: created (${task.branchName})`); + writeStdout(`Window: created (${createdTask.branchName})`); return; } writeStdout(""); - writeStdout(`Run: hf switch ${task.taskId}`); + writeStdout(`Run: hf switch ${createdTask.taskId}`); if ((switched.switchTarget || attached.target).startsWith("/")) { writeStdout(`cd ${switched.switchTarget || attached.target}`); } @@ -493,8 +584,8 @@ async function handleCreate(args: string[]): Promise { async function handleTui(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); - await runTuiCommand(config, workspaceId); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + await runTuiCommand(config, organizationId); } async function handleStatus(args: string[]): Promise { @@ -504,17 +595,17 @@ async function handleStatus(args: string[]): Promise { } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); const backendStatus = await getBackendStatus(config.backend.host, config.backend.port); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const summary = summarizeTasks(rows); if (hasFlag(args, "--json")) { writeStdout( JSON.stringify( { - workspaceId, + organizationId, backend: backendStatus, tasks: { total: summary.total, @@ -529,7 +620,7 @@ async function handleStatus(args: string[]): Promise { return; } - writeStdout(`workspace=${workspaceId}`); + writeStdout(`organization=${organizationId}`); writeStdout(`backend running=${backendStatus.running} pid=${backendStatus.pid ?? "unknown"} version=${backendStatus.version ?? "unknown"}`); writeStdout(`tasks total=${summary.total}`); writeStdout( @@ -538,7 +629,7 @@ async function handleStatus(args: string[]): Promise { const providerSummary = Object.entries(summary.byProvider) .map(([provider, count]) => `${provider}=${count}`) .join(" "); - writeStdout(`providers ${providerSummary || "-"}`); + writeStdout(`sandboxProviders ${providerSummary || "-"}`); } async function handleHistory(args: string[]): Promise { @@ -548,13 +639,13 @@ async function handleHistory(args: string[]): Promise { } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const limit = parseIntOption(readOption(args, "--limit"), 20, "limit"); const branch = readOption(args, "--branch"); const taskId = readOption(args, "--task"); const client = createBackendClientFromConfig(config); const rows = await client.listHistory({ - workspaceId, + organizationId, limit, branch: branch || undefined, taskId: taskId || undefined, @@ -593,11 +684,11 @@ async function handleSwitchLike(cmd: string, args: string[]): Promise { } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); if (cmd === "switch" && taskId === "-") { - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const active = rows.filter((r) => { const group = groupTaskStatus(r.status); return group === "running" || group === "idle" || group === "queued"; @@ -611,19 +702,19 @@ async function handleSwitchLike(cmd: string, args: string[]): Promise { } if (cmd === "switch") { - const result = await client.switchTask(workspaceId, taskId); + const result = await client.switchTask(organizationId, taskId); writeStdout(`cd ${result.switchTarget}`); return; } if (cmd === "attach") { - const result = await client.attachTask(workspaceId, taskId); + const result = await client.attachTask(organizationId, taskId); writeStdout(`target=${result.target} session=${result.sessionId ?? "none"}`); return; } if (cmd === "merge" || cmd === "archive") { - await client.runAction(workspaceId, taskId, cmd); + await client.runAction(organizationId, taskId, cmd); writeStdout("ok"); return; } @@ -656,8 +747,8 @@ async function main(): Promise { return; } - if (cmd === "workspace") { - await handleWorkspace(rest); + if (cmd === "organization") { + await handleOrganization(rest); return; } diff --git a/foundry/packages/cli/src/workspace/config.ts b/foundry/packages/cli/src/organization/config.ts similarity index 71% rename from foundry/packages/cli/src/workspace/config.ts rename to foundry/packages/cli/src/organization/config.ts index 5b05dc4..cfaebfe 100644 --- a/foundry/packages/cli/src/workspace/config.ts +++ b/foundry/packages/cli/src/organization/config.ts @@ -2,7 +2,7 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { dirname } from "node:path"; import { homedir } from "node:os"; import * as toml from "@iarna/toml"; -import { ConfigSchema, resolveWorkspaceId, type AppConfig } from "@sandbox-agent/foundry-shared"; +import { ConfigSchema, resolveOrganizationId, type AppConfig } from "@sandbox-agent/foundry-shared"; export const CONFIG_PATH = `${homedir()}/.config/foundry/config.toml`; @@ -20,6 +20,6 @@ export function saveConfig(config: AppConfig, path = CONFIG_PATH): void { writeFileSync(path, toml.stringify(config), "utf8"); } -export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string { - return resolveWorkspaceId(flagWorkspace, config); +export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string { + return resolveOrganizationId(flagOrganization, config); } diff --git a/foundry/packages/cli/src/theme.ts b/foundry/packages/cli/src/theme.ts index 5315a44..633c079 100644 --- a/foundry/packages/cli/src/theme.ts +++ b/foundry/packages/cli/src/theme.ts @@ -588,7 +588,7 @@ function pointer(obj: JsonObject, parts: string[]): unknown { function opencodeConfigPaths(baseDir: string): string[] { const paths: string[] = []; - const rootish = opencodeProjectConfigPaths(baseDir); + const rootish = opencodeRepositoryConfigPaths(baseDir); paths.push(...rootish); const configDir = process.env.XDG_CONFIG_HOME || join(homedir(), ".config"); @@ -611,12 +611,12 @@ function opencodeThemeDirs(configDir: string | undefined, baseDir: string): stri dirs.push(join(xdgConfig, "opencode", "themes")); dirs.push(join(homedir(), ".opencode", "themes")); - dirs.push(...opencodeProjectThemeDirs(baseDir)); + dirs.push(...opencodeRepositoryThemeDirs(baseDir)); return dirs; } -function opencodeProjectConfigPaths(baseDir: string): string[] { +function opencodeRepositoryConfigPaths(baseDir: string): string[] { const dirs = ancestorDirs(baseDir); const out: string[] = []; for (const dir of dirs) { @@ -628,7 +628,7 @@ function opencodeProjectConfigPaths(baseDir: string): string[] { return out; } -function opencodeProjectThemeDirs(baseDir: string): string[] { +function opencodeRepositoryThemeDirs(baseDir: string): string[] { const dirs = ancestorDirs(baseDir); const out: string[] = []; for (const dir of dirs) { diff --git a/foundry/packages/cli/src/tui.ts b/foundry/packages/cli/src/tui.ts index d561565..c3aba9e 100644 --- a/foundry/packages/cli/src/tui.ts +++ b/foundry/packages/cli/src/tui.ts @@ -56,6 +56,11 @@ interface RenderOptions { height?: number; } +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { + const rows = await client.listTasks(organizationId); + return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); +} + function pad(input: string, width: number): string { if (width <= 0) { return ""; @@ -183,7 +188,7 @@ function helpLines(width: number): string[] { export function formatRows( rows: TaskRecord[], selected: number, - workspaceId: string, + organizationId: string, status: string, searchQuery = "", showHelp = false, @@ -212,7 +217,7 @@ export function formatRows( return `${marker}${pad(display.name, branchWidth)} ${pad(display.diff, COLUMN_WIDTHS.diff)} ${pad(display.agent, COLUMN_WIDTHS.agent)} ${pad(display.pr, COLUMN_WIDTHS.pr)} ${pad(display.author, COLUMN_WIDTHS.author)} ${pad(display.ci, COLUMN_WIDTHS.ci)} ${pad(display.review, COLUMN_WIDTHS.review)} ${pad(display.age, COLUMN_WIDTHS.age)}`; }); - const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `workspace:${workspaceId}`, status], `v${CLI_BUILD_ID}`), totalWidth); + const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `organization:${organizationId}`, status], `v${CLI_BUILD_ID}`), totalWidth); const contentHeight = totalHeight - 1; const lines = [...header, ...body].map((line) => fitLine(line, totalWidth)); @@ -309,7 +314,7 @@ function buildStyledContent(content: string, theme: TuiTheme, api: StyledTextApi return new api.StyledText(chunks); } -export async function runTui(config: AppConfig, workspaceId: string): Promise { +export async function runTui(config: AppConfig, organizationId: string): Promise { const core = (await import("@opentui/core")) as OpenTuiLike; const createCliRenderer = core.createCliRenderer; const TextRenderable = core.TextRenderable; @@ -359,7 +364,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise { try { - const result = await client.switchTask(workspaceId, row.taskId); + const result = await client.switchTask(organizationId, row.taskId); close(`cd ${result.switchTarget}`); } catch (err) { busy = false; @@ -538,7 +543,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise { try { - const result = await client.attachTask(workspaceId, row.taskId); + const result = await client.attachTask(organizationId, row.taskId); close(`target=${result.target} session=${result.sessionId ?? "none"}`); } catch (err) { busy = false; @@ -554,7 +559,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise client.runAction(workspaceId, row.taskId, "archive"), `archived ${row.taskId}`); + void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "archive"), `archived ${row.taskId}`); return; } @@ -563,7 +568,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise client.runAction(workspaceId, row.taskId, "sync"), `synced ${row.taskId}`); + void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "sync"), `synced ${row.taskId}`); return; } @@ -575,8 +580,8 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise { - await client.runAction(workspaceId, row.taskId, "merge"); - await client.runAction(workspaceId, row.taskId, "archive"); + await client.runAction(organizationId, row.taskId, "merge"); + await client.runAction(organizationId, row.taskId, "archive"); }, `merged+archived ${row.taskId}`, ); diff --git a/foundry/packages/cli/test/backend-manager.test.ts b/foundry/packages/cli/test/backend-manager.test.ts index 53529ab..a6089c5 100644 --- a/foundry/packages/cli/test/backend-manager.test.ts +++ b/foundry/packages/cli/test/backend-manager.test.ts @@ -37,7 +37,7 @@ function healthyMetadataResponse(): { ok: boolean; json: () => Promise json: async () => ({ runtime: "rivetkit", actorNames: { - workspace: {}, + organization: {}, }, }), }; @@ -58,7 +58,7 @@ describe("backend manager", () => { const config: AppConfig = ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -68,7 +68,7 @@ describe("backend manager", () => { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, diff --git a/foundry/packages/cli/test/workspace-config.test.ts b/foundry/packages/cli/test/organization-config.test.ts similarity index 59% rename from foundry/packages/cli/test/workspace-config.test.ts rename to foundry/packages/cli/test/organization-config.test.ts index 94145be..5053ec2 100644 --- a/foundry/packages/cli/test/workspace-config.test.ts +++ b/foundry/packages/cli/test/organization-config.test.ts @@ -1,13 +1,13 @@ import { describe, expect, it } from "vitest"; import { ConfigSchema } from "@sandbox-agent/foundry-shared"; -import { resolveWorkspace } from "../src/workspace/config.js"; +import { resolveOrganization } from "../src/organization/config.js"; -describe("cli workspace resolution", () => { - it("uses default workspace when no flag", () => { +describe("cli organization resolution", () => { + it("uses default organization when no flag", () => { const config = ConfigSchema.parse({ auto_submit: true as const, notify: ["terminal" as const], - workspace: { default: "team" }, + organization: { default: "team" }, backend: { host: "127.0.0.1", port: 7741, @@ -17,13 +17,13 @@ describe("cli workspace resolution", () => { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, }); - expect(resolveWorkspace(undefined, config)).toBe("team"); - expect(resolveWorkspace("alpha", config)).toBe("alpha"); + expect(resolveOrganization(undefined, config)).toBe("team"); + expect(resolveOrganization("alpha", config)).toBe("alpha"); }); }); diff --git a/foundry/packages/cli/test/theme.test.ts b/foundry/packages/cli/test/theme.test.ts index a492a63..2a0d7e3 100644 --- a/foundry/packages/cli/test/theme.test.ts +++ b/foundry/packages/cli/test/theme.test.ts @@ -21,7 +21,7 @@ describe("resolveTuiTheme", () => { const baseConfig: AppConfig = ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -31,7 +31,7 @@ describe("resolveTuiTheme", () => { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, diff --git a/foundry/packages/cli/test/tui-format.test.ts b/foundry/packages/cli/test/tui-format.test.ts index fe80182..9ba0feb 100644 --- a/foundry/packages/cli/test/tui-format.test.ts +++ b/foundry/packages/cli/test/tui-format.test.ts @@ -4,14 +4,14 @@ import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client"; import { formatRows } from "../src/tui.js"; const sample: TaskRecord = { - workspaceId: "default", + organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", taskId: "task-1", branchName: "feature/test", title: "Test Title", task: "Do test", - providerId: "local", + sandboxProviderId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -19,7 +19,7 @@ const sample: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "local", + sandboxProviderId: "local", switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 1, diff --git a/foundry/packages/client/src/app-client.ts b/foundry/packages/client/src/app-client.ts index 1fb95d2..16968cf 100644 --- a/foundry/packages/client/src/app-client.ts +++ b/foundry/packages/client/src/app-client.ts @@ -24,7 +24,7 @@ export interface FoundryAppClient { cancelScheduledRenewal(organizationId: string): Promise; resumeSubscription(organizationId: string): Promise; reconnectGithub(organizationId: string): Promise; - recordSeatUsage(workspaceId: string): Promise; + recordSeatUsage(organizationId: string): Promise; } export interface CreateFoundryAppClientOptions { diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts index ccb0657..14e5661 100644 --- a/foundry/packages/client/src/backend-client.ts +++ b/foundry/packages/client/src/backend-client.ts @@ -1,7 +1,6 @@ import { createClient } from "rivetkit/client"; import type { AgentType, - AddRepoInput, AppConfig, FoundryAppSnapshot, FoundryBillingPlanId, @@ -21,20 +20,18 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, TaskEvent, WorkbenchTaskDetail, WorkbenchTaskSummary, WorkbenchSessionDetail, - WorkspaceEvent, - WorkspaceSummarySnapshot, + OrganizationEvent, + OrganizationSummarySnapshot, HistoryEvent, HistoryQueryInput, - ProviderId, + SandboxProviderId, RepoOverview, - RepoStackActionInput, - RepoStackActionResult, RepoRecord, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult, @@ -43,7 +40,7 @@ import type { } from "@sandbox-agent/foundry-shared"; import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import { createMockBackendClient } from "./mock/backend-client.js"; -import { taskKey, taskSandboxKey, workspaceKey } from "./keys.js"; +import { taskKey, taskSandboxKey, organizationKey } from "./keys.js"; export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill"; @@ -75,41 +72,39 @@ export interface ActorConn { dispose(): Promise; } -interface WorkspaceHandle { +interface OrganizationHandle { connect(): ActorConn; - addRepo(input: AddRepoInput): Promise; - listRepos(input: { workspaceId: string }): Promise; + listRepos(input: { organizationId: string }): Promise; createTask(input: CreateTaskInput): Promise; - listTasks(input: { workspaceId: string; repoId?: string }): Promise; - getRepoOverview(input: { workspaceId: string; repoId: string }): Promise; - runRepoStackAction(input: RepoStackActionInput): Promise; + listTasks(input: { organizationId: string; repoId?: string }): Promise; + getRepoOverview(input: { organizationId: string; repoId: string }): Promise; history(input: HistoryQueryInput): Promise; switchTask(taskId: string): Promise; - getTask(input: { workspaceId: string; taskId: string }): Promise; - attachTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; - pushTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - syncTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - mergeTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - archiveTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - killTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - useWorkspace(input: { workspaceId: string }): Promise<{ workspaceId: string }>; + getTask(input: { organizationId: string; taskId: string }): Promise; + attachTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; + pushTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + syncTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + mergeTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + archiveTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + killTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + useOrganization(input: { organizationId: string }): Promise<{ organizationId: string }>; starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise; - getWorkspaceSummary(input: { workspaceId: string }): Promise; + getOrganizationSummary(input: { organizationId: string }): Promise; applyTaskSummaryUpdate(input: { taskSummary: WorkbenchTaskSummary }): Promise; removeTaskSummary(input: { taskId: string }): Promise; - reconcileWorkbenchState(input: { workspaceId: string }): Promise; + reconcileWorkbenchState(input: { organizationId: string }): Promise; createWorkbenchTask(input: TaskWorkbenchCreateTaskInput): Promise; markWorkbenchUnread(input: TaskWorkbenchSelectInput): Promise; renameWorkbenchTask(input: TaskWorkbenchRenameInput): Promise; renameWorkbenchBranch(input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }>; + createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; renameWorkbenchSession(input: TaskWorkbenchRenameSessionInput): Promise; setWorkbenchSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; updateWorkbenchDraft(input: TaskWorkbenchUpdateDraftInput): Promise; changeWorkbenchModel(input: TaskWorkbenchChangeModelInput): Promise; sendWorkbenchMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(input: TaskWorkbenchTabInput): Promise; - closeWorkbenchSession(input: TaskWorkbenchTabInput): Promise; + stopWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; + closeWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; publishWorkbenchPr(input: TaskWorkbenchSelectInput): Promise; revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise; reloadGithubOrganization(): Promise; @@ -118,7 +113,7 @@ interface WorkspaceHandle { reloadGithubPullRequest(input: { repoId: string; prNumber: number }): Promise; } -interface AppWorkspaceHandle { +interface AppOrganizationHandle { connect(): ActorConn; getAppSnapshot(input: { sessionId: string }): Promise; skipAppStarterRepo(input: { sessionId: string }): Promise; @@ -131,7 +126,7 @@ interface AppWorkspaceHandle { createAppBillingPortalSession(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>; cancelAppScheduledRenewal(input: { sessionId: string; organizationId: string }): Promise; resumeAppSubscription(input: { sessionId: string; organizationId: string }): Promise; - recordAppSeatUsage(input: { sessionId: string; workspaceId: string }): Promise; + recordAppSeatUsage(input: { sessionId: string; organizationId: string }): Promise; } interface TaskHandle { @@ -162,12 +157,12 @@ interface TaskSandboxHandle { rawSendSessionMethod(sessionId: string, method: string, params: Record): Promise; destroySession(sessionId: string): Promise; sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>; - providerState(): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>; + providerState(): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; } interface RivetClient { - workspace: { - getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): WorkspaceHandle; + organization: { + getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): OrganizationHandle; }; task: { get(key?: string | string[]): TaskHandle; @@ -182,15 +177,15 @@ interface RivetClient { export interface BackendClientOptions { endpoint: string; - defaultWorkspaceId?: string; + defaultOrganizationId?: string; mode?: "remote" | "mock"; } export interface BackendClient { getAppSnapshot(): Promise; - connectWorkspace(workspaceId: string): Promise; - connectTask(workspaceId: string, repoId: string, taskId: string): Promise; - connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise; + connectOrganization(organizationId: string): Promise; + connectTask(organizationId: string, repoId: string, taskId: string): Promise; + connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise; subscribeApp(listener: () => void): () => void; signInWithGithub(): Promise; signOutApp(): Promise; @@ -204,109 +199,112 @@ export interface BackendClient { openAppBillingPortal(organizationId: string): Promise; cancelAppScheduledRenewal(organizationId: string): Promise; resumeAppSubscription(organizationId: string): Promise; - recordAppSeatUsage(workspaceId: string): Promise; - addRepo(workspaceId: string, remoteUrl: string): Promise; - listRepos(workspaceId: string): Promise; + recordAppSeatUsage(organizationId: string): Promise; + listRepos(organizationId: string): Promise; createTask(input: CreateTaskInput): Promise; - listTasks(workspaceId: string, repoId?: string): Promise; - getRepoOverview(workspaceId: string, repoId: string): Promise; - runRepoStackAction(input: RepoStackActionInput): Promise; - getTask(workspaceId: string, taskId: string): Promise; + listTasks(organizationId: string, repoId?: string): Promise; + getRepoOverview(organizationId: string, repoId: string): Promise; + getTask(organizationId: string, taskId: string): Promise; listHistory(input: HistoryQueryInput): Promise; - switchTask(workspaceId: string, taskId: string): Promise; - attachTask(workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; - runAction(workspaceId: string, taskId: string, action: TaskAction): Promise; + switchTask(organizationId: string, taskId: string): Promise; + attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; + runAction(organizationId: string, taskId: string, action: TaskAction): Promise; createSandboxSession(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; prompt: string; cwd?: string; agent?: AgentType | "opencode"; }): Promise<{ id: string; status: "running" | "idle" | "error" }>; listSandboxSessions( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input?: { cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>; listSandboxSessionEvents( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input: { sessionId: string; cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>; - createSandboxProcess(input: { workspaceId: string; providerId: ProviderId; sandboxId: string; request: ProcessCreateRequest }): Promise; - listSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }>; + createSandboxProcess(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + request: ProcessCreateRequest; + }): Promise; + listSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }>; getSandboxProcessLogs( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessLogFollowQuery, ): Promise; stopSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise; killSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise; - deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise; - subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void; + deleteSandboxProcess(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string): Promise; + subscribeSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): () => void; sendSandboxPrompt(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; sessionId: string; prompt: string; notification?: boolean; }): Promise; sandboxSessionStatus( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, sessionId: string, ): Promise<{ id: string; status: "running" | "idle" | "error" }>; sandboxProviderState( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, - ): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>; - getSandboxAgentConnection(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; - getWorkspaceSummary(workspaceId: string): Promise; - getTaskDetail(workspaceId: string, repoId: string, taskId: string): Promise; - getSessionDetail(workspaceId: string, repoId: string, taskId: string, sessionId: string): Promise; - getWorkbench(workspaceId: string): Promise; - subscribeWorkbench(workspaceId: string, listener: () => void): () => void; - createWorkbenchTask(workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise; - markWorkbenchUnread(workspaceId: string, input: TaskWorkbenchSelectInput): Promise; - renameWorkbenchTask(workspaceId: string, input: TaskWorkbenchRenameInput): Promise; - renameWorkbenchBranch(workspaceId: string, input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }>; - renameWorkbenchSession(workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise; - setWorkbenchSessionUnread(workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise; - updateWorkbenchDraft(workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise; - changeWorkbenchModel(workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise; - sendWorkbenchMessage(workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise; - closeWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise; - publishWorkbenchPr(workspaceId: string, input: TaskWorkbenchSelectInput): Promise; - revertWorkbenchFile(workspaceId: string, input: TaskWorkbenchDiffInput): Promise; - reloadGithubOrganization(workspaceId: string): Promise; - reloadGithubPullRequests(workspaceId: string): Promise; - reloadGithubRepository(workspaceId: string, repoId: string): Promise; - reloadGithubPullRequest(workspaceId: string, repoId: string, prNumber: number): Promise; + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; + getSandboxAgentConnection(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; + getOrganizationSummary(organizationId: string): Promise; + getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; + getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; + getWorkbench(organizationId: string): Promise; + subscribeWorkbench(organizationId: string, listener: () => void): () => void; + createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise; + markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise; + renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise; + renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise; + createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; + renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise; + setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise; + updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise; + changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise; + sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise; + stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; + closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; + publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise; + revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise; + reloadGithubOrganization(organizationId: string): Promise; + reloadGithubPullRequests(organizationId: string): Promise; + reloadGithubRepository(organizationId: string, repoId: string): Promise; + reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise; health(): Promise<{ ok: true }>; - useWorkspace(workspaceId: string): Promise<{ workspaceId: string }>; - starSandboxAgentRepo(workspaceId: string): Promise; + useOrganization(organizationId: string): Promise<{ organizationId: string }>; + starSandboxAgentRepo(organizationId: string): Promise; } export function rivetEndpoint(config: AppConfig): string { @@ -316,10 +314,49 @@ export function rivetEndpoint(config: AppConfig): string { export function createBackendClientFromConfig(config: AppConfig): BackendClient { return createBackendClient({ endpoint: rivetEndpoint(config), - defaultWorkspaceId: config.workspace.default, + defaultOrganizationId: config.organization.default, }); } +export interface BackendHealthCheckOptions { + endpoint: string; + timeoutMs?: number; +} + +export interface BackendMetadata { + clientEndpoint: string; + appEndpoint: string; + rivetEndpoint: string; +} + +export async function checkBackendHealth(options: BackendHealthCheckOptions): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), options.timeoutMs ?? 1_500); + + try { + const response = await fetch(normalizeLegacyBackendEndpoint(options.endpoint), { + method: "GET", + signal: controller.signal, + }); + return response.status < 500; + } catch { + return false; + } finally { + clearTimeout(timeout); + } +} + +export async function readBackendMetadata(options: BackendHealthCheckOptions): Promise { + const endpoints = deriveBackendEndpoints(options.endpoint); + const clientEndpoint = endpoints.rivetEndpoint.replace(/\/v1\/rivet\/?$/, ""); + + return { + clientEndpoint, + appEndpoint: endpoints.appEndpoint, + rivetEndpoint: endpoints.rivetEndpoint, + }; +} + function stripTrailingSlash(value: string): string { return value.replace(/\/$/, ""); } @@ -366,7 +403,7 @@ function signedOutAppSnapshot(): FoundryAppSnapshot { export function createBackendClient(options: BackendClientOptions): BackendClient { if (options.mode === "mock") { - return createMockBackendClient(options.defaultWorkspaceId); + return createMockBackendClient(options.defaultOrganizationId); } const endpoints = deriveBackendEndpoints(options.endpoint); @@ -424,20 +461,20 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return typeof sessionId === "string" && sessionId.length > 0 ? sessionId : null; }; - const workspace = async (workspaceId: string): Promise => - client.workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, + const organization = async (organizationId: string): Promise => + client.organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); - const appWorkspace = async (): Promise => - client.workspace.getOrCreate(workspaceKey("app"), { + const appOrganization = async (): Promise => + client.organization.getOrCreate(organizationKey("app"), { createWithInput: "app", - }) as unknown as AppWorkspaceHandle; + }) as unknown as AppOrganizationHandle; - const task = async (workspaceId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(workspaceId, repoId, taskId)); + const task = async (organizationId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(organizationId, repoId, taskId)); - const sandboxByKey = async (workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise => { - return (client as any).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId)); + const sandboxByKey = async (organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise => { + return (client as any).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); }; function isActorNotFoundError(error: unknown): boolean { @@ -445,21 +482,25 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return message.includes("Actor not found"); } - const sandboxByActorIdFromTask = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { - const ws = await workspace(workspaceId); - const rows = await ws.listTasks({ workspaceId }); + const sandboxByActorIdFromTask = async ( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise => { + const ws = await organization(organizationId); + const rows = await ws.listTasks({ organizationId }); const candidates = [...rows].sort((a, b) => b.updatedAt - a.updatedAt); for (const row of candidates) { try { - const detail = await ws.getTask({ workspaceId, taskId: row.taskId }); - if (detail.providerId !== providerId) { + const detail = await ws.getTask({ organizationId, taskId: row.taskId }); + if (detail.sandboxProviderId !== sandboxProviderId) { continue; } const sandbox = detail.sandboxes.find( (sb) => sb.sandboxId === sandboxId && - sb.providerId === providerId && + sb.sandboxProviderId === sandboxProviderId && typeof (sb as any).sandboxActorId === "string" && (sb as any).sandboxActorId.length > 0, ) as { sandboxActorId?: string } | undefined; @@ -479,19 +520,19 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }; const withSandboxHandle = async ( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, run: (handle: TaskSandboxHandle) => Promise, ): Promise => { - const handle = await sandboxByKey(workspaceId, providerId, sandboxId); + const handle = await sandboxByKey(organizationId, sandboxProviderId, sandboxId); try { return await run(handle); } catch (error) { if (!isActorNotFoundError(error)) { throw error; } - const fallback = await sandboxByActorIdFromTask(workspaceId, providerId, sandboxId); + const fallback = await sandboxByActorIdFromTask(organizationId, sandboxProviderId, sandboxId); if (!fallback) { throw error; } @@ -499,22 +540,22 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } }; - const connectWorkspace = async (workspaceId: string): Promise => { - return (await workspace(workspaceId)).connect() as ActorConn; + const connectOrganization = async (organizationId: string): Promise => { + return (await organization(organizationId)).connect() as ActorConn; }; - const connectTask = async (workspaceId: string, repoId: string, taskIdValue: string): Promise => { - return (await task(workspaceId, repoId, taskIdValue)).connect() as ActorConn; + const connectTask = async (organizationId: string, repoId: string, taskIdValue: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).connect() as ActorConn; }; - const connectSandbox = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { + const connectSandbox = async (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise => { try { - return (await sandboxByKey(workspaceId, providerId, sandboxId)).connect() as ActorConn; + return (await sandboxByKey(organizationId, sandboxProviderId, sandboxId)).connect() as ActorConn; } catch (error) { if (!isActorNotFoundError(error)) { throw error; } - const fallback = await sandboxByActorIdFromTask(workspaceId, providerId, sandboxId); + const fallback = await sandboxByActorIdFromTask(organizationId, sandboxProviderId, sandboxId); if (!fallback) { throw error; } @@ -522,14 +563,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } }; - const getWorkbenchCompat = async (workspaceId: string): Promise => { - const summary = await (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId }); + const getWorkbenchCompat = async (organizationId: string): Promise => { + const summary = await (await organization(organizationId)).getOrganizationSummary({ organizationId }); const tasks = ( await Promise.all( summary.taskSummaries.map(async (taskSummary) => { let detail; try { - detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); + detail = await (await task(organizationId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); } catch (error) { if (isActorNotFoundError(error)) { return null; @@ -539,7 +580,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien const sessionDetails = await Promise.all( detail.sessionsSummary.map(async (session) => { try { - const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); + const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); return [session.id, full] as const; } catch (error) { if (isActorNotFoundError(error)) { @@ -559,7 +600,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien updatedAtMs: detail.updatedAtMs, branch: detail.branch, pullRequest: detail.pullRequest, - tabs: detail.sessionsSummary.map((session) => { + sessions: detail.sessionsSummary.map((session) => { const full = sessionDetailsById.get(session.id); return { id: session.id, @@ -584,7 +625,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien ) ).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null); - const projects = summary.repos + const repositories = summary.repos .map((repo) => ({ id: repo.id, label: repo.label, @@ -594,31 +635,31 @@ export function createBackendClient(options: BackendClientOptions): BackendClien .filter((repo) => repo.tasks.length > 0); return { - workspaceId, + organizationId, repos: summary.repos.map((repo) => ({ id: repo.id, label: repo.label })), - projects, + repositories, tasks: tasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs), }; }; - const subscribeWorkbench = (workspaceId: string, listener: () => void): (() => void) => { - let entry = workbenchSubscriptions.get(workspaceId); + const subscribeWorkbench = (organizationId: string, listener: () => void): (() => void) => { + let entry = workbenchSubscriptions.get(organizationId); if (!entry) { entry = { listeners: new Set(), disposeConnPromise: null, }; - workbenchSubscriptions.set(workspaceId, entry); + workbenchSubscriptions.set(organizationId, entry); } entry.listeners.add(listener); if (!entry.disposeConnPromise) { entry.disposeConnPromise = (async () => { - const handle = await workspace(workspaceId); + const handle = await organization(organizationId); const conn = (handle as any).connect(); const unsubscribeEvent = conn.on("workbenchUpdated", () => { - const current = workbenchSubscriptions.get(workspaceId); + const current = workbenchSubscriptions.get(organizationId); if (!current) { return; } @@ -636,7 +677,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } return () => { - const current = workbenchSubscriptions.get(workspaceId); + const current = workbenchSubscriptions.get(organizationId); if (!current) { return; } @@ -645,17 +686,18 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return; } - workbenchSubscriptions.delete(workspaceId); + workbenchSubscriptions.delete(organizationId); void current.disposeConnPromise?.then(async (disposeConn) => { await disposeConn?.(); }); }; }; - const sandboxProcessSubscriptionKey = (workspaceId: string, providerId: ProviderId, sandboxId: string): string => `${workspaceId}:${providerId}:${sandboxId}`; + const sandboxProcessSubscriptionKey = (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): string => + `${organizationId}:${sandboxProviderId}:${sandboxId}`; - const subscribeSandboxProcesses = (workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): (() => void) => { - const key = sandboxProcessSubscriptionKey(workspaceId, providerId, sandboxId); + const subscribeSandboxProcesses = (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): (() => void) => { + const key = sandboxProcessSubscriptionKey(organizationId, sandboxProviderId, sandboxId); let entry = sandboxProcessSubscriptions.get(key); if (!entry) { entry = { @@ -669,7 +711,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!entry.disposeConnPromise) { entry.disposeConnPromise = (async () => { - const conn = await connectSandbox(workspaceId, providerId, sandboxId); + const conn = await connectSandbox(organizationId, sandboxProviderId, sandboxId); const unsubscribeEvent = conn.on("processesUpdated", () => { const current = sandboxProcessSubscriptions.get(key); if (!current) { @@ -710,7 +752,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!appSubscriptions.disposeConnPromise) { appSubscriptions.disposeConnPromise = (async () => { - const handle = await appWorkspace(); + const handle = await appOrganization(); const conn = (handle as any).connect(); const unsubscribeEvent = conn.on("appUpdated", () => { for (const currentListener of [...appSubscriptions.listeners]) { @@ -745,19 +787,19 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { return signedOutAppSnapshot(); } - return await (await appWorkspace()).getAppSnapshot({ sessionId }); + return await (await appOrganization()).getAppSnapshot({ sessionId }); }, - async connectWorkspace(workspaceId: string): Promise { - return await connectWorkspace(workspaceId); + async connectOrganization(organizationId: string): Promise { + return await connectOrganization(organizationId); }, - async connectTask(workspaceId: string, repoId: string, taskIdValue: string): Promise { - return await connectTask(workspaceId, repoId, taskIdValue); + async connectTask(organizationId: string, repoId: string, taskIdValue: string): Promise { + return await connectTask(organizationId, repoId, taskIdValue); }, - async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise { - return await connectSandbox(workspaceId, providerId, sandboxId); + async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return await connectSandbox(organizationId, sandboxProviderId, sandboxId); }, subscribeApp(listener: () => void): () => void { @@ -788,7 +830,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).skipAppStarterRepo({ sessionId }); + return await (await appOrganization()).skipAppStarterRepo({ sessionId }); }, async starAppStarterRepo(organizationId: string): Promise { @@ -796,7 +838,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).starAppStarterRepo({ sessionId, organizationId }); + return await (await appOrganization()).starAppStarterRepo({ sessionId, organizationId }); }, async selectAppOrganization(organizationId: string): Promise { @@ -804,7 +846,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).selectAppOrganization({ sessionId, organizationId }); + return await (await appOrganization()).selectAppOrganization({ sessionId, organizationId }); }, async updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { @@ -812,7 +854,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).updateAppOrganizationProfile({ + return await (await appOrganization()).updateAppOrganizationProfile({ sessionId, organizationId: input.organizationId, displayName: input.displayName, @@ -826,7 +868,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).triggerAppRepoImport({ sessionId, organizationId }); + return await (await appOrganization()).triggerAppRepoImport({ sessionId, organizationId }); }, async reconnectAppGithub(organizationId: string): Promise { @@ -834,7 +876,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - const response = await (await appWorkspace()).beginAppGithubInstall({ sessionId, organizationId }); + const response = await (await appOrganization()).beginAppGithubInstall({ sessionId, organizationId }); if (typeof window !== "undefined") { window.location.assign(response.url); } @@ -845,7 +887,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - const response = await (await appWorkspace()).createAppCheckoutSession({ sessionId, organizationId, planId }); + const response = await (await appOrganization()).createAppCheckoutSession({ sessionId, organizationId, planId }); if (typeof window !== "undefined") { window.location.assign(response.url); } @@ -856,7 +898,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - const response = await (await appWorkspace()).createAppBillingPortalSession({ sessionId, organizationId }); + const response = await (await appOrganization()).createAppBillingPortalSession({ sessionId, organizationId }); if (typeof window !== "undefined") { window.location.assign(response.url); } @@ -867,7 +909,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).cancelAppScheduledRenewal({ sessionId, organizationId }); + return await (await appOrganization()).cancelAppScheduledRenewal({ sessionId, organizationId }); }, async resumeAppSubscription(organizationId: string): Promise { @@ -875,117 +917,109 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).resumeAppSubscription({ sessionId, organizationId }); + return await (await appOrganization()).resumeAppSubscription({ sessionId, organizationId }); }, - async recordAppSeatUsage(workspaceId: string): Promise { + async recordAppSeatUsage(organizationId: string): Promise { const sessionId = await getSessionId(); if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).recordAppSeatUsage({ sessionId, workspaceId }); + return await (await appOrganization()).recordAppSeatUsage({ sessionId, organizationId }); }, - async addRepo(workspaceId: string, remoteUrl: string): Promise { - return (await workspace(workspaceId)).addRepo({ workspaceId, remoteUrl }); - }, - - async listRepos(workspaceId: string): Promise { - return (await workspace(workspaceId)).listRepos({ workspaceId }); + async listRepos(organizationId: string): Promise { + return (await organization(organizationId)).listRepos({ organizationId }); }, async createTask(input: CreateTaskInput): Promise { - return (await workspace(input.workspaceId)).createTask(input); + return (await organization(input.organizationId)).createTask(input); }, - async starSandboxAgentRepo(workspaceId: string): Promise { - return (await workspace(workspaceId)).starSandboxAgentRepo({ workspaceId }); + async starSandboxAgentRepo(organizationId: string): Promise { + return (await organization(organizationId)).starSandboxAgentRepo({ organizationId }); }, - async listTasks(workspaceId: string, repoId?: string): Promise { - return (await workspace(workspaceId)).listTasks({ workspaceId, repoId }); + async listTasks(organizationId: string, repoId?: string): Promise { + return (await organization(organizationId)).listTasks({ organizationId, repoId }); }, - async getRepoOverview(workspaceId: string, repoId: string): Promise { - return (await workspace(workspaceId)).getRepoOverview({ workspaceId, repoId }); + async getRepoOverview(organizationId: string, repoId: string): Promise { + return (await organization(organizationId)).getRepoOverview({ organizationId, repoId }); }, - async runRepoStackAction(input: RepoStackActionInput): Promise { - return (await workspace(input.workspaceId)).runRepoStackAction(input); - }, - - async getTask(workspaceId: string, taskId: string): Promise { - return (await workspace(workspaceId)).getTask({ - workspaceId, + async getTask(organizationId: string, taskId: string): Promise { + return (await organization(organizationId)).getTask({ + organizationId, taskId, }); }, async listHistory(input: HistoryQueryInput): Promise { - return (await workspace(input.workspaceId)).history(input); + return (await organization(input.organizationId)).history(input); }, - async switchTask(workspaceId: string, taskId: string): Promise { - return (await workspace(workspaceId)).switchTask(taskId); + async switchTask(organizationId: string, taskId: string): Promise { + return (await organization(organizationId)).switchTask(taskId); }, - async attachTask(workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { - return (await workspace(workspaceId)).attachTask({ - workspaceId, + async attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + return (await organization(organizationId)).attachTask({ + organizationId, taskId, reason: "cli.attach", }); }, - async runAction(workspaceId: string, taskId: string, action: TaskAction): Promise { + async runAction(organizationId: string, taskId: string, action: TaskAction): Promise { if (action === "push") { - await (await workspace(workspaceId)).pushTask({ - workspaceId, + await (await organization(organizationId)).pushTask({ + organizationId, taskId, reason: "cli.push", }); return; } if (action === "sync") { - await (await workspace(workspaceId)).syncTask({ - workspaceId, + await (await organization(organizationId)).syncTask({ + organizationId, taskId, reason: "cli.sync", }); return; } if (action === "merge") { - await (await workspace(workspaceId)).mergeTask({ - workspaceId, + await (await organization(organizationId)).mergeTask({ + organizationId, taskId, reason: "cli.merge", }); return; } if (action === "archive") { - await (await workspace(workspaceId)).archiveTask({ - workspaceId, + await (await organization(organizationId)).archiveTask({ + organizationId, taskId, reason: "cli.archive", }); return; } - await (await workspace(workspaceId)).killTask({ - workspaceId, + await (await organization(organizationId)).killTask({ + organizationId, taskId, reason: "cli.kill", }); }, async createSandboxSession(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; prompt: string; cwd?: string; agent?: AgentType | "opencode"; }): Promise<{ id: string; status: "running" | "idle" | "error" }> { - const created = await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + const created = await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.createSession({ agent: input.agent ?? "claude", sessionInit: { @@ -994,7 +1028,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }), ); if (input.prompt.trim().length > 0) { - await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.rawSendSessionMethod(created.id, "session/prompt", { prompt: [{ type: "text", text: input.prompt }], }), @@ -1007,83 +1041,87 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }, async listSandboxSessions( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input?: { cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.listSessions(input ?? {})); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listSessions(input ?? {})); }, async listSandboxSessionEvents( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input: { sessionId: string; cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getEvents(input)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.getEvents(input)); }, async createSandboxProcess(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; request: ProcessCreateRequest; }): Promise { - return await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => handle.createProcess(input.request)); + return await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.createProcess(input.request)); }, - async listSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.listProcesses()); + async listSandboxProcesses( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ processes: SandboxProcessRecord[] }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listProcesses()); }, async getSandboxProcessLogs( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessLogFollowQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getProcessLogs(processId, query)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.getProcessLogs(processId, query)); }, async stopSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.stopProcess(processId, query)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.stopProcess(processId, query)); }, async killSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.killProcess(processId, query)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.killProcess(processId, query)); }, - async deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise { - await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.deleteProcess(processId)); + async deleteSandboxProcess(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string): Promise { + await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.deleteProcess(processId)); }, - subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void { - return subscribeSandboxProcesses(workspaceId, providerId, sandboxId, listener); + subscribeSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): () => void { + return subscribeSandboxProcesses(organizationId, sandboxProviderId, sandboxId, listener); }, async sendSandboxPrompt(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; sessionId: string; prompt: string; notification?: boolean; }): Promise { - await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.rawSendSessionMethod(input.sessionId, "session/prompt", { prompt: [{ type: "text", text: input.prompt }], }), @@ -1091,8 +1129,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }, async sandboxSessionStatus( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, sessionId: string, ): Promise<{ id: string; status: "running" | "idle" | "error" }> { @@ -1103,123 +1141,127 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }, async sandboxProviderState( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, - ): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.providerState()); + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.providerState()); }, - async getSandboxAgentConnection(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.sandboxAgentConnection()); + async getSandboxAgentConnection( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ endpoint: string; token?: string }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.sandboxAgentConnection()); }, - async getWorkspaceSummary(workspaceId: string): Promise { - return (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId }); + async getOrganizationSummary(organizationId: string): Promise { + return (await organization(organizationId)).getOrganizationSummary({ organizationId }); }, - async getTaskDetail(workspaceId: string, repoId: string, taskIdValue: string): Promise { - return (await task(workspaceId, repoId, taskIdValue)).getTaskDetail(); + async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { + return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(); }, - async getSessionDetail(workspaceId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { - return (await task(workspaceId, repoId, taskIdValue)).getSessionDetail({ sessionId }); + async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { + return (await task(organizationId, repoId, taskIdValue)).getSessionDetail({ sessionId }); }, - async getWorkbench(workspaceId: string): Promise { - return await getWorkbenchCompat(workspaceId); + async getWorkbench(organizationId: string): Promise { + return await getWorkbenchCompat(organizationId); }, - subscribeWorkbench(workspaceId: string, listener: () => void): () => void { - return subscribeWorkbench(workspaceId, listener); + subscribeWorkbench(organizationId: string, listener: () => void): () => void { + return subscribeWorkbench(organizationId, listener); }, - async createWorkbenchTask(workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise { - return (await workspace(workspaceId)).createWorkbenchTask(input); + async createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { + return (await organization(organizationId)).createWorkbenchTask(input); }, - async markWorkbenchUnread(workspaceId: string, input: TaskWorkbenchSelectInput): Promise { - await (await workspace(workspaceId)).markWorkbenchUnread(input); + async markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise { + await (await organization(organizationId)).markWorkbenchUnread(input); }, - async renameWorkbenchTask(workspaceId: string, input: TaskWorkbenchRenameInput): Promise { - await (await workspace(workspaceId)).renameWorkbenchTask(input); + async renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise { + await (await organization(organizationId)).renameWorkbenchTask(input); }, - async renameWorkbenchBranch(workspaceId: string, input: TaskWorkbenchRenameInput): Promise { - await (await workspace(workspaceId)).renameWorkbenchBranch(input); + async renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise { + await (await organization(organizationId)).renameWorkbenchBranch(input); }, - async createWorkbenchSession(workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> { - return await (await workspace(workspaceId)).createWorkbenchSession(input); + async createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { + return await (await organization(organizationId)).createWorkbenchSession(input); }, - async renameWorkbenchSession(workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise { - await (await workspace(workspaceId)).renameWorkbenchSession(input); + async renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { + await (await organization(organizationId)).renameWorkbenchSession(input); }, - async setWorkbenchSessionUnread(workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { - await (await workspace(workspaceId)).setWorkbenchSessionUnread(input); + async setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { + await (await organization(organizationId)).setWorkbenchSessionUnread(input); }, - async updateWorkbenchDraft(workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise { - await (await workspace(workspaceId)).updateWorkbenchDraft(input); + async updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { + await (await organization(organizationId)).updateWorkbenchDraft(input); }, - async changeWorkbenchModel(workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise { - await (await workspace(workspaceId)).changeWorkbenchModel(input); + async changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { + await (await organization(organizationId)).changeWorkbenchModel(input); }, - async sendWorkbenchMessage(workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise { - await (await workspace(workspaceId)).sendWorkbenchMessage(input); + async sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { + await (await organization(organizationId)).sendWorkbenchMessage(input); }, - async stopWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise { - await (await workspace(workspaceId)).stopWorkbenchSession(input); + async stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { + await (await organization(organizationId)).stopWorkbenchSession(input); }, - async closeWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise { - await (await workspace(workspaceId)).closeWorkbenchSession(input); + async closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { + await (await organization(organizationId)).closeWorkbenchSession(input); }, - async publishWorkbenchPr(workspaceId: string, input: TaskWorkbenchSelectInput): Promise { - await (await workspace(workspaceId)).publishWorkbenchPr(input); + async publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise { + await (await organization(organizationId)).publishWorkbenchPr(input); }, - async revertWorkbenchFile(workspaceId: string, input: TaskWorkbenchDiffInput): Promise { - await (await workspace(workspaceId)).revertWorkbenchFile(input); + async revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise { + await (await organization(organizationId)).revertWorkbenchFile(input); }, - async reloadGithubOrganization(workspaceId: string): Promise { - await (await workspace(workspaceId)).reloadGithubOrganization(); + async reloadGithubOrganization(organizationId: string): Promise { + await (await organization(organizationId)).reloadGithubOrganization(); }, - async reloadGithubPullRequests(workspaceId: string): Promise { - await (await workspace(workspaceId)).reloadGithubPullRequests(); + async reloadGithubPullRequests(organizationId: string): Promise { + await (await organization(organizationId)).reloadGithubPullRequests(); }, - async reloadGithubRepository(workspaceId: string, repoId: string): Promise { - await (await workspace(workspaceId)).reloadGithubRepository({ repoId }); + async reloadGithubRepository(organizationId: string, repoId: string): Promise { + await (await organization(organizationId)).reloadGithubRepository({ repoId }); }, - async reloadGithubPullRequest(workspaceId: string, repoId: string, prNumber: number): Promise { - await (await workspace(workspaceId)).reloadGithubPullRequest({ repoId, prNumber }); + async reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise { + await (await organization(organizationId)).reloadGithubPullRequest({ repoId, prNumber }); }, async health(): Promise<{ ok: true }> { - const workspaceId = options.defaultWorkspaceId; - if (!workspaceId) { - throw new Error("Backend client default workspace is required for health checks"); + const organizationId = options.defaultOrganizationId; + if (!organizationId) { + throw new Error("Backend client default organization is required for health checks"); } - await (await workspace(workspaceId)).useWorkspace({ - workspaceId, + await (await organization(organizationId)).useOrganization({ + organizationId, }); return { ok: true }; }, - async useWorkspace(workspaceId: string): Promise<{ workspaceId: string }> { - return (await workspace(workspaceId)).useWorkspace({ workspaceId }); + async useOrganization(organizationId: string): Promise<{ organizationId: string }> { + return (await organization(organizationId)).useOrganization({ organizationId }); }, }; } diff --git a/foundry/packages/client/src/index.ts b/foundry/packages/client/src/index.ts index 7605986..87909a9 100644 --- a/foundry/packages/client/src/index.ts +++ b/foundry/packages/client/src/index.ts @@ -1,10 +1,10 @@ export * from "./app-client.js"; export * from "./backend-client.js"; -export * from "./interest/manager.js"; -export * from "./interest/mock-manager.js"; -export * from "./interest/remote-manager.js"; -export * from "./interest/topics.js"; -export * from "./interest/use-interest.js"; +export * from "./subscription/manager.js"; +export * from "./subscription/mock-manager.js"; +export * from "./subscription/remote-manager.js"; +export * from "./subscription/topics.js"; +export * from "./subscription/use-subscription.js"; export * from "./keys.js"; export * from "./mock-app.js"; export * from "./view-model.js"; diff --git a/foundry/packages/client/src/interest/mock-manager.ts b/foundry/packages/client/src/interest/mock-manager.ts deleted file mode 100644 index f1c065e..0000000 --- a/foundry/packages/client/src/interest/mock-manager.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { createMockBackendClient } from "../mock/backend-client.js"; -import { RemoteInterestManager } from "./remote-manager.js"; - -/** - * Mock implementation shares the same interest-manager harness as the remote - * path, but uses the in-memory mock backend that synthesizes actor events. - */ -export class MockInterestManager extends RemoteInterestManager { - constructor() { - super(createMockBackendClient()); - } -} diff --git a/foundry/packages/client/src/keys.ts b/foundry/packages/client/src/keys.ts index 54fdcc1..314f16a 100644 --- a/foundry/packages/client/src/keys.ts +++ b/foundry/packages/client/src/keys.ts @@ -1,29 +1,21 @@ export type ActorKey = string[]; -export function workspaceKey(workspaceId: string): ActorKey { - return ["ws", workspaceId]; +export function organizationKey(organizationId: string): ActorKey { + return ["org", organizationId]; } -export function projectKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId]; +export function repositoryKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId]; } -export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "task", taskId]; +export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "task", taskId]; } -export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey { - return ["ws", workspaceId, "sandbox", sandboxId]; +export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { + return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "history"]; -} - -export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "pr-sync"]; -} - -export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "branch-sync"]; +export function historyKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "history"]; } diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts index 1cec853..0fa6fc7 100644 --- a/foundry/packages/client/src/mock-app.ts +++ b/foundry/packages/client/src/mock-app.ts @@ -67,7 +67,7 @@ export interface MockFoundryOrganizationSettings { export interface MockFoundryOrganization { id: string; - workspaceId: string; + organizationId: string; kind: MockOrganizationKind; settings: MockFoundryOrganizationSettings; github: MockFoundryGithubState; @@ -118,7 +118,7 @@ export interface MockFoundryAppClient { cancelScheduledRenewal(organizationId: string): Promise; resumeSubscription(organizationId: string): Promise; reconnectGithub(organizationId: string): Promise; - recordSeatUsage(workspaceId: string): void; + recordSeatUsage(organizationId: string): void; } const STORAGE_KEY = "sandbox-agent-foundry:mock-app:v1"; @@ -173,7 +173,7 @@ function buildRivetOrganization(): MockFoundryOrganization { return { id: "rivet", - workspaceId: "rivet", + organizationId: "rivet", kind: "organization", settings: { displayName: rivetDevFixture.name ?? rivetDevFixture.login, @@ -254,7 +254,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { organizations: [ { id: "personal-nathan", - workspaceId: "personal-nathan", + organizationId: "personal-nathan", kind: "personal", settings: { displayName: "Nathan", @@ -290,7 +290,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { }, { id: "acme", - workspaceId: "acme", + organizationId: "acme", kind: "organization", settings: { displayName: "Acme", @@ -335,7 +335,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { buildRivetOrganization(), { id: "personal-jamie", - workspaceId: "personal-jamie", + organizationId: "personal-jamie", kind: "personal", settings: { displayName: "Jamie", @@ -659,8 +659,8 @@ class MockFoundryAppStore implements MockFoundryAppClient { })); } - recordSeatUsage(workspaceId: string): void { - const org = this.snapshot.organizations.find((candidate) => candidate.workspaceId === workspaceId); + recordSeatUsage(organizationId: string): void { + const org = this.snapshot.organizations.find((candidate) => candidate.organizationId === organizationId); const currentUser = currentMockUser(this.snapshot); if (!org || !currentUser) { return; diff --git a/foundry/packages/client/src/mock/backend-client.ts b/foundry/packages/client/src/mock/backend-client.ts index b87c8c4..011192d 100644 --- a/foundry/packages/client/src/mock/backend-client.ts +++ b/foundry/packages/client/src/mock/backend-client.ts @@ -1,5 +1,4 @@ import type { - AddRepoInput, AppEvent, CreateTaskInput, FoundryAppSnapshot, @@ -17,21 +16,19 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, TaskEvent, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary, - WorkspaceEvent, - WorkspaceSummarySnapshot, + OrganizationEvent, + OrganizationSummarySnapshot, HistoryEvent, HistoryQueryInput, - ProviderId, + SandboxProviderId, RepoOverview, RepoRecord, - RepoStackActionInput, - RepoStackActionResult, StarSandboxAgentRepoResult, SwitchResult, } from "@sandbox-agent/foundry-shared"; @@ -91,7 +88,7 @@ function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskReco return status; } -export function createMockBackendClient(defaultWorkspaceId = "default"): BackendClient { +export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient { const workbench = getSharedMockWorkbenchClient(); const listenersBySandboxId = new Map void>>(); const processesBySandboxId = new Map(); @@ -176,9 +173,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend updatedAtMs: task.updatedAtMs, branch: task.branch, pullRequest: task.pullRequest, - sessionsSummary: task.tabs.map((tab) => ({ + sessionsSummary: task.sessions.map((tab) => ({ id: tab.id, sessionId: tab.sessionId, + sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId, sessionName: tab.sessionName, agent: tab.agent, model: tab.model, @@ -192,10 +190,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({ ...buildTaskSummary(task), task: task.title, - agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude", + agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"), statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready", - activeSessionId: task.tabs[0]?.sessionId ?? null, + activeSessionId: task.sessions[0]?.sessionId ?? null, diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, reviewStatus: null, @@ -205,7 +203,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend minutesUsed: task.minutesUsed, sandboxes: [ { - providerId: "local", + sandboxProviderId: "local", sandboxId: task.id, cwd: mockCwd(task.repoName, task.id), }, @@ -213,15 +211,14 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend activeSandboxId: task.id, }); - const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], tabId: string): WorkbenchSessionDetail => { - const tab = task.tabs.find((candidate) => candidate.id === tabId); + const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], sessionId: string): WorkbenchSessionDetail => { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { - throw new Error(`Unknown mock tab ${tabId} for task ${task.id}`); + throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`); } return { sessionId: tab.id, - tabId: tab.id, - sandboxSessionId: tab.sessionId, + sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId, sessionName: tab.sessionName, agent: tab.agent, model: tab.model, @@ -234,11 +231,11 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }; }; - const buildWorkspaceSummary = (): WorkspaceSummarySnapshot => { + const buildOrganizationSummary = (): OrganizationSummarySnapshot => { const snapshot = workbench.getSnapshot(); const taskSummaries = snapshot.tasks.map(buildTaskSummary); return { - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repos: snapshot.repos.map((repo) => { const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id); return { @@ -253,39 +250,40 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }; }; - const workspaceScope = (workspaceId: string): string => `workspace:${workspaceId}`; - const taskScope = (workspaceId: string, repoId: string, taskId: string): string => `task:${workspaceId}:${repoId}:${taskId}`; - const sandboxScope = (workspaceId: string, providerId: string, sandboxId: string): string => `sandbox:${workspaceId}:${providerId}:${sandboxId}`; + const organizationScope = (organizationId: string): string => `organization:${organizationId}`; + const taskScope = (organizationId: string, repoId: string, taskId: string): string => `task:${organizationId}:${repoId}:${taskId}`; + const sandboxScope = (organizationId: string, sandboxProviderId: string, sandboxId: string): string => + `sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`; - const emitWorkspaceSnapshot = (): void => { - const summary = buildWorkspaceSummary(); + const emitOrganizationSnapshot = (): void => { + const summary = buildOrganizationSummary(); const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null; if (latestTask) { - emitConnectionEvent(workspaceScope(defaultWorkspaceId), "workspaceUpdated", { + emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { type: "taskSummaryUpdated", taskSummary: latestTask, - } satisfies WorkspaceEvent); + } satisfies OrganizationEvent); } }; const emitTaskUpdate = (taskId: string): void => { const task = requireTask(taskId); - emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "taskUpdated", { + emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", { type: "taskDetailUpdated", detail: buildTaskDetail(task), } satisfies TaskEvent); }; - const emitSessionUpdate = (taskId: string, tabId: string): void => { + const emitSessionUpdate = (taskId: string, sessionId: string): void => { const task = requireTask(taskId); - emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "sessionUpdated", { + emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "sessionUpdated", { type: "sessionUpdated", - session: buildSessionDetail(task, tabId), + session: buildSessionDetail(task, sessionId), } satisfies SessionEvent); }; const emitSandboxProcessesUpdate = (sandboxId: string): void => { - emitConnectionEvent(sandboxScope(defaultWorkspaceId, "local", sandboxId), "processesUpdated", { + emitConnectionEvent(sandboxScope(defaultOrganizationId, "local", sandboxId), "processesUpdated", { type: "processesUpdated", processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)), } satisfies SandboxProcessesEvent); @@ -296,22 +294,22 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend const cwd = mockCwd(task.repoName, task.id); const archived = task.status === "archived"; return { - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repoId: task.repoId, repoRemote: mockRepoRemote(task.repoName), taskId: task.id, branchName: task.branch, title: task.title, task: task.title, - providerId: "local", + sandboxProviderId: "local", status: toTaskStatus(archived ? "archived" : "running", archived), statusMessage: archived ? "archived" : "mock sandbox ready", activeSandboxId: task.id, - activeSessionId: task.tabs[0]?.sessionId ?? null, + activeSessionId: task.sessions[0]?.sessionId ?? null, sandboxes: [ { sandboxId: task.id, - providerId: "local", + sandboxProviderId: "local", sandboxActorId: "mock-sandbox", switchTarget: `mock://${task.id}`, cwd, @@ -319,7 +317,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend updatedAt: task.updatedAtMs, }, ], - agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude", + agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", prSubmitted: Boolean(task.pullRequest), diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, @@ -366,16 +364,16 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return unsupportedAppSnapshot(); }, - async connectWorkspace(workspaceId: string): Promise { - return createConn(workspaceScope(workspaceId)); + async connectOrganization(organizationId: string): Promise { + return createConn(organizationScope(organizationId)); }, - async connectTask(workspaceId: string, repoId: string, taskId: string): Promise { - return createConn(taskScope(workspaceId, repoId, taskId)); + async connectTask(organizationId: string, repoId: string, taskId: string): Promise { + return createConn(taskScope(organizationId, repoId, taskId)); }, - async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise { - return createConn(sandboxScope(workspaceId, providerId, sandboxId)); + async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return createConn(sandboxScope(organizationId, sandboxProviderId, sandboxId)); }, subscribeApp(): () => void { @@ -434,13 +432,9 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return unsupportedAppSnapshot(); }, - async addRepo(_workspaceId: string, _remoteUrl: string): Promise { - notSupported("addRepo"); - }, - - async listRepos(_workspaceId: string): Promise { + async listRepos(_organizationId: string): Promise { return workbench.getSnapshot().repos.map((repo) => ({ - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repoId: repo.id, remoteUrl: mockRepoRemote(repo.label), createdAt: nowMs(), @@ -452,12 +446,12 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend notSupported("createTask"); }, - async listTasks(_workspaceId: string, repoId?: string): Promise { + async listTasks(_organizationId: string, repoId?: string): Promise { return workbench .getSnapshot() .tasks.filter((task) => !repoId || task.repoId === repoId) .map((task) => ({ - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repoId: task.repoId, taskId: task.id, branchName: task.branch, @@ -467,15 +461,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend })); }, - async getRepoOverview(_workspaceId: string, _repoId: string): Promise { + async getRepoOverview(_organizationId: string, _repoId: string): Promise { notSupported("getRepoOverview"); }, - - async runRepoStackAction(_input: RepoStackActionInput): Promise { - notSupported("runRepoStackAction"); - }, - - async getTask(_workspaceId: string, taskId: string): Promise { + async getTask(_organizationId: string, taskId: string): Promise { return buildTaskRecord(taskId); }, @@ -483,23 +472,23 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return []; }, - async switchTask(_workspaceId: string, taskId: string): Promise { + async switchTask(_organizationId: string, taskId: string): Promise { return { - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, taskId, - providerId: "local", + sandboxProviderId: "local", switchTarget: `mock://${taskId}`, }; }, - async attachTask(_workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + async attachTask(_organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { return { target: `mock://${taskId}`, - sessionId: requireTask(taskId).tabs[0]?.sessionId ?? null, + sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null, }; }, - async runAction(_workspaceId: string, _taskId: string): Promise { + async runAction(_organizationId: string, _taskId: string): Promise { notSupported("runAction"); }, @@ -516,8 +505,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async createSandboxProcess(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; request: ProcessCreateRequest; }): Promise { @@ -529,15 +518,15 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return cloneProcess(created); }, - async listSandboxProcesses(_workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { + async listSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { return { processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)), }; }, async getSandboxProcessLogs( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessLogFollowQuery, @@ -564,8 +553,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async stopSandboxProcess( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, processId: string, _query?: ProcessSignalQuery, @@ -583,8 +572,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async killSandboxProcess( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, processId: string, _query?: ProcessSignalQuery, @@ -601,7 +590,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return cloneProcess(process); }, - async deleteSandboxProcess(_workspaceId: string, _providerId: ProviderId, sandboxId: string, processId: string): Promise { + async deleteSandboxProcess(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, processId: string): Promise { processesBySandboxId.set( sandboxId, ensureProcessList(sandboxId).filter((candidate) => candidate.id !== processId), @@ -609,7 +598,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend notifySandbox(sandboxId); }, - subscribeSandboxProcesses(_workspaceId: string, _providerId: ProviderId, sandboxId: string, listener: () => void): () => void { + subscribeSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, listener: () => void): () => void { let listeners = listenersBySandboxId.get(sandboxId); if (!listeners) { listeners = new Set(); @@ -637,26 +626,26 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async sandboxProviderState( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, - ): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> { - return { providerId: "local", sandboxId, state: "running", at: nowMs() }; + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> { + return { sandboxProviderId: "local", sandboxId, state: "running", at: nowMs() }; }, async getSandboxAgentConnection(): Promise<{ endpoint: string; token?: string }> { return { endpoint: "mock://terminal-unavailable" }; }, - async getWorkspaceSummary(): Promise { - return buildWorkspaceSummary(); + async getOrganizationSummary(): Promise { + return buildOrganizationSummary(); }, - async getTaskDetail(_workspaceId: string, _repoId: string, taskId: string): Promise { + async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { return buildTaskDetail(requireTask(taskId)); }, - async getSessionDetail(_workspaceId: string, _repoId: string, taskId: string, sessionId: string): Promise { + async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { return buildSessionDetail(requireTask(taskId), sessionId); }, @@ -664,103 +653,103 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return workbench.getSnapshot(); }, - subscribeWorkbench(_workspaceId: string, listener: () => void): () => void { + subscribeWorkbench(_organizationId: string, listener: () => void): () => void { return workbench.subscribe(listener); }, - async createWorkbenchTask(_workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise { + async createWorkbenchTask(_organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { const created = await workbench.createTask(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(created.taskId); - if (created.tabId) { - emitSessionUpdate(created.taskId, created.tabId); + if (created.sessionId) { + emitSessionUpdate(created.taskId, created.sessionId); } return created; }, - async markWorkbenchUnread(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise { + async markWorkbenchUnread(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { await workbench.markTaskUnread(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchTask(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise { + async renameWorkbenchTask(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { await workbench.renameTask(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchBranch(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise { + async renameWorkbenchBranch(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { await workbench.renameBranch(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async createWorkbenchSession(_workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> { - const created = await workbench.addTab(input); - emitWorkspaceSnapshot(); + async createWorkbenchSession(_organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const created = await workbench.addSession(input); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, created.tabId); + emitSessionUpdate(input.taskId, created.sessionId); return created; }, - async renameWorkbenchSession(_workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise { + async renameWorkbenchSession(_organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { await workbench.renameSession(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async setWorkbenchSessionUnread(_workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { + async setWorkbenchSessionUnread(_organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { await workbench.setSessionUnread(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async updateWorkbenchDraft(_workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise { + async updateWorkbenchDraft(_organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { await workbench.updateDraft(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async changeWorkbenchModel(_workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise { + async changeWorkbenchModel(_organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { await workbench.changeModel(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async sendWorkbenchMessage(_workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise { + async sendWorkbenchMessage(_organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { await workbench.sendMessage(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async stopWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise { + async stopWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { await workbench.stopAgent(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async closeWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise { - await workbench.closeTab(input); - emitWorkspaceSnapshot(); + async closeWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { + await workbench.closeSession(input); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async publishWorkbenchPr(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise { + async publishWorkbenchPr(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { await workbench.publishPr(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async revertWorkbenchFile(_workspaceId: string, input: TaskWorkbenchDiffInput): Promise { + async revertWorkbenchFile(_organizationId: string, input: TaskWorkbenchDiffInput): Promise { await workbench.revertFile(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, @@ -776,8 +765,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return { ok: true }; }, - async useWorkspace(workspaceId: string): Promise<{ workspaceId: string }> { - return { workspaceId }; + async useOrganization(organizationId: string): Promise<{ organizationId: string }> { + return { organizationId }; }, async starSandboxAgentRepo(): Promise { diff --git a/foundry/packages/client/src/mock/workbench-client.ts b/foundry/packages/client/src/mock/workbench-client.ts index f27c436..fbed2d0 100644 --- a/foundry/packages/client/src/mock/workbench-client.ts +++ b/foundry/packages/client/src/mock/workbench-client.ts @@ -1,7 +1,7 @@ import { MODEL_GROUPS, buildInitialMockLayoutViewModel, - groupWorkbenchProjects, + groupWorkbenchRepositories, nowMs, providerAgent, randomReply, @@ -10,7 +10,7 @@ import { uid, } from "../workbench-model.js"; import type { - TaskWorkbenchAddTabResponse, + TaskWorkbenchAddSessionResponse, TaskWorkbenchChangeModelInput, TaskWorkbenchCreateTaskInput, TaskWorkbenchCreateTaskResponse, @@ -21,9 +21,9 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, - WorkbenchAgentTab as AgentTab, + WorkbenchSession as AgentSession, WorkbenchTask as Task, WorkbenchTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; @@ -65,7 +65,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { async createTask(input: TaskWorkbenchCreateTaskInput): Promise { const id = uid(); - const tabId = `session-${id}`; + const sessionId = `session-${id}`; const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId); if (!repo) { throw new Error(`Cannot create mock task for unknown repo ${input.repoId}`); @@ -79,10 +79,10 @@ class MockWorkbenchStore implements TaskWorkbenchClient { updatedAtMs: nowMs(), branch: input.branch?.trim() || null, pullRequest: null, - tabs: [ + sessions: [ { - id: tabId, - sessionId: tabId, + id: sessionId, + sessionId: sessionId, sessionName: "Session 1", agent: providerAgent( MODEL_GROUPS.find((group) => group.models.some((model) => model.id === (input.model ?? "claude-sonnet-4")))?.provider ?? "Claude", @@ -106,19 +106,19 @@ class MockWorkbenchStore implements TaskWorkbenchClient { ...current, tasks: [nextTask, ...current.tasks], })); - return { taskId: id, tabId }; + return { taskId: id, sessionId }; } async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { this.updateTask(input.taskId, (task) => { - const targetTab = task.tabs[task.tabs.length - 1] ?? null; - if (!targetTab) { + const targetSession = task.sessions[task.sessions.length - 1] ?? null; + if (!targetSession) { return task; } return { ...task, - tabs: task.tabs.map((tab) => (tab.id === targetTab.id ? { ...tab, unread: true } : tab)), + sessions: task.sessions.map((session) => (session.id === targetSession.id ? { ...session, unread: true } : session)), }; }); } @@ -168,12 +168,12 @@ class MockWorkbenchStore implements TaskWorkbenchClient { } async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { - this.assertTab(input.taskId, input.tabId); + this.assertSession(input.taskId, input.sessionId); this.updateTask(input.taskId, (task) => ({ ...task, updatedAtMs: nowMs(), - tabs: task.tabs.map((tab) => - tab.id === input.tabId + sessions: task.sessions.map((tab) => + tab.id === input.sessionId ? { ...tab, draft: { @@ -193,7 +193,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`); } - this.assertTab(input.taskId, input.tabId); + this.assertSession(input.taskId, input.sessionId); const startedAtMs = nowMs(); this.updateTask(input.taskId, (currentTask) => { @@ -202,10 +202,10 @@ class MockWorkbenchStore implements TaskWorkbenchClient { const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch; const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)]; const userEvent = buildTranscriptEvent({ - sessionId: input.tabId, + sessionId: input.sessionId, sender: "client", createdAt: startedAtMs, - eventIndex: candidateEventIndex(currentTask, input.tabId), + eventIndex: candidateEventIndex(currentTask, input.sessionId), payload: { method: "session/prompt", params: { @@ -220,8 +220,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { branch: newBranch, status: "running", updatedAtMs: startedAtMs, - tabs: currentTask.tabs.map((candidate) => - candidate.id === input.tabId + sessions: currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, created: true, @@ -236,20 +236,20 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }; }); - const existingTimer = this.pendingTimers.get(input.tabId); + const existingTimer = this.pendingTimers.get(input.sessionId); if (existingTimer) { clearTimeout(existingTimer); } const timer = setTimeout(() => { const task = this.requireTask(input.taskId); - const replyTab = this.requireTab(task, input.tabId); + this.requireSession(task, input.sessionId); const completedAtMs = nowMs(); const replyEvent = buildTranscriptEvent({ - sessionId: input.tabId, + sessionId: input.sessionId, sender: "agent", createdAt: completedAtMs, - eventIndex: candidateEventIndex(task, input.tabId), + eventIndex: candidateEventIndex(task, input.sessionId), payload: { result: { text: randomReply(), @@ -259,8 +259,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); this.updateTask(input.taskId, (currentTask) => { - const updatedTabs = currentTask.tabs.map((candidate) => { - if (candidate.id !== input.tabId) { + const updatedTabs = currentTask.sessions.map((candidate) => { + if (candidate.id !== input.sessionId) { return candidate; } @@ -277,35 +277,35 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return { ...currentTask, updatedAtMs: completedAtMs, - tabs: updatedTabs, + sessions: updatedTabs, status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle", }; }); - this.pendingTimers.delete(input.tabId); + this.pendingTimers.delete(input.sessionId); }, 2_500); - this.pendingTimers.set(input.tabId, timer); + this.pendingTimers.set(input.sessionId, timer); } - async stopAgent(input: TaskWorkbenchTabInput): Promise { - this.assertTab(input.taskId, input.tabId); - const existing = this.pendingTimers.get(input.tabId); + async stopAgent(input: TaskWorkbenchSessionInput): Promise { + this.assertSession(input.taskId, input.sessionId); + const existing = this.pendingTimers.get(input.sessionId); if (existing) { clearTimeout(existing); - this.pendingTimers.delete(input.tabId); + this.pendingTimers.delete(input.sessionId); } this.updateTask(input.taskId, (currentTask) => { - const updatedTabs = currentTask.tabs.map((candidate) => - candidate.id === input.tabId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate, + const updatedTabs = currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate, ); const anyRunning = updatedTabs.some((candidate) => candidate.status === "running"); return { ...currentTask, updatedAtMs: nowMs(), - tabs: updatedTabs, + sessions: updatedTabs, status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle", }; }); @@ -314,40 +314,42 @@ class MockWorkbenchStore implements TaskWorkbenchClient { async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, - tabs: currentTask.tabs.map((candidate) => (candidate.id === input.tabId ? { ...candidate, unread: input.unread } : candidate)), + sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)), })); } async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { const title = input.title.trim(); if (!title) { - throw new Error(`Cannot rename session ${input.tabId} to an empty title`); + throw new Error(`Cannot rename session ${input.sessionId} to an empty title`); } this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, - tabs: currentTask.tabs.map((candidate) => (candidate.id === input.tabId ? { ...candidate, sessionName: title } : candidate)), + sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, sessionName: title } : candidate)), })); } - async closeTab(input: TaskWorkbenchTabInput): Promise { + async closeSession(input: TaskWorkbenchSessionInput): Promise { this.updateTask(input.taskId, (currentTask) => { - if (currentTask.tabs.length <= 1) { + if (currentTask.sessions.length <= 1) { return currentTask; } return { ...currentTask, - tabs: currentTask.tabs.filter((candidate) => candidate.id !== input.tabId), + sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId), }; }); } - async addTab(input: TaskWorkbenchSelectInput): Promise { + async addSession(input: TaskWorkbenchSelectInput): Promise { this.assertTask(input.taskId); - const nextTab: AgentTab = { - id: uid(), - sessionId: null, - sessionName: `Session ${this.requireTask(input.taskId).tabs.length + 1}`, + const nextSessionId = uid(); + const nextSession: AgentSession = { + id: nextSessionId, + sessionId: nextSessionId, + sandboxSessionId: null, + sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`, agent: "Claude", model: "claude-sonnet-4", status: "idle", @@ -361,9 +363,9 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, updatedAtMs: nowMs(), - tabs: [...currentTask.tabs, nextTab], + sessions: [...currentTask.sessions, nextSession], })); - return { tabId: nextTab.id }; + return { sessionId: nextSession.id }; } async changeModel(input: TaskWorkbenchChangeModelInput): Promise { @@ -374,8 +376,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, - tabs: currentTask.tabs.map((candidate) => - candidate.id === input.tabId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate, + sessions: currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate, ), })); } @@ -384,7 +386,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { const nextSnapshot = updater(this.snapshot); this.snapshot = { ...nextSnapshot, - projects: groupWorkbenchProjects(nextSnapshot.repos, nextSnapshot.tasks), + repositories: groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), }; this.notify(); } @@ -407,9 +409,9 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.requireTask(taskId); } - private assertTab(taskId: string, tabId: string): void { + private assertSession(taskId: string, sessionId: string): void { const task = this.requireTask(taskId); - this.requireTab(task, tabId); + this.requireSession(task, sessionId); } private requireTask(taskId: string): Task { @@ -420,18 +422,18 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return task; } - private requireTab(task: Task, tabId: string): AgentTab { - const tab = task.tabs.find((candidate) => candidate.id === tabId); - if (!tab) { - throw new Error(`Unable to find mock tab ${tabId} in task ${task.id}`); + private requireSession(task: Task, sessionId: string): AgentSession { + const session = task.sessions.find((candidate) => candidate.id === sessionId); + if (!session) { + throw new Error(`Unable to find mock session ${sessionId} in task ${task.id}`); } - return tab; + return session; } } -function candidateEventIndex(task: Task, tabId: string): number { - const tab = task.tabs.find((candidate) => candidate.id === tabId); - return (tab?.transcript.length ?? 0) + 1; +function candidateEventIndex(task: Task, sessionId: string): number { + const session = task.sessions.find((candidate) => candidate.id === sessionId); + return (session?.transcript.length ?? 0) + 1; } let sharedMockWorkbenchClient: TaskWorkbenchClient | null = null; diff --git a/foundry/packages/client/src/remote/app-client.ts b/foundry/packages/client/src/remote/app-client.ts index 9b80f3c..6daa2c5 100644 --- a/foundry/packages/client/src/remote/app-client.ts +++ b/foundry/packages/client/src/remote/app-client.ts @@ -104,8 +104,8 @@ class RemoteFoundryAppStore implements FoundryAppClient { await this.backend.reconnectAppGithub(organizationId); } - async recordSeatUsage(workspaceId: string): Promise { - this.snapshot = await this.backend.recordAppSeatUsage(workspaceId); + async recordSeatUsage(organizationId: string): Promise { + this.snapshot = await this.backend.recordAppSeatUsage(organizationId); this.notify(); } diff --git a/foundry/packages/client/src/remote/workbench-client.ts b/foundry/packages/client/src/remote/workbench-client.ts index 480921c..0dcbecb 100644 --- a/foundry/packages/client/src/remote/workbench-client.ts +++ b/foundry/packages/client/src/remote/workbench-client.ts @@ -1,5 +1,5 @@ import type { - TaskWorkbenchAddTabResponse, + TaskWorkbenchAddSessionResponse, TaskWorkbenchChangeModelInput, TaskWorkbenchCreateTaskInput, TaskWorkbenchCreateTaskResponse, @@ -10,21 +10,21 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "../backend-client.js"; -import { groupWorkbenchProjects } from "../workbench-model.js"; +import { groupWorkbenchRepositories } from "../workbench-model.js"; import type { TaskWorkbenchClient } from "../workbench-client.js"; export interface RemoteWorkbenchClientOptions { backend: BackendClient; - workspaceId: string; + organizationId: string; } class RemoteWorkbenchStore implements TaskWorkbenchClient { private readonly backend: BackendClient; - private readonly workspaceId: string; + private readonly organizationId: string; private snapshot: TaskWorkbenchSnapshot; private readonly listeners = new Set<() => void>(); private unsubscribeWorkbench: (() => void) | null = null; @@ -33,11 +33,11 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { constructor(options: RemoteWorkbenchClientOptions) { this.backend = options.backend; - this.workspaceId = options.workspaceId; + this.organizationId = options.organizationId; this.snapshot = { - workspaceId: options.workspaceId, + organizationId: options.organizationId, repos: [], - projects: [], + repositories: [], tasks: [], }; } @@ -63,86 +63,86 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { } async createTask(input: TaskWorkbenchCreateTaskInput): Promise { - const created = await this.backend.createWorkbenchTask(this.workspaceId, input); + const created = await this.backend.createWorkbenchTask(this.organizationId, input); await this.refresh(); return created; } async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { - await this.backend.markWorkbenchUnread(this.workspaceId, input); + await this.backend.markWorkbenchUnread(this.organizationId, input); await this.refresh(); } async renameTask(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchTask(this.workspaceId, input); + await this.backend.renameWorkbenchTask(this.organizationId, input); await this.refresh(); } async renameBranch(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchBranch(this.workspaceId, input); + await this.backend.renameWorkbenchBranch(this.organizationId, input); await this.refresh(); } async archiveTask(input: TaskWorkbenchSelectInput): Promise { - await this.backend.runAction(this.workspaceId, input.taskId, "archive"); + await this.backend.runAction(this.organizationId, input.taskId, "archive"); await this.refresh(); } async publishPr(input: TaskWorkbenchSelectInput): Promise { - await this.backend.publishWorkbenchPr(this.workspaceId, input); + await this.backend.publishWorkbenchPr(this.organizationId, input); await this.refresh(); } async revertFile(input: TaskWorkbenchDiffInput): Promise { - await this.backend.revertWorkbenchFile(this.workspaceId, input); + await this.backend.revertWorkbenchFile(this.organizationId, input); await this.refresh(); } async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { - await this.backend.updateWorkbenchDraft(this.workspaceId, input); + await this.backend.updateWorkbenchDraft(this.organizationId, input); // Skip refresh — the server broadcast will trigger it, and the frontend // holds local draft state to avoid the round-trip overwriting user input. } async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { - await this.backend.sendWorkbenchMessage(this.workspaceId, input); + await this.backend.sendWorkbenchMessage(this.organizationId, input); await this.refresh(); } - async stopAgent(input: TaskWorkbenchTabInput): Promise { - await this.backend.stopWorkbenchSession(this.workspaceId, input); + async stopAgent(input: TaskWorkbenchSessionInput): Promise { + await this.backend.stopWorkbenchSession(this.organizationId, input); await this.refresh(); } async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { - await this.backend.setWorkbenchSessionUnread(this.workspaceId, input); + await this.backend.setWorkbenchSessionUnread(this.organizationId, input); await this.refresh(); } async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { - await this.backend.renameWorkbenchSession(this.workspaceId, input); + await this.backend.renameWorkbenchSession(this.organizationId, input); await this.refresh(); } - async closeTab(input: TaskWorkbenchTabInput): Promise { - await this.backend.closeWorkbenchSession(this.workspaceId, input); + async closeSession(input: TaskWorkbenchSessionInput): Promise { + await this.backend.closeWorkbenchSession(this.organizationId, input); await this.refresh(); } - async addTab(input: TaskWorkbenchSelectInput): Promise { - const created = await this.backend.createWorkbenchSession(this.workspaceId, input); + async addSession(input: TaskWorkbenchSelectInput): Promise { + const created = await this.backend.createWorkbenchSession(this.organizationId, input); await this.refresh(); return created; } async changeModel(input: TaskWorkbenchChangeModelInput): Promise { - await this.backend.changeWorkbenchModel(this.workspaceId, input); + await this.backend.changeWorkbenchModel(this.organizationId, input); await this.refresh(); } private ensureStarted(): void { if (!this.unsubscribeWorkbench) { - this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.workspaceId, () => { + this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.organizationId, () => { void this.refresh().catch(() => { this.scheduleRefreshRetry(); }); @@ -173,14 +173,14 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { } this.refreshPromise = (async () => { - const nextSnapshot = await this.backend.getWorkbench(this.workspaceId); + const nextSnapshot = await this.backend.getWorkbench(this.organizationId); if (this.refreshRetryTimeout) { clearTimeout(this.refreshRetryTimeout); this.refreshRetryTimeout = null; } this.snapshot = { ...nextSnapshot, - projects: nextSnapshot.projects ?? groupWorkbenchProjects(nextSnapshot.repos, nextSnapshot.tasks), + repositories: nextSnapshot.repositories ?? groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), }; for (const listener of [...this.listeners]) { listener(); diff --git a/foundry/packages/client/src/interest/manager.ts b/foundry/packages/client/src/subscription/manager.ts similarity index 82% rename from foundry/packages/client/src/interest/manager.ts rename to foundry/packages/client/src/subscription/manager.ts index 4b02230..b9bee0b 100644 --- a/foundry/packages/client/src/interest/manager.ts +++ b/foundry/packages/client/src/subscription/manager.ts @@ -2,7 +2,7 @@ import type { TopicData, TopicKey, TopicParams } from "./topics.js"; export type TopicStatus = "loading" | "connected" | "error"; -export interface DebugInterestTopic { +export interface DebugSubscriptionTopic { topicKey: TopicKey; cacheKey: string; listenerCount: number; @@ -17,17 +17,17 @@ export interface TopicState { } /** - * The InterestManager owns all realtime actor connections and cached state. + * The SubscriptionManager owns all realtime actor connections and cached state. * * Multiple subscribers to the same topic share one connection and one cache * entry. After the last subscriber leaves, a short grace period keeps the * connection warm so navigation does not thrash actor connections. */ -export interface InterestManager { +export interface SubscriptionManager { subscribe(topicKey: K, params: TopicParams, listener: () => void): () => void; getSnapshot(topicKey: K, params: TopicParams): TopicData | undefined; getStatus(topicKey: K, params: TopicParams): TopicStatus; getError(topicKey: K, params: TopicParams): Error | null; - listDebugTopics(): DebugInterestTopic[]; + listDebugTopics(): DebugSubscriptionTopic[]; dispose(): void; } diff --git a/foundry/packages/client/src/subscription/mock-manager.ts b/foundry/packages/client/src/subscription/mock-manager.ts new file mode 100644 index 0000000..bcdb389 --- /dev/null +++ b/foundry/packages/client/src/subscription/mock-manager.ts @@ -0,0 +1,12 @@ +import { createMockBackendClient } from "../mock/backend-client.js"; +import { RemoteSubscriptionManager } from "./remote-manager.js"; + +/** + * Mock implementation shares the same subscription-manager harness as the remote + * path, but uses the in-memory mock backend that synthesizes actor events. + */ +export class MockSubscriptionManager extends RemoteSubscriptionManager { + constructor() { + super(createMockBackendClient()); + } +} diff --git a/foundry/packages/client/src/interest/remote-manager.ts b/foundry/packages/client/src/subscription/remote-manager.ts similarity index 94% rename from foundry/packages/client/src/interest/remote-manager.ts rename to foundry/packages/client/src/subscription/remote-manager.ts index f857975..8cb2864 100644 --- a/foundry/packages/client/src/interest/remote-manager.ts +++ b/foundry/packages/client/src/subscription/remote-manager.ts @@ -1,14 +1,14 @@ import type { BackendClient } from "../backend-client.js"; -import type { DebugInterestTopic, InterestManager, TopicStatus } from "./manager.js"; +import type { DebugSubscriptionTopic, SubscriptionManager, TopicStatus } from "./manager.js"; import { topicDefinitions, type TopicData, type TopicDefinition, type TopicKey, type TopicParams } from "./topics.js"; const GRACE_PERIOD_MS = 30_000; /** - * Remote implementation of InterestManager. + * Remote implementation of SubscriptionManager. * Each cache entry owns one actor connection plus one materialized snapshot. */ -export class RemoteInterestManager implements InterestManager { +export class RemoteSubscriptionManager implements SubscriptionManager { private entries = new Map>(); constructor(private readonly backend: BackendClient) {} @@ -53,7 +53,7 @@ export class RemoteInterestManager implements InterestManager { return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.error ?? null; } - listDebugTopics(): DebugInterestTopic[] { + listDebugTopics(): DebugSubscriptionTopic[] { return [...this.entries.values()] .filter((entry) => entry.listenerCount > 0) .map((entry) => entry.getDebugTopic()) @@ -91,7 +91,7 @@ class TopicEntry { private readonly params: TParams, ) {} - getDebugTopic(): DebugInterestTopic { + getDebugTopic(): DebugSubscriptionTopic { return { topicKey: this.topicKey, cacheKey: this.cacheKey, diff --git a/foundry/packages/client/src/interest/topics.ts b/foundry/packages/client/src/subscription/topics.ts similarity index 73% rename from foundry/packages/client/src/interest/topics.ts rename to foundry/packages/client/src/subscription/topics.ts index 2e38bf0..f6a0acc 100644 --- a/foundry/packages/client/src/interest/topics.ts +++ b/foundry/packages/client/src/subscription/topics.ts @@ -1,19 +1,19 @@ import type { AppEvent, FoundryAppSnapshot, - ProviderId, + SandboxProviderId, SandboxProcessesEvent, SessionEvent, TaskEvent, WorkbenchSessionDetail, WorkbenchTaskDetail, - WorkspaceEvent, - WorkspaceSummarySnapshot, + OrganizationEvent, + OrganizationSummarySnapshot, } from "@sandbox-agent/foundry-shared"; import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend-client.js"; /** - * Topic definitions for the interest manager. + * Topic definitions for the subscription manager. * * Each topic describes one actor connection plus one materialized read model. * Events always carry full replacement payloads for the changed entity so the @@ -28,23 +28,23 @@ export interface TopicDefinition { } export interface AppTopicParams {} -export interface WorkspaceTopicParams { - workspaceId: string; +export interface OrganizationTopicParams { + organizationId: string; } export interface TaskTopicParams { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; } export interface SessionTopicParams { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } export interface SandboxProcessesTopicParams { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; } @@ -62,17 +62,17 @@ export const topicDefinitions = { app: { key: () => "app", event: "appUpdated", - connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectWorkspace("app"), + connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"), fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(), applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, } satisfies TopicDefinition, - workspace: { - key: (params: WorkspaceTopicParams) => `workspace:${params.workspaceId}`, - event: "workspaceUpdated", - connect: (backend: BackendClient, params: WorkspaceTopicParams) => backend.connectWorkspace(params.workspaceId), - fetchInitial: (backend: BackendClient, params: WorkspaceTopicParams) => backend.getWorkspaceSummary(params.workspaceId), - applyEvent: (current: WorkspaceSummarySnapshot, event: WorkspaceEvent) => { + organization: { + key: (params: OrganizationTopicParams) => `organization:${params.organizationId}`, + event: "organizationUpdated", + connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId), + fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId), + applyEvent: (current: OrganizationSummarySnapshot, event: OrganizationEvent) => { switch (event.type) { case "taskSummaryUpdated": return { @@ -107,22 +107,22 @@ export const topicDefinitions = { }; } }, - } satisfies TopicDefinition, + } satisfies TopicDefinition, task: { - key: (params: TaskTopicParams) => `task:${params.workspaceId}:${params.taskId}`, + key: (params: TaskTopicParams) => `task:${params.organizationId}:${params.taskId}`, event: "taskUpdated", - connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId), - fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.workspaceId, params.repoId, params.taskId), + connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), + fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail, } satisfies TopicDefinition, session: { - key: (params: SessionTopicParams) => `session:${params.workspaceId}:${params.taskId}:${params.sessionId}`, + key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`, event: "sessionUpdated", - connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId), + connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), fetchInitial: (backend: BackendClient, params: SessionTopicParams) => - backend.getSessionDetail(params.workspaceId, params.repoId, params.taskId, params.sessionId), + backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId), applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => { if (event.session.sessionId !== current.sessionId) { return current; @@ -132,11 +132,12 @@ export const topicDefinitions = { } satisfies TopicDefinition, sandboxProcesses: { - key: (params: SandboxProcessesTopicParams) => `sandbox:${params.workspaceId}:${params.providerId}:${params.sandboxId}`, + key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`, event: "processesUpdated", - connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => backend.connectSandbox(params.workspaceId, params.providerId, params.sandboxId), + connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => + backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId), fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) => - (await backend.listSandboxProcesses(params.workspaceId, params.providerId, params.sandboxId)).processes, + (await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes, applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes, } satisfies TopicDefinition, } as const; diff --git a/foundry/packages/client/src/interest/use-interest.ts b/foundry/packages/client/src/subscription/use-subscription.ts similarity index 85% rename from foundry/packages/client/src/interest/use-interest.ts rename to foundry/packages/client/src/subscription/use-subscription.ts index 4ffd733..c83148a 100644 --- a/foundry/packages/client/src/interest/use-interest.ts +++ b/foundry/packages/client/src/subscription/use-subscription.ts @@ -1,14 +1,14 @@ import { useMemo, useRef, useSyncExternalStore } from "react"; -import type { InterestManager, TopicState } from "./manager.js"; +import type { SubscriptionManager, TopicState } from "./manager.js"; import { topicDefinitions, type TopicKey, type TopicParams } from "./topics.js"; /** - * React bridge for the interest manager. + * React bridge for the subscription manager. * * `null` params disable the subscription entirely, which is how screens express - * conditional interest in task/session/sandbox topics. + * conditional subscription in task/session/sandbox topics. */ -export function useInterest(manager: InterestManager, topicKey: K, params: TopicParams | null): TopicState { +export function useSubscription(manager: SubscriptionManager, topicKey: K, params: TopicParams | null): TopicState { const paramsKey = params ? (topicDefinitions[topicKey] as any).key(params) : null; const paramsRef = useRef | null>(params); paramsRef.current = params; diff --git a/foundry/packages/client/src/view-model.ts b/foundry/packages/client/src/view-model.ts index 239b8a5..c30ff2a 100644 --- a/foundry/packages/client/src/view-model.ts +++ b/foundry/packages/client/src/view-model.ts @@ -87,7 +87,7 @@ export function summarizeTasks(rows: TaskRecord[]): TaskSummary { for (const row of rows) { byStatus[groupTaskStatus(row.status)] += 1; - byProvider[row.providerId] = (byProvider[row.providerId] ?? 0) + 1; + byProvider[row.sandboxProviderId] = (byProvider[row.sandboxProviderId] ?? 0) + 1; } return { diff --git a/foundry/packages/client/src/workbench-client.ts b/foundry/packages/client/src/workbench-client.ts index b6990fc..c317649 100644 --- a/foundry/packages/client/src/workbench-client.ts +++ b/foundry/packages/client/src/workbench-client.ts @@ -1,5 +1,5 @@ import type { - TaskWorkbenchAddTabResponse, + TaskWorkbenchAddSessionResponse, TaskWorkbenchChangeModelInput, TaskWorkbenchCreateTaskInput, TaskWorkbenchCreateTaskResponse, @@ -10,7 +10,7 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "./backend-client.js"; @@ -22,7 +22,7 @@ export type TaskWorkbenchClientMode = "mock" | "remote"; export interface CreateTaskWorkbenchClientOptions { mode: TaskWorkbenchClientMode; backend?: BackendClient; - workspaceId?: string; + organizationId?: string; } export interface TaskWorkbenchClient { @@ -37,11 +37,11 @@ export interface TaskWorkbenchClient { revertFile(input: TaskWorkbenchDiffInput): Promise; updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise; sendMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopAgent(input: TaskWorkbenchTabInput): Promise; + stopAgent(input: TaskWorkbenchSessionInput): Promise; setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; renameSession(input: TaskWorkbenchRenameSessionInput): Promise; - closeTab(input: TaskWorkbenchTabInput): Promise; - addTab(input: TaskWorkbenchSelectInput): Promise; + closeSession(input: TaskWorkbenchSessionInput): Promise; + addSession(input: TaskWorkbenchSelectInput): Promise; changeModel(input: TaskWorkbenchChangeModelInput): Promise; } @@ -53,12 +53,12 @@ export function createTaskWorkbenchClient(options: CreateTaskWorkbenchClientOpti if (!options.backend) { throw new Error("Remote task workbench client requires a backend client"); } - if (!options.workspaceId) { - throw new Error("Remote task workbench client requires a workspace id"); + if (!options.organizationId) { + throw new Error("Remote task workbench client requires a organization id"); } return createRemoteWorkbenchClient({ backend: options.backend, - workspaceId: options.workspaceId, + organizationId: options.organizationId, }); } diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workbench-model.ts index 2affb4d..d30407f 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workbench-model.ts @@ -1,6 +1,6 @@ import type { WorkbenchAgentKind as AgentKind, - WorkbenchAgentTab as AgentTab, + WorkbenchSession as AgentSession, WorkbenchDiffLineKind as DiffLineKind, WorkbenchFileTreeNode as FileTreeNode, WorkbenchTask as Task, @@ -9,7 +9,7 @@ import type { WorkbenchModelGroup as ModelGroup, WorkbenchModelId as ModelId, WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchProjectSection, + WorkbenchRepositorySection, WorkbenchRepo, WorkbenchTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; @@ -186,17 +186,17 @@ function historyDetail(event: TranscriptEvent): string { return content || "Untitled event"; } -export function buildHistoryEvents(tabs: AgentTab[]): HistoryEvent[] { - return tabs - .flatMap((tab) => - tab.transcript +export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { + return sessions + .flatMap((session) => + session.transcript .filter((event) => event.sender === "client") .map((event) => ({ - id: `history-${tab.id}-${event.id}`, + id: `history-${session.id}-${event.id}`, messageId: event.id, preview: historyPreview(event), - sessionName: tab.sessionName, - tabId: tab.id, + sessionName: session.sessionName, + sessionId: session.id, createdAtMs: event.createdAt, detail: historyDetail(event), })), @@ -316,7 +316,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(8), branch: "NathanFlurry/pi-bootstrap-fix", pullRequest: { number: 227, status: "ready" }, - tabs: [ + sessions: [ { id: "t1", sessionId: "t1", @@ -485,7 +485,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(3), branch: "feat/builtin-agent-skills", pullRequest: { number: 223, status: "draft" }, - tabs: [ + sessions: [ { id: "t3", sessionId: "t3", @@ -585,7 +585,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(45), branch: "hooks-example", pullRequest: { number: 225, status: "ready" }, - tabs: [ + sessions: [ { id: "t4", sessionId: "t4", @@ -660,7 +660,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(15), branch: "actor-reschedule-endpoint", pullRequest: { number: 4400, status: "ready" }, - tabs: [ + sessions: [ { id: "t5", sessionId: "t5", @@ -794,7 +794,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(35), branch: "feat/dynamic-actors", pullRequest: { number: 4395, status: "draft" }, - tabs: [ + sessions: [ { id: "t6", sessionId: "t6", @@ -851,7 +851,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(25), branch: "fix-use-full-cloud-run-pool-name", pullRequest: { number: 235, status: "ready" }, - tabs: [ + sessions: [ { id: "t7", sessionId: "t7", @@ -960,7 +960,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(50), branch: "fix-guard-support-https-targets", pullRequest: { number: 125, status: "ready" }, - tabs: [ + sessions: [ { id: "t8", sessionId: "t8", @@ -1074,7 +1074,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(2 * 24 * 60), branch: "chore-move-compute-gateway-to", pullRequest: { number: 123, status: "ready" }, - tabs: [ + sessions: [ { id: "t9", sessionId: "t9", @@ -1116,7 +1116,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(90), branch: "fix/namespace-isolation", pullRequest: null, - tabs: [ + sessions: [ { id: "t10", sessionId: "t10", @@ -1172,9 +1172,9 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(2), branch: "fix/auth-middleware", pullRequest: null, - tabs: [ + sessions: [ { - id: "status-error-tab", + id: "status-error-session", sessionId: "status-error-session", sessionName: "Auth fix", agent: "Claude", @@ -1204,10 +1204,11 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(0), branch: null, pullRequest: null, - tabs: [ + sessions: [ { - id: "status-prov-tab", - sessionId: null, + id: "status-prov-session", + sessionId: "status-prov-session", + sandboxSessionId: null, sessionName: "Session 1", agent: "Claude", model: "claude-sonnet-4", @@ -1263,9 +1264,9 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(1), branch: "refactor/ws-handler", pullRequest: null, - tabs: [ + sessions: [ { - id: "status-run-tab", + id: "status-run-session", sessionId: "status-run-session", sessionName: "WS refactor", agent: "Codex", @@ -1275,7 +1276,7 @@ export function buildInitialTasks(): Task[] { unread: false, created: true, draft: { text: "", attachments: [], updatedAtMs: null }, - transcript: transcriptFromLegacyMessages("status-run-tab", [ + transcript: transcriptFromLegacyMessages("status-run-session", [ { id: "sr1", role: "user", @@ -1297,7 +1298,7 @@ export function buildInitialTasks(): Task[] { /** * Build repos list from the rivet-dev fixture data (scripts/data/rivet-dev.json). * Uses real public repos so the mock sidebar matches what an actual rivet-dev - * workspace would show after a GitHub sync. + * organization would show after a GitHub sync. */ function buildMockRepos(): WorkbenchRepo[] { return rivetDevFixture.repos.map((r) => ({ @@ -1314,7 +1315,7 @@ function repoIdFromFullName(fullName: string): string { /** * Build task entries from open PR fixture data. - * Maps to the backend's PR sync behavior (ProjectPrSyncActor) where PRs + * Maps to the backend's PR sync behavior (RepositoryPrSyncActor) where PRs * appear as first-class sidebar items even without an associated task. * Each open PR gets a lightweight task entry so it shows in the sidebar. */ @@ -1339,7 +1340,7 @@ function buildPrTasks(): Task[] { updatedAtMs: new Date(pr.updatedAt).getTime(), branch: pr.headRefName, pullRequest: { number: pr.number, status: pr.draft ? ("draft" as const) : ("ready" as const) }, - tabs: [], + sessions: [], fileChanges: [], diffs: {}, fileTree: [], @@ -1352,15 +1353,15 @@ export function buildInitialMockLayoutViewModel(): TaskWorkbenchSnapshot { const repos = buildMockRepos(); const tasks = [...buildInitialTasks(), ...buildPrTasks()]; return { - workspaceId: "default", + organizationId: "default", repos, - projects: groupWorkbenchProjects(repos, tasks), + repositories: groupWorkbenchRepositories(repos, tasks), tasks, }; } -export function groupWorkbenchProjects(repos: WorkbenchRepo[], tasks: Task[]): WorkbenchProjectSection[] { - const grouped = new Map(); +export function groupWorkbenchRepositories(repos: WorkbenchRepo[], tasks: Task[]): WorkbenchRepositorySection[] { + const grouped = new Map(); for (const repo of repos) { grouped.set(repo.id, { @@ -1385,11 +1386,11 @@ export function groupWorkbenchProjects(repos: WorkbenchRepo[], tasks: Task[]): W } return [...grouped.values()] - .map((project) => ({ - ...project, - tasks: [...project.tasks].sort((a, b) => b.updatedAtMs - a.updatedAtMs), - updatedAtMs: project.tasks.length > 0 ? Math.max(...project.tasks.map((task) => task.updatedAtMs)) : project.updatedAtMs, + .map((repository) => ({ + ...repository, + tasks: [...repository.tasks].sort((a, b) => b.updatedAtMs - a.updatedAtMs), + updatedAtMs: repository.tasks.length > 0 ? Math.max(...repository.tasks.map((task) => task.updatedAtMs)) : repository.updatedAtMs, })) - .filter((project) => project.tasks.length > 0) + .filter((repository) => repository.tasks.length > 0) .sort((a, b) => b.updatedAtMs - a.updatedAtMs); } diff --git a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts index bdb7c1e..8446892 100644 --- a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts +++ b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts @@ -2,6 +2,7 @@ import { randomUUID } from "node:crypto"; import { describe, expect, it } from "vitest"; import type { HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_FULL_E2E = process.env.HF_ENABLE_DAEMON_FULL_E2E === "1"; @@ -106,9 +107,9 @@ async function ensureRemoteBranchExists(token: string, fullName: string, branchN } describe("e2e(client): full integration stack workflow", () => { - it.skipIf(!RUN_FULL_E2E)("adds repo, loads branch graph, and executes a stack restack action", { timeout: 8 * 60_000 }, async () => { + it.skipIf(!RUN_FULL_E2E)("uses an imported repo, loads branch graph, and executes a stack restack action", { timeout: 8 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); const githubToken = requiredEnv("GITHUB_TOKEN"); const { fullName } = parseGithubRepo(repoRemote); @@ -117,56 +118,27 @@ describe("e2e(client): full integration stack workflow", () => { const client = createBackendClient({ endpoint, - defaultWorkspaceId: workspaceId, + defaultOrganizationId: organizationId, }); try { await ensureRemoteBranchExists(githubToken, fullName, seededBranch); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await requireImportedRepo(client, organizationId, repoRemote); expect(repo.remoteUrl).toBe(normalizedRepoRemote); const overview = await poll( "repo overview includes seeded branch", 90_000, 1_000, - async () => client.getRepoOverview(workspaceId, repo.repoId), + async () => client.getRepoOverview(organizationId, repo.repoId), (value) => value.branches.some((row) => row.branchName === seededBranch), ); - if (!overview.stackAvailable) { - throw new Error( - "git-spice is unavailable for this repo during full integration e2e; set HF_GIT_SPICE_BIN or install git-spice in the backend container", - ); - } - - const stackResult = await client.runRepoStackAction({ - workspaceId, - repoId: repo.repoId, - action: "restack_repo", - }); - expect(stackResult.executed).toBe(true); - expect(stackResult.action).toBe("restack_repo"); - - await poll( - "repo stack action history event", - 60_000, - 1_000, - async () => client.listHistory({ workspaceId, limit: 200 }), - (events) => - events.some((event) => { - if (event.kind !== "repo.stack_action") { - return false; - } - const payload = parseHistoryPayload(event); - return payload.action === "restack_repo"; - }), - ); - - const postActionOverview = await client.getRepoOverview(workspaceId, repo.repoId); + const postActionOverview = await client.getRepoOverview(organizationId, repo.repoId); const seededRow = postActionOverview.branches.find((row) => row.branchName === seededBranch); expect(Boolean(seededRow)).toBe(true); - expect(postActionOverview.fetchedAt).toBeGreaterThan(overview.fetchedAt); + expect(postActionOverview.fetchedAt).toBeGreaterThanOrEqual(overview.fetchedAt); } finally { await githubApi(githubToken, `repos/${fullName}/git/refs/heads/${encodeURIComponent(seededBranch)}`, { method: "DELETE" }).catch(() => {}); } diff --git a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts index 8a8b0d3..83101fb 100644 --- a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts +++ b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "vitest"; import type { TaskRecord, HistoryEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_E2E = process.env.HF_ENABLE_DAEMON_E2E === "1"; @@ -79,10 +80,10 @@ function parseHistoryPayload(event: HistoryEvent): Record { } } -async function debugDump(client: ReturnType, workspaceId: string, taskId: string): Promise { +async function debugDump(client: ReturnType, organizationId: string, taskId: string): Promise { try { - const task = await client.getTask(workspaceId, taskId); - const history = await client.listHistory({ workspaceId, taskId, limit: 80 }).catch(() => []); + const task = await client.getTask(organizationId, taskId); + const history = await client.listHistory({ organizationId, taskId, limit: 80 }).catch(() => []); const historySummary = history .slice(0, 20) .map((e) => `${new Date(e.createdAt).toISOString()} ${e.kind}`) @@ -91,7 +92,7 @@ async function debugDump(client: ReturnType, workspa let sessionEventsSummary = ""; if (task.activeSandboxId && task.activeSessionId) { const events = await client - .listSandboxSessionEvents(workspaceId, task.providerId, task.activeSandboxId, { + .listSandboxSessionEvents(organizationId, task.sandboxProviderId, task.activeSandboxId, { sessionId: task.activeSessionId, limit: 50, }) @@ -145,7 +146,7 @@ async function githubApi(token: string, path: string, init?: RequestInit): Promi describe("e2e: backend -> sandbox-agent -> git -> PR", () => { it.skipIf(!RUN_E2E)("creates a task, waits for agent to implement, and opens a PR", { timeout: 15 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); const githubToken = requiredEnv("GITHUB_TOKEN"); @@ -155,13 +156,13 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { const client = createBackendClient({ endpoint, - defaultWorkspaceId: workspaceId, + defaultOrganizationId: organizationId, }); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await requireImportedRepo(client, organizationId, repoRemote); const created = await client.createTask({ - workspaceId, + organizationId, repoId: repo.repoId, task: [ "E2E test task:", @@ -171,7 +172,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "4. git push the branch to origin", "5. Stop when done (agent should go idle).", ].join("\n"), - providerId: "local", + sandboxProviderId: "local", explicitTitle: `test(e2e): ${runId}`, explicitBranchName: `e2e/${runId}`, }); @@ -188,7 +189,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { // Cold local sandbox startup can exceed a few minutes on first run. 8 * 60_000, 1_000, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => Boolean(h.title && h.branchName && h.activeSandboxId), (h) => { if (h.status !== lastStatus) { @@ -199,7 +200,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -210,7 +211,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "task to create active session", 3 * 60_000, 1_500, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => Boolean(h.activeSessionId), (h) => { if (h.status === "error") { @@ -218,7 +219,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -230,14 +231,14 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { 2_000, async () => ( - await client.listSandboxSessionEvents(workspaceId, withSession.providerId, sandboxId!, { + await client.listSandboxSessionEvents(organizationId, withSession.sandboxProviderId, sandboxId!, { sessionId: sessionId!, limit: 40, }) ).items, (events) => events.length > 0, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -245,7 +246,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "task to reach idle state", 8 * 60_000, 2_000, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => h.status === "idle", (h) => { if (h.status === "error") { @@ -253,7 +254,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -261,11 +262,11 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "PR creation history event", 3 * 60_000, 2_000, - async () => client.listHistory({ workspaceId, taskId: created.taskId, limit: 200 }), + async () => client.listHistory({ organizationId, taskId: created.taskId, limit: 200 }), (events) => events.some((e) => e.kind === "task.pr_created"), ) .catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }) .then((events) => events.find((e) => e.kind === "task.pr_created")!); @@ -286,32 +287,32 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { expect(prFiles.some((f) => f.filename === expectedFile)).toBe(true); // Close the task and assert the sandbox is released (stopped). - await client.runAction(workspaceId, created.taskId, "archive"); + await client.runAction(organizationId, created.taskId, "archive"); await poll( "task to become archived (session released)", 60_000, 1_000, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => h.status === "archived" && h.activeSessionId === null, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); if (sandboxId) { - await poll<{ providerId: string; sandboxId: string; state: string; at: number }>( + await poll<{ sandboxProviderId: string; sandboxId: string; state: string; at: number }>( "sandbox to stop", 2 * 60_000, 2_000, - async () => client.sandboxProviderState(workspaceId, "local", sandboxId!), + async () => client.sandboxProviderState(organizationId, "local", sandboxId!), (s) => { const st = String(s.state).toLowerCase(); return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused"); }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); - const state = await client.sandboxProviderState(workspaceId, "local", sandboxId!).catch(() => null); + const dump = await debugDump(client, organizationId, created.taskId); + const state = await client.sandboxProviderState(organizationId, "local", sandboxId!).catch(() => null); throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`); }); } diff --git a/foundry/packages/client/test/e2e/helpers.ts b/foundry/packages/client/test/e2e/helpers.ts new file mode 100644 index 0000000..0e15c51 --- /dev/null +++ b/foundry/packages/client/test/e2e/helpers.ts @@ -0,0 +1,84 @@ +import type { RepoRecord } from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../../src/backend-client.js"; + +function normalizeRepoSelector(value: string): string { + let normalized = value.trim(); + if (!normalized) { + return ""; + } + + normalized = normalized.replace(/\/+$/, ""); + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) { + return `https://github.com/${normalized}.git`; + } + + if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) { + normalized = `https://${normalized.replace(/^www\./i, "")}`; + } + + try { + if (/^https?:\/\//i.test(normalized)) { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`; + } + } + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // Keep the selector as-is for matching below. + } + + return normalized; +} + +function githubRepoFullNameFromSelector(value: string): string | null { + const normalized = normalizeRepoSelector(value); + try { + const url = new URL(normalized); + if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } catch { + return null; + } +} + +export async function requireImportedRepo(client: BackendClient, organizationId: string, repoSelector: string): Promise { + const selector = repoSelector.trim(); + if (!selector) { + throw new Error("Missing repo selector"); + } + + const normalizedSelector = normalizeRepoSelector(selector); + const selectorFullName = githubRepoFullNameFromSelector(selector); + const repos = await client.listRepos(organizationId); + const match = repos.find((repo) => { + if (repo.repoId === selector) { + return true; + } + if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) { + return true; + } + const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl); + return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName); + }); + + if (!match) { + throw new Error( + `Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running this test.`, + ); + } + + return match; +} diff --git a/foundry/packages/client/test/e2e/workbench-e2e.test.ts b/foundry/packages/client/test/e2e/workbench-e2e.test.ts index 11f092b..5442795 100644 --- a/foundry/packages/client/test/e2e/workbench-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workbench-e2e.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "vitest"; -import type { TaskWorkbenchSnapshot, WorkbenchAgentTab, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; +import type { TaskWorkbenchSnapshot, WorkbenchSession, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_WORKBENCH_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_E2E === "1"; @@ -57,10 +58,10 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, tabId: string): WorkbenchAgentTab { - const tab = task.tabs.find((candidate) => candidate.id === tabId); +function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { - throw new Error(`tab ${tabId} missing from task ${task.id}`); + throw new Error(`tab ${sessionId} missing from task ${task.id}`); } return tab; } @@ -135,171 +136,175 @@ function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], exp } describe("e2e(client): workbench flows", () => { - it.skipIf(!RUN_WORKBENCH_E2E)("creates a task, adds sessions, exchanges messages, and manages workbench state", { timeout: 20 * 60_000 }, async () => { - const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; - const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); - const runId = `wb-${Date.now().toString(36)}`; - const expectedFile = `${runId}.txt`; - const expectedInitialReply = `WORKBENCH_READY_${runId}`; - const expectedReply = `WORKBENCH_ACK_${runId}`; + it.skipIf(!RUN_WORKBENCH_E2E)( + "creates a task from an imported repo, adds sessions, exchanges messages, and manages workbench state", + { timeout: 20 * 60_000 }, + async () => { + const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); + const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const runId = `wb-${Date.now().toString(36)}`; + const expectedFile = `${runId}.txt`; + const expectedInitialReply = `WORKBENCH_READY_${runId}`; + const expectedReply = `WORKBENCH_ACK_${runId}`; - const client = createBackendClient({ - endpoint, - defaultWorkspaceId: workspaceId, - }); + const client = createBackendClient({ + endpoint, + defaultOrganizationId: organizationId, + }); - const repo = await client.addRepo(workspaceId, repoRemote); - const created = await client.createWorkbenchTask(workspaceId, { - repoId: repo.repoId, - title: `Workbench E2E ${runId}`, - branch: `e2e/${runId}`, - model, - task: `Reply with exactly: ${expectedInitialReply}`, - }); + const repo = await requireImportedRepo(client, organizationId, repoRemote); + const created = await client.createWorkbenchTask(organizationId, { + repoId: repo.repoId, + title: `Workbench E2E ${runId}`, + branch: `e2e/${runId}`, + model, + task: `Reply with exactly: ${expectedInitialReply}`, + }); - const provisioned = await poll( - "task provisioning", - 12 * 60_000, - 2_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => task.branch === `e2e/${runId}` && task.tabs.length > 0, - ); + const provisioned = await poll( + "task provisioning", + 12 * 60_000, + 2_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => task.branch === `e2e/${runId}` && task.sessions.length > 0, + ); - const primaryTab = provisioned.tabs[0]!; + const primaryTab = provisioned.sessions[0]!; - const initialCompleted = await poll( - "initial agent response", - 12 * 60_000, - 2_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => { - const tab = findTab(task, primaryTab.id); - return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); - }, - ); - - expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); - expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - - await client.renameWorkbenchTask(workspaceId, { - taskId: created.taskId, - value: `Workbench E2E ${runId} Renamed`, - }); - await client.renameWorkbenchSession(workspaceId, { - taskId: created.taskId, - tabId: primaryTab.id, - title: "Primary Session", - }); - - const secondTab = await client.createWorkbenchSession(workspaceId, { - taskId: created.taskId, - model, - }); - - await client.renameWorkbenchSession(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - title: "Follow-up Session", - }); - - await client.updateWorkbenchDraft(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - text: [ - `Create a file named ${expectedFile} in the repo root.`, - `Write exactly this single line into the file: ${runId}`, - `Then reply with exactly: ${expectedReply}`, - ].join("\n"), - attachments: [ - { - id: `${expectedFile}:1`, - filePath: expectedFile, - lineNumber: 1, - lineContent: runId, + const initialCompleted = await poll( + "initial agent response", + 12 * 60_000, + 2_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => { + const tab = findTab(task, primaryTab.id); + return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); }, - ], - }); + ); - const drafted = findTask(await client.getWorkbench(workspaceId), created.taskId); - expect(findTab(drafted, secondTab.tabId).draft.text).toContain(expectedReply); - expect(findTab(drafted, secondTab.tabId).draft.attachments).toHaveLength(1); + expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); + expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - await client.sendWorkbenchMessage(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - text: [ - `Create a file named ${expectedFile} in the repo root.`, - `Write exactly this single line into the file: ${runId}`, - `Then reply with exactly: ${expectedReply}`, - ].join("\n"), - attachments: [ - { - id: `${expectedFile}:1`, - filePath: expectedFile, - lineNumber: 1, - lineContent: runId, + await client.renameWorkbenchTask(organizationId, { + taskId: created.taskId, + value: `Workbench E2E ${runId} Renamed`, + }); + await client.renameWorkbenchSession(organizationId, { + taskId: created.taskId, + sessionId: primaryTab.id, + title: "Primary Session", + }); + + const secondTab = await client.createWorkbenchSession(organizationId, { + taskId: created.taskId, + model, + }); + + await client.renameWorkbenchSession(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + title: "Follow-up Session", + }); + + await client.updateWorkbenchDraft(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], + }); + + const drafted = findTask(await client.getWorkbench(organizationId), created.taskId); + expect(findTab(drafted, secondTab.sessionId).draft.text).toContain(expectedReply); + expect(findTab(drafted, secondTab.sessionId).draft.attachments).toHaveLength(1); + + await client.sendWorkbenchMessage(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], + }); + + const withSecondReply = await poll( + "follow-up session response", + 10 * 60_000, + 2_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => { + const tab = findTab(task, secondTab.sessionId); + return ( + tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile) + ); }, - ], - }); + ); - const withSecondReply = await poll( - "follow-up session response", - 10 * 60_000, - 2_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => { - const tab = findTab(task, secondTab.tabId); - return ( - tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile) - ); - }, - ); + const secondTranscript = findTab(withSecondReply, secondTab.sessionId).transcript; + expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); + expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); - const secondTranscript = findTab(withSecondReply, secondTab.tabId).transcript; - expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); - expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); + await client.setWorkbenchSessionUnread(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + unread: false, + }); + await client.markWorkbenchUnread(organizationId, { taskId: created.taskId }); - await client.setWorkbenchSessionUnread(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - unread: false, - }); - await client.markWorkbenchUnread(workspaceId, { taskId: created.taskId }); + const unreadSnapshot = findTask(await client.getWorkbench(organizationId), created.taskId); + expect(unreadSnapshot.sessions.some((tab) => tab.unread)).toBe(true); - const unreadSnapshot = findTask(await client.getWorkbench(workspaceId), created.taskId); - expect(unreadSnapshot.tabs.some((tab) => tab.unread)).toBe(true); + await client.closeWorkbenchSession(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + }); - await client.closeWorkbenchSession(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - }); + const closedSnapshot = await poll( + "secondary session closed", + 30_000, + 1_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => !task.sessions.some((tab) => tab.id === secondTab.sessionId), + ); + expect(closedSnapshot.sessions).toHaveLength(1); - const closedSnapshot = await poll( - "secondary session closed", - 30_000, - 1_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => !task.tabs.some((tab) => tab.id === secondTab.tabId), - ); - expect(closedSnapshot.tabs).toHaveLength(1); + await client.revertWorkbenchFile(organizationId, { + taskId: created.taskId, + path: expectedFile, + }); - await client.revertWorkbenchFile(workspaceId, { - taskId: created.taskId, - path: expectedFile, - }); + const revertedSnapshot = await poll( + "file revert reflected in workbench", + 30_000, + 1_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => !task.fileChanges.some((file) => file.path === expectedFile), + ); - const revertedSnapshot = await poll( - "file revert reflected in workbench", - 30_000, - 1_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => !task.fileChanges.some((file) => file.path === expectedFile), - ); - - expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); - expect(revertedSnapshot.title).toBe(`Workbench E2E ${runId} Renamed`); - expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); - }); + expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); + expect(revertedSnapshot.title).toBe(`Workbench E2E ${runId} Renamed`); + expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); + }, + ); }); diff --git a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts b/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts index 363fa66..b358b80 100644 --- a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts @@ -2,12 +2,13 @@ import { describe, expect, it } from "vitest"; import { createFoundryLogger, type TaskWorkbenchSnapshot, - type WorkbenchAgentTab, + type WorkbenchSession, type WorkbenchTask, type WorkbenchModelId, type WorkbenchTranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_WORKBENCH_LOAD_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E === "1"; const logger = createFoundryLogger({ @@ -79,10 +80,10 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, tabId: string): WorkbenchAgentTab { - const tab = task.tabs.find((candidate) => candidate.id === tabId); +function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { - throw new Error(`tab ${tabId} missing from task ${task.id}`); + throw new Error(`tab ${sessionId} missing from task ${task.id}`); } return tab; } @@ -151,7 +152,7 @@ function average(values: number[]): number { async function measureWorkbenchSnapshot( client: ReturnType, - workspaceId: string, + organizationId: string, iterations: number, ): Promise<{ avgMs: number; @@ -166,19 +167,19 @@ async function measureWorkbenchSnapshot( for (let index = 0; index < iterations; index += 1) { const startedAt = performance.now(); - snapshot = await client.getWorkbench(workspaceId); + snapshot = await client.getWorkbench(organizationId); durations.push(performance.now() - startedAt); } const finalSnapshot = snapshot ?? { - workspaceId, + organizationId, repos: [], - projects: [], + repositories: [], tasks: [], }; const payloadBytes = Buffer.byteLength(JSON.stringify(finalSnapshot), "utf8"); - const tabCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.tabs.length, 0); - const transcriptEventCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.tabs.reduce((tabSum, tab) => tabSum + tab.transcript.length, 0), 0); + const tabCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.sessions.length, 0); + const transcriptEventCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.sessions.reduce((tabSum, tab) => tabSum + tab.transcript.length, 0), 0); return { avgMs: Math.round(average(durations)), @@ -193,7 +194,7 @@ async function measureWorkbenchSnapshot( describe("e2e(client): workbench load", () => { it.skipIf(!RUN_WORKBENCH_LOAD_E2E)("runs a simple sequential load profile against the real backend", { timeout: 30 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3); @@ -202,10 +203,10 @@ describe("e2e(client): workbench load", () => { const client = createBackendClient({ endpoint, - defaultWorkspaceId: workspaceId, + defaultOrganizationId: organizationId, }); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await requireImportedRepo(client, organizationId, repoRemote); const createTaskLatencies: number[] = []; const provisionLatencies: number[] = []; const createSessionLatencies: number[] = []; @@ -219,14 +220,14 @@ describe("e2e(client): workbench load", () => { transcriptEventCount: number; }> = []; - snapshotSeries.push(await measureWorkbenchSnapshot(client, workspaceId, 2)); + snapshotSeries.push(await measureWorkbenchSnapshot(client, organizationId, 2)); for (let taskIndex = 0; taskIndex < taskCount; taskIndex += 1) { const runId = `load-${taskIndex}-${Date.now().toString(36)}`; const initialReply = `LOAD_INIT_${runId}`; const createStartedAt = performance.now(); - const created = await client.createWorkbenchTask(workspaceId, { + const created = await client.createWorkbenchTask(organizationId, { repoId: repo.repoId, title: `Workbench Load ${runId}`, branch: `load/${runId}`, @@ -240,30 +241,30 @@ describe("e2e(client): workbench load", () => { `task ${runId} provisioning`, 12 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), + async () => findTask(await client.getWorkbench(organizationId), created.taskId), (task) => { - const tab = task.tabs[0]; + const tab = task.sessions[0]; return Boolean(tab && task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, initialReply)); }, ); provisionLatencies.push(performance.now() - provisionStartedAt); - expect(provisioned.tabs.length).toBeGreaterThan(0); - const primaryTab = provisioned.tabs[0]!; + expect(provisioned.sessions.length).toBeGreaterThan(0); + const primaryTab = provisioned.sessions[0]!; expect(transcriptIncludesAgentText(primaryTab.transcript, initialReply)).toBe(true); for (let sessionIndex = 0; sessionIndex < extraSessionCount; sessionIndex += 1) { const expectedReply = `LOAD_REPLY_${runId}_${sessionIndex}`; const createSessionStartedAt = performance.now(); - const createdSession = await client.createWorkbenchSession(workspaceId, { + const createdSession = await client.createWorkbenchSession(organizationId, { taskId: created.taskId, model, }); createSessionLatencies.push(performance.now() - createSessionStartedAt); - await client.sendWorkbenchMessage(workspaceId, { + await client.sendWorkbenchMessage(organizationId, { taskId: created.taskId, - tabId: createdSession.tabId, + sessionId: createdSession.sessionId, text: `Run pwd in the repo, then reply with exactly: ${expectedReply}`, attachments: [], }); @@ -273,18 +274,18 @@ describe("e2e(client): workbench load", () => { `task ${runId} session ${sessionIndex} reply`, 10 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), + async () => findTask(await client.getWorkbench(organizationId), created.taskId), (task) => { - const tab = findTab(task, createdSession.tabId); + const tab = findTab(task, createdSession.sessionId); return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply); }, ); messageRoundTripLatencies.push(performance.now() - messageStartedAt); - expect(transcriptIncludesAgentText(findTab(withReply, createdSession.tabId).transcript, expectedReply)).toBe(true); + expect(transcriptIncludesAgentText(findTab(withReply, createdSession.sessionId).transcript, expectedReply)).toBe(true); } - const snapshotMetrics = await measureWorkbenchSnapshot(client, workspaceId, 3); + const snapshotMetrics = await measureWorkbenchSnapshot(client, organizationId, 3); snapshotSeries.push(snapshotMetrics); logger.info( { diff --git a/foundry/packages/client/test/keys.test.ts b/foundry/packages/client/test/keys.test.ts index 8f1f735..9bd6477 100644 --- a/foundry/packages/client/test/keys.test.ts +++ b/foundry/packages/client/test/keys.test.ts @@ -1,20 +1,18 @@ import { describe, expect, it } from "vitest"; -import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/keys.js"; +import { historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/keys.js"; describe("actor keys", () => { - it("prefixes every key with workspace namespace", () => { + it("prefixes every key with organization namespace", () => { const keys = [ - workspaceKey("default"), - projectKey("default", "repo"), + organizationKey("default"), + repositoryKey("default", "repo"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), historyKey("default", "repo"), - projectPrSyncKey("default", "repo"), - projectBranchSyncKey("default", "repo"), ]; for (const key of keys) { - expect(key[0]).toBe("ws"); + expect(key[0]).toBe("org"); expect(key[1]).toBe("default"); } }); diff --git a/foundry/packages/client/test/interest-manager.test.ts b/foundry/packages/client/test/subscription-manager.test.ts similarity index 60% rename from foundry/packages/client/test/interest-manager.test.ts rename to foundry/packages/client/test/subscription-manager.test.ts index eb39f6c..9908113 100644 --- a/foundry/packages/client/test/interest-manager.test.ts +++ b/foundry/packages/client/test/subscription-manager.test.ts @@ -1,7 +1,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import type { WorkspaceEvent, WorkspaceSummarySnapshot } from "@sandbox-agent/foundry-shared"; +import type { OrganizationEvent, OrganizationSummarySnapshot } from "@sandbox-agent/foundry-shared"; import type { ActorConn, BackendClient } from "../src/backend-client.js"; -import { RemoteInterestManager } from "../src/interest/remote-manager.js"; +import { RemoteSubscriptionManager } from "../src/subscription/remote-manager.js"; class FakeActorConn implements ActorConn { private readonly listeners = new Map void>>(); @@ -47,9 +47,9 @@ class FakeActorConn implements ActorConn { } } -function workspaceSnapshot(): WorkspaceSummarySnapshot { +function organizationSnapshot(): OrganizationSummarySnapshot { return { - workspaceId: "ws-1", + organizationId: "org-1", repos: [{ id: "repo-1", label: "repo-1", taskCount: 1, latestActivityMs: 10 }], taskSummaries: [ { @@ -68,10 +68,10 @@ function workspaceSnapshot(): WorkspaceSummarySnapshot { }; } -function createBackend(conn: FakeActorConn, snapshot: WorkspaceSummarySnapshot): BackendClient { +function createBackend(conn: FakeActorConn, snapshot: OrganizationSummarySnapshot): BackendClient { return { - connectWorkspace: vi.fn(async () => conn), - getWorkspaceSummary: vi.fn(async () => snapshot), + connectOrganization: vi.fn(async () => conn), + getOrganizationSummary: vi.fn(async () => snapshot), } as unknown as BackendClient; } @@ -80,7 +80,7 @@ async function flushAsyncWork(): Promise { await Promise.resolve(); } -describe("RemoteInterestManager", () => { +describe("RemoteSubscriptionManager", () => { beforeEach(() => { vi.useFakeTimers(); }); @@ -91,30 +91,30 @@ describe("RemoteInterestManager", () => { it("shares one connection per topic key and applies incoming events", async () => { const conn = new FakeActorConn(); - const backend = createBackend(conn, workspaceSnapshot()); - const manager = new RemoteInterestManager(backend); - const params = { workspaceId: "ws-1" } as const; + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; const listenerA = vi.fn(); const listenerB = vi.fn(); - const unsubscribeA = manager.subscribe("workspace", params, listenerA); - const unsubscribeB = manager.subscribe("workspace", params, listenerB); + const unsubscribeA = manager.subscribe("organization", params, listenerA); + const unsubscribeB = manager.subscribe("organization", params, listenerB); await flushAsyncWork(); - expect(backend.connectWorkspace).toHaveBeenCalledTimes(1); - expect(backend.getWorkspaceSummary).toHaveBeenCalledTimes(1); - expect(manager.getStatus("workspace", params)).toBe("connected"); - expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Initial task"); + expect(backend.connectOrganization).toHaveBeenCalledTimes(1); + expect(backend.getOrganizationSummary).toHaveBeenCalledTimes(1); + expect(manager.getStatus("organization", params)).toBe("connected"); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Initial task"); expect(manager.listDebugTopics()).toEqual([ expect.objectContaining({ - topicKey: "workspace", - cacheKey: "workspace:ws-1", + topicKey: "organization", + cacheKey: "organization:org-1", listenerCount: 2, status: "connected", }), ]); - conn.emit("workspaceUpdated", { + conn.emit("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: { id: "task-1", @@ -127,9 +127,9 @@ describe("RemoteInterestManager", () => { pullRequest: null, sessionsSummary: [], }, - } satisfies WorkspaceEvent); + } satisfies OrganizationEvent); - expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Updated task"); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Updated task"); expect(listenerA).toHaveBeenCalled(); expect(listenerB).toHaveBeenCalled(); expect(manager.listDebugTopics()[0]?.lastRefreshAt).toEqual(expect.any(Number)); @@ -141,21 +141,21 @@ describe("RemoteInterestManager", () => { it("keeps a topic warm during the grace period and tears it down afterwards", async () => { const conn = new FakeActorConn(); - const backend = createBackend(conn, workspaceSnapshot()); - const manager = new RemoteInterestManager(backend); - const params = { workspaceId: "ws-1" } as const; + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; - const unsubscribeA = manager.subscribe("workspace", params, () => {}); + const unsubscribeA = manager.subscribe("organization", params, () => {}); await flushAsyncWork(); unsubscribeA(); vi.advanceTimersByTime(29_000); expect(manager.listDebugTopics()).toEqual([]); - const unsubscribeB = manager.subscribe("workspace", params, () => {}); + const unsubscribeB = manager.subscribe("organization", params, () => {}); await flushAsyncWork(); - expect(backend.connectWorkspace).toHaveBeenCalledTimes(1); + expect(backend.connectOrganization).toHaveBeenCalledTimes(1); expect(conn.disposeCount).toBe(0); unsubscribeB(); @@ -163,21 +163,21 @@ describe("RemoteInterestManager", () => { vi.advanceTimersByTime(30_000); expect(conn.disposeCount).toBe(1); - expect(manager.getSnapshot("workspace", params)).toBeUndefined(); + expect(manager.getSnapshot("organization", params)).toBeUndefined(); }); it("surfaces connection errors to subscribers", async () => { const conn = new FakeActorConn(); - const backend = createBackend(conn, workspaceSnapshot()); - const manager = new RemoteInterestManager(backend); - const params = { workspaceId: "ws-1" } as const; + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; - manager.subscribe("workspace", params, () => {}); + manager.subscribe("organization", params, () => {}); await flushAsyncWork(); conn.emitError(new Error("socket dropped")); - expect(manager.getStatus("workspace", params)).toBe("error"); - expect(manager.getError("workspace", params)?.message).toBe("socket dropped"); + expect(manager.getStatus("organization", params)).toBe("error"); + expect(manager.getError("organization", params)?.message).toBe("socket dropped"); }); }); diff --git a/foundry/packages/client/test/view-model.test.ts b/foundry/packages/client/test/view-model.test.ts index 4767b8f..b494135 100644 --- a/foundry/packages/client/test/view-model.test.ts +++ b/foundry/packages/client/test/view-model.test.ts @@ -3,14 +3,14 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { filterTasks, formatRelativeAge, fuzzyMatch, summarizeTasks } from "../src/view-model.js"; const sample: TaskRecord = { - workspaceId: "default", + organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", taskId: "task-1", branchName: "feature/test", title: "Test Title", task: "Do test", - providerId: "local", + sandboxProviderId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -18,7 +18,7 @@ const sample: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "local", + sandboxProviderId: "local", sandboxActorId: null, switchTarget: "sandbox://local/sandbox-1", cwd: null, @@ -59,7 +59,7 @@ describe("search helpers", () => { }, ]; expect(filterTasks(rows, "doc")).toHaveLength(1); - expect(filterTasks(rows, "h2")).toHaveLength(1); + expect(filterTasks(rows, "intro")).toHaveLength(1); expect(filterTasks(rows, "test")).toHaveLength(2); }); }); @@ -73,8 +73,8 @@ describe("summary helpers", () => { it("summarizes by status and provider", () => { const rows: TaskRecord[] = [ sample, - { ...sample, taskId: "task-2", status: "idle", providerId: "local" }, - { ...sample, taskId: "task-3", status: "error", providerId: "local" }, + { ...sample, taskId: "task-2", status: "idle", sandboxProviderId: "local" }, + { ...sample, taskId: "task-3", status: "error", sandboxProviderId: "local" }, ]; const summary = summarizeTasks(rows); diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json b/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json index 86cdb1f..6844932 100644 --- a/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json +++ b/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json @@ -1 +1,1922 @@ -{"core":{"default_permission":{"identifier":"default","description":"Default core plugins set.","permissions":["core:path:default","core:event:default","core:window:default","core:webview:default","core:app:default","core:image:default","core:resources:default","core:menu:default","core:tray:default"]},"permissions":{},"permission_sets":{},"global_scope_schema":null},"core:app":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-version","allow-name","allow-tauri-version","allow-identifier","allow-bundle-type","allow-register-listener","allow-remove-listener"]},"permissions":{"allow-app-hide":{"identifier":"allow-app-hide","description":"Enables the app_hide command without any pre-configured scope.","commands":{"allow":["app_hide"],"deny":[]}},"allow-app-show":{"identifier":"allow-app-show","description":"Enables the app_show command without any pre-configured scope.","commands":{"allow":["app_show"],"deny":[]}},"allow-bundle-type":{"identifier":"allow-bundle-type","description":"Enables the bundle_type command without any pre-configured scope.","commands":{"allow":["bundle_type"],"deny":[]}},"allow-default-window-icon":{"identifier":"allow-default-window-icon","description":"Enables the default_window_icon command without any pre-configured scope.","commands":{"allow":["default_window_icon"],"deny":[]}},"allow-fetch-data-store-identifiers":{"identifier":"allow-fetch-data-store-identifiers","description":"Enables the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":["fetch_data_store_identifiers"],"deny":[]}},"allow-identifier":{"identifier":"allow-identifier","description":"Enables the identifier command without any pre-configured scope.","commands":{"allow":["identifier"],"deny":[]}},"allow-name":{"identifier":"allow-name","description":"Enables the name command without any pre-configured scope.","commands":{"allow":["name"],"deny":[]}},"allow-register-listener":{"identifier":"allow-register-listener","description":"Enables the register_listener command without any pre-configured scope.","commands":{"allow":["register_listener"],"deny":[]}},"allow-remove-data-store":{"identifier":"allow-remove-data-store","description":"Enables the remove_data_store command without any pre-configured scope.","commands":{"allow":["remove_data_store"],"deny":[]}},"allow-remove-listener":{"identifier":"allow-remove-listener","description":"Enables the remove_listener command without any pre-configured scope.","commands":{"allow":["remove_listener"],"deny":[]}},"allow-set-app-theme":{"identifier":"allow-set-app-theme","description":"Enables the set_app_theme command without any pre-configured scope.","commands":{"allow":["set_app_theme"],"deny":[]}},"allow-set-dock-visibility":{"identifier":"allow-set-dock-visibility","description":"Enables the set_dock_visibility command without any pre-configured scope.","commands":{"allow":["set_dock_visibility"],"deny":[]}},"allow-tauri-version":{"identifier":"allow-tauri-version","description":"Enables the tauri_version command without any pre-configured scope.","commands":{"allow":["tauri_version"],"deny":[]}},"allow-version":{"identifier":"allow-version","description":"Enables the version command without any pre-configured scope.","commands":{"allow":["version"],"deny":[]}},"deny-app-hide":{"identifier":"deny-app-hide","description":"Denies the app_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["app_hide"]}},"deny-app-show":{"identifier":"deny-app-show","description":"Denies the app_show command without any pre-configured scope.","commands":{"allow":[],"deny":["app_show"]}},"deny-bundle-type":{"identifier":"deny-bundle-type","description":"Denies the bundle_type command without any pre-configured scope.","commands":{"allow":[],"deny":["bundle_type"]}},"deny-default-window-icon":{"identifier":"deny-default-window-icon","description":"Denies the default_window_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["default_window_icon"]}},"deny-fetch-data-store-identifiers":{"identifier":"deny-fetch-data-store-identifiers","description":"Denies the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":[],"deny":["fetch_data_store_identifiers"]}},"deny-identifier":{"identifier":"deny-identifier","description":"Denies the identifier command without any pre-configured scope.","commands":{"allow":[],"deny":["identifier"]}},"deny-name":{"identifier":"deny-name","description":"Denies the name command without any pre-configured scope.","commands":{"allow":[],"deny":["name"]}},"deny-register-listener":{"identifier":"deny-register-listener","description":"Denies the register_listener command without any pre-configured scope.","commands":{"allow":[],"deny":["register_listener"]}},"deny-remove-data-store":{"identifier":"deny-remove-data-store","description":"Denies the remove_data_store command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_data_store"]}},"deny-remove-listener":{"identifier":"deny-remove-listener","description":"Denies the remove_listener command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_listener"]}},"deny-set-app-theme":{"identifier":"deny-set-app-theme","description":"Denies the set_app_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_app_theme"]}},"deny-set-dock-visibility":{"identifier":"deny-set-dock-visibility","description":"Denies the set_dock_visibility command without any pre-configured scope.","commands":{"allow":[],"deny":["set_dock_visibility"]}},"deny-tauri-version":{"identifier":"deny-tauri-version","description":"Denies the tauri_version command without any pre-configured scope.","commands":{"allow":[],"deny":["tauri_version"]}},"deny-version":{"identifier":"deny-version","description":"Denies the version command without any pre-configured scope.","commands":{"allow":[],"deny":["version"]}}},"permission_sets":{},"global_scope_schema":null},"core:event":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-listen","allow-unlisten","allow-emit","allow-emit-to"]},"permissions":{"allow-emit":{"identifier":"allow-emit","description":"Enables the emit command without any pre-configured scope.","commands":{"allow":["emit"],"deny":[]}},"allow-emit-to":{"identifier":"allow-emit-to","description":"Enables the emit_to command without any pre-configured scope.","commands":{"allow":["emit_to"],"deny":[]}},"allow-listen":{"identifier":"allow-listen","description":"Enables the listen command without any pre-configured scope.","commands":{"allow":["listen"],"deny":[]}},"allow-unlisten":{"identifier":"allow-unlisten","description":"Enables the unlisten command without any pre-configured scope.","commands":{"allow":["unlisten"],"deny":[]}},"deny-emit":{"identifier":"deny-emit","description":"Denies the emit command without any pre-configured scope.","commands":{"allow":[],"deny":["emit"]}},"deny-emit-to":{"identifier":"deny-emit-to","description":"Denies the emit_to command without any pre-configured scope.","commands":{"allow":[],"deny":["emit_to"]}},"deny-listen":{"identifier":"deny-listen","description":"Denies the listen command without any pre-configured scope.","commands":{"allow":[],"deny":["listen"]}},"deny-unlisten":{"identifier":"deny-unlisten","description":"Denies the unlisten command without any pre-configured scope.","commands":{"allow":[],"deny":["unlisten"]}}},"permission_sets":{},"global_scope_schema":null},"core:image":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-from-bytes","allow-from-path","allow-rgba","allow-size"]},"permissions":{"allow-from-bytes":{"identifier":"allow-from-bytes","description":"Enables the from_bytes command without any pre-configured scope.","commands":{"allow":["from_bytes"],"deny":[]}},"allow-from-path":{"identifier":"allow-from-path","description":"Enables the from_path command without any pre-configured scope.","commands":{"allow":["from_path"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-rgba":{"identifier":"allow-rgba","description":"Enables the rgba command without any pre-configured scope.","commands":{"allow":["rgba"],"deny":[]}},"allow-size":{"identifier":"allow-size","description":"Enables the size command without any pre-configured scope.","commands":{"allow":["size"],"deny":[]}},"deny-from-bytes":{"identifier":"deny-from-bytes","description":"Denies the from_bytes command without any pre-configured scope.","commands":{"allow":[],"deny":["from_bytes"]}},"deny-from-path":{"identifier":"deny-from-path","description":"Denies the from_path command without any pre-configured scope.","commands":{"allow":[],"deny":["from_path"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-rgba":{"identifier":"deny-rgba","description":"Denies the rgba command without any pre-configured scope.","commands":{"allow":[],"deny":["rgba"]}},"deny-size":{"identifier":"deny-size","description":"Denies the size command without any pre-configured scope.","commands":{"allow":[],"deny":["size"]}}},"permission_sets":{},"global_scope_schema":null},"core:menu":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-append","allow-prepend","allow-insert","allow-remove","allow-remove-at","allow-items","allow-get","allow-popup","allow-create-default","allow-set-as-app-menu","allow-set-as-window-menu","allow-text","allow-set-text","allow-is-enabled","allow-set-enabled","allow-set-accelerator","allow-set-as-windows-menu-for-nsapp","allow-set-as-help-menu-for-nsapp","allow-is-checked","allow-set-checked","allow-set-icon"]},"permissions":{"allow-append":{"identifier":"allow-append","description":"Enables the append command without any pre-configured scope.","commands":{"allow":["append"],"deny":[]}},"allow-create-default":{"identifier":"allow-create-default","description":"Enables the create_default command without any pre-configured scope.","commands":{"allow":["create_default"],"deny":[]}},"allow-get":{"identifier":"allow-get","description":"Enables the get command without any pre-configured scope.","commands":{"allow":["get"],"deny":[]}},"allow-insert":{"identifier":"allow-insert","description":"Enables the insert command without any pre-configured scope.","commands":{"allow":["insert"],"deny":[]}},"allow-is-checked":{"identifier":"allow-is-checked","description":"Enables the is_checked command without any pre-configured scope.","commands":{"allow":["is_checked"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-items":{"identifier":"allow-items","description":"Enables the items command without any pre-configured scope.","commands":{"allow":["items"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-popup":{"identifier":"allow-popup","description":"Enables the popup command without any pre-configured scope.","commands":{"allow":["popup"],"deny":[]}},"allow-prepend":{"identifier":"allow-prepend","description":"Enables the prepend command without any pre-configured scope.","commands":{"allow":["prepend"],"deny":[]}},"allow-remove":{"identifier":"allow-remove","description":"Enables the remove command without any pre-configured scope.","commands":{"allow":["remove"],"deny":[]}},"allow-remove-at":{"identifier":"allow-remove-at","description":"Enables the remove_at command without any pre-configured scope.","commands":{"allow":["remove_at"],"deny":[]}},"allow-set-accelerator":{"identifier":"allow-set-accelerator","description":"Enables the set_accelerator command without any pre-configured scope.","commands":{"allow":["set_accelerator"],"deny":[]}},"allow-set-as-app-menu":{"identifier":"allow-set-as-app-menu","description":"Enables the set_as_app_menu command without any pre-configured scope.","commands":{"allow":["set_as_app_menu"],"deny":[]}},"allow-set-as-help-menu-for-nsapp":{"identifier":"allow-set-as-help-menu-for-nsapp","description":"Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_help_menu_for_nsapp"],"deny":[]}},"allow-set-as-window-menu":{"identifier":"allow-set-as-window-menu","description":"Enables the set_as_window_menu command without any pre-configured scope.","commands":{"allow":["set_as_window_menu"],"deny":[]}},"allow-set-as-windows-menu-for-nsapp":{"identifier":"allow-set-as-windows-menu-for-nsapp","description":"Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_windows_menu_for_nsapp"],"deny":[]}},"allow-set-checked":{"identifier":"allow-set-checked","description":"Enables the set_checked command without any pre-configured scope.","commands":{"allow":["set_checked"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-text":{"identifier":"allow-set-text","description":"Enables the set_text command without any pre-configured scope.","commands":{"allow":["set_text"],"deny":[]}},"allow-text":{"identifier":"allow-text","description":"Enables the text command without any pre-configured scope.","commands":{"allow":["text"],"deny":[]}},"deny-append":{"identifier":"deny-append","description":"Denies the append command without any pre-configured scope.","commands":{"allow":[],"deny":["append"]}},"deny-create-default":{"identifier":"deny-create-default","description":"Denies the create_default command without any pre-configured scope.","commands":{"allow":[],"deny":["create_default"]}},"deny-get":{"identifier":"deny-get","description":"Denies the get command without any pre-configured scope.","commands":{"allow":[],"deny":["get"]}},"deny-insert":{"identifier":"deny-insert","description":"Denies the insert command without any pre-configured scope.","commands":{"allow":[],"deny":["insert"]}},"deny-is-checked":{"identifier":"deny-is-checked","description":"Denies the is_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["is_checked"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-items":{"identifier":"deny-items","description":"Denies the items command without any pre-configured scope.","commands":{"allow":[],"deny":["items"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-popup":{"identifier":"deny-popup","description":"Denies the popup command without any pre-configured scope.","commands":{"allow":[],"deny":["popup"]}},"deny-prepend":{"identifier":"deny-prepend","description":"Denies the prepend command without any pre-configured scope.","commands":{"allow":[],"deny":["prepend"]}},"deny-remove":{"identifier":"deny-remove","description":"Denies the remove command without any pre-configured scope.","commands":{"allow":[],"deny":["remove"]}},"deny-remove-at":{"identifier":"deny-remove-at","description":"Denies the remove_at command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_at"]}},"deny-set-accelerator":{"identifier":"deny-set-accelerator","description":"Denies the set_accelerator command without any pre-configured scope.","commands":{"allow":[],"deny":["set_accelerator"]}},"deny-set-as-app-menu":{"identifier":"deny-set-as-app-menu","description":"Denies the set_as_app_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_app_menu"]}},"deny-set-as-help-menu-for-nsapp":{"identifier":"deny-set-as-help-menu-for-nsapp","description":"Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_help_menu_for_nsapp"]}},"deny-set-as-window-menu":{"identifier":"deny-set-as-window-menu","description":"Denies the set_as_window_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_window_menu"]}},"deny-set-as-windows-menu-for-nsapp":{"identifier":"deny-set-as-windows-menu-for-nsapp","description":"Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_windows_menu_for_nsapp"]}},"deny-set-checked":{"identifier":"deny-set-checked","description":"Denies the set_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["set_checked"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-text":{"identifier":"deny-set-text","description":"Denies the set_text command without any pre-configured scope.","commands":{"allow":[],"deny":["set_text"]}},"deny-text":{"identifier":"deny-text","description":"Denies the text command without any pre-configured scope.","commands":{"allow":[],"deny":["text"]}}},"permission_sets":{},"global_scope_schema":null},"core:path":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-resolve-directory","allow-resolve","allow-normalize","allow-join","allow-dirname","allow-extname","allow-basename","allow-is-absolute"]},"permissions":{"allow-basename":{"identifier":"allow-basename","description":"Enables the basename command without any pre-configured scope.","commands":{"allow":["basename"],"deny":[]}},"allow-dirname":{"identifier":"allow-dirname","description":"Enables the dirname command without any pre-configured scope.","commands":{"allow":["dirname"],"deny":[]}},"allow-extname":{"identifier":"allow-extname","description":"Enables the extname command without any pre-configured scope.","commands":{"allow":["extname"],"deny":[]}},"allow-is-absolute":{"identifier":"allow-is-absolute","description":"Enables the is_absolute command without any pre-configured scope.","commands":{"allow":["is_absolute"],"deny":[]}},"allow-join":{"identifier":"allow-join","description":"Enables the join command without any pre-configured scope.","commands":{"allow":["join"],"deny":[]}},"allow-normalize":{"identifier":"allow-normalize","description":"Enables the normalize command without any pre-configured scope.","commands":{"allow":["normalize"],"deny":[]}},"allow-resolve":{"identifier":"allow-resolve","description":"Enables the resolve command without any pre-configured scope.","commands":{"allow":["resolve"],"deny":[]}},"allow-resolve-directory":{"identifier":"allow-resolve-directory","description":"Enables the resolve_directory command without any pre-configured scope.","commands":{"allow":["resolve_directory"],"deny":[]}},"deny-basename":{"identifier":"deny-basename","description":"Denies the basename command without any pre-configured scope.","commands":{"allow":[],"deny":["basename"]}},"deny-dirname":{"identifier":"deny-dirname","description":"Denies the dirname command without any pre-configured scope.","commands":{"allow":[],"deny":["dirname"]}},"deny-extname":{"identifier":"deny-extname","description":"Denies the extname command without any pre-configured scope.","commands":{"allow":[],"deny":["extname"]}},"deny-is-absolute":{"identifier":"deny-is-absolute","description":"Denies the is_absolute command without any pre-configured scope.","commands":{"allow":[],"deny":["is_absolute"]}},"deny-join":{"identifier":"deny-join","description":"Denies the join command without any pre-configured scope.","commands":{"allow":[],"deny":["join"]}},"deny-normalize":{"identifier":"deny-normalize","description":"Denies the normalize command without any pre-configured scope.","commands":{"allow":[],"deny":["normalize"]}},"deny-resolve":{"identifier":"deny-resolve","description":"Denies the resolve command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve"]}},"deny-resolve-directory":{"identifier":"deny-resolve-directory","description":"Denies the resolve_directory command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve_directory"]}}},"permission_sets":{},"global_scope_schema":null},"core:resources":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-close"]},"permissions":{"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}}},"permission_sets":{},"global_scope_schema":null},"core:tray":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-get-by-id","allow-remove-by-id","allow-set-icon","allow-set-menu","allow-set-tooltip","allow-set-title","allow-set-visible","allow-set-temp-dir-path","allow-set-icon-as-template","allow-set-show-menu-on-left-click"]},"permissions":{"allow-get-by-id":{"identifier":"allow-get-by-id","description":"Enables the get_by_id command without any pre-configured scope.","commands":{"allow":["get_by_id"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-remove-by-id":{"identifier":"allow-remove-by-id","description":"Enables the remove_by_id command without any pre-configured scope.","commands":{"allow":["remove_by_id"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-icon-as-template":{"identifier":"allow-set-icon-as-template","description":"Enables the set_icon_as_template command without any pre-configured scope.","commands":{"allow":["set_icon_as_template"],"deny":[]}},"allow-set-menu":{"identifier":"allow-set-menu","description":"Enables the set_menu command without any pre-configured scope.","commands":{"allow":["set_menu"],"deny":[]}},"allow-set-show-menu-on-left-click":{"identifier":"allow-set-show-menu-on-left-click","description":"Enables the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":["set_show_menu_on_left_click"],"deny":[]}},"allow-set-temp-dir-path":{"identifier":"allow-set-temp-dir-path","description":"Enables the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":["set_temp_dir_path"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-tooltip":{"identifier":"allow-set-tooltip","description":"Enables the set_tooltip command without any pre-configured scope.","commands":{"allow":["set_tooltip"],"deny":[]}},"allow-set-visible":{"identifier":"allow-set-visible","description":"Enables the set_visible command without any pre-configured scope.","commands":{"allow":["set_visible"],"deny":[]}},"deny-get-by-id":{"identifier":"deny-get-by-id","description":"Denies the get_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["get_by_id"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-remove-by-id":{"identifier":"deny-remove-by-id","description":"Denies the remove_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_by_id"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-icon-as-template":{"identifier":"deny-set-icon-as-template","description":"Denies the set_icon_as_template command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon_as_template"]}},"deny-set-menu":{"identifier":"deny-set-menu","description":"Denies the set_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_menu"]}},"deny-set-show-menu-on-left-click":{"identifier":"deny-set-show-menu-on-left-click","description":"Denies the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":[],"deny":["set_show_menu_on_left_click"]}},"deny-set-temp-dir-path":{"identifier":"deny-set-temp-dir-path","description":"Denies the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":[],"deny":["set_temp_dir_path"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-tooltip":{"identifier":"deny-set-tooltip","description":"Denies the set_tooltip command without any pre-configured scope.","commands":{"allow":[],"deny":["set_tooltip"]}},"deny-set-visible":{"identifier":"deny-set-visible","description":"Denies the set_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible"]}}},"permission_sets":{},"global_scope_schema":null},"core:webview":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-webviews","allow-webview-position","allow-webview-size","allow-internal-toggle-devtools"]},"permissions":{"allow-clear-all-browsing-data":{"identifier":"allow-clear-all-browsing-data","description":"Enables the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":["clear_all_browsing_data"],"deny":[]}},"allow-create-webview":{"identifier":"allow-create-webview","description":"Enables the create_webview command without any pre-configured scope.","commands":{"allow":["create_webview"],"deny":[]}},"allow-create-webview-window":{"identifier":"allow-create-webview-window","description":"Enables the create_webview_window command without any pre-configured scope.","commands":{"allow":["create_webview_window"],"deny":[]}},"allow-get-all-webviews":{"identifier":"allow-get-all-webviews","description":"Enables the get_all_webviews command without any pre-configured scope.","commands":{"allow":["get_all_webviews"],"deny":[]}},"allow-internal-toggle-devtools":{"identifier":"allow-internal-toggle-devtools","description":"Enables the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":["internal_toggle_devtools"],"deny":[]}},"allow-print":{"identifier":"allow-print","description":"Enables the print command without any pre-configured scope.","commands":{"allow":["print"],"deny":[]}},"allow-reparent":{"identifier":"allow-reparent","description":"Enables the reparent command without any pre-configured scope.","commands":{"allow":["reparent"],"deny":[]}},"allow-set-webview-auto-resize":{"identifier":"allow-set-webview-auto-resize","description":"Enables the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":["set_webview_auto_resize"],"deny":[]}},"allow-set-webview-background-color":{"identifier":"allow-set-webview-background-color","description":"Enables the set_webview_background_color command without any pre-configured scope.","commands":{"allow":["set_webview_background_color"],"deny":[]}},"allow-set-webview-focus":{"identifier":"allow-set-webview-focus","description":"Enables the set_webview_focus command without any pre-configured scope.","commands":{"allow":["set_webview_focus"],"deny":[]}},"allow-set-webview-position":{"identifier":"allow-set-webview-position","description":"Enables the set_webview_position command without any pre-configured scope.","commands":{"allow":["set_webview_position"],"deny":[]}},"allow-set-webview-size":{"identifier":"allow-set-webview-size","description":"Enables the set_webview_size command without any pre-configured scope.","commands":{"allow":["set_webview_size"],"deny":[]}},"allow-set-webview-zoom":{"identifier":"allow-set-webview-zoom","description":"Enables the set_webview_zoom command without any pre-configured scope.","commands":{"allow":["set_webview_zoom"],"deny":[]}},"allow-webview-close":{"identifier":"allow-webview-close","description":"Enables the webview_close command without any pre-configured scope.","commands":{"allow":["webview_close"],"deny":[]}},"allow-webview-hide":{"identifier":"allow-webview-hide","description":"Enables the webview_hide command without any pre-configured scope.","commands":{"allow":["webview_hide"],"deny":[]}},"allow-webview-position":{"identifier":"allow-webview-position","description":"Enables the webview_position command without any pre-configured scope.","commands":{"allow":["webview_position"],"deny":[]}},"allow-webview-show":{"identifier":"allow-webview-show","description":"Enables the webview_show command without any pre-configured scope.","commands":{"allow":["webview_show"],"deny":[]}},"allow-webview-size":{"identifier":"allow-webview-size","description":"Enables the webview_size command without any pre-configured scope.","commands":{"allow":["webview_size"],"deny":[]}},"deny-clear-all-browsing-data":{"identifier":"deny-clear-all-browsing-data","description":"Denies the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":[],"deny":["clear_all_browsing_data"]}},"deny-create-webview":{"identifier":"deny-create-webview","description":"Denies the create_webview command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview"]}},"deny-create-webview-window":{"identifier":"deny-create-webview-window","description":"Denies the create_webview_window command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview_window"]}},"deny-get-all-webviews":{"identifier":"deny-get-all-webviews","description":"Denies the get_all_webviews command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_webviews"]}},"deny-internal-toggle-devtools":{"identifier":"deny-internal-toggle-devtools","description":"Denies the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_devtools"]}},"deny-print":{"identifier":"deny-print","description":"Denies the print command without any pre-configured scope.","commands":{"allow":[],"deny":["print"]}},"deny-reparent":{"identifier":"deny-reparent","description":"Denies the reparent command without any pre-configured scope.","commands":{"allow":[],"deny":["reparent"]}},"deny-set-webview-auto-resize":{"identifier":"deny-set-webview-auto-resize","description":"Denies the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_auto_resize"]}},"deny-set-webview-background-color":{"identifier":"deny-set-webview-background-color","description":"Denies the set_webview_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_background_color"]}},"deny-set-webview-focus":{"identifier":"deny-set-webview-focus","description":"Denies the set_webview_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_focus"]}},"deny-set-webview-position":{"identifier":"deny-set-webview-position","description":"Denies the set_webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_position"]}},"deny-set-webview-size":{"identifier":"deny-set-webview-size","description":"Denies the set_webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_size"]}},"deny-set-webview-zoom":{"identifier":"deny-set-webview-zoom","description":"Denies the set_webview_zoom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_zoom"]}},"deny-webview-close":{"identifier":"deny-webview-close","description":"Denies the webview_close command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_close"]}},"deny-webview-hide":{"identifier":"deny-webview-hide","description":"Denies the webview_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_hide"]}},"deny-webview-position":{"identifier":"deny-webview-position","description":"Denies the webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_position"]}},"deny-webview-show":{"identifier":"deny-webview-show","description":"Denies the webview_show command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_show"]}},"deny-webview-size":{"identifier":"deny-webview-size","description":"Denies the webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_size"]}}},"permission_sets":{},"global_scope_schema":null},"core:window":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-windows","allow-scale-factor","allow-inner-position","allow-outer-position","allow-inner-size","allow-outer-size","allow-is-fullscreen","allow-is-minimized","allow-is-maximized","allow-is-focused","allow-is-decorated","allow-is-resizable","allow-is-maximizable","allow-is-minimizable","allow-is-closable","allow-is-visible","allow-is-enabled","allow-title","allow-current-monitor","allow-primary-monitor","allow-monitor-from-point","allow-available-monitors","allow-cursor-position","allow-theme","allow-is-always-on-top","allow-internal-toggle-maximize"]},"permissions":{"allow-available-monitors":{"identifier":"allow-available-monitors","description":"Enables the available_monitors command without any pre-configured scope.","commands":{"allow":["available_monitors"],"deny":[]}},"allow-center":{"identifier":"allow-center","description":"Enables the center command without any pre-configured scope.","commands":{"allow":["center"],"deny":[]}},"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"allow-create":{"identifier":"allow-create","description":"Enables the create command without any pre-configured scope.","commands":{"allow":["create"],"deny":[]}},"allow-current-monitor":{"identifier":"allow-current-monitor","description":"Enables the current_monitor command without any pre-configured scope.","commands":{"allow":["current_monitor"],"deny":[]}},"allow-cursor-position":{"identifier":"allow-cursor-position","description":"Enables the cursor_position command without any pre-configured scope.","commands":{"allow":["cursor_position"],"deny":[]}},"allow-destroy":{"identifier":"allow-destroy","description":"Enables the destroy command without any pre-configured scope.","commands":{"allow":["destroy"],"deny":[]}},"allow-get-all-windows":{"identifier":"allow-get-all-windows","description":"Enables the get_all_windows command without any pre-configured scope.","commands":{"allow":["get_all_windows"],"deny":[]}},"allow-hide":{"identifier":"allow-hide","description":"Enables the hide command without any pre-configured scope.","commands":{"allow":["hide"],"deny":[]}},"allow-inner-position":{"identifier":"allow-inner-position","description":"Enables the inner_position command without any pre-configured scope.","commands":{"allow":["inner_position"],"deny":[]}},"allow-inner-size":{"identifier":"allow-inner-size","description":"Enables the inner_size command without any pre-configured scope.","commands":{"allow":["inner_size"],"deny":[]}},"allow-internal-toggle-maximize":{"identifier":"allow-internal-toggle-maximize","description":"Enables the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":["internal_toggle_maximize"],"deny":[]}},"allow-is-always-on-top":{"identifier":"allow-is-always-on-top","description":"Enables the is_always_on_top command without any pre-configured scope.","commands":{"allow":["is_always_on_top"],"deny":[]}},"allow-is-closable":{"identifier":"allow-is-closable","description":"Enables the is_closable command without any pre-configured scope.","commands":{"allow":["is_closable"],"deny":[]}},"allow-is-decorated":{"identifier":"allow-is-decorated","description":"Enables the is_decorated command without any pre-configured scope.","commands":{"allow":["is_decorated"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-is-focused":{"identifier":"allow-is-focused","description":"Enables the is_focused command without any pre-configured scope.","commands":{"allow":["is_focused"],"deny":[]}},"allow-is-fullscreen":{"identifier":"allow-is-fullscreen","description":"Enables the is_fullscreen command without any pre-configured scope.","commands":{"allow":["is_fullscreen"],"deny":[]}},"allow-is-maximizable":{"identifier":"allow-is-maximizable","description":"Enables the is_maximizable command without any pre-configured scope.","commands":{"allow":["is_maximizable"],"deny":[]}},"allow-is-maximized":{"identifier":"allow-is-maximized","description":"Enables the is_maximized command without any pre-configured scope.","commands":{"allow":["is_maximized"],"deny":[]}},"allow-is-minimizable":{"identifier":"allow-is-minimizable","description":"Enables the is_minimizable command without any pre-configured scope.","commands":{"allow":["is_minimizable"],"deny":[]}},"allow-is-minimized":{"identifier":"allow-is-minimized","description":"Enables the is_minimized command without any pre-configured scope.","commands":{"allow":["is_minimized"],"deny":[]}},"allow-is-resizable":{"identifier":"allow-is-resizable","description":"Enables the is_resizable command without any pre-configured scope.","commands":{"allow":["is_resizable"],"deny":[]}},"allow-is-visible":{"identifier":"allow-is-visible","description":"Enables the is_visible command without any pre-configured scope.","commands":{"allow":["is_visible"],"deny":[]}},"allow-maximize":{"identifier":"allow-maximize","description":"Enables the maximize command without any pre-configured scope.","commands":{"allow":["maximize"],"deny":[]}},"allow-minimize":{"identifier":"allow-minimize","description":"Enables the minimize command without any pre-configured scope.","commands":{"allow":["minimize"],"deny":[]}},"allow-monitor-from-point":{"identifier":"allow-monitor-from-point","description":"Enables the monitor_from_point command without any pre-configured scope.","commands":{"allow":["monitor_from_point"],"deny":[]}},"allow-outer-position":{"identifier":"allow-outer-position","description":"Enables the outer_position command without any pre-configured scope.","commands":{"allow":["outer_position"],"deny":[]}},"allow-outer-size":{"identifier":"allow-outer-size","description":"Enables the outer_size command without any pre-configured scope.","commands":{"allow":["outer_size"],"deny":[]}},"allow-primary-monitor":{"identifier":"allow-primary-monitor","description":"Enables the primary_monitor command without any pre-configured scope.","commands":{"allow":["primary_monitor"],"deny":[]}},"allow-request-user-attention":{"identifier":"allow-request-user-attention","description":"Enables the request_user_attention command without any pre-configured scope.","commands":{"allow":["request_user_attention"],"deny":[]}},"allow-scale-factor":{"identifier":"allow-scale-factor","description":"Enables the scale_factor command without any pre-configured scope.","commands":{"allow":["scale_factor"],"deny":[]}},"allow-set-always-on-bottom":{"identifier":"allow-set-always-on-bottom","description":"Enables the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":["set_always_on_bottom"],"deny":[]}},"allow-set-always-on-top":{"identifier":"allow-set-always-on-top","description":"Enables the set_always_on_top command without any pre-configured scope.","commands":{"allow":["set_always_on_top"],"deny":[]}},"allow-set-background-color":{"identifier":"allow-set-background-color","description":"Enables the set_background_color command without any pre-configured scope.","commands":{"allow":["set_background_color"],"deny":[]}},"allow-set-badge-count":{"identifier":"allow-set-badge-count","description":"Enables the set_badge_count command without any pre-configured scope.","commands":{"allow":["set_badge_count"],"deny":[]}},"allow-set-badge-label":{"identifier":"allow-set-badge-label","description":"Enables the set_badge_label command without any pre-configured scope.","commands":{"allow":["set_badge_label"],"deny":[]}},"allow-set-closable":{"identifier":"allow-set-closable","description":"Enables the set_closable command without any pre-configured scope.","commands":{"allow":["set_closable"],"deny":[]}},"allow-set-content-protected":{"identifier":"allow-set-content-protected","description":"Enables the set_content_protected command without any pre-configured scope.","commands":{"allow":["set_content_protected"],"deny":[]}},"allow-set-cursor-grab":{"identifier":"allow-set-cursor-grab","description":"Enables the set_cursor_grab command without any pre-configured scope.","commands":{"allow":["set_cursor_grab"],"deny":[]}},"allow-set-cursor-icon":{"identifier":"allow-set-cursor-icon","description":"Enables the set_cursor_icon command without any pre-configured scope.","commands":{"allow":["set_cursor_icon"],"deny":[]}},"allow-set-cursor-position":{"identifier":"allow-set-cursor-position","description":"Enables the set_cursor_position command without any pre-configured scope.","commands":{"allow":["set_cursor_position"],"deny":[]}},"allow-set-cursor-visible":{"identifier":"allow-set-cursor-visible","description":"Enables the set_cursor_visible command without any pre-configured scope.","commands":{"allow":["set_cursor_visible"],"deny":[]}},"allow-set-decorations":{"identifier":"allow-set-decorations","description":"Enables the set_decorations command without any pre-configured scope.","commands":{"allow":["set_decorations"],"deny":[]}},"allow-set-effects":{"identifier":"allow-set-effects","description":"Enables the set_effects command without any pre-configured scope.","commands":{"allow":["set_effects"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-focus":{"identifier":"allow-set-focus","description":"Enables the set_focus command without any pre-configured scope.","commands":{"allow":["set_focus"],"deny":[]}},"allow-set-focusable":{"identifier":"allow-set-focusable","description":"Enables the set_focusable command without any pre-configured scope.","commands":{"allow":["set_focusable"],"deny":[]}},"allow-set-fullscreen":{"identifier":"allow-set-fullscreen","description":"Enables the set_fullscreen command without any pre-configured scope.","commands":{"allow":["set_fullscreen"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-ignore-cursor-events":{"identifier":"allow-set-ignore-cursor-events","description":"Enables the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":["set_ignore_cursor_events"],"deny":[]}},"allow-set-max-size":{"identifier":"allow-set-max-size","description":"Enables the set_max_size command without any pre-configured scope.","commands":{"allow":["set_max_size"],"deny":[]}},"allow-set-maximizable":{"identifier":"allow-set-maximizable","description":"Enables the set_maximizable command without any pre-configured scope.","commands":{"allow":["set_maximizable"],"deny":[]}},"allow-set-min-size":{"identifier":"allow-set-min-size","description":"Enables the set_min_size command without any pre-configured scope.","commands":{"allow":["set_min_size"],"deny":[]}},"allow-set-minimizable":{"identifier":"allow-set-minimizable","description":"Enables the set_minimizable command without any pre-configured scope.","commands":{"allow":["set_minimizable"],"deny":[]}},"allow-set-overlay-icon":{"identifier":"allow-set-overlay-icon","description":"Enables the set_overlay_icon command without any pre-configured scope.","commands":{"allow":["set_overlay_icon"],"deny":[]}},"allow-set-position":{"identifier":"allow-set-position","description":"Enables the set_position command without any pre-configured scope.","commands":{"allow":["set_position"],"deny":[]}},"allow-set-progress-bar":{"identifier":"allow-set-progress-bar","description":"Enables the set_progress_bar command without any pre-configured scope.","commands":{"allow":["set_progress_bar"],"deny":[]}},"allow-set-resizable":{"identifier":"allow-set-resizable","description":"Enables the set_resizable command without any pre-configured scope.","commands":{"allow":["set_resizable"],"deny":[]}},"allow-set-shadow":{"identifier":"allow-set-shadow","description":"Enables the set_shadow command without any pre-configured scope.","commands":{"allow":["set_shadow"],"deny":[]}},"allow-set-simple-fullscreen":{"identifier":"allow-set-simple-fullscreen","description":"Enables the set_simple_fullscreen command without any pre-configured scope.","commands":{"allow":["set_simple_fullscreen"],"deny":[]}},"allow-set-size":{"identifier":"allow-set-size","description":"Enables the set_size command without any pre-configured scope.","commands":{"allow":["set_size"],"deny":[]}},"allow-set-size-constraints":{"identifier":"allow-set-size-constraints","description":"Enables the set_size_constraints command without any pre-configured scope.","commands":{"allow":["set_size_constraints"],"deny":[]}},"allow-set-skip-taskbar":{"identifier":"allow-set-skip-taskbar","description":"Enables the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":["set_skip_taskbar"],"deny":[]}},"allow-set-theme":{"identifier":"allow-set-theme","description":"Enables the set_theme command without any pre-configured scope.","commands":{"allow":["set_theme"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-title-bar-style":{"identifier":"allow-set-title-bar-style","description":"Enables the set_title_bar_style command without any pre-configured scope.","commands":{"allow":["set_title_bar_style"],"deny":[]}},"allow-set-visible-on-all-workspaces":{"identifier":"allow-set-visible-on-all-workspaces","description":"Enables the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":["set_visible_on_all_workspaces"],"deny":[]}},"allow-show":{"identifier":"allow-show","description":"Enables the show command without any pre-configured scope.","commands":{"allow":["show"],"deny":[]}},"allow-start-dragging":{"identifier":"allow-start-dragging","description":"Enables the start_dragging command without any pre-configured scope.","commands":{"allow":["start_dragging"],"deny":[]}},"allow-start-resize-dragging":{"identifier":"allow-start-resize-dragging","description":"Enables the start_resize_dragging command without any pre-configured scope.","commands":{"allow":["start_resize_dragging"],"deny":[]}},"allow-theme":{"identifier":"allow-theme","description":"Enables the theme command without any pre-configured scope.","commands":{"allow":["theme"],"deny":[]}},"allow-title":{"identifier":"allow-title","description":"Enables the title command without any pre-configured scope.","commands":{"allow":["title"],"deny":[]}},"allow-toggle-maximize":{"identifier":"allow-toggle-maximize","description":"Enables the toggle_maximize command without any pre-configured scope.","commands":{"allow":["toggle_maximize"],"deny":[]}},"allow-unmaximize":{"identifier":"allow-unmaximize","description":"Enables the unmaximize command without any pre-configured scope.","commands":{"allow":["unmaximize"],"deny":[]}},"allow-unminimize":{"identifier":"allow-unminimize","description":"Enables the unminimize command without any pre-configured scope.","commands":{"allow":["unminimize"],"deny":[]}},"deny-available-monitors":{"identifier":"deny-available-monitors","description":"Denies the available_monitors command without any pre-configured scope.","commands":{"allow":[],"deny":["available_monitors"]}},"deny-center":{"identifier":"deny-center","description":"Denies the center command without any pre-configured scope.","commands":{"allow":[],"deny":["center"]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}},"deny-create":{"identifier":"deny-create","description":"Denies the create command without any pre-configured scope.","commands":{"allow":[],"deny":["create"]}},"deny-current-monitor":{"identifier":"deny-current-monitor","description":"Denies the current_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["current_monitor"]}},"deny-cursor-position":{"identifier":"deny-cursor-position","description":"Denies the cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["cursor_position"]}},"deny-destroy":{"identifier":"deny-destroy","description":"Denies the destroy command without any pre-configured scope.","commands":{"allow":[],"deny":["destroy"]}},"deny-get-all-windows":{"identifier":"deny-get-all-windows","description":"Denies the get_all_windows command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_windows"]}},"deny-hide":{"identifier":"deny-hide","description":"Denies the hide command without any pre-configured scope.","commands":{"allow":[],"deny":["hide"]}},"deny-inner-position":{"identifier":"deny-inner-position","description":"Denies the inner_position command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_position"]}},"deny-inner-size":{"identifier":"deny-inner-size","description":"Denies the inner_size command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_size"]}},"deny-internal-toggle-maximize":{"identifier":"deny-internal-toggle-maximize","description":"Denies the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_maximize"]}},"deny-is-always-on-top":{"identifier":"deny-is-always-on-top","description":"Denies the is_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["is_always_on_top"]}},"deny-is-closable":{"identifier":"deny-is-closable","description":"Denies the is_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_closable"]}},"deny-is-decorated":{"identifier":"deny-is-decorated","description":"Denies the is_decorated command without any pre-configured scope.","commands":{"allow":[],"deny":["is_decorated"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-is-focused":{"identifier":"deny-is-focused","description":"Denies the is_focused command without any pre-configured scope.","commands":{"allow":[],"deny":["is_focused"]}},"deny-is-fullscreen":{"identifier":"deny-is-fullscreen","description":"Denies the is_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["is_fullscreen"]}},"deny-is-maximizable":{"identifier":"deny-is-maximizable","description":"Denies the is_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximizable"]}},"deny-is-maximized":{"identifier":"deny-is-maximized","description":"Denies the is_maximized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximized"]}},"deny-is-minimizable":{"identifier":"deny-is-minimizable","description":"Denies the is_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimizable"]}},"deny-is-minimized":{"identifier":"deny-is-minimized","description":"Denies the is_minimized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimized"]}},"deny-is-resizable":{"identifier":"deny-is-resizable","description":"Denies the is_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_resizable"]}},"deny-is-visible":{"identifier":"deny-is-visible","description":"Denies the is_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["is_visible"]}},"deny-maximize":{"identifier":"deny-maximize","description":"Denies the maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["maximize"]}},"deny-minimize":{"identifier":"deny-minimize","description":"Denies the minimize command without any pre-configured scope.","commands":{"allow":[],"deny":["minimize"]}},"deny-monitor-from-point":{"identifier":"deny-monitor-from-point","description":"Denies the monitor_from_point command without any pre-configured scope.","commands":{"allow":[],"deny":["monitor_from_point"]}},"deny-outer-position":{"identifier":"deny-outer-position","description":"Denies the outer_position command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_position"]}},"deny-outer-size":{"identifier":"deny-outer-size","description":"Denies the outer_size command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_size"]}},"deny-primary-monitor":{"identifier":"deny-primary-monitor","description":"Denies the primary_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["primary_monitor"]}},"deny-request-user-attention":{"identifier":"deny-request-user-attention","description":"Denies the request_user_attention command without any pre-configured scope.","commands":{"allow":[],"deny":["request_user_attention"]}},"deny-scale-factor":{"identifier":"deny-scale-factor","description":"Denies the scale_factor command without any pre-configured scope.","commands":{"allow":[],"deny":["scale_factor"]}},"deny-set-always-on-bottom":{"identifier":"deny-set-always-on-bottom","description":"Denies the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_bottom"]}},"deny-set-always-on-top":{"identifier":"deny-set-always-on-top","description":"Denies the set_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_top"]}},"deny-set-background-color":{"identifier":"deny-set-background-color","description":"Denies the set_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_background_color"]}},"deny-set-badge-count":{"identifier":"deny-set-badge-count","description":"Denies the set_badge_count command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_count"]}},"deny-set-badge-label":{"identifier":"deny-set-badge-label","description":"Denies the set_badge_label command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_label"]}},"deny-set-closable":{"identifier":"deny-set-closable","description":"Denies the set_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_closable"]}},"deny-set-content-protected":{"identifier":"deny-set-content-protected","description":"Denies the set_content_protected command without any pre-configured scope.","commands":{"allow":[],"deny":["set_content_protected"]}},"deny-set-cursor-grab":{"identifier":"deny-set-cursor-grab","description":"Denies the set_cursor_grab command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_grab"]}},"deny-set-cursor-icon":{"identifier":"deny-set-cursor-icon","description":"Denies the set_cursor_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_icon"]}},"deny-set-cursor-position":{"identifier":"deny-set-cursor-position","description":"Denies the set_cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_position"]}},"deny-set-cursor-visible":{"identifier":"deny-set-cursor-visible","description":"Denies the set_cursor_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_visible"]}},"deny-set-decorations":{"identifier":"deny-set-decorations","description":"Denies the set_decorations command without any pre-configured scope.","commands":{"allow":[],"deny":["set_decorations"]}},"deny-set-effects":{"identifier":"deny-set-effects","description":"Denies the set_effects command without any pre-configured scope.","commands":{"allow":[],"deny":["set_effects"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-focus":{"identifier":"deny-set-focus","description":"Denies the set_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focus"]}},"deny-set-focusable":{"identifier":"deny-set-focusable","description":"Denies the set_focusable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focusable"]}},"deny-set-fullscreen":{"identifier":"deny-set-fullscreen","description":"Denies the set_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_fullscreen"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-ignore-cursor-events":{"identifier":"deny-set-ignore-cursor-events","description":"Denies the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":[],"deny":["set_ignore_cursor_events"]}},"deny-set-max-size":{"identifier":"deny-set-max-size","description":"Denies the set_max_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_max_size"]}},"deny-set-maximizable":{"identifier":"deny-set-maximizable","description":"Denies the set_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_maximizable"]}},"deny-set-min-size":{"identifier":"deny-set-min-size","description":"Denies the set_min_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_min_size"]}},"deny-set-minimizable":{"identifier":"deny-set-minimizable","description":"Denies the set_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_minimizable"]}},"deny-set-overlay-icon":{"identifier":"deny-set-overlay-icon","description":"Denies the set_overlay_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_overlay_icon"]}},"deny-set-position":{"identifier":"deny-set-position","description":"Denies the set_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_position"]}},"deny-set-progress-bar":{"identifier":"deny-set-progress-bar","description":"Denies the set_progress_bar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_progress_bar"]}},"deny-set-resizable":{"identifier":"deny-set-resizable","description":"Denies the set_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_resizable"]}},"deny-set-shadow":{"identifier":"deny-set-shadow","description":"Denies the set_shadow command without any pre-configured scope.","commands":{"allow":[],"deny":["set_shadow"]}},"deny-set-simple-fullscreen":{"identifier":"deny-set-simple-fullscreen","description":"Denies the set_simple_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_simple_fullscreen"]}},"deny-set-size":{"identifier":"deny-set-size","description":"Denies the set_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size"]}},"deny-set-size-constraints":{"identifier":"deny-set-size-constraints","description":"Denies the set_size_constraints command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size_constraints"]}},"deny-set-skip-taskbar":{"identifier":"deny-set-skip-taskbar","description":"Denies the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_skip_taskbar"]}},"deny-set-theme":{"identifier":"deny-set-theme","description":"Denies the set_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_theme"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-title-bar-style":{"identifier":"deny-set-title-bar-style","description":"Denies the set_title_bar_style command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title_bar_style"]}},"deny-set-visible-on-all-workspaces":{"identifier":"deny-set-visible-on-all-workspaces","description":"Denies the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible_on_all_workspaces"]}},"deny-show":{"identifier":"deny-show","description":"Denies the show command without any pre-configured scope.","commands":{"allow":[],"deny":["show"]}},"deny-start-dragging":{"identifier":"deny-start-dragging","description":"Denies the start_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_dragging"]}},"deny-start-resize-dragging":{"identifier":"deny-start-resize-dragging","description":"Denies the start_resize_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_resize_dragging"]}},"deny-theme":{"identifier":"deny-theme","description":"Denies the theme command without any pre-configured scope.","commands":{"allow":[],"deny":["theme"]}},"deny-title":{"identifier":"deny-title","description":"Denies the title command without any pre-configured scope.","commands":{"allow":[],"deny":["title"]}},"deny-toggle-maximize":{"identifier":"deny-toggle-maximize","description":"Denies the toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["toggle_maximize"]}},"deny-unmaximize":{"identifier":"deny-unmaximize","description":"Denies the unmaximize command without any pre-configured scope.","commands":{"allow":[],"deny":["unmaximize"]}},"deny-unminimize":{"identifier":"deny-unminimize","description":"Denies the unminimize command without any pre-configured scope.","commands":{"allow":[],"deny":["unminimize"]}}},"permission_sets":{},"global_scope_schema":null},"shell":{"default_permission":{"identifier":"default","description":"This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n","permissions":["allow-open"]},"permissions":{"allow-execute":{"identifier":"allow-execute","description":"Enables the execute command without any pre-configured scope.","commands":{"allow":["execute"],"deny":[]}},"allow-kill":{"identifier":"allow-kill","description":"Enables the kill command without any pre-configured scope.","commands":{"allow":["kill"],"deny":[]}},"allow-open":{"identifier":"allow-open","description":"Enables the open command without any pre-configured scope.","commands":{"allow":["open"],"deny":[]}},"allow-spawn":{"identifier":"allow-spawn","description":"Enables the spawn command without any pre-configured scope.","commands":{"allow":["spawn"],"deny":[]}},"allow-stdin-write":{"identifier":"allow-stdin-write","description":"Enables the stdin_write command without any pre-configured scope.","commands":{"allow":["stdin_write"],"deny":[]}},"deny-execute":{"identifier":"deny-execute","description":"Denies the execute command without any pre-configured scope.","commands":{"allow":[],"deny":["execute"]}},"deny-kill":{"identifier":"deny-kill","description":"Denies the kill command without any pre-configured scope.","commands":{"allow":[],"deny":["kill"]}},"deny-open":{"identifier":"deny-open","description":"Denies the open command without any pre-configured scope.","commands":{"allow":[],"deny":["open"]}},"deny-spawn":{"identifier":"deny-spawn","description":"Denies the spawn command without any pre-configured scope.","commands":{"allow":[],"deny":["spawn"]}},"deny-stdin-write":{"identifier":"deny-stdin-write","description":"Denies the stdin_write command without any pre-configured scope.","commands":{"allow":[],"deny":["stdin_write"]}}},"permission_sets":{},"global_scope_schema":{"$schema":"http://json-schema.org/draft-07/schema#","anyOf":[{"additionalProperties":false,"properties":{"args":{"allOf":[{"$ref":"#/definitions/ShellScopeEntryAllowedArgs"}],"description":"The allowed arguments for the command execution."},"cmd":{"description":"The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.","type":"string"},"name":{"description":"The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.","type":"string"}},"required":["cmd","name"],"type":"object"},{"additionalProperties":false,"properties":{"args":{"allOf":[{"$ref":"#/definitions/ShellScopeEntryAllowedArgs"}],"description":"The allowed arguments for the command execution."},"name":{"description":"The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.","type":"string"},"sidecar":{"description":"If this command is a sidecar command.","type":"boolean"}},"required":["name","sidecar"],"type":"object"}],"definitions":{"ShellScopeEntryAllowedArg":{"anyOf":[{"description":"A non-configurable argument that is passed to the command in the order it was specified.","type":"string"},{"additionalProperties":false,"description":"A variable that is set while calling the command from the webview API.","properties":{"raw":{"default":false,"description":"Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.","type":"boolean"},"validator":{"description":"[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ","type":"string"}},"required":["validator"],"type":"object"}],"description":"A command argument allowed to be executed by the webview API."},"ShellScopeEntryAllowedArgs":{"anyOf":[{"description":"Use a simple boolean to allow all or disable all arguments to this command configuration.","type":"boolean"},{"description":"A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.","items":{"$ref":"#/definitions/ShellScopeEntryAllowedArg"},"type":"array"}],"description":"A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration."}},"description":"Shell scope entry.","title":"ShellScopeEntry"}}} \ No newline at end of file +{ + "core": { + "default_permission": { + "identifier": "default", + "description": "Default core plugins set.", + "permissions": [ + "core:path:default", + "core:event:default", + "core:window:default", + "core:webview:default", + "core:app:default", + "core:image:default", + "core:resources:default", + "core:menu:default", + "core:tray:default" + ] + }, + "permissions": {}, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:app": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": [ + "allow-version", + "allow-name", + "allow-tauri-version", + "allow-identifier", + "allow-bundle-type", + "allow-register-listener", + "allow-remove-listener" + ] + }, + "permissions": { + "allow-app-hide": { + "identifier": "allow-app-hide", + "description": "Enables the app_hide command without any pre-configured scope.", + "commands": { "allow": ["app_hide"], "deny": [] } + }, + "allow-app-show": { + "identifier": "allow-app-show", + "description": "Enables the app_show command without any pre-configured scope.", + "commands": { "allow": ["app_show"], "deny": [] } + }, + "allow-bundle-type": { + "identifier": "allow-bundle-type", + "description": "Enables the bundle_type command without any pre-configured scope.", + "commands": { "allow": ["bundle_type"], "deny": [] } + }, + "allow-default-window-icon": { + "identifier": "allow-default-window-icon", + "description": "Enables the default_window_icon command without any pre-configured scope.", + "commands": { "allow": ["default_window_icon"], "deny": [] } + }, + "allow-fetch-data-store-identifiers": { + "identifier": "allow-fetch-data-store-identifiers", + "description": "Enables the fetch_data_store_identifiers command without any pre-configured scope.", + "commands": { "allow": ["fetch_data_store_identifiers"], "deny": [] } + }, + "allow-identifier": { + "identifier": "allow-identifier", + "description": "Enables the identifier command without any pre-configured scope.", + "commands": { "allow": ["identifier"], "deny": [] } + }, + "allow-name": { + "identifier": "allow-name", + "description": "Enables the name command without any pre-configured scope.", + "commands": { "allow": ["name"], "deny": [] } + }, + "allow-register-listener": { + "identifier": "allow-register-listener", + "description": "Enables the register_listener command without any pre-configured scope.", + "commands": { "allow": ["register_listener"], "deny": [] } + }, + "allow-remove-data-store": { + "identifier": "allow-remove-data-store", + "description": "Enables the remove_data_store command without any pre-configured scope.", + "commands": { "allow": ["remove_data_store"], "deny": [] } + }, + "allow-remove-listener": { + "identifier": "allow-remove-listener", + "description": "Enables the remove_listener command without any pre-configured scope.", + "commands": { "allow": ["remove_listener"], "deny": [] } + }, + "allow-set-app-theme": { + "identifier": "allow-set-app-theme", + "description": "Enables the set_app_theme command without any pre-configured scope.", + "commands": { "allow": ["set_app_theme"], "deny": [] } + }, + "allow-set-dock-visibility": { + "identifier": "allow-set-dock-visibility", + "description": "Enables the set_dock_visibility command without any pre-configured scope.", + "commands": { "allow": ["set_dock_visibility"], "deny": [] } + }, + "allow-tauri-version": { + "identifier": "allow-tauri-version", + "description": "Enables the tauri_version command without any pre-configured scope.", + "commands": { "allow": ["tauri_version"], "deny": [] } + }, + "allow-version": { + "identifier": "allow-version", + "description": "Enables the version command without any pre-configured scope.", + "commands": { "allow": ["version"], "deny": [] } + }, + "deny-app-hide": { + "identifier": "deny-app-hide", + "description": "Denies the app_hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["app_hide"] } + }, + "deny-app-show": { + "identifier": "deny-app-show", + "description": "Denies the app_show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["app_show"] } + }, + "deny-bundle-type": { + "identifier": "deny-bundle-type", + "description": "Denies the bundle_type command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["bundle_type"] } + }, + "deny-default-window-icon": { + "identifier": "deny-default-window-icon", + "description": "Denies the default_window_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["default_window_icon"] } + }, + "deny-fetch-data-store-identifiers": { + "identifier": "deny-fetch-data-store-identifiers", + "description": "Denies the fetch_data_store_identifiers command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["fetch_data_store_identifiers"] } + }, + "deny-identifier": { + "identifier": "deny-identifier", + "description": "Denies the identifier command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["identifier"] } + }, + "deny-name": { + "identifier": "deny-name", + "description": "Denies the name command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["name"] } + }, + "deny-register-listener": { + "identifier": "deny-register-listener", + "description": "Denies the register_listener command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["register_listener"] } + }, + "deny-remove-data-store": { + "identifier": "deny-remove-data-store", + "description": "Denies the remove_data_store command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_data_store"] } + }, + "deny-remove-listener": { + "identifier": "deny-remove-listener", + "description": "Denies the remove_listener command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_listener"] } + }, + "deny-set-app-theme": { + "identifier": "deny-set-app-theme", + "description": "Denies the set_app_theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_app_theme"] } + }, + "deny-set-dock-visibility": { + "identifier": "deny-set-dock-visibility", + "description": "Denies the set_dock_visibility command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_dock_visibility"] } + }, + "deny-tauri-version": { + "identifier": "deny-tauri-version", + "description": "Denies the tauri_version command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["tauri_version"] } + }, + "deny-version": { + "identifier": "deny-version", + "description": "Denies the version command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["version"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:event": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-listen", "allow-unlisten", "allow-emit", "allow-emit-to"] + }, + "permissions": { + "allow-emit": { + "identifier": "allow-emit", + "description": "Enables the emit command without any pre-configured scope.", + "commands": { "allow": ["emit"], "deny": [] } + }, + "allow-emit-to": { + "identifier": "allow-emit-to", + "description": "Enables the emit_to command without any pre-configured scope.", + "commands": { "allow": ["emit_to"], "deny": [] } + }, + "allow-listen": { + "identifier": "allow-listen", + "description": "Enables the listen command without any pre-configured scope.", + "commands": { "allow": ["listen"], "deny": [] } + }, + "allow-unlisten": { + "identifier": "allow-unlisten", + "description": "Enables the unlisten command without any pre-configured scope.", + "commands": { "allow": ["unlisten"], "deny": [] } + }, + "deny-emit": { + "identifier": "deny-emit", + "description": "Denies the emit command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["emit"] } + }, + "deny-emit-to": { + "identifier": "deny-emit-to", + "description": "Denies the emit_to command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["emit_to"] } + }, + "deny-listen": { + "identifier": "deny-listen", + "description": "Denies the listen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["listen"] } + }, + "deny-unlisten": { + "identifier": "deny-unlisten", + "description": "Denies the unlisten command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unlisten"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:image": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-new", "allow-from-bytes", "allow-from-path", "allow-rgba", "allow-size"] + }, + "permissions": { + "allow-from-bytes": { + "identifier": "allow-from-bytes", + "description": "Enables the from_bytes command without any pre-configured scope.", + "commands": { "allow": ["from_bytes"], "deny": [] } + }, + "allow-from-path": { + "identifier": "allow-from-path", + "description": "Enables the from_path command without any pre-configured scope.", + "commands": { "allow": ["from_path"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-rgba": { + "identifier": "allow-rgba", + "description": "Enables the rgba command without any pre-configured scope.", + "commands": { "allow": ["rgba"], "deny": [] } + }, + "allow-size": { + "identifier": "allow-size", + "description": "Enables the size command without any pre-configured scope.", + "commands": { "allow": ["size"], "deny": [] } + }, + "deny-from-bytes": { + "identifier": "deny-from-bytes", + "description": "Denies the from_bytes command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["from_bytes"] } + }, + "deny-from-path": { + "identifier": "deny-from-path", + "description": "Denies the from_path command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["from_path"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-rgba": { + "identifier": "deny-rgba", + "description": "Denies the rgba command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["rgba"] } + }, + "deny-size": { + "identifier": "deny-size", + "description": "Denies the size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["size"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:menu": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-new", + "allow-append", + "allow-prepend", + "allow-insert", + "allow-remove", + "allow-remove-at", + "allow-items", + "allow-get", + "allow-popup", + "allow-create-default", + "allow-set-as-app-menu", + "allow-set-as-window-menu", + "allow-text", + "allow-set-text", + "allow-is-enabled", + "allow-set-enabled", + "allow-set-accelerator", + "allow-set-as-windows-menu-for-nsapp", + "allow-set-as-help-menu-for-nsapp", + "allow-is-checked", + "allow-set-checked", + "allow-set-icon" + ] + }, + "permissions": { + "allow-append": { + "identifier": "allow-append", + "description": "Enables the append command without any pre-configured scope.", + "commands": { "allow": ["append"], "deny": [] } + }, + "allow-create-default": { + "identifier": "allow-create-default", + "description": "Enables the create_default command without any pre-configured scope.", + "commands": { "allow": ["create_default"], "deny": [] } + }, + "allow-get": { + "identifier": "allow-get", + "description": "Enables the get command without any pre-configured scope.", + "commands": { "allow": ["get"], "deny": [] } + }, + "allow-insert": { + "identifier": "allow-insert", + "description": "Enables the insert command without any pre-configured scope.", + "commands": { "allow": ["insert"], "deny": [] } + }, + "allow-is-checked": { + "identifier": "allow-is-checked", + "description": "Enables the is_checked command without any pre-configured scope.", + "commands": { "allow": ["is_checked"], "deny": [] } + }, + "allow-is-enabled": { + "identifier": "allow-is-enabled", + "description": "Enables the is_enabled command without any pre-configured scope.", + "commands": { "allow": ["is_enabled"], "deny": [] } + }, + "allow-items": { + "identifier": "allow-items", + "description": "Enables the items command without any pre-configured scope.", + "commands": { "allow": ["items"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-popup": { + "identifier": "allow-popup", + "description": "Enables the popup command without any pre-configured scope.", + "commands": { "allow": ["popup"], "deny": [] } + }, + "allow-prepend": { + "identifier": "allow-prepend", + "description": "Enables the prepend command without any pre-configured scope.", + "commands": { "allow": ["prepend"], "deny": [] } + }, + "allow-remove": { + "identifier": "allow-remove", + "description": "Enables the remove command without any pre-configured scope.", + "commands": { "allow": ["remove"], "deny": [] } + }, + "allow-remove-at": { + "identifier": "allow-remove-at", + "description": "Enables the remove_at command without any pre-configured scope.", + "commands": { "allow": ["remove_at"], "deny": [] } + }, + "allow-set-accelerator": { + "identifier": "allow-set-accelerator", + "description": "Enables the set_accelerator command without any pre-configured scope.", + "commands": { "allow": ["set_accelerator"], "deny": [] } + }, + "allow-set-as-app-menu": { + "identifier": "allow-set-as-app-menu", + "description": "Enables the set_as_app_menu command without any pre-configured scope.", + "commands": { "allow": ["set_as_app_menu"], "deny": [] } + }, + "allow-set-as-help-menu-for-nsapp": { + "identifier": "allow-set-as-help-menu-for-nsapp", + "description": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": ["set_as_help_menu_for_nsapp"], "deny": [] } + }, + "allow-set-as-window-menu": { + "identifier": "allow-set-as-window-menu", + "description": "Enables the set_as_window_menu command without any pre-configured scope.", + "commands": { "allow": ["set_as_window_menu"], "deny": [] } + }, + "allow-set-as-windows-menu-for-nsapp": { + "identifier": "allow-set-as-windows-menu-for-nsapp", + "description": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": ["set_as_windows_menu_for_nsapp"], "deny": [] } + }, + "allow-set-checked": { + "identifier": "allow-set-checked", + "description": "Enables the set_checked command without any pre-configured scope.", + "commands": { "allow": ["set_checked"], "deny": [] } + }, + "allow-set-enabled": { + "identifier": "allow-set-enabled", + "description": "Enables the set_enabled command without any pre-configured scope.", + "commands": { "allow": ["set_enabled"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-text": { + "identifier": "allow-set-text", + "description": "Enables the set_text command without any pre-configured scope.", + "commands": { "allow": ["set_text"], "deny": [] } + }, + "allow-text": { + "identifier": "allow-text", + "description": "Enables the text command without any pre-configured scope.", + "commands": { "allow": ["text"], "deny": [] } + }, + "deny-append": { + "identifier": "deny-append", + "description": "Denies the append command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["append"] } + }, + "deny-create-default": { + "identifier": "deny-create-default", + "description": "Denies the create_default command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_default"] } + }, + "deny-get": { + "identifier": "deny-get", + "description": "Denies the get command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get"] } + }, + "deny-insert": { + "identifier": "deny-insert", + "description": "Denies the insert command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["insert"] } + }, + "deny-is-checked": { + "identifier": "deny-is-checked", + "description": "Denies the is_checked command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_checked"] } + }, + "deny-is-enabled": { + "identifier": "deny-is-enabled", + "description": "Denies the is_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_enabled"] } + }, + "deny-items": { + "identifier": "deny-items", + "description": "Denies the items command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["items"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-popup": { + "identifier": "deny-popup", + "description": "Denies the popup command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["popup"] } + }, + "deny-prepend": { + "identifier": "deny-prepend", + "description": "Denies the prepend command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["prepend"] } + }, + "deny-remove": { + "identifier": "deny-remove", + "description": "Denies the remove command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove"] } + }, + "deny-remove-at": { + "identifier": "deny-remove-at", + "description": "Denies the remove_at command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_at"] } + }, + "deny-set-accelerator": { + "identifier": "deny-set-accelerator", + "description": "Denies the set_accelerator command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_accelerator"] } + }, + "deny-set-as-app-menu": { + "identifier": "deny-set-as-app-menu", + "description": "Denies the set_as_app_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_app_menu"] } + }, + "deny-set-as-help-menu-for-nsapp": { + "identifier": "deny-set-as-help-menu-for-nsapp", + "description": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_help_menu_for_nsapp"] } + }, + "deny-set-as-window-menu": { + "identifier": "deny-set-as-window-menu", + "description": "Denies the set_as_window_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_window_menu"] } + }, + "deny-set-as-windows-menu-for-nsapp": { + "identifier": "deny-set-as-windows-menu-for-nsapp", + "description": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_windows_menu_for_nsapp"] } + }, + "deny-set-checked": { + "identifier": "deny-set-checked", + "description": "Denies the set_checked command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_checked"] } + }, + "deny-set-enabled": { + "identifier": "deny-set-enabled", + "description": "Denies the set_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_enabled"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-text": { + "identifier": "deny-set-text", + "description": "Denies the set_text command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_text"] } + }, + "deny-text": { + "identifier": "deny-text", + "description": "Denies the text command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["text"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:path": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-resolve-directory", + "allow-resolve", + "allow-normalize", + "allow-join", + "allow-dirname", + "allow-extname", + "allow-basename", + "allow-is-absolute" + ] + }, + "permissions": { + "allow-basename": { + "identifier": "allow-basename", + "description": "Enables the basename command without any pre-configured scope.", + "commands": { "allow": ["basename"], "deny": [] } + }, + "allow-dirname": { + "identifier": "allow-dirname", + "description": "Enables the dirname command without any pre-configured scope.", + "commands": { "allow": ["dirname"], "deny": [] } + }, + "allow-extname": { + "identifier": "allow-extname", + "description": "Enables the extname command without any pre-configured scope.", + "commands": { "allow": ["extname"], "deny": [] } + }, + "allow-is-absolute": { + "identifier": "allow-is-absolute", + "description": "Enables the is_absolute command without any pre-configured scope.", + "commands": { "allow": ["is_absolute"], "deny": [] } + }, + "allow-join": { + "identifier": "allow-join", + "description": "Enables the join command without any pre-configured scope.", + "commands": { "allow": ["join"], "deny": [] } + }, + "allow-normalize": { + "identifier": "allow-normalize", + "description": "Enables the normalize command without any pre-configured scope.", + "commands": { "allow": ["normalize"], "deny": [] } + }, + "allow-resolve": { + "identifier": "allow-resolve", + "description": "Enables the resolve command without any pre-configured scope.", + "commands": { "allow": ["resolve"], "deny": [] } + }, + "allow-resolve-directory": { + "identifier": "allow-resolve-directory", + "description": "Enables the resolve_directory command without any pre-configured scope.", + "commands": { "allow": ["resolve_directory"], "deny": [] } + }, + "deny-basename": { + "identifier": "deny-basename", + "description": "Denies the basename command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["basename"] } + }, + "deny-dirname": { + "identifier": "deny-dirname", + "description": "Denies the dirname command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["dirname"] } + }, + "deny-extname": { + "identifier": "deny-extname", + "description": "Denies the extname command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["extname"] } + }, + "deny-is-absolute": { + "identifier": "deny-is-absolute", + "description": "Denies the is_absolute command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_absolute"] } + }, + "deny-join": { + "identifier": "deny-join", + "description": "Denies the join command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["join"] } + }, + "deny-normalize": { + "identifier": "deny-normalize", + "description": "Denies the normalize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["normalize"] } + }, + "deny-resolve": { + "identifier": "deny-resolve", + "description": "Denies the resolve command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["resolve"] } + }, + "deny-resolve-directory": { + "identifier": "deny-resolve-directory", + "description": "Denies the resolve_directory command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["resolve_directory"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:resources": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-close"] + }, + "permissions": { + "allow-close": { + "identifier": "allow-close", + "description": "Enables the close command without any pre-configured scope.", + "commands": { "allow": ["close"], "deny": [] } + }, + "deny-close": { + "identifier": "deny-close", + "description": "Denies the close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["close"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:tray": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-new", + "allow-get-by-id", + "allow-remove-by-id", + "allow-set-icon", + "allow-set-menu", + "allow-set-tooltip", + "allow-set-title", + "allow-set-visible", + "allow-set-temp-dir-path", + "allow-set-icon-as-template", + "allow-set-show-menu-on-left-click" + ] + }, + "permissions": { + "allow-get-by-id": { + "identifier": "allow-get-by-id", + "description": "Enables the get_by_id command without any pre-configured scope.", + "commands": { "allow": ["get_by_id"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-remove-by-id": { + "identifier": "allow-remove-by-id", + "description": "Enables the remove_by_id command without any pre-configured scope.", + "commands": { "allow": ["remove_by_id"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-icon-as-template": { + "identifier": "allow-set-icon-as-template", + "description": "Enables the set_icon_as_template command without any pre-configured scope.", + "commands": { "allow": ["set_icon_as_template"], "deny": [] } + }, + "allow-set-menu": { + "identifier": "allow-set-menu", + "description": "Enables the set_menu command without any pre-configured scope.", + "commands": { "allow": ["set_menu"], "deny": [] } + }, + "allow-set-show-menu-on-left-click": { + "identifier": "allow-set-show-menu-on-left-click", + "description": "Enables the set_show_menu_on_left_click command without any pre-configured scope.", + "commands": { "allow": ["set_show_menu_on_left_click"], "deny": [] } + }, + "allow-set-temp-dir-path": { + "identifier": "allow-set-temp-dir-path", + "description": "Enables the set_temp_dir_path command without any pre-configured scope.", + "commands": { "allow": ["set_temp_dir_path"], "deny": [] } + }, + "allow-set-title": { + "identifier": "allow-set-title", + "description": "Enables the set_title command without any pre-configured scope.", + "commands": { "allow": ["set_title"], "deny": [] } + }, + "allow-set-tooltip": { + "identifier": "allow-set-tooltip", + "description": "Enables the set_tooltip command without any pre-configured scope.", + "commands": { "allow": ["set_tooltip"], "deny": [] } + }, + "allow-set-visible": { + "identifier": "allow-set-visible", + "description": "Enables the set_visible command without any pre-configured scope.", + "commands": { "allow": ["set_visible"], "deny": [] } + }, + "deny-get-by-id": { + "identifier": "deny-get-by-id", + "description": "Denies the get_by_id command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_by_id"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-remove-by-id": { + "identifier": "deny-remove-by-id", + "description": "Denies the remove_by_id command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_by_id"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-icon-as-template": { + "identifier": "deny-set-icon-as-template", + "description": "Denies the set_icon_as_template command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon_as_template"] } + }, + "deny-set-menu": { + "identifier": "deny-set-menu", + "description": "Denies the set_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_menu"] } + }, + "deny-set-show-menu-on-left-click": { + "identifier": "deny-set-show-menu-on-left-click", + "description": "Denies the set_show_menu_on_left_click command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_show_menu_on_left_click"] } + }, + "deny-set-temp-dir-path": { + "identifier": "deny-set-temp-dir-path", + "description": "Denies the set_temp_dir_path command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_temp_dir_path"] } + }, + "deny-set-title": { + "identifier": "deny-set-title", + "description": "Denies the set_title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title"] } + }, + "deny-set-tooltip": { + "identifier": "deny-set-tooltip", + "description": "Denies the set_tooltip command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_tooltip"] } + }, + "deny-set-visible": { + "identifier": "deny-set-visible", + "description": "Denies the set_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_visible"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:webview": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": ["allow-get-all-webviews", "allow-webview-position", "allow-webview-size", "allow-internal-toggle-devtools"] + }, + "permissions": { + "allow-clear-all-browsing-data": { + "identifier": "allow-clear-all-browsing-data", + "description": "Enables the clear_all_browsing_data command without any pre-configured scope.", + "commands": { "allow": ["clear_all_browsing_data"], "deny": [] } + }, + "allow-create-webview": { + "identifier": "allow-create-webview", + "description": "Enables the create_webview command without any pre-configured scope.", + "commands": { "allow": ["create_webview"], "deny": [] } + }, + "allow-create-webview-window": { + "identifier": "allow-create-webview-window", + "description": "Enables the create_webview_window command without any pre-configured scope.", + "commands": { "allow": ["create_webview_window"], "deny": [] } + }, + "allow-get-all-webviews": { + "identifier": "allow-get-all-webviews", + "description": "Enables the get_all_webviews command without any pre-configured scope.", + "commands": { "allow": ["get_all_webviews"], "deny": [] } + }, + "allow-internal-toggle-devtools": { + "identifier": "allow-internal-toggle-devtools", + "description": "Enables the internal_toggle_devtools command without any pre-configured scope.", + "commands": { "allow": ["internal_toggle_devtools"], "deny": [] } + }, + "allow-print": { + "identifier": "allow-print", + "description": "Enables the print command without any pre-configured scope.", + "commands": { "allow": ["print"], "deny": [] } + }, + "allow-reparent": { + "identifier": "allow-reparent", + "description": "Enables the reparent command without any pre-configured scope.", + "commands": { "allow": ["reparent"], "deny": [] } + }, + "allow-set-webview-auto-resize": { + "identifier": "allow-set-webview-auto-resize", + "description": "Enables the set_webview_auto_resize command without any pre-configured scope.", + "commands": { "allow": ["set_webview_auto_resize"], "deny": [] } + }, + "allow-set-webview-background-color": { + "identifier": "allow-set-webview-background-color", + "description": "Enables the set_webview_background_color command without any pre-configured scope.", + "commands": { "allow": ["set_webview_background_color"], "deny": [] } + }, + "allow-set-webview-focus": { + "identifier": "allow-set-webview-focus", + "description": "Enables the set_webview_focus command without any pre-configured scope.", + "commands": { "allow": ["set_webview_focus"], "deny": [] } + }, + "allow-set-webview-position": { + "identifier": "allow-set-webview-position", + "description": "Enables the set_webview_position command without any pre-configured scope.", + "commands": { "allow": ["set_webview_position"], "deny": [] } + }, + "allow-set-webview-size": { + "identifier": "allow-set-webview-size", + "description": "Enables the set_webview_size command without any pre-configured scope.", + "commands": { "allow": ["set_webview_size"], "deny": [] } + }, + "allow-set-webview-zoom": { + "identifier": "allow-set-webview-zoom", + "description": "Enables the set_webview_zoom command without any pre-configured scope.", + "commands": { "allow": ["set_webview_zoom"], "deny": [] } + }, + "allow-webview-close": { + "identifier": "allow-webview-close", + "description": "Enables the webview_close command without any pre-configured scope.", + "commands": { "allow": ["webview_close"], "deny": [] } + }, + "allow-webview-hide": { + "identifier": "allow-webview-hide", + "description": "Enables the webview_hide command without any pre-configured scope.", + "commands": { "allow": ["webview_hide"], "deny": [] } + }, + "allow-webview-position": { + "identifier": "allow-webview-position", + "description": "Enables the webview_position command without any pre-configured scope.", + "commands": { "allow": ["webview_position"], "deny": [] } + }, + "allow-webview-show": { + "identifier": "allow-webview-show", + "description": "Enables the webview_show command without any pre-configured scope.", + "commands": { "allow": ["webview_show"], "deny": [] } + }, + "allow-webview-size": { + "identifier": "allow-webview-size", + "description": "Enables the webview_size command without any pre-configured scope.", + "commands": { "allow": ["webview_size"], "deny": [] } + }, + "deny-clear-all-browsing-data": { + "identifier": "deny-clear-all-browsing-data", + "description": "Denies the clear_all_browsing_data command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["clear_all_browsing_data"] } + }, + "deny-create-webview": { + "identifier": "deny-create-webview", + "description": "Denies the create_webview command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_webview"] } + }, + "deny-create-webview-window": { + "identifier": "deny-create-webview-window", + "description": "Denies the create_webview_window command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_webview_window"] } + }, + "deny-get-all-webviews": { + "identifier": "deny-get-all-webviews", + "description": "Denies the get_all_webviews command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_all_webviews"] } + }, + "deny-internal-toggle-devtools": { + "identifier": "deny-internal-toggle-devtools", + "description": "Denies the internal_toggle_devtools command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["internal_toggle_devtools"] } + }, + "deny-print": { + "identifier": "deny-print", + "description": "Denies the print command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["print"] } + }, + "deny-reparent": { + "identifier": "deny-reparent", + "description": "Denies the reparent command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["reparent"] } + }, + "deny-set-webview-auto-resize": { + "identifier": "deny-set-webview-auto-resize", + "description": "Denies the set_webview_auto_resize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_auto_resize"] } + }, + "deny-set-webview-background-color": { + "identifier": "deny-set-webview-background-color", + "description": "Denies the set_webview_background_color command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_background_color"] } + }, + "deny-set-webview-focus": { + "identifier": "deny-set-webview-focus", + "description": "Denies the set_webview_focus command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_focus"] } + }, + "deny-set-webview-position": { + "identifier": "deny-set-webview-position", + "description": "Denies the set_webview_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_position"] } + }, + "deny-set-webview-size": { + "identifier": "deny-set-webview-size", + "description": "Denies the set_webview_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_size"] } + }, + "deny-set-webview-zoom": { + "identifier": "deny-set-webview-zoom", + "description": "Denies the set_webview_zoom command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_zoom"] } + }, + "deny-webview-close": { + "identifier": "deny-webview-close", + "description": "Denies the webview_close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_close"] } + }, + "deny-webview-hide": { + "identifier": "deny-webview-hide", + "description": "Denies the webview_hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_hide"] } + }, + "deny-webview-position": { + "identifier": "deny-webview-position", + "description": "Denies the webview_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_position"] } + }, + "deny-webview-show": { + "identifier": "deny-webview-show", + "description": "Denies the webview_show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_show"] } + }, + "deny-webview-size": { + "identifier": "deny-webview-size", + "description": "Denies the webview_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_size"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:window": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": [ + "allow-get-all-windows", + "allow-scale-factor", + "allow-inner-position", + "allow-outer-position", + "allow-inner-size", + "allow-outer-size", + "allow-is-fullscreen", + "allow-is-minimized", + "allow-is-maximized", + "allow-is-focused", + "allow-is-decorated", + "allow-is-resizable", + "allow-is-maximizable", + "allow-is-minimizable", + "allow-is-closable", + "allow-is-visible", + "allow-is-enabled", + "allow-title", + "allow-current-monitor", + "allow-primary-monitor", + "allow-monitor-from-point", + "allow-available-monitors", + "allow-cursor-position", + "allow-theme", + "allow-is-always-on-top", + "allow-internal-toggle-maximize" + ] + }, + "permissions": { + "allow-available-monitors": { + "identifier": "allow-available-monitors", + "description": "Enables the available_monitors command without any pre-configured scope.", + "commands": { "allow": ["available_monitors"], "deny": [] } + }, + "allow-center": { + "identifier": "allow-center", + "description": "Enables the center command without any pre-configured scope.", + "commands": { "allow": ["center"], "deny": [] } + }, + "allow-close": { + "identifier": "allow-close", + "description": "Enables the close command without any pre-configured scope.", + "commands": { "allow": ["close"], "deny": [] } + }, + "allow-create": { + "identifier": "allow-create", + "description": "Enables the create command without any pre-configured scope.", + "commands": { "allow": ["create"], "deny": [] } + }, + "allow-current-monitor": { + "identifier": "allow-current-monitor", + "description": "Enables the current_monitor command without any pre-configured scope.", + "commands": { "allow": ["current_monitor"], "deny": [] } + }, + "allow-cursor-position": { + "identifier": "allow-cursor-position", + "description": "Enables the cursor_position command without any pre-configured scope.", + "commands": { "allow": ["cursor_position"], "deny": [] } + }, + "allow-destroy": { + "identifier": "allow-destroy", + "description": "Enables the destroy command without any pre-configured scope.", + "commands": { "allow": ["destroy"], "deny": [] } + }, + "allow-get-all-windows": { + "identifier": "allow-get-all-windows", + "description": "Enables the get_all_windows command without any pre-configured scope.", + "commands": { "allow": ["get_all_windows"], "deny": [] } + }, + "allow-hide": { + "identifier": "allow-hide", + "description": "Enables the hide command without any pre-configured scope.", + "commands": { "allow": ["hide"], "deny": [] } + }, + "allow-inner-position": { + "identifier": "allow-inner-position", + "description": "Enables the inner_position command without any pre-configured scope.", + "commands": { "allow": ["inner_position"], "deny": [] } + }, + "allow-inner-size": { + "identifier": "allow-inner-size", + "description": "Enables the inner_size command without any pre-configured scope.", + "commands": { "allow": ["inner_size"], "deny": [] } + }, + "allow-internal-toggle-maximize": { + "identifier": "allow-internal-toggle-maximize", + "description": "Enables the internal_toggle_maximize command without any pre-configured scope.", + "commands": { "allow": ["internal_toggle_maximize"], "deny": [] } + }, + "allow-is-always-on-top": { + "identifier": "allow-is-always-on-top", + "description": "Enables the is_always_on_top command without any pre-configured scope.", + "commands": { "allow": ["is_always_on_top"], "deny": [] } + }, + "allow-is-closable": { + "identifier": "allow-is-closable", + "description": "Enables the is_closable command without any pre-configured scope.", + "commands": { "allow": ["is_closable"], "deny": [] } + }, + "allow-is-decorated": { + "identifier": "allow-is-decorated", + "description": "Enables the is_decorated command without any pre-configured scope.", + "commands": { "allow": ["is_decorated"], "deny": [] } + }, + "allow-is-enabled": { + "identifier": "allow-is-enabled", + "description": "Enables the is_enabled command without any pre-configured scope.", + "commands": { "allow": ["is_enabled"], "deny": [] } + }, + "allow-is-focused": { + "identifier": "allow-is-focused", + "description": "Enables the is_focused command without any pre-configured scope.", + "commands": { "allow": ["is_focused"], "deny": [] } + }, + "allow-is-fullscreen": { + "identifier": "allow-is-fullscreen", + "description": "Enables the is_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["is_fullscreen"], "deny": [] } + }, + "allow-is-maximizable": { + "identifier": "allow-is-maximizable", + "description": "Enables the is_maximizable command without any pre-configured scope.", + "commands": { "allow": ["is_maximizable"], "deny": [] } + }, + "allow-is-maximized": { + "identifier": "allow-is-maximized", + "description": "Enables the is_maximized command without any pre-configured scope.", + "commands": { "allow": ["is_maximized"], "deny": [] } + }, + "allow-is-minimizable": { + "identifier": "allow-is-minimizable", + "description": "Enables the is_minimizable command without any pre-configured scope.", + "commands": { "allow": ["is_minimizable"], "deny": [] } + }, + "allow-is-minimized": { + "identifier": "allow-is-minimized", + "description": "Enables the is_minimized command without any pre-configured scope.", + "commands": { "allow": ["is_minimized"], "deny": [] } + }, + "allow-is-resizable": { + "identifier": "allow-is-resizable", + "description": "Enables the is_resizable command without any pre-configured scope.", + "commands": { "allow": ["is_resizable"], "deny": [] } + }, + "allow-is-visible": { + "identifier": "allow-is-visible", + "description": "Enables the is_visible command without any pre-configured scope.", + "commands": { "allow": ["is_visible"], "deny": [] } + }, + "allow-maximize": { + "identifier": "allow-maximize", + "description": "Enables the maximize command without any pre-configured scope.", + "commands": { "allow": ["maximize"], "deny": [] } + }, + "allow-minimize": { + "identifier": "allow-minimize", + "description": "Enables the minimize command without any pre-configured scope.", + "commands": { "allow": ["minimize"], "deny": [] } + }, + "allow-monitor-from-point": { + "identifier": "allow-monitor-from-point", + "description": "Enables the monitor_from_point command without any pre-configured scope.", + "commands": { "allow": ["monitor_from_point"], "deny": [] } + }, + "allow-outer-position": { + "identifier": "allow-outer-position", + "description": "Enables the outer_position command without any pre-configured scope.", + "commands": { "allow": ["outer_position"], "deny": [] } + }, + "allow-outer-size": { + "identifier": "allow-outer-size", + "description": "Enables the outer_size command without any pre-configured scope.", + "commands": { "allow": ["outer_size"], "deny": [] } + }, + "allow-primary-monitor": { + "identifier": "allow-primary-monitor", + "description": "Enables the primary_monitor command without any pre-configured scope.", + "commands": { "allow": ["primary_monitor"], "deny": [] } + }, + "allow-request-user-attention": { + "identifier": "allow-request-user-attention", + "description": "Enables the request_user_attention command without any pre-configured scope.", + "commands": { "allow": ["request_user_attention"], "deny": [] } + }, + "allow-scale-factor": { + "identifier": "allow-scale-factor", + "description": "Enables the scale_factor command without any pre-configured scope.", + "commands": { "allow": ["scale_factor"], "deny": [] } + }, + "allow-set-always-on-bottom": { + "identifier": "allow-set-always-on-bottom", + "description": "Enables the set_always_on_bottom command without any pre-configured scope.", + "commands": { "allow": ["set_always_on_bottom"], "deny": [] } + }, + "allow-set-always-on-top": { + "identifier": "allow-set-always-on-top", + "description": "Enables the set_always_on_top command without any pre-configured scope.", + "commands": { "allow": ["set_always_on_top"], "deny": [] } + }, + "allow-set-background-color": { + "identifier": "allow-set-background-color", + "description": "Enables the set_background_color command without any pre-configured scope.", + "commands": { "allow": ["set_background_color"], "deny": [] } + }, + "allow-set-badge-count": { + "identifier": "allow-set-badge-count", + "description": "Enables the set_badge_count command without any pre-configured scope.", + "commands": { "allow": ["set_badge_count"], "deny": [] } + }, + "allow-set-badge-label": { + "identifier": "allow-set-badge-label", + "description": "Enables the set_badge_label command without any pre-configured scope.", + "commands": { "allow": ["set_badge_label"], "deny": [] } + }, + "allow-set-closable": { + "identifier": "allow-set-closable", + "description": "Enables the set_closable command without any pre-configured scope.", + "commands": { "allow": ["set_closable"], "deny": [] } + }, + "allow-set-content-protected": { + "identifier": "allow-set-content-protected", + "description": "Enables the set_content_protected command without any pre-configured scope.", + "commands": { "allow": ["set_content_protected"], "deny": [] } + }, + "allow-set-cursor-grab": { + "identifier": "allow-set-cursor-grab", + "description": "Enables the set_cursor_grab command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_grab"], "deny": [] } + }, + "allow-set-cursor-icon": { + "identifier": "allow-set-cursor-icon", + "description": "Enables the set_cursor_icon command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_icon"], "deny": [] } + }, + "allow-set-cursor-position": { + "identifier": "allow-set-cursor-position", + "description": "Enables the set_cursor_position command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_position"], "deny": [] } + }, + "allow-set-cursor-visible": { + "identifier": "allow-set-cursor-visible", + "description": "Enables the set_cursor_visible command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_visible"], "deny": [] } + }, + "allow-set-decorations": { + "identifier": "allow-set-decorations", + "description": "Enables the set_decorations command without any pre-configured scope.", + "commands": { "allow": ["set_decorations"], "deny": [] } + }, + "allow-set-effects": { + "identifier": "allow-set-effects", + "description": "Enables the set_effects command without any pre-configured scope.", + "commands": { "allow": ["set_effects"], "deny": [] } + }, + "allow-set-enabled": { + "identifier": "allow-set-enabled", + "description": "Enables the set_enabled command without any pre-configured scope.", + "commands": { "allow": ["set_enabled"], "deny": [] } + }, + "allow-set-focus": { + "identifier": "allow-set-focus", + "description": "Enables the set_focus command without any pre-configured scope.", + "commands": { "allow": ["set_focus"], "deny": [] } + }, + "allow-set-focusable": { + "identifier": "allow-set-focusable", + "description": "Enables the set_focusable command without any pre-configured scope.", + "commands": { "allow": ["set_focusable"], "deny": [] } + }, + "allow-set-fullscreen": { + "identifier": "allow-set-fullscreen", + "description": "Enables the set_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["set_fullscreen"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-ignore-cursor-events": { + "identifier": "allow-set-ignore-cursor-events", + "description": "Enables the set_ignore_cursor_events command without any pre-configured scope.", + "commands": { "allow": ["set_ignore_cursor_events"], "deny": [] } + }, + "allow-set-max-size": { + "identifier": "allow-set-max-size", + "description": "Enables the set_max_size command without any pre-configured scope.", + "commands": { "allow": ["set_max_size"], "deny": [] } + }, + "allow-set-maximizable": { + "identifier": "allow-set-maximizable", + "description": "Enables the set_maximizable command without any pre-configured scope.", + "commands": { "allow": ["set_maximizable"], "deny": [] } + }, + "allow-set-min-size": { + "identifier": "allow-set-min-size", + "description": "Enables the set_min_size command without any pre-configured scope.", + "commands": { "allow": ["set_min_size"], "deny": [] } + }, + "allow-set-minimizable": { + "identifier": "allow-set-minimizable", + "description": "Enables the set_minimizable command without any pre-configured scope.", + "commands": { "allow": ["set_minimizable"], "deny": [] } + }, + "allow-set-overlay-icon": { + "identifier": "allow-set-overlay-icon", + "description": "Enables the set_overlay_icon command without any pre-configured scope.", + "commands": { "allow": ["set_overlay_icon"], "deny": [] } + }, + "allow-set-position": { + "identifier": "allow-set-position", + "description": "Enables the set_position command without any pre-configured scope.", + "commands": { "allow": ["set_position"], "deny": [] } + }, + "allow-set-progress-bar": { + "identifier": "allow-set-progress-bar", + "description": "Enables the set_progress_bar command without any pre-configured scope.", + "commands": { "allow": ["set_progress_bar"], "deny": [] } + }, + "allow-set-resizable": { + "identifier": "allow-set-resizable", + "description": "Enables the set_resizable command without any pre-configured scope.", + "commands": { "allow": ["set_resizable"], "deny": [] } + }, + "allow-set-shadow": { + "identifier": "allow-set-shadow", + "description": "Enables the set_shadow command without any pre-configured scope.", + "commands": { "allow": ["set_shadow"], "deny": [] } + }, + "allow-set-simple-fullscreen": { + "identifier": "allow-set-simple-fullscreen", + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["set_simple_fullscreen"], "deny": [] } + }, + "allow-set-size": { + "identifier": "allow-set-size", + "description": "Enables the set_size command without any pre-configured scope.", + "commands": { "allow": ["set_size"], "deny": [] } + }, + "allow-set-size-constraints": { + "identifier": "allow-set-size-constraints", + "description": "Enables the set_size_constraints command without any pre-configured scope.", + "commands": { "allow": ["set_size_constraints"], "deny": [] } + }, + "allow-set-skip-taskbar": { + "identifier": "allow-set-skip-taskbar", + "description": "Enables the set_skip_taskbar command without any pre-configured scope.", + "commands": { "allow": ["set_skip_taskbar"], "deny": [] } + }, + "allow-set-theme": { + "identifier": "allow-set-theme", + "description": "Enables the set_theme command without any pre-configured scope.", + "commands": { "allow": ["set_theme"], "deny": [] } + }, + "allow-set-title": { + "identifier": "allow-set-title", + "description": "Enables the set_title command without any pre-configured scope.", + "commands": { "allow": ["set_title"], "deny": [] } + }, + "allow-set-title-bar-style": { + "identifier": "allow-set-title-bar-style", + "description": "Enables the set_title_bar_style command without any pre-configured scope.", + "commands": { "allow": ["set_title_bar_style"], "deny": [] } + }, + "allow-set-visible-on-all-organizations": { + "identifier": "allow-set-visible-on-all-organizations", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", + "commands": { "allow": ["set_visible_on_all_organizations"], "deny": [] } + }, + "allow-show": { + "identifier": "allow-show", + "description": "Enables the show command without any pre-configured scope.", + "commands": { "allow": ["show"], "deny": [] } + }, + "allow-start-dragging": { + "identifier": "allow-start-dragging", + "description": "Enables the start_dragging command without any pre-configured scope.", + "commands": { "allow": ["start_dragging"], "deny": [] } + }, + "allow-start-resize-dragging": { + "identifier": "allow-start-resize-dragging", + "description": "Enables the start_resize_dragging command without any pre-configured scope.", + "commands": { "allow": ["start_resize_dragging"], "deny": [] } + }, + "allow-theme": { + "identifier": "allow-theme", + "description": "Enables the theme command without any pre-configured scope.", + "commands": { "allow": ["theme"], "deny": [] } + }, + "allow-title": { + "identifier": "allow-title", + "description": "Enables the title command without any pre-configured scope.", + "commands": { "allow": ["title"], "deny": [] } + }, + "allow-toggle-maximize": { + "identifier": "allow-toggle-maximize", + "description": "Enables the toggle_maximize command without any pre-configured scope.", + "commands": { "allow": ["toggle_maximize"], "deny": [] } + }, + "allow-unmaximize": { + "identifier": "allow-unmaximize", + "description": "Enables the unmaximize command without any pre-configured scope.", + "commands": { "allow": ["unmaximize"], "deny": [] } + }, + "allow-unminimize": { + "identifier": "allow-unminimize", + "description": "Enables the unminimize command without any pre-configured scope.", + "commands": { "allow": ["unminimize"], "deny": [] } + }, + "deny-available-monitors": { + "identifier": "deny-available-monitors", + "description": "Denies the available_monitors command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["available_monitors"] } + }, + "deny-center": { + "identifier": "deny-center", + "description": "Denies the center command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["center"] } + }, + "deny-close": { + "identifier": "deny-close", + "description": "Denies the close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["close"] } + }, + "deny-create": { + "identifier": "deny-create", + "description": "Denies the create command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create"] } + }, + "deny-current-monitor": { + "identifier": "deny-current-monitor", + "description": "Denies the current_monitor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["current_monitor"] } + }, + "deny-cursor-position": { + "identifier": "deny-cursor-position", + "description": "Denies the cursor_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["cursor_position"] } + }, + "deny-destroy": { + "identifier": "deny-destroy", + "description": "Denies the destroy command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["destroy"] } + }, + "deny-get-all-windows": { + "identifier": "deny-get-all-windows", + "description": "Denies the get_all_windows command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_all_windows"] } + }, + "deny-hide": { + "identifier": "deny-hide", + "description": "Denies the hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["hide"] } + }, + "deny-inner-position": { + "identifier": "deny-inner-position", + "description": "Denies the inner_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["inner_position"] } + }, + "deny-inner-size": { + "identifier": "deny-inner-size", + "description": "Denies the inner_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["inner_size"] } + }, + "deny-internal-toggle-maximize": { + "identifier": "deny-internal-toggle-maximize", + "description": "Denies the internal_toggle_maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["internal_toggle_maximize"] } + }, + "deny-is-always-on-top": { + "identifier": "deny-is-always-on-top", + "description": "Denies the is_always_on_top command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_always_on_top"] } + }, + "deny-is-closable": { + "identifier": "deny-is-closable", + "description": "Denies the is_closable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_closable"] } + }, + "deny-is-decorated": { + "identifier": "deny-is-decorated", + "description": "Denies the is_decorated command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_decorated"] } + }, + "deny-is-enabled": { + "identifier": "deny-is-enabled", + "description": "Denies the is_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_enabled"] } + }, + "deny-is-focused": { + "identifier": "deny-is-focused", + "description": "Denies the is_focused command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_focused"] } + }, + "deny-is-fullscreen": { + "identifier": "deny-is-fullscreen", + "description": "Denies the is_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_fullscreen"] } + }, + "deny-is-maximizable": { + "identifier": "deny-is-maximizable", + "description": "Denies the is_maximizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_maximizable"] } + }, + "deny-is-maximized": { + "identifier": "deny-is-maximized", + "description": "Denies the is_maximized command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_maximized"] } + }, + "deny-is-minimizable": { + "identifier": "deny-is-minimizable", + "description": "Denies the is_minimizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_minimizable"] } + }, + "deny-is-minimized": { + "identifier": "deny-is-minimized", + "description": "Denies the is_minimized command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_minimized"] } + }, + "deny-is-resizable": { + "identifier": "deny-is-resizable", + "description": "Denies the is_resizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_resizable"] } + }, + "deny-is-visible": { + "identifier": "deny-is-visible", + "description": "Denies the is_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_visible"] } + }, + "deny-maximize": { + "identifier": "deny-maximize", + "description": "Denies the maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["maximize"] } + }, + "deny-minimize": { + "identifier": "deny-minimize", + "description": "Denies the minimize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["minimize"] } + }, + "deny-monitor-from-point": { + "identifier": "deny-monitor-from-point", + "description": "Denies the monitor_from_point command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["monitor_from_point"] } + }, + "deny-outer-position": { + "identifier": "deny-outer-position", + "description": "Denies the outer_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["outer_position"] } + }, + "deny-outer-size": { + "identifier": "deny-outer-size", + "description": "Denies the outer_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["outer_size"] } + }, + "deny-primary-monitor": { + "identifier": "deny-primary-monitor", + "description": "Denies the primary_monitor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["primary_monitor"] } + }, + "deny-request-user-attention": { + "identifier": "deny-request-user-attention", + "description": "Denies the request_user_attention command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["request_user_attention"] } + }, + "deny-scale-factor": { + "identifier": "deny-scale-factor", + "description": "Denies the scale_factor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["scale_factor"] } + }, + "deny-set-always-on-bottom": { + "identifier": "deny-set-always-on-bottom", + "description": "Denies the set_always_on_bottom command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_always_on_bottom"] } + }, + "deny-set-always-on-top": { + "identifier": "deny-set-always-on-top", + "description": "Denies the set_always_on_top command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_always_on_top"] } + }, + "deny-set-background-color": { + "identifier": "deny-set-background-color", + "description": "Denies the set_background_color command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_background_color"] } + }, + "deny-set-badge-count": { + "identifier": "deny-set-badge-count", + "description": "Denies the set_badge_count command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_badge_count"] } + }, + "deny-set-badge-label": { + "identifier": "deny-set-badge-label", + "description": "Denies the set_badge_label command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_badge_label"] } + }, + "deny-set-closable": { + "identifier": "deny-set-closable", + "description": "Denies the set_closable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_closable"] } + }, + "deny-set-content-protected": { + "identifier": "deny-set-content-protected", + "description": "Denies the set_content_protected command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_content_protected"] } + }, + "deny-set-cursor-grab": { + "identifier": "deny-set-cursor-grab", + "description": "Denies the set_cursor_grab command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_grab"] } + }, + "deny-set-cursor-icon": { + "identifier": "deny-set-cursor-icon", + "description": "Denies the set_cursor_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_icon"] } + }, + "deny-set-cursor-position": { + "identifier": "deny-set-cursor-position", + "description": "Denies the set_cursor_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_position"] } + }, + "deny-set-cursor-visible": { + "identifier": "deny-set-cursor-visible", + "description": "Denies the set_cursor_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_visible"] } + }, + "deny-set-decorations": { + "identifier": "deny-set-decorations", + "description": "Denies the set_decorations command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_decorations"] } + }, + "deny-set-effects": { + "identifier": "deny-set-effects", + "description": "Denies the set_effects command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_effects"] } + }, + "deny-set-enabled": { + "identifier": "deny-set-enabled", + "description": "Denies the set_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_enabled"] } + }, + "deny-set-focus": { + "identifier": "deny-set-focus", + "description": "Denies the set_focus command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_focus"] } + }, + "deny-set-focusable": { + "identifier": "deny-set-focusable", + "description": "Denies the set_focusable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_focusable"] } + }, + "deny-set-fullscreen": { + "identifier": "deny-set-fullscreen", + "description": "Denies the set_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_fullscreen"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-ignore-cursor-events": { + "identifier": "deny-set-ignore-cursor-events", + "description": "Denies the set_ignore_cursor_events command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_ignore_cursor_events"] } + }, + "deny-set-max-size": { + "identifier": "deny-set-max-size", + "description": "Denies the set_max_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_max_size"] } + }, + "deny-set-maximizable": { + "identifier": "deny-set-maximizable", + "description": "Denies the set_maximizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_maximizable"] } + }, + "deny-set-min-size": { + "identifier": "deny-set-min-size", + "description": "Denies the set_min_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_min_size"] } + }, + "deny-set-minimizable": { + "identifier": "deny-set-minimizable", + "description": "Denies the set_minimizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_minimizable"] } + }, + "deny-set-overlay-icon": { + "identifier": "deny-set-overlay-icon", + "description": "Denies the set_overlay_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_overlay_icon"] } + }, + "deny-set-position": { + "identifier": "deny-set-position", + "description": "Denies the set_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_position"] } + }, + "deny-set-progress-bar": { + "identifier": "deny-set-progress-bar", + "description": "Denies the set_progress_bar command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_progress_bar"] } + }, + "deny-set-resizable": { + "identifier": "deny-set-resizable", + "description": "Denies the set_resizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_resizable"] } + }, + "deny-set-shadow": { + "identifier": "deny-set-shadow", + "description": "Denies the set_shadow command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_shadow"] } + }, + "deny-set-simple-fullscreen": { + "identifier": "deny-set-simple-fullscreen", + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_simple_fullscreen"] } + }, + "deny-set-size": { + "identifier": "deny-set-size", + "description": "Denies the set_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_size"] } + }, + "deny-set-size-constraints": { + "identifier": "deny-set-size-constraints", + "description": "Denies the set_size_constraints command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_size_constraints"] } + }, + "deny-set-skip-taskbar": { + "identifier": "deny-set-skip-taskbar", + "description": "Denies the set_skip_taskbar command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_skip_taskbar"] } + }, + "deny-set-theme": { + "identifier": "deny-set-theme", + "description": "Denies the set_theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_theme"] } + }, + "deny-set-title": { + "identifier": "deny-set-title", + "description": "Denies the set_title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title"] } + }, + "deny-set-title-bar-style": { + "identifier": "deny-set-title-bar-style", + "description": "Denies the set_title_bar_style command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title_bar_style"] } + }, + "deny-set-visible-on-all-organizations": { + "identifier": "deny-set-visible-on-all-organizations", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_visible_on_all_organizations"] } + }, + "deny-show": { + "identifier": "deny-show", + "description": "Denies the show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["show"] } + }, + "deny-start-dragging": { + "identifier": "deny-start-dragging", + "description": "Denies the start_dragging command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["start_dragging"] } + }, + "deny-start-resize-dragging": { + "identifier": "deny-start-resize-dragging", + "description": "Denies the start_resize_dragging command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["start_resize_dragging"] } + }, + "deny-theme": { + "identifier": "deny-theme", + "description": "Denies the theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["theme"] } + }, + "deny-title": { + "identifier": "deny-title", + "description": "Denies the title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["title"] } + }, + "deny-toggle-maximize": { + "identifier": "deny-toggle-maximize", + "description": "Denies the toggle_maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["toggle_maximize"] } + }, + "deny-unmaximize": { + "identifier": "deny-unmaximize", + "description": "Denies the unmaximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unmaximize"] } + }, + "deny-unminimize": { + "identifier": "deny-unminimize", + "description": "Denies the unminimize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unminimize"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "shell": { + "default_permission": { + "identifier": "default", + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n", + "permissions": ["allow-open"] + }, + "permissions": { + "allow-execute": { + "identifier": "allow-execute", + "description": "Enables the execute command without any pre-configured scope.", + "commands": { "allow": ["execute"], "deny": [] } + }, + "allow-kill": { + "identifier": "allow-kill", + "description": "Enables the kill command without any pre-configured scope.", + "commands": { "allow": ["kill"], "deny": [] } + }, + "allow-open": { + "identifier": "allow-open", + "description": "Enables the open command without any pre-configured scope.", + "commands": { "allow": ["open"], "deny": [] } + }, + "allow-spawn": { + "identifier": "allow-spawn", + "description": "Enables the spawn command without any pre-configured scope.", + "commands": { "allow": ["spawn"], "deny": [] } + }, + "allow-stdin-write": { + "identifier": "allow-stdin-write", + "description": "Enables the stdin_write command without any pre-configured scope.", + "commands": { "allow": ["stdin_write"], "deny": [] } + }, + "deny-execute": { + "identifier": "deny-execute", + "description": "Denies the execute command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["execute"] } + }, + "deny-kill": { + "identifier": "deny-kill", + "description": "Denies the kill command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["kill"] } + }, + "deny-open": { + "identifier": "deny-open", + "description": "Denies the open command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["open"] } + }, + "deny-spawn": { + "identifier": "deny-spawn", + "description": "Denies the spawn command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["spawn"] } + }, + "deny-stdin-write": { + "identifier": "deny-stdin-write", + "description": "Denies the stdin_write command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["stdin_write"] } + } + }, + "permission_sets": {}, + "global_scope_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "args": { "allOf": [{ "$ref": "#/definitions/ShellScopeEntryAllowedArgs" }], "description": "The allowed arguments for the command execution." }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "required": ["cmd", "name"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "args": { "allOf": [{ "$ref": "#/definitions/ShellScopeEntryAllowedArgs" }], "description": "The allowed arguments for the command execution." }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { "description": "If this command is a sidecar command.", "type": "boolean" } + }, + "required": ["name", "sidecar"], + "type": "object" + } + ], + "definitions": { + "ShellScopeEntryAllowedArg": { + "anyOf": [ + { "description": "A non-configurable argument that is passed to the command in the order it was specified.", "type": "string" }, + { + "additionalProperties": false, + "description": "A variable that is set while calling the command from the webview API.", + "properties": { + "raw": { + "default": false, + "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", + "type": "boolean" + }, + "validator": { + "description": "[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ", + "type": "string" + } + }, + "required": ["validator"], + "type": "object" + } + ], + "description": "A command argument allowed to be executed by the webview API." + }, + "ShellScopeEntryAllowedArgs": { + "anyOf": [ + { "description": "Use a simple boolean to allow all or disable all arguments to this command configuration.", "type": "boolean" }, + { + "description": "A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.", + "items": { "$ref": "#/definitions/ShellScopeEntryAllowedArg" }, + "type": "array" + } + ], + "description": "A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration." + } + }, + "description": "Shell scope entry.", + "title": "ShellScopeEntry" + } + } +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json b/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json index f827fe1..34f0a61 100644 --- a/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json +++ b/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json @@ -21,9 +21,7 @@ { "description": "A list of capabilities.", "type": "object", - "required": [ - "capabilities" - ], + "required": ["capabilities"], "properties": { "capabilities": { "description": "The list of capabilities.", @@ -39,10 +37,7 @@ "Capability": { "description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```", "type": "object", - "required": [ - "identifier", - "permissions" - ], + "required": ["identifier", "permissions"], "properties": { "identifier": { "description": "Identifier of the capability.\n\n## Example\n\n`main-user-files-write`", @@ -93,10 +88,7 @@ }, "platforms": { "description": "Limit which target platforms this capability applies to.\n\nBy default all platforms are targeted.\n\n## Example\n\n`[\"macOS\",\"windows\"]`", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Target" } @@ -106,9 +98,7 @@ "CapabilityRemote": { "description": "Configuration for remote URLs that are associated with the capability.", "type": "object", - "required": [ - "urls" - ], + "required": ["urls"], "properties": { "urls": { "description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n## Examples\n\n- \"https://*.mydomain.dev\": allows subdomains of mydomain.dev - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api", @@ -218,10 +208,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -244,10 +231,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -278,10 +262,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -304,10 +285,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -356,20 +334,14 @@ }, "allow": { "description": "Data that defines what is allowed by the scope.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } }, "deny": { "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } @@ -377,9 +349,7 @@ } } ], - "required": [ - "identifier" - ] + "required": ["identifier"] } ] }, @@ -1845,10 +1815,10 @@ "markdownDescription": "Enables the set_title_bar_style command without any pre-configured scope." }, { - "description": "Enables the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:allow-set-visible-on-all-workspaces", - "markdownDescription": "Enables the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:allow-set-visible-on-all-organizations", + "markdownDescription": "Enables the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Enables the show command without any pre-configured scope.", @@ -2301,10 +2271,10 @@ "markdownDescription": "Denies the set_title_bar_style command without any pre-configured scope." }, { - "description": "Denies the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:deny-set-visible-on-all-workspaces", - "markdownDescription": "Denies the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:deny-set-visible-on-all-organizations", + "markdownDescription": "Denies the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Denies the show command without any pre-configured scope.", @@ -2482,37 +2452,27 @@ { "description": "MacOS.", "type": "string", - "enum": [ - "macOS" - ] + "enum": ["macOS"] }, { "description": "Windows.", "type": "string", - "enum": [ - "windows" - ] + "enum": ["windows"] }, { "description": "Linux.", "type": "string", - "enum": [ - "linux" - ] + "enum": ["linux"] }, { "description": "Android.", "type": "string", - "enum": [ - "android" - ] + "enum": ["android"] }, { "description": "iOS.", "type": "string", - "enum": [ - "iOS" - ] + "enum": ["iOS"] } ] }, @@ -2526,9 +2486,7 @@ { "description": "A variable that is set while calling the command from the webview API.", "type": "object", - "required": [ - "validator" - ], + "required": ["validator"], "properties": { "raw": { "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", @@ -2561,4 +2519,4 @@ ] } } -} \ No newline at end of file +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json b/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json index f827fe1..34f0a61 100644 --- a/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json +++ b/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json @@ -21,9 +21,7 @@ { "description": "A list of capabilities.", "type": "object", - "required": [ - "capabilities" - ], + "required": ["capabilities"], "properties": { "capabilities": { "description": "The list of capabilities.", @@ -39,10 +37,7 @@ "Capability": { "description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```", "type": "object", - "required": [ - "identifier", - "permissions" - ], + "required": ["identifier", "permissions"], "properties": { "identifier": { "description": "Identifier of the capability.\n\n## Example\n\n`main-user-files-write`", @@ -93,10 +88,7 @@ }, "platforms": { "description": "Limit which target platforms this capability applies to.\n\nBy default all platforms are targeted.\n\n## Example\n\n`[\"macOS\",\"windows\"]`", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Target" } @@ -106,9 +98,7 @@ "CapabilityRemote": { "description": "Configuration for remote URLs that are associated with the capability.", "type": "object", - "required": [ - "urls" - ], + "required": ["urls"], "properties": { "urls": { "description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n## Examples\n\n- \"https://*.mydomain.dev\": allows subdomains of mydomain.dev - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api", @@ -218,10 +208,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -244,10 +231,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -278,10 +262,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -304,10 +285,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -356,20 +334,14 @@ }, "allow": { "description": "Data that defines what is allowed by the scope.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } }, "deny": { "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } @@ -377,9 +349,7 @@ } } ], - "required": [ - "identifier" - ] + "required": ["identifier"] } ] }, @@ -1845,10 +1815,10 @@ "markdownDescription": "Enables the set_title_bar_style command without any pre-configured scope." }, { - "description": "Enables the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:allow-set-visible-on-all-workspaces", - "markdownDescription": "Enables the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:allow-set-visible-on-all-organizations", + "markdownDescription": "Enables the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Enables the show command without any pre-configured scope.", @@ -2301,10 +2271,10 @@ "markdownDescription": "Denies the set_title_bar_style command without any pre-configured scope." }, { - "description": "Denies the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:deny-set-visible-on-all-workspaces", - "markdownDescription": "Denies the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:deny-set-visible-on-all-organizations", + "markdownDescription": "Denies the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Denies the show command without any pre-configured scope.", @@ -2482,37 +2452,27 @@ { "description": "MacOS.", "type": "string", - "enum": [ - "macOS" - ] + "enum": ["macOS"] }, { "description": "Windows.", "type": "string", - "enum": [ - "windows" - ] + "enum": ["windows"] }, { "description": "Linux.", "type": "string", - "enum": [ - "linux" - ] + "enum": ["linux"] }, { "description": "Android.", "type": "string", - "enum": [ - "android" - ] + "enum": ["android"] }, { "description": "iOS.", "type": "string", - "enum": [ - "iOS" - ] + "enum": ["iOS"] } ] }, @@ -2526,9 +2486,7 @@ { "description": "A variable that is set while calling the command from the webview API.", "type": "object", - "required": [ - "validator" - ], + "required": ["validator"], "properties": { "raw": { "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", @@ -2561,4 +2519,4 @@ ] } } -} \ No newline at end of file +} diff --git a/foundry/packages/frontend/src/app/router.tsx b/foundry/packages/frontend/src/app/router.tsx index 8ee0855..dd22724 100644 --- a/foundry/packages/frontend/src/app/router.tsx +++ b/foundry/packages/frontend/src/app/router.tsx @@ -1,6 +1,6 @@ import { type ReactNode, useEffect } from "react"; import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared"; -import { useInterest } from "@sandbox-agent/foundry-client"; +import { useSubscription } from "@sandbox-agent/foundry-client"; import { Navigate, Outlet, createRootRoute, createRoute, createRouter } from "@tanstack/react-router"; import { MockLayout } from "../components/mock-layout"; import { @@ -11,8 +11,8 @@ import { MockOrganizationSettingsPage, MockSignInPage, } from "../components/mock-onboarding"; -import { defaultWorkspaceId, isMockFrontendClient } from "../lib/env"; -import { interestManager } from "../lib/interest"; +import { defaultOrganizationId, isMockFrontendClient } from "../lib/env"; +import { subscriptionManager } from "../lib/subscription"; import { activeMockOrganization, getMockOrganizationById, isAppSnapshotBootstrapping, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; const rootRoute = createRootRoute({ @@ -61,20 +61,20 @@ const organizationCheckoutRoute = createRoute({ component: OrganizationCheckoutRoute, }); -const workspaceRoute = createRoute({ +const organizationRoute = createRoute({ getParentRoute: () => rootRoute, - path: "/workspaces/$workspaceId", - component: WorkspaceLayoutRoute, + path: "/organizations/$organizationId", + component: OrganizationLayoutRoute, }); -const workspaceIndexRoute = createRoute({ - getParentRoute: () => workspaceRoute, +const organizationIndexRoute = createRoute({ + getParentRoute: () => organizationRoute, path: "/", - component: WorkspaceRoute, + component: OrganizationRoute, }); const taskRoute = createRoute({ - getParentRoute: () => workspaceRoute, + getParentRoute: () => organizationRoute, path: "tasks/$taskId", validateSearch: (search: Record) => ({ sessionId: typeof search.sessionId === "string" && search.sessionId.trim().length > 0 ? search.sessionId : undefined, @@ -83,7 +83,7 @@ const taskRoute = createRoute({ }); const repoRoute = createRoute({ - getParentRoute: () => workspaceRoute, + getParentRoute: () => organizationRoute, path: "repos/$repoId", component: RepoRoute, }); @@ -96,7 +96,7 @@ const routeTree = rootRoute.addChildren([ organizationSettingsRoute, organizationBillingRoute, organizationCheckoutRoute, - workspaceRoute.addChildren([workspaceIndexRoute, taskRoute, repoRoute]), + organizationRoute.addChildren([organizationIndexRoute, taskRoute, repoRoute]), ]); export const router = createRouter({ routeTree }); @@ -107,7 +107,7 @@ declare module "@tanstack/react-router" { } } -function WorkspaceLayoutRoute() { +function OrganizationLayoutRoute() { return ; } @@ -142,7 +142,7 @@ function IndexRoute() { const activeOrganization = activeMockOrganization(snapshot); if (activeOrganization) { - return ; + return ; } return ; @@ -238,54 +238,54 @@ function OrganizationCheckoutRoute() { return ; } -function WorkspaceRoute() { - const { workspaceId } = workspaceRoute.useParams(); +function OrganizationRoute() { + const { organizationId } = organizationRoute.useParams(); return ( - - - + + + ); } -function WorkspaceView({ - workspaceId, +function OrganizationView({ + organizationId, selectedTaskId, selectedSessionId, }: { - workspaceId: string; + organizationId: string; selectedTaskId: string | null; selectedSessionId: string | null; }) { - return ; + return ; } function TaskRoute() { - const { workspaceId, taskId } = taskRoute.useParams(); + const { organizationId, taskId } = taskRoute.useParams(); const { sessionId } = taskRoute.useSearch(); return ( - - - + + + ); } -function TaskView({ workspaceId, taskId, sessionId }: { workspaceId: string; taskId: string; sessionId: string | null }) { - return ; +function TaskView({ organizationId, taskId, sessionId }: { organizationId: string; taskId: string; sessionId: string | null }) { + return ; } function RepoRoute() { - const { workspaceId, repoId } = repoRoute.useParams(); + const { organizationId, repoId } = repoRoute.useParams(); return ( - - - + + + ); } -function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; children: ReactNode }) { +function AppOrganizationGate({ organizationId, children }: { organizationId: string; children: ReactNode }) { const client = useMockAppClient(); const snapshot = useMockAppSnapshot(); - const organization = snapshot.organizations.find((candidate) => candidate.workspaceId === workspaceId) ?? null; + const organization = snapshot.organizations.find((candidate) => candidate.organizationId === organizationId) ?? null; useEffect(() => { if (organization && snapshot.activeOrganizationId !== organization.id) { @@ -294,7 +294,7 @@ function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; chil }, [client, organization, snapshot.activeOrganizationId]); if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { - return ; + return ; } if (snapshot.auth.status === "signed_out") { @@ -308,13 +308,15 @@ function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; chil return <>{children}; } -function RepoRouteInner({ workspaceId, repoId }: { workspaceId: string; repoId: string }) { - const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); - const activeTaskId = workspaceState.data?.taskSummaries.find((task) => task.repoId === repoId)?.id; +function RepoRouteInner({ organizationId, repoId }: { organizationId: string; repoId: string }) { + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const activeTaskId = organizationState.data?.taskSummaries.find((task) => task.repoId === repoId)?.id; if (!activeTaskId) { - return ; + return ; } - return ; + return ( + + ); } function RootLayout() { diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx index 061eff1..56907ff 100644 --- a/foundry/packages/frontend/src/components/dev-panel.tsx +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -2,8 +2,9 @@ import { memo, useEffect, useMemo, useState } from "react"; import { useStyletron } from "baseui"; import { useFoundryTokens } from "../app/theme"; import { isMockFrontendClient } from "../lib/env"; -import { interestManager } from "../lib/interest"; +import { subscriptionManager } from "../lib/subscription"; import type { + FoundryAppSnapshot, FoundryOrganization, TaskStatus, TaskWorkbenchSnapshot, @@ -11,11 +12,12 @@ import type { WorkbenchSessionSummary, WorkbenchTaskStatus, } from "@sandbox-agent/foundry-shared"; -import type { DebugInterestTopic } from "@sandbox-agent/foundry-client"; +import { useSubscription } from "@sandbox-agent/foundry-client"; +import type { DebugSubscriptionTopic } from "@sandbox-agent/foundry-client"; import { describeTaskState } from "../features/tasks/status"; interface DevPanelProps { - workspaceId: string; + organizationId: string; snapshot: TaskWorkbenchSnapshot; organization?: FoundryOrganization | null; focusedTask?: DevPanelFocusedTask | null; @@ -46,12 +48,12 @@ interface TopicInfo { lastRefresh: number | null; } -function topicLabel(topic: DebugInterestTopic): string { +function topicLabel(topic: DebugSubscriptionTopic): string { switch (topic.topicKey) { case "app": return "App"; - case "workspace": - return "Workspace"; + case "organization": + return "Organization"; case "task": return "Task"; case "session": @@ -62,7 +64,7 @@ function topicLabel(topic: DebugInterestTopic): string { } /** Extract the params portion of a cache key (everything after the first `:`) */ -function topicParams(topic: DebugInterestTopic): string { +function topicParams(topic: DebugSubscriptionTopic): string { const idx = topic.cacheKey.indexOf(":"); return idx >= 0 ? topic.cacheKey.slice(idx + 1) : ""; } @@ -133,7 +135,7 @@ function thinkingLabel(sinceMs: number | null, now: number): string | null { return `thinking ${elapsed}s`; } -export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organization, focusedTask }: DevPanelProps) { +export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organization, focusedTask }: DevPanelProps) { const [css] = useStyletron(); const t = useFoundryTokens(); const [now, setNow] = useState(Date.now()); @@ -145,7 +147,7 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza }, []); const topics = useMemo((): TopicInfo[] => { - return interestManager.listDebugTopics().map((topic) => ({ + return subscriptionManager.listDebugTopics().map((topic) => ({ label: topicLabel(topic), key: topic.cacheKey, params: topicParams(topic), @@ -156,12 +158,18 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza })); }, [now]); + const appState = useSubscription(subscriptionManager, "app", {}); + const appSnapshot: FoundryAppSnapshot | null = appState.data ?? null; + const repos = snapshot.repos ?? []; - const prCount = (snapshot.tasks ?? []).filter((task) => task.pullRequest != null).length; + const tasks = snapshot.tasks ?? []; + const prCount = tasks.filter((task) => task.pullRequest != null).length; const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null; const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null); const lastWebhookAt = organization?.github.lastWebhookAt ?? null; const hasRecentWebhook = lastWebhookAt != null && now - lastWebhookAt < 5 * 60_000; + const totalOrgs = appSnapshot?.organizations.length ?? 0; + const authStatus = appSnapshot?.auth.status ?? "unknown"; const mono = css({ fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace", @@ -218,8 +226,8 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza {/* Body */}
- {/* Interest Topics */} -
+ {/* Subscription Topics */} +
{topics.map((topic) => (
No active subscriptions}
+ {/* App State */} +
+
+
+ + Auth + {authStatus.replace(/_/g, " ")} +
+
+ + +
+
app topic: {appState.status}
+
+
+ {/* Snapshot Summary */} -
+
- + +
@@ -395,7 +428,7 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza {sandbox.sandboxId.slice(0, 16)} {isActive ? " *" : ""} - {sandbox.providerId} + {sandbox.sandboxProviderId}
{sandbox.cwd &&
cwd: {sandbox.cwd}
} @@ -408,8 +441,8 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza )} {/* GitHub */} - {organization && ( -
+
+ {organization ? (
- App + App Install {organization.github.installationStatus.replace(/_/g, " ")} @@ -438,6 +471,9 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza /> Sync {organization.github.syncStatus} + {organization.github.lastSyncAt != null && ( + {timeAgo(organization.github.lastSyncAt)} + )}
) : ( - never received + never received )}
- - + +
{organization.github.connectedAccount && (
@{organization.github.connectedAccount}
@@ -469,12 +505,14 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
last sync: {organization.github.lastSyncLabel}
)}
-
- )} + ) : ( + No organization data loaded + )} +
- {/* Workspace */} -
-
{workspaceId}
+ {/* Organization */} +
+
{organizationId}
{organization && (
org: {organization.settings.displayName} ({organization.kind}) diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index e0f6803..d922ce2 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -10,7 +10,7 @@ import { type WorkbenchTaskDetail, type WorkbenchTaskSummary, } from "@sandbox-agent/foundry-shared"; -import { useInterest } from "@sandbox-agent/foundry-client"; +import { useSubscription } from "@sandbox-agent/foundry-client"; import { CircleAlert, PanelLeft, PanelRight } from "lucide-react"; import { useFoundryTokens } from "../app/theme"; @@ -21,7 +21,7 @@ import { MessageList } from "./mock-layout/message-list"; import { PromptComposer } from "./mock-layout/prompt-composer"; import { RightSidebar } from "./mock-layout/right-sidebar"; import { Sidebar } from "./mock-layout/sidebar"; -import { TabStrip } from "./mock-layout/tab-strip"; +import { SessionStrip } from "./mock-layout/session-strip"; import { TerminalPane } from "./mock-layout/terminal-pane"; import { TranscriptHeader } from "./mock-layout/transcript-header"; import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT, SPanel, ScrollBody, Shell, SpinnerDot } from "./mock-layout/ui"; @@ -41,11 +41,11 @@ import { } from "./mock-layout/view-model"; import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app"; import { backendClient } from "../lib/backend"; -import { interestManager } from "../lib/interest"; +import { subscriptionManager } from "../lib/subscription"; import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status"; -function firstAgentTabId(task: Task): string | null { - return task.tabs[0]?.id ?? null; +function firstAgentSessionId(task: Task): string | null { + return task.sessions[0]?.id ?? null; } function sanitizeOpenDiffs(task: Task, paths: string[] | undefined): string[] { @@ -56,25 +56,25 @@ function sanitizeOpenDiffs(task: Task, paths: string[] | undefined): string[] { return paths.filter((path) => task.diffs[path] != null); } -function sanitizeLastAgentTabId(task: Task, tabId: string | null | undefined): string | null { - if (tabId && task.tabs.some((tab) => tab.id === tabId)) { - return tabId; +function sanitizeLastAgentSessionId(task: Task, sessionId: string | null | undefined): string | null { + if (sessionId && task.sessions.some((tab) => tab.id === sessionId)) { + return sessionId; } - return firstAgentTabId(task); + return firstAgentSessionId(task); } -function sanitizeActiveTabId(task: Task, tabId: string | null | undefined, openDiffs: string[], lastAgentTabId: string | null): string | null { - if (tabId) { - if (task.tabs.some((tab) => tab.id === tabId)) { - return tabId; +function sanitizeActiveSessionId(task: Task, sessionId: string | null | undefined, openDiffs: string[], lastAgentSessionId: string | null): string | null { + if (sessionId) { + if (task.sessions.some((tab) => tab.id === sessionId)) { + return sessionId; } - if (isDiffTab(tabId) && openDiffs.includes(diffPath(tabId))) { - return tabId; + if (isDiffTab(sessionId) && openDiffs.includes(diffPath(sessionId))) { + return sessionId; } } - return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentTabId; + return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentSessionId; } function githubInstallationWarningTitle(organization: FoundryOrganization): string { @@ -85,7 +85,7 @@ function githubInstallationWarningDetail(organization: FoundryOrganization): str const statusDetail = organization.github.lastSyncLabel.trim(); const requirementDetail = organization.github.installationStatus === "install_required" - ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this workspace." + ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this organization." : "Webhook delivery is unavailable. Repo sync and PR updates will not work until the GitHub App is reconnected."; return statusDetail ? `${requirementDetail} ${statusDetail}.` : requirementDetail; } @@ -130,10 +130,10 @@ function GithubInstallationWarning({ ); } -function toLegacyTab( +function toSessionModel( summary: WorkbenchSessionSummary, - sessionDetail?: { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }, -): Task["tabs"][number] { + sessionDetail?: { draft: Task["sessions"][number]["draft"]; transcript: Task["sessions"][number]["transcript"] }, +): Task["sessions"][number] { return { id: summary.id, sessionId: summary.sessionId, @@ -154,10 +154,10 @@ function toLegacyTab( }; } -function toLegacyTask( +function toTaskModel( summary: WorkbenchTaskSummary, detail?: WorkbenchTaskDetail, - sessionCache?: Map, + sessionCache?: Map, ): Task { const sessions = detail?.sessionsSummary ?? summary.sessionsSummary; return { @@ -171,7 +171,7 @@ function toLegacyTask( updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs, branch: detail?.branch ?? summary.branch, pullRequest: detail?.pullRequest ?? summary.pullRequest, - tabs: sessions.map((session) => toLegacyTab(session, sessionCache?.get(session.id))), + sessions: sessions.map((session) => toSessionModel(session, sessionCache?.get(session.id))), fileChanges: detail?.fileChanges ?? [], diffs: detail?.diffs ?? {}, fileTree: detail?.fileTree ?? [], @@ -190,7 +190,7 @@ function isOpenPrTaskId(taskId: string): boolean { return taskId.startsWith(OPEN_PR_TASK_PREFIX); } -function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { +function toOpenPrTaskModel(pullRequest: WorkbenchOpenPrSummary): Task { return { id: openPrTaskId(pullRequest.prId), repoId: pullRequest.repoId, @@ -205,7 +205,7 @@ function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { number: pullRequest.number, status: pullRequest.isDraft ? "draft" : "ready", }, - tabs: [], + sessions: [], fileChanges: [], diffs: {}, fileTree: [], @@ -214,7 +214,7 @@ function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { }; } -function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): string | null { +function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): string | null { if (!tab) { return null; } @@ -230,7 +230,7 @@ function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): stri return null; } -function groupProjects(repos: Array<{ id: string; label: string }>, tasks: Task[]) { +function groupRepositories(repos: Array<{ id: string; label: string }>, tasks: Task[]) { return repos .map((repo) => ({ id: repo.id, @@ -249,21 +249,21 @@ interface WorkbenchActions { branch?: string; onBranch?: string; model?: ModelId; - }): Promise<{ taskId: string; tabId?: string }>; + }): Promise<{ taskId: string; sessionId?: string }>; markTaskUnread(input: { taskId: string }): Promise; renameTask(input: { taskId: string; value: string }): Promise; renameBranch(input: { taskId: string; value: string }): Promise; archiveTask(input: { taskId: string }): Promise; publishPr(input: { taskId: string }): Promise; revertFile(input: { taskId: string; path: string }): Promise; - updateDraft(input: { taskId: string; tabId: string; text: string; attachments: LineAttachment[] }): Promise; - sendMessage(input: { taskId: string; tabId: string; text: string; attachments: LineAttachment[] }): Promise; - stopAgent(input: { taskId: string; tabId: string }): Promise; - setSessionUnread(input: { taskId: string; tabId: string; unread: boolean }): Promise; - renameSession(input: { taskId: string; tabId: string; title: string }): Promise; - closeTab(input: { taskId: string; tabId: string }): Promise; - addTab(input: { taskId: string; model?: string }): Promise<{ tabId: string }>; - changeModel(input: { taskId: string; tabId: string; model: ModelId }): Promise; + updateDraft(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + sendMessage(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + stopAgent(input: { taskId: string; sessionId: string }): Promise; + setSessionUnread(input: { taskId: string; sessionId: string; unread: boolean }): Promise; + renameSession(input: { taskId: string; sessionId: string; title: string }): Promise; + closeSession(input: { taskId: string; sessionId: string }): Promise; + addSession(input: { taskId: string; model?: string }): Promise<{ sessionId: string }>; + changeModel(input: { taskId: string; sessionId: string; model: ModelId }): Promise; reloadGithubOrganization(): Promise; reloadGithubPullRequests(): Promise; reloadGithubRepository(repoId: string): Promise; @@ -274,12 +274,12 @@ const TranscriptPanel = memo(function TranscriptPanel({ taskWorkbenchClient, task, hasSandbox, - activeTabId, - lastAgentTabId, + activeSessionId, + lastAgentSessionId, openDiffs, onSyncRouteSession, - onSetActiveTabId, - onSetLastAgentTabId, + onSetActiveSessionId, + onSetLastAgentSessionId, onSetOpenDiffs, sidebarCollapsed, onToggleSidebar, @@ -293,12 +293,12 @@ const TranscriptPanel = memo(function TranscriptPanel({ taskWorkbenchClient: WorkbenchActions; task: Task; hasSandbox: boolean; - activeTabId: string | null; - lastAgentTabId: string | null; + activeSessionId: string | null; + lastAgentSessionId: string | null; openDiffs: string[]; onSyncRouteSession: (taskId: string, sessionId: string | null, replace?: boolean) => void; - onSetActiveTabId: (tabId: string | null) => void; - onSetLastAgentTabId: (tabId: string | null) => void; + onSetActiveSessionId: (sessionId: string | null) => void; + onSetLastAgentSessionId: (sessionId: string | null) => void; onSetOpenDiffs: (paths: string[]) => void; sidebarCollapsed?: boolean; onToggleSidebar?: () => void; @@ -313,37 +313,38 @@ const TranscriptPanel = memo(function TranscriptPanel({ const [defaultModel, setDefaultModel] = useState("claude-sonnet-4"); const [editingField, setEditingField] = useState<"title" | "branch" | null>(null); const [editValue, setEditValue] = useState(""); - const [editingSessionTabId, setEditingSessionTabId] = useState(null); + const [editingSessionId, setEditingSessionId] = useState(null); const [editingSessionName, setEditingSessionName] = useState(""); - const [pendingHistoryTarget, setPendingHistoryTarget] = useState<{ messageId: string; tabId: string } | null>(null); + const [pendingHistoryTarget, setPendingHistoryTarget] = useState<{ messageId: string; sessionId: string } | null>(null); const [copiedMessageId, setCopiedMessageId] = useState(null); const [timerNowMs, setTimerNowMs] = useState(() => Date.now()); const [localDraft, setLocalDraft] = useState(""); const [localAttachments, setLocalAttachments] = useState([]); + const [pendingMessage, setPendingMessage] = useState<{ text: string; sessionId: string; sentAt: number } | null>(null); const lastEditTimeRef = useRef(0); const throttleTimerRef = useRef | null>(null); const pendingDraftRef = useRef<{ text: string; attachments: LineAttachment[] } | null>(null); const scrollRef = useRef(null); const textareaRef = useRef(null); const messageRefs = useRef(new Map()); - const activeDiff = activeTabId && isDiffTab(activeTabId) ? diffPath(activeTabId) : null; - const activeAgentTab = activeDiff ? null : (task.tabs.find((candidate) => candidate.id === activeTabId) ?? task.tabs[0] ?? null); - const promptTab = task.tabs.find((candidate) => candidate.id === lastAgentTabId) ?? task.tabs[0] ?? null; + const activeDiff = activeSessionId && isDiffTab(activeSessionId) ? diffPath(activeSessionId) : null; + const activeAgentSession = activeDiff ? null : (task.sessions.find((candidate) => candidate.id === activeSessionId) ?? task.sessions[0] ?? null); + const promptSession = task.sessions.find((candidate) => candidate.id === lastAgentSessionId) ?? task.sessions[0] ?? null; const isTerminal = task.status === "archived"; - const historyEvents = useMemo(() => buildHistoryEvents(task.tabs), [task.tabs]); - const activeMessages = useMemo(() => buildDisplayMessages(activeAgentTab), [activeAgentTab]); + const historyEvents = useMemo(() => buildHistoryEvents(task.sessions), [task.sessions]); + const activeMessages = useMemo(() => buildDisplayMessages(activeAgentSession), [activeAgentSession]); const taskRuntimeStatus = task.runtimeStatus ?? task.status; const taskState = describeTaskState(taskRuntimeStatus, task.statusMessage ?? null); const taskProvisioning = isProvisioningTaskStatus(taskRuntimeStatus); const taskProvisioningMessage = taskState.detail; - const activeSessionMessage = sessionStateMessage(activeAgentTab); + const activeSessionMessage = sessionStateMessage(activeAgentSession); const showPendingSessionState = !activeDiff && - !!activeAgentTab && - (activeAgentTab.status === "pending_provision" || activeAgentTab.status === "pending_session_create" || activeAgentTab.status === "error") && + !!activeAgentSession && + (activeAgentSession.status === "pending_provision" || activeAgentSession.status === "pending_session_create" || activeAgentSession.status === "error") && activeMessages.length === 0; - const serverDraft = promptTab?.draft.text ?? ""; - const serverAttachments = promptTab?.draft.attachments ?? []; + const serverDraft = promptSession?.draft.text ?? ""; + const serverAttachments = promptSession?.draft.attachments ?? []; // Sync server → local only when user hasn't typed recently (3s cooldown) const DRAFT_SYNC_COOLDOWN_MS = 3_000; @@ -354,12 +355,26 @@ const TranscriptPanel = memo(function TranscriptPanel({ } }, [serverDraft, serverAttachments]); - // Reset local draft immediately on tab/task switch + // Reset local draft immediately on session/task switch useEffect(() => { lastEditTimeRef.current = 0; - setLocalDraft(promptTab?.draft.text ?? ""); - setLocalAttachments(promptTab?.draft.attachments ?? []); - }, [promptTab?.id, task.id]); + setLocalDraft(promptSession?.draft.text ?? ""); + setLocalAttachments(promptSession?.draft.attachments ?? []); + }, [promptSession?.id, task.id]); + + // Clear pending message once the real transcript contains a client message newer than when we sent + const pendingMessageClientCount = useRef(0); + useEffect(() => { + if (!pendingMessage) return; + + const targetSession = task.sessions.find((s) => s.id === pendingMessage.sessionId); + if (!targetSession) return; + + const clientEventCount = targetSession.transcript.filter((event) => event.sender === "client").length; + if (clientEventCount > pendingMessageClientCount.current) { + setPendingMessage(null); + } + }, [task.sessions, pendingMessage]); const draft = localDraft; const attachments = localAttachments; @@ -372,10 +387,10 @@ const TranscriptPanel = memo(function TranscriptPanel({ useEffect(() => { textareaRef.current?.focus(); - }, [activeTabId, task.id]); + }, [activeSessionId, task.id]); useEffect(() => { - setEditingSessionTabId(null); + setEditingSessionId(null); setEditingSessionName(""); }, [task.id]); @@ -389,7 +404,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ const nextHeight = Math.min(textarea.scrollHeight, PROMPT_TEXTAREA_MAX_HEIGHT); textarea.style.height = `${Math.max(PROMPT_TEXTAREA_MIN_HEIGHT, nextHeight)}px`; textarea.style.overflowY = textarea.scrollHeight > PROMPT_TEXTAREA_MAX_HEIGHT ? "auto" : "hidden"; - }, [draft, activeTabId, task.id]); + }, [draft, activeSessionId, task.id]); useEffect(() => { if (!copiedMessageId) { @@ -404,7 +419,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ }, [copiedMessageId]); useEffect(() => { - if (!activeAgentTab || activeAgentTab.status !== "running" || activeAgentTab.thinkingSinceMs === null) { + if (!activeAgentSession || activeAgentSession.status !== "running" || activeAgentSession.thinkingSinceMs === null) { return; } @@ -414,19 +429,19 @@ const TranscriptPanel = memo(function TranscriptPanel({ }, 1_000); return () => window.clearInterval(timer); - }, [activeAgentTab?.id, activeAgentTab?.status, activeAgentTab?.thinkingSinceMs]); + }, [activeAgentSession?.id, activeAgentSession?.status, activeAgentSession?.thinkingSinceMs]); useEffect(() => { - if (!activeAgentTab?.unread) { + if (!activeAgentSession?.unread) { return; } void taskWorkbenchClient.setSessionUnread({ taskId: task.id, - tabId: activeAgentTab.id, + sessionId: activeAgentSession.id, unread: false, }); - }, [activeAgentTab?.id, activeAgentTab?.unread, task.id]); + }, [activeAgentSession?.id, activeAgentSession?.unread, task.id]); const startEditingField = useCallback((field: "title" | "branch", value: string) => { setEditingField(field); @@ -458,10 +473,10 @@ const TranscriptPanel = memo(function TranscriptPanel({ const DRAFT_THROTTLE_MS = 500; const flushDraft = useCallback( - (text: string, nextAttachments: LineAttachment[], tabId: string) => { + (text: string, nextAttachments: LineAttachment[], sessionId: string) => { void taskWorkbenchClient.updateDraft({ taskId: task.id, - tabId, + sessionId, text, attachments: nextAttachments, }); @@ -480,7 +495,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ const updateDraft = useCallback( (nextText: string, nextAttachments: LineAttachment[]) => { - if (!promptTab) { + if (!promptSession) { return; } @@ -495,162 +510,172 @@ const TranscriptPanel = memo(function TranscriptPanel({ throttleTimerRef.current = setTimeout(() => { throttleTimerRef.current = null; if (pendingDraftRef.current) { - flushDraft(pendingDraftRef.current.text, pendingDraftRef.current.attachments, promptTab.id); + flushDraft(pendingDraftRef.current.text, pendingDraftRef.current.attachments, promptSession.id); pendingDraftRef.current = null; } }, DRAFT_THROTTLE_MS); } }, - [promptTab, flushDraft], + [promptSession, flushDraft], ); const sendMessage = useCallback(() => { const text = draft.trim(); - if (!text || !promptTab) { + if (!text || !promptSession) { return; } - onSetActiveTabId(promptTab.id); - onSetLastAgentTabId(promptTab.id); + // Clear draft and show optimistic message immediately (don't wait for server round-trip) + setLocalDraft(""); + setLocalAttachments([]); + lastEditTimeRef.current = Date.now(); + // Snapshot current client message count so we can detect when the server adds ours + pendingMessageClientCount.current = promptSession.transcript.filter((event) => event.sender === "client").length; + setPendingMessage({ text, sessionId: promptSession.id, sentAt: Date.now() }); + + onSetActiveSessionId(promptSession.id); + onSetLastAgentSessionId(promptSession.id); void taskWorkbenchClient.sendMessage({ taskId: task.id, - tabId: promptTab.id, + sessionId: promptSession.id, text, attachments, }); - }, [attachments, draft, task.id, onSetActiveTabId, onSetLastAgentTabId, promptTab]); + }, [attachments, draft, task.id, onSetActiveSessionId, onSetLastAgentSessionId, promptSession]); const stopAgent = useCallback(() => { - if (!promptTab) { + if (!promptSession) { return; } void taskWorkbenchClient.stopAgent({ taskId: task.id, - tabId: promptTab.id, + sessionId: promptSession.id, }); - }, [task.id, promptTab]); + }, [task.id, promptSession]); - const switchTab = useCallback( - (tabId: string) => { - onSetActiveTabId(tabId); + const switchSession = useCallback( + (sessionId: string) => { + onSetActiveSessionId(sessionId); - if (!isDiffTab(tabId)) { - onSetLastAgentTabId(tabId); - const tab = task.tabs.find((candidate) => candidate.id === tabId); - if (tab?.unread) { + if (!isDiffTab(sessionId)) { + onSetLastAgentSessionId(sessionId); + const session = task.sessions.find((candidate) => candidate.id === sessionId); + if (session?.unread) { void taskWorkbenchClient.setSessionUnread({ taskId: task.id, - tabId, + sessionId, unread: false, }); } - onSyncRouteSession(task.id, tabId); + onSyncRouteSession(task.id, sessionId); } }, - [task.id, task.tabs, onSetActiveTabId, onSetLastAgentTabId, onSyncRouteSession], + [task.id, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); - const setTabUnread = useCallback( - (tabId: string, unread: boolean) => { - void taskWorkbenchClient.setSessionUnread({ taskId: task.id, tabId, unread }); + const setSessionUnread = useCallback( + (sessionId: string, unread: boolean) => { + void taskWorkbenchClient.setSessionUnread({ taskId: task.id, sessionId, unread }); }, [task.id], ); - const startRenamingTab = useCallback( - (tabId: string) => { - const targetTab = task.tabs.find((candidate) => candidate.id === tabId); - if (!targetTab) { - throw new Error(`Unable to rename missing session tab ${tabId}`); + const startRenamingSession = useCallback( + (sessionId: string) => { + const targetSession = task.sessions.find((candidate) => candidate.id === sessionId); + if (!targetSession) { + throw new Error(`Unable to rename missing session ${sessionId}`); } - setEditingSessionTabId(tabId); - setEditingSessionName(targetTab.sessionName); + setEditingSessionId(sessionId); + setEditingSessionName(targetSession.sessionName); }, - [task.tabs], + [task.sessions], ); - const cancelTabRename = useCallback(() => { - setEditingSessionTabId(null); + const cancelSessionRename = useCallback(() => { + setEditingSessionId(null); setEditingSessionName(""); }, []); - const commitTabRename = useCallback(() => { - if (!editingSessionTabId) { + const commitSessionRename = useCallback(() => { + if (!editingSessionId) { return; } const trimmedName = editingSessionName.trim(); if (!trimmedName) { - cancelTabRename(); + cancelSessionRename(); return; } void taskWorkbenchClient.renameSession({ taskId: task.id, - tabId: editingSessionTabId, + sessionId: editingSessionId, title: trimmedName, }); - cancelTabRename(); - }, [cancelTabRename, editingSessionName, editingSessionTabId, task.id]); + cancelSessionRename(); + }, [cancelSessionRename, editingSessionName, editingSessionId, task.id]); - const closeTab = useCallback( - (tabId: string) => { - const remainingTabs = task.tabs.filter((candidate) => candidate.id !== tabId); - const nextTabId = remainingTabs[0]?.id ?? null; + const closeSession = useCallback( + (sessionId: string) => { + const remainingSessions = task.sessions.filter((candidate) => candidate.id !== sessionId); + const nextSessionId = remainingSessions[0]?.id ?? null; - if (activeTabId === tabId) { - onSetActiveTabId(nextTabId); + if (activeSessionId === sessionId) { + onSetActiveSessionId(nextSessionId); } - if (lastAgentTabId === tabId) { - onSetLastAgentTabId(nextTabId); + if (lastAgentSessionId === sessionId) { + onSetLastAgentSessionId(nextSessionId); } - onSyncRouteSession(task.id, nextTabId); - void taskWorkbenchClient.closeTab({ taskId: task.id, tabId }); + onSyncRouteSession(task.id, nextSessionId); + void taskWorkbenchClient.closeSession({ taskId: task.id, sessionId }); }, - [activeTabId, task.id, task.tabs, lastAgentTabId, onSetActiveTabId, onSetLastAgentTabId, onSyncRouteSession], + [activeSessionId, task.id, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); const closeDiffTab = useCallback( (path: string) => { const nextOpenDiffs = openDiffs.filter((candidate) => candidate !== path); onSetOpenDiffs(nextOpenDiffs); - if (activeTabId === diffTabId(path)) { - onSetActiveTabId(nextOpenDiffs.length > 0 ? diffTabId(nextOpenDiffs[nextOpenDiffs.length - 1]!) : (lastAgentTabId ?? firstAgentTabId(task))); + if (activeSessionId === diffTabId(path)) { + onSetActiveSessionId( + nextOpenDiffs.length > 0 ? diffTabId(nextOpenDiffs[nextOpenDiffs.length - 1]!) : (lastAgentSessionId ?? firstAgentSessionId(task)), + ); } }, - [activeTabId, task, lastAgentTabId, onSetActiveTabId, onSetOpenDiffs, openDiffs], + [activeSessionId, task, lastAgentSessionId, onSetActiveSessionId, onSetOpenDiffs, openDiffs], ); - const addTab = useCallback(() => { + const addSession = useCallback(() => { void (async () => { - const { tabId } = await taskWorkbenchClient.addTab({ taskId: task.id }); - onSetLastAgentTabId(tabId); - onSetActiveTabId(tabId); - onSyncRouteSession(task.id, tabId); + const { sessionId } = await taskWorkbenchClient.addSession({ taskId: task.id }); + onSetLastAgentSessionId(sessionId); + onSetActiveSessionId(sessionId); + onSyncRouteSession(task.id, sessionId); })(); - }, [task.id, onSetActiveTabId, onSetLastAgentTabId, onSyncRouteSession]); + }, [task.id, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); const changeModel = useCallback( (model: ModelId) => { - if (!promptTab) { - throw new Error(`Unable to change model for task ${task.id} without an active prompt tab`); + if (!promptSession) { + throw new Error(`Unable to change model for task ${task.id} without an active prompt session`); } void taskWorkbenchClient.changeModel({ taskId: task.id, - tabId: promptTab.id, + sessionId: promptSession.id, model, }); }, - [task.id, promptTab], + [task.id, promptSession], ); const addAttachment = useCallback( (filePath: string, lineNumber: number, lineContent: string) => { - if (!promptTab) { + if (!promptSession) { return; } @@ -661,7 +686,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ updateDraft(draft, [...attachments, nextAttachment]); }, - [attachments, draft, promptTab, updateDraft], + [attachments, draft, promptSession, updateDraft], ); const removeAttachment = useCallback( @@ -676,13 +701,13 @@ const TranscriptPanel = memo(function TranscriptPanel({ const jumpToHistoryEvent = useCallback( (event: HistoryEvent) => { - setPendingHistoryTarget({ messageId: event.messageId, tabId: event.tabId }); + setPendingHistoryTarget({ messageId: event.messageId, sessionId: event.sessionId }); - if (activeTabId !== event.tabId) { - switchTab(event.tabId); + if (activeSessionId !== event.sessionId) { + switchSession(event.sessionId); } }, - [activeTabId, switchTab], + [activeSessionId, switchSession], ); const copyMessage = useCallback(async (message: Message) => { @@ -704,26 +729,29 @@ const TranscriptPanel = memo(function TranscriptPanel({ } }, []); + const isOptimisticThinking = pendingMessage !== null && activeAgentSession?.id === pendingMessage.sessionId; const thinkingTimerLabel = - activeAgentTab?.status === "running" && activeAgentTab.thinkingSinceMs !== null - ? formatThinkingDuration(timerNowMs - activeAgentTab.thinkingSinceMs) - : null; + activeAgentSession?.status === "running" && activeAgentSession.thinkingSinceMs !== null + ? formatThinkingDuration(timerNowMs - activeAgentSession.thinkingSinceMs) + : isOptimisticThinking + ? formatThinkingDuration(timerNowMs - pendingMessage.sentAt) + : null; return ( { - if (activeAgentTab) { - setTabUnread(activeAgentTab.id, unread); + onSetActiveSessionUnread={(unread) => { + if (activeAgentSession) { + setSessionUnread(activeAgentSession.id, unread); } }} sidebarCollapsed={sidebarCollapsed} @@ -749,21 +777,21 @@ const TranscriptPanel = memo(function TranscriptPanel({ border: `1px solid ${t.borderDefault}`, }} > - {activeDiff ? ( @@ -773,7 +801,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ diff={task.diffs[activeDiff]} onAddAttachment={addAttachment} /> - ) : task.tabs.length === 0 ? ( + ) : task.sessions.length === 0 ? (
Sessions are where you chat with the agent. Start one now to send the first prompt on this task.

@@ -560,127 +664,230 @@ export const Sidebar = memo(function Sidebar({ ); } - const { project, task } = item; - const isActive = task.id === activeId; - const isPullRequestItem = isPullRequestSidebarItem(task); - const isRunning = task.tabs.some((tab) => tab.status === "running"); - const isProvisioning = - !isPullRequestItem && - (String(task.status).startsWith("init_") || - task.status === "new" || - task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create")); - const hasUnread = task.tabs.some((tab) => tab.unread); - const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; - const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); - const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); - const hasDiffs = totalAdded > 0 || totalRemoved > 0; + if (item.type === "task") { + const { repository, task, taskIndex } = item; + const isActive = task.id === activeId; + const isPullRequestItem = isPullRequestSidebarItem(task); + const isRunning = task.sessions.some((s) => s.status === "running"); + const isProvisioning = + !isPullRequestItem && + (String(task.status).startsWith("init_") || + task.status === "new" || + task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create")); + const hasUnread = task.sessions.some((s) => s.unread); + const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; + const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); + const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); + const hasDiffs = totalAdded > 0 || totalRemoved > 0; + const isTaskDropTarget = + drag?.type === "task" && drag.repositoryId === repository.id && drag.overIdx === taskIndex && drag.fromIdx !== taskIndex; + const isTaskBeingDragged = drag?.type === "task" && drag.repositoryId === repository.id && drag.fromIdx === taskIndex && didDragRef.current; - return ( -
{ - if (node) { - virtualizer.measureElement(node); - } - }} - style={{ - left: 0, - position: "absolute", - top: 0, - transform: `translateY(${virtualItem.start}px)`, - width: "100%", - }} - > -
-
onSelect(task.id)} - onContextMenu={(event) => { - if (isPullRequestItem && task.pullRequest) { + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + opacity: isTaskBeingDragged ? 0.4 : 1, + transition: "opacity 150ms ease", + }} + onMouseDown={(event) => { + if (event.button !== 0) return; + if (dragRef.current) return; + event.stopPropagation(); + startYRef.current = event.clientY; + didDragRef.current = false; + const state: DragState = { type: "task", repositoryId: repository.id, fromIdx: taskIndex, overIdx: null }; + dragRef.current = state; + setDrag(state); + }} + > + {isTaskDropTarget ? ( +
+ ) : null} +
+
onSelect(task.id)} + onContextMenu={(event) => { + if (isPullRequestItem && task.pullRequest) { + contextMenu.open(event, [ + { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, + { label: "Create task", onClick: () => onSelect(task.id) }, + ]); + return; + } contextMenu.open(event, [ - { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, - { label: "Create task", onClick: () => onSelect(task.id) }, + { label: "Rename task", onClick: () => onRenameTask(task.id) }, + { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, + { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, ]); - return; - } - contextMenu.open(event, [ - { label: "Rename task", onClick: () => onRenameTask(task.id) }, - { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, - { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, - ]); - }} - className={css({ - padding: "8px 12px", - borderRadius: "8px", - backgroundColor: isActive ? t.interactiveHover : "transparent", - cursor: "pointer", - transition: "all 150ms ease", - ":hover": { - backgroundColor: t.interactiveHover, - }, - })} - > -
-
- {isPullRequestItem ? ( - - ) : ( - - )} -
-
- +
+
- {task.title} - - {isPullRequestItem && task.statusMessage ? ( - - {task.statusMessage} - - ) : null} -
- {task.pullRequest != null ? ( - - - #{task.pullRequest.number} - - {task.pullRequest.status === "draft" ? : null} - - ) : ( - - )} - {hasDiffs ? ( -
- +{totalAdded} - -{totalRemoved} + {isPullRequestItem ? ( + + ) : ( + + )}
- ) : null} - - {formatRelativeAge(task.updatedAtMs)} - +
+ + {task.title} + + {isPullRequestItem && task.statusMessage ? ( + + {task.statusMessage} + + ) : null} +
+ {task.pullRequest != null ? ( + + + #{task.pullRequest.number} + + {task.pullRequest.status === "draft" ? : null} + + ) : ( + + )} + {hasDiffs ? ( +
+ +{totalAdded} + -{totalRemoved} +
+ ) : null} + + {formatRelativeAge(task.updatedAtMs)} + +
-
- ); + ); + } + + if (item.type === "task-drop-zone") { + const { repository, taskCount } = item; + const isDropTarget = + drag?.type === "task" && + drag.repositoryId === repository.id && + drag.overIdx === taskCount && + drag.fromIdx !== taskCount; + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + className={css({ + minHeight: "4px", + position: "relative", + "::before": { + content: '""', + position: "absolute", + top: 0, + left: 0, + right: 0, + height: "2px", + backgroundColor: isDropTarget ? t.textPrimary : "transparent", + transition: "background-color 100ms ease", + }, + })} + /> + ); + } + + if (item.type === "repository-drop-zone") { + const isDropTarget = + drag?.type === "repository" && drag.overIdx === item.repositoryCount && drag.fromIdx !== item.repositoryCount; + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + className={css({ + minHeight: "4px", + position: "relative", + "::before": { + content: '""', + position: "absolute", + top: 0, + left: 0, + right: 0, + height: "2px", + backgroundColor: isDropTarget ? t.textPrimary : "transparent", + transition: "background-color 100ms ease", + }, + })} + /> + ); + } + + return null; })}
@@ -717,19 +924,19 @@ function SidebarFooter() { const snapshot = useMockAppSnapshot(); const organization = activeMockOrganization(snapshot); const [open, setOpen] = useState(false); - const [workspaceFlyoutOpen, setWorkspaceFlyoutOpen] = useState(false); + const [organizationFlyoutOpen, setOrganizationFlyoutOpen] = useState(false); const containerRef = useRef(null); const flyoutTimerRef = useRef | null>(null); - const workspaceTriggerRef = useRef(null); + const organizationTriggerRef = useRef(null); const flyoutRef = useRef(null); const [flyoutPos, setFlyoutPos] = useState<{ top: number; left: number } | null>(null); useLayoutEffect(() => { - if (workspaceFlyoutOpen && workspaceTriggerRef.current) { - const rect = workspaceTriggerRef.current.getBoundingClientRect(); + if (organizationFlyoutOpen && organizationTriggerRef.current) { + const rect = organizationTriggerRef.current.getBoundingClientRect(); setFlyoutPos({ top: rect.top, left: rect.right + 4 }); } - }, [workspaceFlyoutOpen]); + }, [organizationFlyoutOpen]); useEffect(() => { if (!open) return; @@ -739,7 +946,7 @@ function SidebarFooter() { const inFlyout = flyoutRef.current?.contains(target); if (!inContainer && !inFlyout) { setOpen(false); - setWorkspaceFlyoutOpen(false); + setOrganizationFlyoutOpen(false); } } document.addEventListener("mousedown", handleClick); @@ -749,10 +956,10 @@ function SidebarFooter() { const switchToOrg = useCallback( (org: (typeof snapshot.organizations)[number]) => { setOpen(false); - setWorkspaceFlyoutOpen(false); + setOrganizationFlyoutOpen(false); void (async () => { await client.selectOrganization(org.id); - await navigate({ to: `/workspaces/${org.workspaceId}` as never }); + await navigate({ to: `/organizations/${org.organizationId}` as never }); })(); }, [client, navigate], @@ -760,11 +967,11 @@ function SidebarFooter() { const openFlyout = useCallback(() => { if (flyoutTimerRef.current) clearTimeout(flyoutTimerRef.current); - setWorkspaceFlyoutOpen(true); + setOrganizationFlyoutOpen(true); }, []); const closeFlyout = useCallback(() => { - flyoutTimerRef.current = setTimeout(() => setWorkspaceFlyoutOpen(false), 150); + flyoutTimerRef.current = setTimeout(() => setOrganizationFlyoutOpen(false), 150); }, []); const menuItems: Array<{ icon: React.ReactNode; label: string; danger?: boolean; onClick: () => void }> = []; @@ -838,14 +1045,14 @@ function SidebarFooter() { })} >
- {/* Workspace flyout trigger */} + {/* Organization flyout trigger */} {organization ? ( -
+
) : null} - {/* Workspace flyout portal */} - {workspaceFlyoutOpen && organization && flyoutPos + {/* Organization flyout portal */} + {organizationFlyoutOpen && organization && flyoutPos ? createPortal(
{ setOpen((prev) => { - if (prev) setWorkspaceFlyoutOpen(false); + if (prev) setOrganizationFlyoutOpen(false); return !prev; }); }} diff --git a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx index ca9326a..95e6876 100644 --- a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx @@ -1,4 +1,4 @@ -import { type SandboxProcessRecord, useInterest } from "@sandbox-agent/foundry-client"; +import { type SandboxProcessRecord, useSubscription } from "@sandbox-agent/foundry-client"; import { ProcessTerminal } from "@sandbox-agent/react"; import { useQuery } from "@tanstack/react-query"; import { useStyletron } from "baseui"; @@ -7,10 +7,10 @@ import { ChevronDown, ChevronUp, Plus, SquareTerminal, Trash2 } from "lucide-rea import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { SandboxAgent } from "sandbox-agent"; import { backendClient } from "../../lib/backend"; -import { interestManager } from "../../lib/interest"; +import { subscriptionManager } from "../../lib/subscription"; interface TerminalPaneProps { - workspaceId: string; + organizationId: string; taskId: string | null; isExpanded?: boolean; onExpand?: () => void; @@ -95,10 +95,10 @@ function HeaderIconButton({ ); } -export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onCollapse, onStartResize }: TerminalPaneProps) { +export function TerminalPane({ organizationId, taskId, isExpanded, onExpand, onCollapse, onStartResize }: TerminalPaneProps) { const [css] = useStyletron(); const t = useFoundryTokens(); - const [activeTabId, setActiveTabId] = useState(null); + const [activeSessionId, setActiveTabId] = useState(null); const [processTabs, setProcessTabs] = useState([]); const [creatingProcess, setCreatingProcess] = useState(false); const [hoveredTabId, setHoveredTabId] = useState(null); @@ -184,17 +184,17 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl [listWidth], ); - const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); const taskSummary = useMemo( - () => (taskId ? (workspaceState.data?.taskSummaries.find((task) => task.id === taskId) ?? null) : null), - [taskId, workspaceState.data?.taskSummaries], + () => (taskId ? (organizationState.data?.taskSummaries.find((task) => task.id === taskId) ?? null) : null), + [taskId, organizationState.data?.taskSummaries], ); - const taskState = useInterest( - interestManager, + const taskState = useSubscription( + subscriptionManager, "task", taskSummary ? { - workspaceId, + organizationId, repoId: taskSummary.repoId, taskId: taskSummary.id, } @@ -211,7 +211,7 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl }, [taskState.data]); const connectionQuery = useQuery({ - queryKey: ["mock-layout", "sandbox-agent-connection", workspaceId, activeSandbox?.providerId ?? "", activeSandbox?.sandboxId ?? ""], + queryKey: ["mock-layout", "sandbox-agent-connection", organizationId, activeSandbox?.sandboxProviderId ?? "", activeSandbox?.sandboxId ?? ""], enabled: Boolean(activeSandbox?.sandboxId), staleTime: 30_000, refetchOnWindowFocus: false, @@ -220,17 +220,17 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl throw new Error("Cannot load a sandbox connection without an active sandbox."); } - return await backendClient.getSandboxAgentConnection(workspaceId, activeSandbox.providerId, activeSandbox.sandboxId); + return await backendClient.getSandboxAgentConnection(organizationId, activeSandbox.sandboxProviderId, activeSandbox.sandboxId); }, }); - const processesState = useInterest( - interestManager, + const processesState = useSubscription( + subscriptionManager, "sandboxProcesses", activeSandbox ? { - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, } : null, @@ -325,11 +325,11 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl }); }, []); - const closeTerminalTab = useCallback((tabId: string) => { + const closeTerminalTab = useCallback((sessionId: string) => { setProcessTabs((current) => { - const next = current.filter((tab) => tab.id !== tabId); + const next = current.filter((tab) => tab.id !== sessionId); setActiveTabId((currentActive) => { - if (currentActive === tabId) { + if (currentActive === sessionId) { return next.length > 0 ? next[next.length - 1]!.id : null; } return currentActive; @@ -346,8 +346,8 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl setCreatingProcess(true); try { const created = await backendClient.createSandboxProcess({ - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, request: defaultShellRequest(activeSandbox.cwd), }); @@ -355,10 +355,10 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl } finally { setCreatingProcess(false); } - }, [activeSandbox, openTerminalTab, workspaceId]); + }, [activeSandbox, openTerminalTab, organizationId]); const processTabsById = useMemo(() => new Map(processTabs.map((tab) => [tab.id, tab])), [processTabs]); - const activeProcessTab = activeTabId ? (processTabsById.get(activeTabId) ?? null) : null; + const activeProcessTab = activeSessionId ? (processTabsById.get(activeSessionId) ?? null) : null; const activeTerminalProcess = useMemo( () => (activeProcessTab ? (processes.find((process) => process.id === activeProcessTab.processId) ?? null) : null), [activeProcessTab, processes], @@ -571,9 +571,9 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl css={css} t={t} label="Kill terminal" - disabled={!activeTabId} + disabled={!activeSessionId} onClick={() => { - if (activeTabId) closeTerminalTab(activeTabId); + if (activeSessionId) closeTerminalTab(activeSessionId); }} > @@ -622,7 +622,7 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl })} > {processTabs.map((tab, tabIndex) => { - const isActive = activeTabId === tab.id; + const isActive = activeSessionId === tab.id; const isHovered = hoveredTabId === tab.id; const isDropTarget = tabDrag !== null && tabDrag.overIdx === tabIndex && tabDrag.fromIdx !== tabIndex; const isBeingDragged = tabDrag !== null && tabDrag.fromIdx === tabIndex && didTabDrag.current; diff --git a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx index 808c4a6..a024871 100644 --- a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx @@ -6,19 +6,19 @@ import { Clock, PanelLeft, PanelRight } from "lucide-react"; import { useFoundryTokens } from "../../app/theme"; import { deriveHeaderStatus } from "../../features/tasks/status"; import { HeaderStatusPill, PanelHeaderBar } from "./ui"; -import { type AgentTab, type Task } from "./view-model"; +import { type AgentSession, type Task } from "./view-model"; export const TranscriptHeader = memo(function TranscriptHeader({ task, hasSandbox, - activeTab, + activeSession, editingField, editValue, onEditValueChange, onStartEditingField, onCommitEditingField, onCancelEditingField, - onSetActiveTabUnread, + onSetActiveSessionUnread, sidebarCollapsed, onToggleSidebar, onSidebarPeekStart, @@ -29,14 +29,14 @@ export const TranscriptHeader = memo(function TranscriptHeader({ }: { task: Task; hasSandbox: boolean; - activeTab: AgentTab | null | undefined; + activeSession: AgentSession | null | undefined; editingField: "title" | "branch" | null; editValue: string; onEditValueChange: (value: string) => void; onStartEditingField: (field: "title" | "branch", value: string) => void; onCommitEditingField: (field: "title" | "branch") => void; onCancelEditingField: () => void; - onSetActiveTabUnread: (unread: boolean) => void; + onSetActiveSessionUnread: (unread: boolean) => void; sidebarCollapsed?: boolean; onToggleSidebar?: () => void; onSidebarPeekStart?: () => void; @@ -51,8 +51,8 @@ export const TranscriptHeader = memo(function TranscriptHeader({ const needsTrafficLightInset = isDesktop && sidebarCollapsed; const taskStatus = task.runtimeStatus ?? task.status; const headerStatus = useMemo( - () => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeTab?.status ?? null, activeTab?.errorMessage ?? null, hasSandbox), - [taskStatus, task.statusMessage, activeTab?.status, activeTab?.errorMessage, hasSandbox], + () => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), + [taskStatus, task.statusMessage, activeSession?.status, activeSession?.errorMessage, hasSandbox], ); return ( diff --git a/foundry/packages/frontend/src/components/mock-layout/ui.tsx b/foundry/packages/frontend/src/components/mock-layout/ui.tsx index a036030..d39a408 100644 --- a/foundry/packages/frontend/src/components/mock-layout/ui.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/ui.tsx @@ -4,7 +4,7 @@ import { GitPullRequest, GitPullRequestDraft } from "lucide-react"; import { useFoundryTokens } from "../../app/theme"; import { getFoundryTokens } from "../../styles/tokens"; -import type { AgentKind, AgentTab } from "./view-model"; +import type { AgentKind, AgentSession } from "./view-model"; export interface ContextMenuItem { label: string; @@ -251,10 +251,10 @@ export const HeaderStatusPill = memo(function HeaderStatusPill({ status }: { sta ); }); -export const TabAvatar = memo(function TabAvatar({ tab }: { tab: AgentTab }) { - if (tab.status === "running" || tab.status === "pending_provision" || tab.status === "pending_session_create") return ; - if (tab.unread) return ; - return ; +export const SessionAvatar = memo(function SessionAvatar({ session }: { session: AgentSession }) { + if (session.status === "running" || session.status === "pending_provision" || session.status === "pending_session_create") return ; + if (session.unread) return ; + return ; }); export const Shell = styled("div", ({ $theme }) => { diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts index 810b065..21228fc 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts @@ -1,10 +1,10 @@ import { describe, expect, it } from "vitest"; -import type { WorkbenchAgentTab } from "@sandbox-agent/foundry-shared"; +import type { WorkbenchSession } from "@sandbox-agent/foundry-shared"; import { buildDisplayMessages } from "./view-model"; -function makeTab(transcript: WorkbenchAgentTab["transcript"]): WorkbenchAgentTab { +function makeSession(transcript: WorkbenchSession["transcript"]): WorkbenchSession { return { - id: "tab-1", + id: "session-1", sessionId: "session-1", sessionName: "Session 1", agent: "Codex", @@ -25,7 +25,7 @@ function makeTab(transcript: WorkbenchAgentTab["transcript"]): WorkbenchAgentTab describe("buildDisplayMessages", () => { it("collapses chunked agent output into a single display message", () => { const messages = buildDisplayMessages( - makeTab([ + makeSession([ { id: "evt-setup", eventIndex: 0, @@ -139,7 +139,7 @@ describe("buildDisplayMessages", () => { it("hides non-message session update envelopes", () => { const messages = buildDisplayMessages( - makeTab([ + makeSession([ { id: "evt-client", eventIndex: 1, diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.ts index bb5e72b..83f5c7a 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.ts @@ -1,6 +1,6 @@ import type { WorkbenchAgentKind as AgentKind, - WorkbenchAgentTab as AgentTab, + WorkbenchSession as AgentSession, WorkbenchDiffLineKind as DiffLineKind, WorkbenchFileChange as FileChange, WorkbenchFileTreeNode as FileTreeNode, @@ -10,12 +10,12 @@ import type { WorkbenchModelGroup as ModelGroup, WorkbenchModelId as ModelId, WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchProjectSection as ProjectSection, + WorkbenchRepositorySection as RepositorySection, WorkbenchTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { extractEventText } from "../../features/sessions/model"; -export type { ProjectSection }; +export type { RepositorySection }; export const MODEL_GROUPS: ModelGroup[] = [ { @@ -138,17 +138,17 @@ function historyDetail(event: TranscriptEvent): string { return content || "Untitled event"; } -export function buildHistoryEvents(tabs: AgentTab[]): HistoryEvent[] { - return tabs - .flatMap((tab) => - tab.transcript +export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { + return sessions + .flatMap((session) => + session.transcript .filter((event) => event.sender === "client") .map((event) => ({ - id: `history-${tab.id}-${event.id}`, + id: `history-${session.id}-${event.id}`, messageId: event.id, preview: historyPreview(event), - sessionName: tab.sessionName, - tabId: tab.id, + sessionName: session.sessionName, + sessionId: session.id, createdAtMs: event.createdAt, detail: historyDetail(event), })), @@ -255,8 +255,8 @@ function shouldDisplayEvent(event: TranscriptEvent): boolean { return Boolean(extractEventText(payload).trim()); } -export function buildDisplayMessages(tab: AgentTab | null | undefined): Message[] { - if (!tab) { +export function buildDisplayMessages(session: AgentSession | null | undefined): Message[] { + if (!session) { return []; } @@ -270,7 +270,7 @@ export function buildDisplayMessages(tab: AgentTab | null | undefined): Message[ pendingAgentMessage = null; }; - for (const event of tab.transcript) { + for (const event of session.transcript) { const chunkText = isAgentChunkEvent(event); if (chunkText !== null) { if (!pendingAgentMessage) { @@ -329,7 +329,7 @@ export function parseDiffLines(diff: string): ParsedDiffLine[] { export type { AgentKind, - AgentTab, + AgentSession, DiffLineKind, FileChange, FileTreeNode, diff --git a/foundry/packages/frontend/src/components/mock-onboarding.tsx b/foundry/packages/frontend/src/components/mock-onboarding.tsx index 66bcfcc..4528695 100644 --- a/foundry/packages/frontend/src/components/mock-onboarding.tsx +++ b/foundry/packages/frontend/src/components/mock-onboarding.tsx @@ -103,8 +103,8 @@ function formatDate(value: string | null): string { return dateFormatter.format(new Date(value)); } -function workspacePath(organization: FoundryOrganization): string { - return `/workspaces/${organization.workspaceId}`; +function organizationPath(organization: FoundryOrganization): string { + return `/organizations/${organization.organizationId}`; } function settingsPath(organization: FoundryOrganization): string { @@ -121,7 +121,7 @@ function checkoutPath(organization: FoundryOrganization, planId: FoundryBillingP function statusBadge(t: FoundryTokens, organization: FoundryOrganization) { if (organization.kind === "personal") { - return Personal workspace; + return Personal organization; } return GitHub organization; } @@ -347,11 +347,11 @@ export function MockOrganizationSelectorPage() { /> -

Select a workspace

+

Select a organization

Choose where you want to work.

- {/* Workspace list */} + {/* Organization list */}
{ void (async () => { await client.selectOrganization(organization.id); - await navigate({ to: workspacePath(organization) }); + await navigate({ to: organizationPath(organization) }); })(); }} style={{ @@ -580,13 +580,13 @@ function SettingsLayout({ overflowY: "auto", }} > - {/* Back to workspace */} + {/* Back to organization */} {/* User header */} @@ -775,7 +775,7 @@ export function MockOrganizationSettingsPage({ organization }: { organization: F
{[ "Hand off tasks to teammates for review or continuation", - "Shared workspace with unified billing across your org", + "Shared organization with unified billing across your org", "200 task hours per seat, with bulk hour purchases available", "Collaborative task history and audit trail", ].map((feature) => ( @@ -1132,7 +1132,7 @@ export function MockAccountSettingsPage() { }} > - Back to workspace + Back to organization
diff --git a/foundry/packages/frontend/src/components/workspace-dashboard.tsx b/foundry/packages/frontend/src/components/organization-dashboard.tsx similarity index 79% rename from foundry/packages/frontend/src/components/workspace-dashboard.tsx rename to foundry/packages/frontend/src/components/organization-dashboard.tsx index 569c4bf..461ee90 100644 --- a/foundry/packages/frontend/src/components/workspace-dashboard.tsx +++ b/foundry/packages/frontend/src/components/organization-dashboard.tsx @@ -1,6 +1,6 @@ import { useEffect, useMemo, useState, type ReactNode } from "react"; -import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; -import { useInterest } from "@sandbox-agent/foundry-client"; +import type { AgentType, RepoBranchRecord, RepoOverview, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; +import { currentFoundryOrganization, useSubscription } from "@sandbox-agent/foundry-client"; import { useMutation, useQuery } from "@tanstack/react-query"; import { Link, useNavigate } from "@tanstack/react-router"; import { Button } from "baseui/button"; @@ -13,17 +13,17 @@ import { Textarea } from "baseui/textarea"; import { StyledDivider } from "baseui/divider"; import { styled, useStyletron } from "baseui"; import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography"; -import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal, Shuffle } from "lucide-react"; +import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal } from "lucide-react"; import { formatDiffStat } from "../features/tasks/model"; import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status"; import { HeaderStatusPill } from "./mock-layout/ui"; import { buildTranscript, resolveSessionSelection } from "../features/sessions/model"; import { backendClient } from "../lib/backend"; -import { interestManager } from "../lib/interest"; +import { subscriptionManager } from "../lib/subscription"; import { DevPanel, useDevPanel } from "./dev-panel"; -interface WorkspaceDashboardProps { - workspaceId: string; +interface OrganizationDashboardProps { + organizationId: string; selectedTaskId?: string; selectedRepoId?: string; } @@ -142,8 +142,6 @@ function repoSummary(overview: RepoOverview | undefined): { total: number; mapped: number; unmapped: number; - conflicts: number; - needsRestack: number; openPrs: number; } { if (!overview) { @@ -151,27 +149,17 @@ function repoSummary(overview: RepoOverview | undefined): { total: 0, mapped: 0, unmapped: 0, - conflicts: 0, - needsRestack: 0, openPrs: 0, }; } let mapped = 0; - let conflicts = 0; - let needsRestack = 0; let openPrs = 0; for (const row of overview.branches) { if (row.taskId) { mapped += 1; } - if (row.conflictsWithMain) { - conflicts += 1; - } - if (row.trackedInStack && row.parentBranch && row.hasUnpushed) { - needsRestack += 1; - } if (row.prNumber && row.prState !== "MERGED" && row.prState !== "CLOSED") { openPrs += 1; } @@ -181,16 +169,11 @@ function repoSummary(overview: RepoOverview | undefined): { total: overview.branches.length, mapped, unmapped: Math.max(0, overview.branches.length - mapped), - conflicts, - needsRestack, openPrs, }; } function branchKind(row: RepoBranchRecord): StatusTagKind { - if (row.conflictsWithMain) { - return "negative"; - } if (row.prState === "OPEN" || row.prState === "DRAFT") { return "warning"; } @@ -333,7 +316,7 @@ function MetaRow({ label, value, mono = false }: { label: string; value: string; ); } -export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }: WorkspaceDashboardProps) { +export function OrganizationDashboard({ organizationId, selectedTaskId, selectedRepoId }: OrganizationDashboardProps) { const [css, theme] = useStyletron(); const navigate = useNavigate(); const showDevPanel = useDevPanel(); @@ -346,16 +329,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const [newTitle, setNewTitle] = useState(""); const [newBranchName, setNewBranchName] = useState(""); const [createOnBranch, setCreateOnBranch] = useState(null); - const [addRepoOpen, setAddRepoOpen] = useState(false); const [createTaskOpen, setCreateTaskOpen] = useState(false); - const [addRepoRemote, setAddRepoRemote] = useState(""); - const [addRepoError, setAddRepoError] = useState(null); - const [stackActionError, setStackActionError] = useState(null); - const [stackActionMessage, setStackActionMessage] = useState(null); const [selectedOverviewBranch, setSelectedOverviewBranch] = useState(null); const [overviewFilter, setOverviewFilter] = useState("active"); - const [reparentBranchName, setReparentBranchName] = useState(null); - const [reparentParentBranch, setReparentParentBranch] = useState(""); const [newAgentType, setNewAgentType] = useState(() => { try { const raw = globalThis.localStorage?.getItem("hf.settings.agentType"); @@ -366,16 +342,19 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }); const [createError, setCreateError] = useState(null); - const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); - const repos = workspaceState.data?.repos ?? []; - const rows = workspaceState.data?.taskSummaries ?? []; + const appState = useSubscription(subscriptionManager, "app", {}); + const activeOrg = appState.data ? currentFoundryOrganization(appState.data) : null; + + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const repos = organizationState.data?.repos ?? []; + const rows = organizationState.data?.taskSummaries ?? []; const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rows, selectedTaskId]); - const taskState = useInterest( - interestManager, + const taskState = useSubscription( + subscriptionManager, "task", !repoOverviewMode && selectedSummary ? { - workspaceId, + organizationId, repoId: selectedSummary.repoId, taskId: selectedSummary.id, } @@ -384,13 +363,13 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const activeRepoId = selectedRepoId ?? createRepoId; const repoOverviewQuery = useQuery({ - queryKey: ["workspace", workspaceId, "repo-overview", activeRepoId], + queryKey: ["organization", organizationId, "repo-overview", activeRepoId], enabled: Boolean(repoOverviewMode && activeRepoId), queryFn: async () => { if (!activeRepoId) { throw new Error("No repo selected"); } - return backendClient.getRepoOverview(workspaceId, activeRepoId); + return backendClient.getRepoOverview(organizationId, activeRepoId); }, }); @@ -455,16 +434,16 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId useEffect(() => { if (!repoOverviewMode && !selectedTaskId && rows.length > 0) { void navigate({ - to: "/workspaces/$workspaceId/tasks/$taskId", + to: "/organizations/$organizationId/tasks/$taskId", params: { - workspaceId, + organizationId, taskId: rows[0]!.id, }, search: { sessionId: undefined }, replace: true, }); } - }, [navigate, repoOverviewMode, rows, selectedTaskId, workspaceId]); + }, [navigate, repoOverviewMode, rows, selectedTaskId, organizationId]); useEffect(() => { setActiveSessionId(null); @@ -494,12 +473,12 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId ); const resolvedSessionId = sessionSelection.sessionId; const staleSessionId = sessionSelection.staleSessionId; - const sessionState = useInterest( - interestManager, + const sessionState = useSubscription( + subscriptionManager, "session", selectedForSession && resolvedSessionId ? { - workspaceId, + organizationId, repoId: selectedForSession.repoId, taskId: selectedForSession.id, sessionId: resolvedSessionId, @@ -537,9 +516,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }, [repoOverviewMode, selectedForSession, selectedSummary]); const devPanelSnapshot = useMemo( (): TaskWorkbenchSnapshot => ({ - workspaceId, + organizationId, repos: repos.map((repo) => ({ id: repo.id, label: repo.label })), - projects: [], + repositories: [], tasks: rows.map((task) => ({ id: task.id, repoId: task.repoId, @@ -551,7 +530,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId updatedAtMs: task.updatedAtMs, branch: task.branch ?? null, pullRequest: task.pullRequest, - tabs: task.sessionsSummary.map((session) => ({ + sessions: task.sessionsSummary.map((session) => ({ ...session, draft: { text: "", @@ -567,7 +546,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null, })), }), - [repos, rows, selectedForSession, workspaceId], + [repos, rows, selectedForSession, organizationId], ); const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => { @@ -575,8 +554,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId throw new Error("No sandbox is available for this task"); } return backendClient.createSandboxSession({ - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, prompt: selectedForSession.task, cwd: activeSandbox.cwd ?? undefined, @@ -607,8 +586,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId } const sessionId = await ensureSessionForPrompt(); await backendClient.sendSandboxPrompt({ - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, sessionId, prompt, @@ -634,7 +613,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const draftBranchName = newBranchName.trim(); return backendClient.createTask({ - workspaceId, + organizationId, repoId, task, agentType: newAgentType, @@ -651,9 +630,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId setCreateOnBranch(null); setCreateTaskOpen(false); await navigate({ - to: "/workspaces/$workspaceId/tasks/$taskId", + to: "/organizations/$organizationId/tasks/$taskId", params: { - workspaceId, + organizationId, taskId: task.taskId, }, search: { sessionId: undefined }, @@ -664,63 +643,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }, }); - const addRepo = useMutation({ - mutationFn: async (remoteUrl: string) => { - const trimmed = remoteUrl.trim(); - if (!trimmed) { - throw new Error("Remote URL is required"); - } - return backendClient.addRepo(workspaceId, trimmed); - }, - onSuccess: async (created) => { - setAddRepoError(null); - setAddRepoRemote(""); - setAddRepoOpen(false); - setCreateRepoId(created.repoId); - if (repoOverviewMode) { - await navigate({ - to: "/workspaces/$workspaceId/repos/$repoId", - params: { - workspaceId, - repoId: created.repoId, - }, - }); - } - }, - onError: (error) => { - setAddRepoError(error instanceof Error ? error.message : String(error)); - }, - }); - - const runStackAction = useMutation({ - mutationFn: async (input: { action: RepoStackAction; branchName?: string; parentBranch?: string }) => { - if (!activeRepoId) { - throw new Error("No repository selected"); - } - return backendClient.runRepoStackAction({ - workspaceId, - repoId: activeRepoId, - action: input.action, - branchName: input.branchName, - parentBranch: input.parentBranch, - }); - }, - onSuccess: async (result) => { - if (result.executed) { - setStackActionError(null); - setStackActionMessage(result.message); - } else { - setStackActionMessage(null); - setStackActionError(result.message); - } - await repoOverviewQuery.refetch(); - }, - onError: (error) => { - setStackActionMessage(null); - setStackActionError(error instanceof Error ? error.message : String(error)); - }, - }); - const openCreateFromBranch = (repoId: string, branchName: string): void => { setCreateRepoId(repoId); setCreateOnBranch(branchName); @@ -747,7 +669,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const overview = repoOverviewQuery.data; const overviewStats = repoSummary(overview); - const stackActionsEnabled = Boolean(overview?.stackAvailable) && !runStackAction.isPending; const filteredOverviewBranches = useMemo(() => { if (!overview?.branches?.length) { return []; @@ -774,26 +695,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId } }, [filteredOverviewBranches, selectedOverviewBranch]); - const handleReparentSubmit = (): void => { - if (!reparentBranchName || !reparentParentBranch.trim()) { - return; - } - setStackActionError(null); - void runStackAction - .mutateAsync({ - action: "reparent_branch", - branchName: reparentBranchName, - parentBranch: reparentParentBranch.trim(), - }) - .then(() => { - setReparentBranchName(null); - setReparentParentBranch(""); - }) - .catch(() => { - // mutation state is surfaced above - }); - }; - const modalOverrides = useMemo( () => ({ Dialog: { @@ -834,7 +735,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId gap: "2px", })} > - Workspace + Organization
- {workspaceId} + {organizationId}
@@ -853,12 +754,14 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId size="compact" kind="secondary" onClick={() => { - setAddRepoError(null); - setAddRepoOpen(true); + void navigate({ + to: "/organizations/$organizationId/settings", + params: { organizationId }, + }); }} - data-testid="repo-add-open" + data-testid="organization-settings-open" > - Add Repo + GitHub Settings
@@ -873,14 +776,14 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId - {workspaceState.status === "loading" ? ( + {organizationState.status === "loading" ? ( <> ) : null} - {workspaceState.status !== "loading" && repoGroups.length === 0 ? ( - No repos or tasks yet. Add a repo to start a workspace. + {organizationState.status !== "loading" && repoGroups.length === 0 ? ( + No repos or tasks yet. Create the repository in GitHub, then sync repos from organization settings. ) : null} {repoGroups.map((group) => ( @@ -894,8 +797,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId })} >
- - - -
@@ -1099,28 +967,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId Branches {overviewStats.total} Mapped {overviewStats.mapped} Unmapped {overviewStats.unmapped} - Conflicts {overviewStats.conflicts} Open PRs {overviewStats.openPrs} - Needs restack {overviewStats.needsRestack}
- - {overview && !overview.stackAvailable ? ( - - git-spice is unavailable for this repo. Stack actions are disabled. - - ) : null} - - {stackActionError ? ( - - {stackActionError} - - ) : null} - - {stackActionMessage ? ( - - {stackActionMessage} - - ) : null} @@ -1139,10 +987,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId className={css({ minWidth: "980px", display: "grid", - gridTemplateColumns: "2fr 1.3fr 0.8fr 1fr 1fr 1.4fr", + gridTemplateColumns: "2fr 1.3fr 1fr 1fr 0.9fr 1.2fr", })} > - {["Branch", "Parent", "Ahead", "PR", "CI/Review", "Actions"].map((label) => ( + {["Branch", "Task", "PR", "CI / Review", "Updated", "Actions"].map((label) => (
- - {formatRelativeAge(branch.updatedAt)} - {branch.taskId ? "task" : "unmapped"} - {branch.trackedInStack ? stack : null} + + {branch.commitSha.slice(0, 10) || "-"} +
-
{branch.parentBranch ?? "-"}
-
{branch.hasUnpushed ? "yes" : "-"}
+
{branch.taskTitle ?? branch.taskId ?? "-"}
+
{formatRelativeAge(branch.updatedAt)}
- - - - - - {!branch.taskId ? (
@@ -1636,7 +1434,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId > -
@@ -1659,10 +1456,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId })} > - - + + )}
@@ -1764,49 +1561,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId - setAddRepoOpen(false)} overrides={modalOverrides}> - Add Repo - -
- - Add a git remote URL to this workspace. - - setAddRepoRemote(event.target.value)} - overrides={inputTestIdOverrides("repo-add-remote")} - /> - {addRepoError ? ( - - {addRepoError} - - ) : null} -
-
- - - - -
- { @@ -1847,34 +1601,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId overrides={selectTestIdOverrides("task-create-repo")} /> {repos.length === 0 ? ( -
- - No repos yet. - - -
+ + No imported repos yet. Create the repository in GitHub first, then sync repos from organization settings. + ) : null} @@ -1967,52 +1696,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
- - { - setReparentBranchName(null); - setReparentParentBranch(""); - }} - overrides={modalOverrides} - > - Reparent Branch - -
- - {reparentBranchName ? `Move ${reparentBranchName} onto a different parent branch.` : ""} - - setReparentParentBranch(event.target.value)} - placeholder="Parent branch" - overrides={inputTestIdOverrides("repo-overview-reparent-input")} - /> -
-
- - - - -
- {showDevPanel ? : null} + {showDevPanel ? ( + + ) : null} ); } diff --git a/foundry/packages/frontend/src/features/tasks/model.test.ts b/foundry/packages/frontend/src/features/tasks/model.test.ts index dc62f30..6db9bb5 100644 --- a/foundry/packages/frontend/src/features/tasks/model.test.ts +++ b/foundry/packages/frontend/src/features/tasks/model.test.ts @@ -3,14 +3,14 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { formatDiffStat, groupTasksByRepo } from "./model"; const base: TaskRecord = { - workspaceId: "default", + organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", taskId: "task-1", branchName: "feature/one", title: "Feature one", task: "Ship one", - providerId: "local", + sandboxProviderId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -18,7 +18,7 @@ const base: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "local", + sandboxProviderId: "local", sandboxActorId: null, switchTarget: "sandbox://local/sandbox-1", cwd: null, diff --git a/foundry/packages/frontend/src/lib/backend.ts b/foundry/packages/frontend/src/lib/backend.ts index 158e701..b57cc51 100644 --- a/foundry/packages/frontend/src/lib/backend.ts +++ b/foundry/packages/frontend/src/lib/backend.ts @@ -1,8 +1,8 @@ import { createBackendClient } from "@sandbox-agent/foundry-client"; -import { backendEndpoint, defaultWorkspaceId, frontendClientMode } from "./env"; +import { backendEndpoint, defaultOrganizationId, frontendClientMode } from "./env"; export const backendClient = createBackendClient({ endpoint: backendEndpoint, - defaultWorkspaceId, + defaultOrganizationId, mode: frontendClientMode, }); diff --git a/foundry/packages/frontend/src/lib/env.ts b/foundry/packages/frontend/src/lib/env.ts index ea53e85..5476f83 100644 --- a/foundry/packages/frontend/src/lib/env.ts +++ b/foundry/packages/frontend/src/lib/env.ts @@ -1,6 +1,6 @@ type FoundryRuntimeConfig = { backendEndpoint?: string; - defaultWorkspaceId?: string; + defaultOrganizationId?: string; frontendClientMode?: string; }; @@ -26,7 +26,7 @@ const runtimeConfig = typeof window !== "undefined" ? window.__FOUNDRY_RUNTIME_C export const backendEndpoint = runtimeConfig?.backendEndpoint?.trim() || import.meta.env.VITE_HF_BACKEND_ENDPOINT?.trim() || resolveDefaultBackendEndpoint(); -export const defaultWorkspaceId = runtimeConfig?.defaultWorkspaceId?.trim() || import.meta.env.VITE_HF_WORKSPACE?.trim() || "default"; +export const defaultOrganizationId = runtimeConfig?.defaultOrganizationId?.trim() || import.meta.env.VITE_HF_WORKSPACE?.trim() || "default"; function resolveFrontendClientMode(): "mock" | "remote" { const raw = runtimeConfig?.frontendClientMode?.trim().toLowerCase() || frontendEnv.FOUNDRY_FRONTEND_CLIENT_MODE?.trim().toLowerCase(); diff --git a/foundry/packages/frontend/src/lib/interest.ts b/foundry/packages/frontend/src/lib/interest.ts deleted file mode 100644 index a736e71..0000000 --- a/foundry/packages/frontend/src/lib/interest.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { MockInterestManager, RemoteInterestManager } from "@sandbox-agent/foundry-client"; -import { backendClient } from "./backend"; -import { frontendClientMode } from "./env"; - -export const interestManager = frontendClientMode === "mock" ? new MockInterestManager() : new RemoteInterestManager(backendClient); diff --git a/foundry/packages/frontend/src/lib/mock-app.ts b/foundry/packages/frontend/src/lib/mock-app.ts index c72a708..acf3009 100644 --- a/foundry/packages/frontend/src/lib/mock-app.ts +++ b/foundry/packages/frontend/src/lib/mock-app.ts @@ -1,7 +1,7 @@ import { useSyncExternalStore } from "react"; import { createFoundryAppClient, - useInterest, + useSubscription, currentFoundryOrganization, currentFoundryUser, eligibleFoundryOrganizations, @@ -9,7 +9,7 @@ import { } from "@sandbox-agent/foundry-client"; import type { FoundryAppSnapshot, FoundryBillingPlanId, FoundryOrganization, UpdateFoundryOrganizationProfileInput } from "@sandbox-agent/foundry-shared"; import { backendClient } from "./backend"; -import { interestManager } from "./interest"; +import { subscriptionManager } from "./subscription"; import { frontendClientMode } from "./env"; const REMOTE_APP_SESSION_STORAGE_KEY = "sandbox-agent-foundry:remote-app-session"; @@ -37,10 +37,10 @@ const legacyAppClient: FoundryAppClient = createFoundryAppClient({ const remoteAppClient: FoundryAppClient = { getSnapshot(): FoundryAppSnapshot { - return interestManager.getSnapshot("app", {}) ?? EMPTY_APP_SNAPSHOT; + return subscriptionManager.getSnapshot("app", {}) ?? EMPTY_APP_SNAPSHOT; }, subscribe(listener: () => void): () => void { - return interestManager.subscribe("app", {}, listener); + return subscriptionManager.subscribe("app", {}, listener); }, async signInWithGithub(userId?: string): Promise { void userId; @@ -79,8 +79,8 @@ const remoteAppClient: FoundryAppClient = { async reconnectGithub(organizationId: string): Promise { await backendClient.reconnectAppGithub(organizationId); }, - async recordSeatUsage(workspaceId: string): Promise { - await backendClient.recordAppSeatUsage(workspaceId); + async recordSeatUsage(organizationId: string): Promise { + await backendClient.recordAppSeatUsage(organizationId); }, }; @@ -88,7 +88,7 @@ const appClient: FoundryAppClient = frontendClientMode === "remote" ? remoteAppC export function useMockAppSnapshot(): FoundryAppSnapshot { if (frontendClientMode === "remote") { - const app = useInterest(interestManager, "app", {}); + const app = useSubscription(subscriptionManager, "app", {}); if (app.status !== "loading") { firstSnapshotDelivered = true; } diff --git a/foundry/packages/frontend/src/lib/subscription.ts b/foundry/packages/frontend/src/lib/subscription.ts new file mode 100644 index 0000000..c1618fb --- /dev/null +++ b/foundry/packages/frontend/src/lib/subscription.ts @@ -0,0 +1,5 @@ +import { MockSubscriptionManager, RemoteSubscriptionManager } from "@sandbox-agent/foundry-client"; +import { backendClient } from "./backend"; +import { frontendClientMode } from "./env"; + +export const subscriptionManager = frontendClientMode === "mock" ? new MockSubscriptionManager() : new RemoteSubscriptionManager(backendClient); diff --git a/foundry/packages/shared/src/app-shell.ts b/foundry/packages/shared/src/app-shell.ts index 31ee235..93d3b02 100644 --- a/foundry/packages/shared/src/app-shell.ts +++ b/foundry/packages/shared/src/app-shell.ts @@ -65,7 +65,7 @@ export interface FoundryOrganizationSettings { export interface FoundryOrganization { id: string; - workspaceId: string; + organizationId: string; kind: FoundryOrganizationKind; settings: FoundryOrganizationSettings; github: FoundryGithubState; diff --git a/foundry/packages/shared/src/config.ts b/foundry/packages/shared/src/config.ts index 3f7e0b0..44ea722 100644 --- a/foundry/packages/shared/src/config.ts +++ b/foundry/packages/shared/src/config.ts @@ -15,7 +15,7 @@ export const ConfigSchema = z.object({ }) .optional(), notify: z.array(NotifyBackendSchema).default(["terminal"]), - workspace: z + organization: z .object({ default: z.string().min(1).default("default"), }) @@ -39,7 +39,7 @@ export const ConfigSchema = z.object({ backup_interval_secs: 3600, backup_retention_days: 7, }), - providers: z + sandboxProviders: z .object({ local: z .object({ diff --git a/foundry/packages/shared/src/contracts.ts b/foundry/packages/shared/src/contracts.ts index 40c4f53..d6725f7 100644 --- a/foundry/packages/shared/src/contracts.ts +++ b/foundry/packages/shared/src/contracts.ts @@ -1,14 +1,14 @@ import { z } from "zod"; -export const WorkspaceIdSchema = z +export const OrganizationIdSchema = z .string() .min(1) .max(64) .regex(/^[a-zA-Z0-9._-]+$/); -export type WorkspaceId = z.infer; +export type OrganizationId = z.infer; -export const ProviderIdSchema = z.enum(["e2b", "local"]); -export type ProviderId = z.infer; +export const SandboxProviderIdSchema = z.enum(["e2b", "local"]); +export type SandboxProviderId = z.infer; export const AgentTypeSchema = z.enum(["claude", "codex"]); export type AgentType = z.infer; @@ -39,7 +39,7 @@ export const TaskStatusSchema = z.enum([ export type TaskStatus = z.infer; export const RepoRecordSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema, remoteUrl: RepoRemoteSchema, createdAt: z.number().int(), @@ -47,33 +47,27 @@ export const RepoRecordSchema = z.object({ }); export type RepoRecord = z.infer; -export const AddRepoInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, - remoteUrl: RepoRemoteSchema, -}); -export type AddRepoInput = z.infer; - export const CreateTaskInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema, task: z.string().min(1), explicitTitle: z.string().trim().min(1).optional(), explicitBranchName: z.string().trim().min(1).optional(), - providerId: ProviderIdSchema.optional(), + sandboxProviderId: SandboxProviderIdSchema.optional(), agentType: AgentTypeSchema.optional(), onBranch: z.string().trim().min(1).optional(), }); export type CreateTaskInput = z.infer; export const TaskRecordSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: z.string().min(1), repoRemote: RepoRemoteSchema, taskId: z.string().min(1), branchName: z.string().min(1).nullable(), title: z.string().min(1).nullable(), task: z.string().min(1), - providerId: ProviderIdSchema, + sandboxProviderId: SandboxProviderIdSchema, status: TaskStatusSchema, statusMessage: z.string().nullable(), activeSandboxId: z.string().nullable(), @@ -81,7 +75,7 @@ export const TaskRecordSchema = z.object({ sandboxes: z.array( z.object({ sandboxId: z.string().min(1), - providerId: ProviderIdSchema, + sandboxProviderId: SandboxProviderIdSchema, sandboxActorId: z.string().nullable(), switchTarget: z.string().min(1), cwd: z.string().nullable(), @@ -106,7 +100,7 @@ export const TaskRecordSchema = z.object({ export type TaskRecord = z.infer; export const TaskSummarySchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: z.string().min(1), taskId: z.string().min(1), branchName: z.string().min(1).nullable(), @@ -117,21 +111,21 @@ export const TaskSummarySchema = z.object({ export type TaskSummary = z.infer; export const TaskActionInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, taskId: z.string().min(1), }); export type TaskActionInput = z.infer; export const SwitchResultSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, taskId: z.string().min(1), - providerId: ProviderIdSchema, + sandboxProviderId: SandboxProviderIdSchema, switchTarget: z.string().min(1), }); export type SwitchResult = z.infer; export const ListTasksInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema.optional(), }); export type ListTasksInput = z.infer; @@ -139,11 +133,6 @@ export type ListTasksInput = z.infer; export const RepoBranchRecordSchema = z.object({ branchName: z.string().min(1), commitSha: z.string().min(1), - parentBranch: z.string().nullable(), - trackedInStack: z.boolean(), - diffStat: z.string().nullable(), - hasUnpushed: z.boolean(), - conflictsWithMain: z.boolean(), taskId: z.string().nullable(), taskTitle: z.string().nullable(), taskStatus: TaskStatusSchema.nullable(), @@ -153,69 +142,27 @@ export const RepoBranchRecordSchema = z.object({ ciStatus: z.string().nullable(), reviewStatus: z.string().nullable(), reviewer: z.string().nullable(), - firstSeenAt: z.number().int().nullable(), - lastSeenAt: z.number().int().nullable(), updatedAt: z.number().int(), }); export type RepoBranchRecord = z.infer; export const RepoOverviewSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema, remoteUrl: RepoRemoteSchema, baseRef: z.string().nullable(), - stackAvailable: z.boolean(), fetchedAt: z.number().int(), - branchSyncAt: z.number().int().nullable(), - prSyncAt: z.number().int().nullable(), - branchSyncStatus: z.enum(["pending", "syncing", "synced", "error"]), - prSyncStatus: z.enum(["pending", "syncing", "synced", "error"]), - repoActionJobs: z.array( - z.object({ - jobId: z.string().min(1), - action: z.enum(["sync_repo", "restack_repo", "restack_subtree", "rebase_branch", "reparent_branch"]), - branchName: z.string().nullable(), - parentBranch: z.string().nullable(), - status: z.enum(["queued", "running", "completed", "error"]), - message: z.string().min(1), - createdAt: z.number().int(), - updatedAt: z.number().int(), - completedAt: z.number().int().nullable(), - }), - ), branches: z.array(RepoBranchRecordSchema), }); export type RepoOverview = z.infer; -export const RepoStackActionSchema = z.enum(["sync_repo", "restack_repo", "restack_subtree", "rebase_branch", "reparent_branch"]); -export type RepoStackAction = z.infer; - -export const RepoStackActionInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, - repoId: RepoIdSchema, - action: RepoStackActionSchema, - branchName: z.string().trim().min(1).optional(), - parentBranch: z.string().trim().min(1).optional(), +export const OrganizationUseInputSchema = z.object({ + organizationId: OrganizationIdSchema, }); -export type RepoStackActionInput = z.infer; - -export const RepoStackActionResultSchema = z.object({ - jobId: z.string().min(1).nullable().optional(), - action: RepoStackActionSchema, - executed: z.boolean(), - status: z.enum(["queued", "running", "completed", "error"]).optional(), - message: z.string().min(1), - at: z.number().int(), -}); -export type RepoStackActionResult = z.infer; - -export const WorkspaceUseInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, -}); -export type WorkspaceUseInput = z.infer; +export type OrganizationUseInput = z.infer; export const StarSandboxAgentRepoInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, }); export type StarSandboxAgentRepoInput = z.infer; @@ -226,7 +173,7 @@ export const StarSandboxAgentRepoResultSchema = z.object({ export type StarSandboxAgentRepoResult = z.infer; export const HistoryQueryInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, limit: z.number().int().positive().max(500).optional(), branch: z.string().min(1).optional(), taskId: z.string().min(1).optional(), @@ -235,7 +182,7 @@ export type HistoryQueryInput = z.infer; export const HistoryEventSchema = z.object({ id: z.number().int(), - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: z.string().nullable(), taskId: z.string().nullable(), branchName: z.string().nullable(), @@ -246,14 +193,14 @@ export const HistoryEventSchema = z.object({ export type HistoryEvent = z.infer; export const PruneInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, dryRun: z.boolean(), yes: z.boolean(), }); export type PruneInput = z.infer; export const KillInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, taskId: z.string().min(1), deleteBranch: z.boolean(), abandon: z.boolean(), @@ -261,13 +208,13 @@ export const KillInputSchema = z.object({ export type KillInput = z.infer; export const StatuslineInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, format: z.enum(["table", "claude-code"]), }); export type StatuslineInput = z.infer; export const ListInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, format: z.enum(["table", "json"]), full: z.boolean(), }); diff --git a/foundry/packages/shared/src/index.ts b/foundry/packages/shared/src/index.ts index be629a6..754bf21 100644 --- a/foundry/packages/shared/src/index.ts +++ b/foundry/packages/shared/src/index.ts @@ -4,4 +4,4 @@ export * from "./config.js"; export * from "./logging.js"; export * from "./realtime-events.js"; export * from "./workbench.js"; -export * from "./workspace.js"; +export * from "./organization.js"; diff --git a/foundry/packages/shared/src/organization.ts b/foundry/packages/shared/src/organization.ts new file mode 100644 index 0000000..73e1867 --- /dev/null +++ b/foundry/packages/shared/src/organization.ts @@ -0,0 +1,13 @@ +import type { AppConfig } from "./config.js"; + +export function resolveOrganizationId(flagOrganization: string | undefined, config: AppConfig): string { + if (flagOrganization && flagOrganization.trim().length > 0) { + return flagOrganization.trim(); + } + + if (config.organization.default.trim().length > 0) { + return config.organization.default.trim(); + } + + return "default"; +} diff --git a/foundry/packages/shared/src/realtime-events.ts b/foundry/packages/shared/src/realtime-events.ts index 739dd51..ddb5c2b 100644 --- a/foundry/packages/shared/src/realtime-events.ts +++ b/foundry/packages/shared/src/realtime-events.ts @@ -1,5 +1,5 @@ import type { FoundryAppSnapshot } from "./app-shell.js"; -import type { WorkbenchOpenPrSummary, WorkbenchRepoSummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js"; +import type { WorkbenchOpenPrSummary, WorkbenchRepositorySummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js"; export interface SandboxProcessSnapshot { id: string; @@ -15,12 +15,12 @@ export interface SandboxProcessSnapshot { tty: boolean; } -/** Workspace-level events broadcast by the workspace actor. */ -export type WorkspaceEvent = +/** Organization-level events broadcast by the organization actor. */ +export type OrganizationEvent = | { type: "taskSummaryUpdated"; taskSummary: WorkbenchTaskSummary } | { type: "taskRemoved"; taskId: string } - | { type: "repoAdded"; repo: WorkbenchRepoSummary } - | { type: "repoUpdated"; repo: WorkbenchRepoSummary } + | { type: "repoAdded"; repo: WorkbenchRepositorySummary } + | { type: "repoUpdated"; repo: WorkbenchRepositorySummary } | { type: "repoRemoved"; repoId: string } | { type: "pullRequestUpdated"; pullRequest: WorkbenchOpenPrSummary } | { type: "pullRequestRemoved"; prId: string }; @@ -31,7 +31,7 @@ export type TaskEvent = { type: "taskDetailUpdated"; detail: WorkbenchTaskDetail /** Session-level events broadcast by the task actor and filtered by sessionId on the client. */ export type SessionEvent = { type: "sessionUpdated"; session: WorkbenchSessionDetail }; -/** App-level events broadcast by the app workspace actor. */ +/** App-level events broadcast by the app organization actor. */ export type AppEvent = { type: "appUpdated"; snapshot: FoundryAppSnapshot }; /** Sandbox process events broadcast by the sandbox instance actor. */ diff --git a/foundry/packages/shared/src/workbench.ts b/foundry/packages/shared/src/workbench.ts index 078ed31..6a0df2e 100644 --- a/foundry/packages/shared/src/workbench.ts +++ b/foundry/packages/shared/src/workbench.ts @@ -1,4 +1,4 @@ -import type { AgentType, ProviderId, TaskStatus } from "./contracts.js"; +import type { AgentType, SandboxProviderId, TaskStatus } from "./contracts.js"; export type WorkbenchTaskStatus = TaskStatus | "new"; export type WorkbenchAgentKind = "Claude" | "Codex" | "Cursor"; @@ -32,7 +32,10 @@ export interface WorkbenchComposerDraft { /** Session metadata without transcript content. */ export interface WorkbenchSessionSummary { id: string; - sessionId: string | null; + /** Stable UI session id used for routing and task-local identity. */ + sessionId: string; + /** Underlying sandbox session id when provisioning has completed. */ + sandboxSessionId?: string | null; sessionName: string; agent: WorkbenchAgentKind; model: WorkbenchModelId; @@ -43,11 +46,10 @@ export interface WorkbenchSessionSummary { errorMessage?: string | null; } -/** Full session content — only fetched when viewing a specific session tab. */ +/** Full session content — only fetched when viewing a specific session. */ export interface WorkbenchSessionDetail { - /** Stable UI tab id used for the session topic key and routing. */ + /** Stable UI session id used for the session topic key and routing. */ sessionId: string; - tabId: string; sandboxSessionId: string | null; sessionName: string; agent: WorkbenchAgentKind; @@ -87,7 +89,7 @@ export interface WorkbenchHistoryEvent { messageId: string; preview: string; sessionName: string; - tabId: string; + sessionId: string; createdAtMs: number; detail: string; } @@ -121,12 +123,12 @@ export interface WorkbenchOpenPrSummary { } export interface WorkbenchSandboxSummary { - providerId: ProviderId; + sandboxProviderId: SandboxProviderId; sandboxId: string; cwd: string | null; } -/** Sidebar-level task data. Materialized in the workspace actor's SQLite. */ +/** Sidebar-level task data. Materialized in the organization actor's SQLite. */ export interface WorkbenchTaskSummary { id: string; repoId: string; @@ -162,8 +164,8 @@ export interface WorkbenchTaskDetail extends WorkbenchTaskSummary { activeSandboxId: string | null; } -/** Repo-level summary for workspace sidebar. */ -export interface WorkbenchRepoSummary { +/** Repo-level summary for organization sidebar. */ +export interface WorkbenchRepositorySummary { id: string; label: string; /** Aggregated branch/task overview state (replaces getRepoOverview polling). */ @@ -171,19 +173,15 @@ export interface WorkbenchRepoSummary { latestActivityMs: number; } -/** Workspace-level snapshot — initial fetch for the workspace topic. */ -export interface WorkspaceSummarySnapshot { - workspaceId: string; - repos: WorkbenchRepoSummary[]; +/** Organization-level snapshot — initial fetch for the organization topic. */ +export interface OrganizationSummarySnapshot { + organizationId: string; + repos: WorkbenchRepositorySummary[]; taskSummaries: WorkbenchTaskSummary[]; openPullRequests: WorkbenchOpenPrSummary[]; } -/** - * Deprecated compatibility aliases for older mock/view-model code. - * New code should use the summary/detail/topic-specific types above. - */ -export interface WorkbenchAgentTab extends WorkbenchSessionSummary { +export interface WorkbenchSession extends WorkbenchSessionSummary { draft: WorkbenchComposerDraft; transcript: WorkbenchTranscriptEvent[]; } @@ -199,7 +197,7 @@ export interface WorkbenchTask { updatedAtMs: number; branch: string | null; pullRequest: WorkbenchPullRequestSummary | null; - tabs: WorkbenchAgentTab[]; + sessions: WorkbenchSession[]; fileChanges: WorkbenchFileChange[]; diffs: Record; fileTree: WorkbenchFileTreeNode[]; @@ -212,7 +210,7 @@ export interface WorkbenchRepo { label: string; } -export interface WorkbenchProjectSection { +export interface WorkbenchRepositorySection { id: string; label: string; updatedAtMs: number; @@ -220,9 +218,9 @@ export interface WorkbenchProjectSection { } export interface TaskWorkbenchSnapshot { - workspaceId: string; + organizationId: string; repos: WorkbenchRepo[]; - projects: WorkbenchProjectSection[]; + repositories: WorkbenchRepositorySection[]; tasks: WorkbenchTask[]; } @@ -256,30 +254,30 @@ export interface TaskWorkbenchRenameInput { export interface TaskWorkbenchSendMessageInput { taskId: string; - tabId: string; + sessionId: string; text: string; attachments: WorkbenchLineAttachment[]; } -export interface TaskWorkbenchTabInput { +export interface TaskWorkbenchSessionInput { taskId: string; - tabId: string; + sessionId: string; } -export interface TaskWorkbenchRenameSessionInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchRenameSessionInput extends TaskWorkbenchSessionInput { title: string; } -export interface TaskWorkbenchChangeModelInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchChangeModelInput extends TaskWorkbenchSessionInput { model: WorkbenchModelId; } -export interface TaskWorkbenchUpdateDraftInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchUpdateDraftInput extends TaskWorkbenchSessionInput { text: string; attachments: WorkbenchLineAttachment[]; } -export interface TaskWorkbenchSetSessionUnreadInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchSetSessionUnreadInput extends TaskWorkbenchSessionInput { unread: boolean; } @@ -290,9 +288,9 @@ export interface TaskWorkbenchDiffInput { export interface TaskWorkbenchCreateTaskResponse { taskId: string; - tabId?: string; + sessionId?: string; } -export interface TaskWorkbenchAddTabResponse { - tabId: string; +export interface TaskWorkbenchAddSessionResponse { + sessionId: string; } diff --git a/foundry/packages/shared/src/workspace.ts b/foundry/packages/shared/src/workspace.ts deleted file mode 100644 index fb8e1b7..0000000 --- a/foundry/packages/shared/src/workspace.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { AppConfig } from "./config.js"; - -export function resolveWorkspaceId(flagWorkspace: string | undefined, config: AppConfig): string { - if (flagWorkspace && flagWorkspace.trim().length > 0) { - return flagWorkspace.trim(); - } - - if (config.workspace.default.trim().length > 0) { - return config.workspace.default.trim(); - } - - return "default"; -} diff --git a/foundry/packages/shared/test/workspace.test.ts b/foundry/packages/shared/test/organization.test.ts similarity index 59% rename from foundry/packages/shared/test/workspace.test.ts rename to foundry/packages/shared/test/organization.test.ts index c57173a..f1cd3f6 100644 --- a/foundry/packages/shared/test/workspace.test.ts +++ b/foundry/packages/shared/test/organization.test.ts @@ -1,10 +1,10 @@ import { describe, expect, it } from "vitest"; -import { ConfigSchema, resolveWorkspaceId, type AppConfig } from "../src/index.js"; +import { ConfigSchema, resolveOrganizationId, type AppConfig } from "../src/index.js"; const cfg: AppConfig = ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "team-a" }, + organization: { default: "team-a" }, backend: { host: "127.0.0.1", port: 7741, @@ -14,27 +14,27 @@ const cfg: AppConfig = ConfigSchema.parse({ backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, }); -describe("resolveWorkspaceId", () => { +describe("resolveOrganizationId", () => { it("prefers explicit flag", () => { - expect(resolveWorkspaceId("feature", cfg)).toBe("feature"); + expect(resolveOrganizationId("feature", cfg)).toBe("feature"); }); it("falls back to config default", () => { - expect(resolveWorkspaceId(undefined, cfg)).toBe("team-a"); + expect(resolveOrganizationId(undefined, cfg)).toBe("team-a"); }); it("falls back to literal default when config value is empty", () => { const empty = { ...cfg, - workspace: { default: "" }, + organization: { default: "" }, } as AppConfig; - expect(resolveWorkspaceId(undefined, empty)).toBe("default"); + expect(resolveOrganizationId(undefined, empty)).toBe("default"); }); }); diff --git a/foundry/research/friction/general.mdx b/foundry/research/friction/general.mdx index b152287..fce920b 100644 --- a/foundry/research/friction/general.mdx +++ b/foundry/research/friction/general.mdx @@ -15,8 +15,8 @@ The root cause of the duplicate HTTP request is unknown. It is not `appWorkspace ### Attempted Fix / Workaround 1. Made `completeAppGithubAuth` clear `oauthState`/`oauthStateExpiresAt` immediately after validation and before `exchangeCode`, so any duplicate request fails the state check instead of hitting GitHub with a consumed code. -2. Split `syncGithubSessionFromToken` into a fast path (`initGithubSession` — exchange code, get viewer, store token+identity) and a slow path (`syncGithubOrganizations` — list orgs, list installations, sync each workspace). -3. `completeAppGithubAuth` now uses the fast path and enqueues the slow org sync to the workspace workflow queue (`workspace.command.syncGithubSession`, fire-and-forget). The HTTP callback returns a 302 redirect in ~2s instead of ~18s, eliminating the proxy timeout window. +2. Split `syncGithubSessionFromToken` into a fast path (`initGithubSession` — exchange code, get viewer, store token+identity) and a slow path (`syncGithubOrganizations` — list orgs, list installations, sync each organization). +3. `completeAppGithubAuth` now uses the fast path and enqueues the slow org sync to the organization workflow queue (`organization.command.syncGithubSession`, fire-and-forget). The HTTP callback returns a 302 redirect in ~2s instead of ~18s, eliminating the proxy timeout window. 4. The frontend already polls `getAppSnapshot` every 500ms when any org has `syncStatus === "syncing"`, so the deferred sync is transparent to the user. 5. `bootstrapAppGithubSession` (dev-only) still calls the full synchronous `syncGithubSessionFromToken` since proxy timeouts are not a concern in dev and it needs the session fully populated before returning. @@ -38,14 +38,14 @@ Verifying the BaseUI frontend against the real `rivet-dev/sandbox-agent-testing` Three separate issues stacked together during live verification: -1. A half-created task actor remained in project indexes after earlier runtime failures. The actor state existed, but its durable task row did not, so repo overview polling spammed `Task not found` and kept trying to load an orphaned task. +1. A half-created task actor remained in repository indexes after earlier runtime failures. The actor state existed, but its durable task row did not, so repo overview polling spammed `Task not found` and kept trying to load an orphaned task. 2. Rebuilding the backend container outside `just dev` dropped injected GitHub auth, which made repo overview fall back to `Open PRs 0` until `GITHUB_TOKEN`/`GH_TOKEN` were passed back into `docker compose`. 3. In the create-task modal, the BaseUI-controlled form looked populated in the browser, but submit gating/click behavior was unreliable under browser automation, making it hard to distinguish frontend state bugs from backend failures. ### Attempted Fix / Workaround -1. Updated project-actor stale task pruning to treat `Task not found:` the same as actor-not-found and rebuilt the backend image. -2. Recovered the orphaned task by forcing an initialize attempt, which surfaced a missing `body?.providerId` guard in the task init workflow and led to pruning the stale project index row. +1. Updated repository-actor stale task pruning to treat `Task not found:` the same as actor-not-found and rebuilt the backend image. +2. Recovered the orphaned task by forcing an initialize attempt, which surfaced a missing `body?.providerId` guard in the task init workflow and led to pruning the stale repository index row. 3. Recreated the backend with `GITHUB_TOKEN="$(gh auth token)" GH_TOKEN="$(gh auth token)" docker compose ... up -d --build backend` so PR sync could see live GitHub data again. 4. Used `agent-browser` plus screenshots to separate working paths (repo overview + PR visibility) from the remaining broken path (modal submit / task creation UI). @@ -80,22 +80,22 @@ The Docker dev backend container was starting on Bun `1.2.23` and accepting TCP ### What I Was Working On -Implementing Daytona snapshot-based sandbox creation and running required workspace validation. +Implementing Daytona snapshot-based sandbox creation and running required organization validation. ### Friction / Issue -The workspace `node_modules` tree is partially root-owned in this environment. `pnpm install`/cleanup failed with `EACCES` and left missing local tool entrypoints (for example `turbo`/`typescript`), which blocked `pnpm -w typecheck/build/test` from running end-to-end. +The organization `node_modules` tree is partially root-owned in this environment. `pnpm install`/cleanup failed with `EACCES` and left missing local tool entrypoints (for example `turbo`/`typescript`), which blocked `pnpm -w typecheck/build/test` from running end-to-end. ### Attempted Fix / Workaround -1. Attempted workspace reinstall (`pnpm install`, `CI=true pnpm install`) and package-level reinstall. +1. Attempted organization reinstall (`pnpm install`, `CI=true pnpm install`) and package-level reinstall. 2. Attempted cleanup/recreate of `node_modules`, but root-owned files could not be removed. 3. Added temporary local shims for missing tool entrypoints to continue targeted validation. ### Outcome - Daytona-specific changes and backend tests were validated. -- Full workspace validation remains blocked until `node_modules` ownership is repaired (or container is recreated). +- Full organization validation remains blocked until `node_modules` ownership is repaired (or container is recreated). ## 2026-02-16 - uncommitted @@ -187,7 +187,7 @@ Vitest ESM module namespace exports are non-configurable, so `vi.spyOn(childProc ### Outcome - Backend manager tests are stable under ESM. -- Full workspace tests pass with lifecycle coverage for outdated-backend restart behavior. +- Full organization tests pass with lifecycle coverage for outdated-backend restart behavior. ## 2026-02-08 - uncommitted @@ -202,8 +202,8 @@ The environment did not provide `rg`, and docs/policy files still described Rust ### Attempted Fix / Workaround 1. Switched repository discovery to `find`/`grep`. -2. Rewrote project guidance files (`CLAUDE.md`, `skills/SKILL.md`, docs, `SPEC.md`) to match the TypeScript architecture. -3. Added missing TUI test coverage so workspace-wide test runs no longer fail on packages without tests. +2. Rewrote repository guidance files (`CLAUDE.md`, `skills/SKILL.md`, docs, `SPEC.md`) to match the TypeScript architecture. +3. Added missing TUI test coverage so monorepo-wide test runs no longer fail on packages without tests. ### Outcome @@ -214,7 +214,7 @@ The environment did not provide `rg`, and docs/policy files still described Rust ### What I Was Working On -Running full workspace test validation (`pnpm -w test`) for the migrated monorepo. +Running full organization test validation (`pnpm -w test`) for the migrated monorepo. ### Friction / Issue @@ -228,7 +228,7 @@ Backend integration tests depend on native `better-sqlite3` bindings, which were ### Outcome -- Full workspace test suite passes consistently. +- Full organization test suite passes consistently. - Backend unit coverage always runs; DB integration tests run automatically on environments with native bindings. ## 2026-02-09 - aab1012 (working tree) @@ -309,13 +309,13 @@ Running backend tests with the integration flag enabled triggered unrelated acto ### Attempted Fix / Workaround 1. Switched to package-targeted test runs for deterministic coverage (`@sandbox-agent/foundry-backend` + `@sandbox-agent/foundry-frontend`). -2. Relied on required workspace validation (`pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test`) plus targeted stack test files. +2. Relied on required organization validation (`pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test`) plus targeted stack test files. 3. Stopped the runaway integration run and recorded this friction for follow-up. ### Outcome - New stack-focused tests pass in deterministic targeted runs. -- Full required workspace checks pass. +- Full required organization checks pass. - Integration-gated suite remains noisy and needs separate stabilization. ## 2026-03-05 - uncommitted @@ -326,7 +326,7 @@ Reviewing architecture for simplification opportunities. ### Friction / Issue -Considered merging `projectPrSync` (30s) and `projectBranchSync` (5s) into a single `projectSync` actor that polls at the faster cadence and does PR fetches every Nth tick. This would reduce actor count by one per repo but violates the single-responsibility-per-actor pattern established in the codebase. Mixed cadences within one actor add conditional tick logic, make the polling intervals harder to reason about independently, and couple two unrelated data sources (git branches vs GitHub API) into one failure domain. +Considered merging `repositoryPrSync` (30s) and `repositoryBranchSync` (5s) into a single `repositorySync` actor that polls at the faster cadence and does PR fetches every Nth tick. This would reduce actor count by one per repo but violates the single-responsibility-per-actor pattern established in the codebase. Mixed cadences within one actor add conditional tick logic, make the polling intervals harder to reason about independently, and couple two unrelated data sources (git branches vs GitHub API) into one failure domain. ### Attempted Fix / Workaround @@ -334,7 +334,7 @@ None — rejected the idea during review. ### Outcome -- Keep `projectPrSync` and `projectBranchSync` as separate actors. +- Keep `repositoryPrSync` and `repositoryBranchSync` as separate actors. - Single-responsibility-per-sync-actor is the right pattern for this codebase. ## 2026-03-06 - 77341ff @@ -345,13 +345,13 @@ Bringing up the Docker-based local dev stack with `just dev` after the BaseUI fr ### Friction / Issue -Docker Desktop recovered, but the frontend container failed immediately with `Cannot find module @rollup/rollup-linux-arm64-gnu`. The dev compose setup bind-mounted the host workspace into `/app`, so the Linux container picked up macOS `node_modules` and missed Rollup's Linux optional package. +Docker Desktop recovered, but the frontend container failed immediately with `Cannot find module @rollup/rollup-linux-arm64-gnu`. The dev compose setup bind-mounted the host organization into `/app`, so the Linux container picked up macOS `node_modules` and missed Rollup's Linux optional package. ### Attempted Fix / Workaround 1. Confirmed Docker itself was healthy again by checking the Unix socket, `docker version`, and the backend health endpoint. 2. Reproduced the frontend crash inside `docker compose`. -3. Changed the frontend dev service to use named volumes for workspace `node_modules` and the pnpm store, and to run `pnpm install --frozen-lockfile` inside the container before starting Vite. +3. Changed the frontend dev service to use named volumes for organization `node_modules` and the pnpm store, and to run `pnpm install --frozen-lockfile` inside the container before starting Vite. ### Outcome diff --git a/foundry/research/friction/rivet.mdx b/foundry/research/friction/rivet.mdx index c9cb8eb..a2e4649 100644 --- a/foundry/research/friction/rivet.mdx +++ b/foundry/research/friction/rivet.mdx @@ -12,7 +12,7 @@ Resolving GitHub OAuth callback failures caused by stale actor state after squas 2. **No programmatic way to list or destroy actors on Rivet Cloud without the service key.** The public runner token (`pk_*`) lacks permissions for actor management (list/destroy). The Cloud API token (`cloud_api_*`) in our `.env` was returning "token not found". The actual working token format is the service key (`sk_*`) from the namespace connection URL. This was not documented — the destroy docs reference "admin tokens" which are described as "currently not supported on Rivet Cloud" ([#3530](https://github.com/rivet-dev/rivet/issues/3530)), but the `sk_*` token works. The disconnect between the docs and reality cost significant debugging time. -3. **Actor errors during `getOrCreate` are opaque.** When the `workspace.completeAppGithubAuth` action triggered `getOrCreate` for org workspace actors, the migration failure inside the newly-woken actor was surfaced as `"Internal error"` with no indication that it was a migration/schema issue. The actual error (`table already exists`) was only visible in actor-level logs, not in the action response or the calling backend's logs. +3. **Actor errors during `getOrCreate` are opaque.** When the `organization.completeAppGithubAuth` action triggered `getOrCreate` for org organization actors, the migration failure inside the newly-woken actor was surfaced as `"Internal error"` with no indication that it was a migration/schema issue. The actual error (`table already exists`) was only visible in actor-level logs, not in the action response or the calling backend's logs. ### Attempted Fix / Workaround @@ -22,7 +22,7 @@ Resolving GitHub OAuth callback failures caused by stale actor state after squas ### Outcome -- All 4 stale workspace actors destroyed (3 org workspaces + 1 old v2-prefixed app workspace). +- All 4 stale organization actors destroyed (3 org organizations + 1 old v2-prefixed app organization). - Reverted `IF NOT EXISTS` migration changes so Drizzle migrations remain standard. - After redeploy, new actors will be created fresh with the correct squashed migration journal. - **RivetKit improvement opportunities:** @@ -112,17 +112,17 @@ Diagnosing stuck tasks (`init_create_sandbox`) after switching to a linked Rivet ### Friction / Issue 1. File-system driver actor-state writes still attempted to serialize legacy `kvStorage`, which can exceed Bare's buffer limit and trigger `Failed to save actor state: BareError: (byte:0) too large buffer`. -2. Project snapshots swallowed missing task actors and only logged warnings, so stale `task_index` rows persisted and appeared as stuck/ghost tasks in the UI. +2. Repository snapshots swallowed missing task actors and only logged warnings, so stale `task_index` rows persisted and appeared as stuck/ghost tasks in the UI. ### Attempted Fix / Workaround 1. In RivetKit file-system driver writes, force persisted `kvStorage` to `[]` (runtime KV is SQLite-only) so oversized legacy payloads are never re-serialized. -2. In backend project actor flows (`hydrate`, `snapshot`, `repo overview`, branch registration, PR-close archive), detect `Actor not found` and prune stale `task_index` rows immediately. +2. In backend repository actor flows (`hydrate`, `snapshot`, `repo overview`, branch registration, PR-close archive), detect `Actor not found` and prune stale `task_index` rows immediately. ### Outcome - Prevents repeated serialization crashes caused by legacy oversized state blobs. -- Missing task actors are now self-healed from project indexes instead of repeatedly surfacing as silent warnings. +- Missing task actors are now self-healed from repository indexes instead of repeatedly surfacing as silent warnings. ## 2026-02-12 - uncommitted @@ -193,7 +193,7 @@ Adopt these concrete repo conventions: - Schema rule (critical): - SQLite is **per actor instance**, not a shared DB across all instances. -- Do not “namespace” rows with `workspaceId`/`repoId`/`taskId` columns when those identifiers already live in the actor key/state. +- Do not “namespace” rows with `organizationId`/`repoId`/`taskId` columns when those identifiers already live in the actor key/state. - Prefer single-row tables for single-instance storage (e.g. `id=1`) when appropriate. - Migration generation flow (Bun + DrizzleKit): @@ -247,7 +247,7 @@ Verifying Daytona-backed task/session flows for the new frontend and sandbox-ins ### Friction / Issue -Task workflow steps intermittently entered failed state with `StepExhaustedError` and `unknown error` during initialization replay (`init-start-sandbox-instance`, then `init-write-db`), which caused `task.get` to time out and cascaded into `project snapshot timed out` / `workspace list_tasks timed out`. +Task workflow steps intermittently entered failed state with `StepExhaustedError` and `unknown error` during initialization replay (`init-start-sandbox-instance`, then `init-write-db`), which caused `task.get` to time out and cascaded into `repository snapshot timed out` / `organization list_tasks timed out`. ### Attempted Fix / Workaround @@ -305,7 +305,7 @@ if (msg.type === "TickProjectRefresh") { // Coalesce duplicate ticks for a short window. while (Date.now() < deadline) { - const next = await c.queue.next("project", { timeout: deadline - Date.now() }); + const next = await c.queue.next("repository", { timeout: deadline - Date.now() }); if (!next) break; // timeout if (next.type === "TickProjectRefresh") { @@ -348,7 +348,7 @@ Two mistakes in the prior proposal: 2. **Coalesce by message names, not `msg.type`.** - Keep one message name per command/tick channel. -- When a tick window opens, drain and coalesce multiple tick names (e.g. `tick.project.refresh`, `tick.pr.refresh`, `tick.sandbox.health`) into one execution per name. +- When a tick window opens, drain and coalesce multiple tick names (e.g. `tick.repository.refresh`, `tick.pr.refresh`, `tick.sandbox.health`) into one execution per name. 3. **Tick coalesce pattern with timeout (single loop):** @@ -375,7 +375,7 @@ while (true) { // Timeout reached => one or more ticks are due. const due = new Set(); const at = Date.now(); - if (at >= nextProjectRefreshAt) due.add("tick.project.refresh"); + if (at >= nextProjectRefreshAt) due.add("tick.repository.refresh"); if (at >= nextPrRefreshAt) due.add("tick.pr.refresh"); if (at >= nextSandboxHealthAt) due.add("tick.sandbox.health"); @@ -388,7 +388,7 @@ while (true) { } // Execute each due tick once, in deterministic order. - if (due.has("tick.project.refresh")) { + if (due.has("tick.repository.refresh")) { await refreshProjectSnapshot(); nextProjectRefreshAt = Date.now() + 5_000; } @@ -424,7 +424,7 @@ Even with queue-timeout ticks, packing multiple independent timer cadences into ### Final Pattern 1. **Parent actors are command-only loops with no timeout.** -- `WorkspaceActor`, `ProjectActor`, `TaskActor`, and `HistoryActor` wait on queue messages only. +- `OrganizationActor`, `RepositoryActor`, `TaskActor`, and `HistoryActor` wait on queue messages only. 2. **Periodic work moves to dedicated child sync actors.** - Each child actor has exactly one timeout cadence (e.g. PR sync, branch sync, task status sync). @@ -439,7 +439,7 @@ Even with queue-timeout ticks, packing multiple independent timer cadences into ### Example Structure -- `ProjectActor` (no timeout): handles commands + applies `project.pr_sync.result` / `project.branch_sync.result` writes. +- `RepositoryActor` (no timeout): handles commands + applies `repository.pr_sync.result` / `repository.branch_sync.result` writes. - `ProjectPrSyncActor` (timeout 30s): polls PR data, sends result message. - `ProjectBranchSyncActor` (timeout 5s): polls branch data, sends result message. - `TaskActor` (no timeout): handles lifecycle + applies `task.status_sync.result` writes. @@ -502,7 +502,7 @@ Removing custom backend REST endpoints and migrating CLI/TUI calls to direct `ri ### Friction / Issue -We had implemented a `/v1/*` HTTP shim (`/v1/tasks`, `/v1/workspaces/use`, etc.) between clients and actors, which duplicated actor APIs and introduced an unnecessary transport layer. +We had implemented a `/v1/*` HTTP shim (`/v1/tasks`, `/v1/organizations/use`, etc.) between clients and actors, which duplicated actor APIs and introduced an unnecessary transport layer. ### Attempted Fix / Workaround @@ -575,21 +575,21 @@ Removing `*Actor` suffix from all actor export names and registry keys. ### Friction / Issue -RivetKit's `setup({ use: { ... } })` uses property names as actor identifiers in `client.` calls. All 8 actors were exported as `workspaceActor`, `projectActor`, `taskActor`, etc., which meant client code used verbose `client.workspaceActor.getOrCreate(...)` instead of `client.workspace.getOrCreate(...)`. +RivetKit's `setup({ use: { ... } })` uses property names as actor identifiers in `client.` calls. All 8 actors were exported as `organization`, `repository`, `taskActor`, etc., which meant client code used verbose `client.organization.getOrCreate(...)` instead of `client.organization.getOrCreate(...)`. The `Actor` suffix is redundant — everything in the registry is an actor by definition. It also leaked into type names (`WorkspaceActorHandle`, `ProjectActorInput`, `HistoryActorInput`) and local function names (`workspaceActorKey`, `taskActorKey`). ### Attempted Fix / Workaround -1. Renamed all 8 actor exports: `workspaceActor` → `workspace`, `projectActor` → `project`, `taskActor` → `task`, `sandboxInstanceActor` → `sandboxInstance`, `historyActor` → `history`, `projectPrSyncActor` → `projectPrSync`, `projectBranchSyncActor` → `projectBranchSync`, `taskStatusSyncActor` → `taskStatusSync`. +1. Renamed all 8 actor exports: `organization` → `organization`, `repository` → `repository`, `taskActor` → `task`, `sandboxInstanceActor` → `sandboxInstance`, `historyActor` → `history`, `repositoryPrSync` → `repositoryPrSync`, `repositoryBranchSync` → `repositoryBranchSync`, `taskStatusSyncActor` → `taskStatusSync`. 2. Updated registry keys in `actors/index.ts`. 3. Renamed all `client.Actor` references across 14 files (actor definitions, backend entry, CLI client, tests). -4. Renamed associated types (`ProjectActorInput` → `ProjectInput`, `HistoryActorInput` → `HistoryInput`, `WorkspaceActorHandle` → `WorkspaceHandle`, `TaskActorHandle` → `TaskHandle`). +4. Renamed associated types (`ProjectActorInput` → `RepositoryInput`, `HistoryActorInput` → `HistoryInput`, `WorkspaceActorHandle` → `OrganizationHandle`, `TaskActorHandle` → `TaskHandle`). ### Outcome - Actor names are now concise and match their semantic role. -- Client code reads naturally: `client.workspace.getOrCreate(...)`, `client.task.get(...)`. +- Client code reads naturally: `client.organization.getOrCreate(...)`, `client.task.get(...)`. - No runtime behavior change — registry property names drive actor routing. ## 2026-02-09 - uncommitted @@ -609,8 +609,8 @@ Concrete examples from our codebase: | Actor | Pattern | Why | |-------|---------|-----| -| `workspace` | Plain run | Every handler is a DB query or single actor delegation | -| `project` | Plain run | Handlers are DB upserts or delegate to task actor | +| `organization` | Plain run | Every handler is a DB query or single actor delegation | +| `repository` | Plain run | Handlers are DB upserts or delegate to task actor | | `task` | **Needs workflow** | `initialize` is a 7-step pipeline (createSandbox → ensureAgent → createSession → DB writes → start child actors); post-idle is a 5-step pipeline (commit → push → PR → cache → notify) | | `history` | Plain run | Single DB insert per message | | `sandboxInstance` | Plain run | Single-table CRUD per message | @@ -647,7 +647,7 @@ This matters when reasoning about workflow `listen()` behavior: you might assume RivetKit docs should clarify: 1. Queue names are **per-actor-instance** — two different actor instances can use the same queue name without collision. -2. The dotted naming convention (e.g. `project.command.ensure`) is a user convention for readability, not a routing hierarchy. +2. The dotted naming convention (e.g. `repository.command.ensure`) is a user convention for readability, not a routing hierarchy. 3. `c.queue.next(["a", "b"])` listens on queues named `"a"` and `"b"` *within this actor*, not across actors. ### Outcome @@ -662,7 +662,7 @@ Migrating task actor to durable workflows. AI-generated queue names used dotted ### Friction / Issue -When generating actor queue names, the AI (and our own codebase) defaulted to dotted names like `task.command.initialize`, `project.pr_sync.result`, `task.status_sync.control.start`. These work fine in plain `run` loops, but create friction when interacting with the workflow system because `workflowQueueName()` prefixes them with `__workflow:`, producing names like `__workflow:task.command.initialize`. +When generating actor queue names, the AI (and our own codebase) defaulted to dotted names like `task.command.initialize`, `repository.pr_sync.result`, `task.status_sync.control.start`. These work fine in plain `run` loops, but create friction when interacting with the workflow system because `workflowQueueName()` prefixes them with `__workflow:`, producing names like `__workflow:task.command.initialize`. Queue names should always be **camelCase** (e.g. `initializeTask`, `statusSyncResult`, `attachTask`). Dotted names are misleading — they imply hierarchy or routing semantics that don't exist (queues are flat, per-actor-instance strings). They also look like object property paths, which causes confusion when used as dynamic property keys on queue handles (`actor.queue["task.command.initialize"]`). @@ -754,4 +754,4 @@ Using `better-sqlite3` and `node:sqlite` in backend DB bootstrap caused Bun runt - Backend starts successfully under Bun. - Shared Drizzle/SQLite actor DB path still works. -- Workspace build + tests pass. +- Organization build + tests pass. diff --git a/foundry/research/friction/sandboxes.mdx b/foundry/research/friction/sandboxes.mdx index e30e85b..38d4b3f 100644 --- a/foundry/research/friction/sandboxes.mdx +++ b/foundry/research/friction/sandboxes.mdx @@ -8,7 +8,7 @@ Implementing provider adapters (`worktree`, `daytona`) under the backend package ### Friction / Issue -Provider interface intentionally keeps `DestroySandboxRequest` minimal (`workspaceId`, `sandboxId`), but local git worktree cleanup may need repo context. +Provider interface intentionally keeps `DestroySandboxRequest` minimal (`organizationId`, `sandboxId`), but local git worktree cleanup may need repo context. ### Attempted Fix / Workaround @@ -54,8 +54,8 @@ The previous end-to-end flow implicitly depended on local filesystem paths (`rep ### Attempted Fix / Workaround -1. Introduced explicit repo remote records (`WorkspaceActor.addRepo`) and validated remotes with `git ls-remote`. -2. Made `ProjectActor` assert a backend-owned local clone exists on wake and fetch remote branch state from that clone. +1. Introduced explicit imported repository records sourced from GitHub sync instead of local organization paths. +2. Made `RepositoryActor` assert a backend-owned local clone exists on wake and fetch remote branch state from that clone. 3. Updated PR creation to avoid requiring a checked-out branch by using `gh pr create --head `. 4. Updated `DaytonaProvider.createSandbox` to clone the repo and checkout the branch into a deterministic workdir and return it as `cwd` for sandbox-agent sessions. diff --git a/foundry/research/realtime-interest-manager-spec.md b/foundry/research/realtime-interest-manager-spec.md index 9c0fc93..dff2aea 100644 --- a/foundry/research/realtime-interest-manager-spec.md +++ b/foundry/research/realtime-interest-manager-spec.md @@ -4,7 +4,7 @@ Replace the current polling + empty-notification + full-refetch architecture with a push-based realtime system. The client subscribes to topics, receives the initial state, and then receives full replacement payloads for changed entities over WebSocket. No polling. No re-fetching. -This spec covers three layers: backend (materialized state + broadcast), client library (interest manager), and frontend (hook consumption). Comment architecture-related code throughout so new contributors can understand the data flow from comments alone. +This spec covers three layers: backend (materialized state + broadcast), client library (subscription manager), and frontend (hook consumption). Comment architecture-related code throughout so new contributors can understand the data flow from comments alone. --- @@ -17,7 +17,7 @@ This spec covers three layers: backend (materialized state + broadcast), client Currently `WorkbenchTask` is a single flat type carrying everything (sidebar fields + transcripts + diffs + file tree). Split it: ```typescript -/** Sidebar-level task data. Materialized in the workspace actor's SQLite. */ +/** Sidebar-level task data. Materialized in the organization actor's SQLite. */ export interface WorkbenchTaskSummary { id: string; repoId: string; @@ -44,7 +44,7 @@ export interface WorkbenchSessionSummary { created: boolean; } -/** Repo-level summary for workspace sidebar. */ +/** Repo-level summary for organization sidebar. */ export interface WorkbenchRepoSummary { id: string; label: string; @@ -93,9 +93,9 @@ export interface WorkbenchSessionDetail { transcript: WorkbenchTranscriptEvent[]; } -/** Workspace-level snapshot — initial fetch for the workspace topic. */ -export interface WorkspaceSummarySnapshot { - workspaceId: string; +/** Organization-level snapshot — initial fetch for the organization topic. */ +export interface OrganizationSummarySnapshot { + organizationId: string; repos: WorkbenchRepoSummary[]; taskSummaries: WorkbenchTaskSummary[]; } @@ -110,8 +110,8 @@ Remove the old `TaskWorkbenchSnapshot` type and `WorkbenchTask` type once migrat Each event carries the full new state of the changed entity — not a patch, not an empty notification. ```typescript -/** Workspace-level events broadcast by the workspace actor. */ -export type WorkspaceEvent = +/** Organization-level events broadcast by the organization actor. */ +export type OrganizationEvent = | { type: "taskSummaryUpdated"; taskSummary: WorkbenchTaskSummary } | { type: "taskRemoved"; taskId: string } | { type: "repoAdded"; repo: WorkbenchRepoSummary } @@ -126,7 +126,7 @@ export type TaskEvent = export type SessionEvent = | { type: "sessionUpdated"; session: WorkbenchSessionDetail }; -/** App-level events broadcast by the app workspace actor. */ +/** App-level events broadcast by the app organization actor. */ export type AppEvent = | { type: "appUpdated"; snapshot: FoundryAppSnapshot }; @@ -139,13 +139,13 @@ export type SandboxProcessesEvent = ## 2. Backend: Materialized State + Broadcasts -### 2.1 Workspace actor — materialized sidebar state +### 2.1 Organization actor — materialized sidebar state **Files:** -- `packages/backend/src/actors/workspace/db/schema.ts` — add tables -- `packages/backend/src/actors/workspace/actions.ts` — replace `buildWorkbenchSnapshot`, add delta handlers +- `packages/backend/src/actors/organization/db/schema.ts` — add tables +- `packages/backend/src/actors/organization/actions.ts` — replace `buildWorkbenchSnapshot`, add delta handlers -Add to workspace actor SQLite schema: +Add to organization actor SQLite schema: ```typescript export const taskSummaries = sqliteTable("task_summaries", { @@ -161,7 +161,7 @@ export const taskSummaries = sqliteTable("task_summaries", { }); ``` -New workspace actions: +New organization actions: ```typescript /** @@ -176,23 +176,23 @@ async applyTaskSummaryUpdate(c, input: { taskSummary: WorkbenchTaskSummary }) { await c.db.insert(taskSummaries).values(toRow(input.taskSummary)) .onConflictDoUpdate({ target: taskSummaries.taskId, set: toRow(input.taskSummary) }).run(); // Broadcast to connected clients - c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary }); + c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary }); } async removeTaskSummary(c, input: { taskId: string }) { await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); - c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId }); + c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId }); } /** - * Initial fetch for the workspace topic. + * Initial fetch for the organization topic. * Reads entirely from local SQLite — no fan-out to child actors. */ -async getWorkspaceSummary(c, input: { workspaceId: string }): Promise { +async getWorkspaceSummary(c, input: { organizationId: string }): Promise { const repoRows = await c.db.select().from(repos).orderBy(desc(repos.updatedAt)).all(); const taskRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); return { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repos: repoRows.map(toRepoSummary), taskSummaries: taskRows.map(toTaskSummary), }; @@ -201,7 +201,7 @@ async getWorkspaceSummary(c, input: { workspaceId: string }): Promise { ... } async getSessionDetail(c, input: { sessionId: string }): Promise { ... } ``` -### 2.4 App workspace actor +### 2.4 App organization actor -**File:** `packages/backend/src/actors/workspace/app-shell.ts` +**File:** `packages/backend/src/actors/organization/app-shell.ts` Change `c.broadcast("appUpdated", { at: Date.now(), sessionId })` to: ```typescript @@ -304,12 +304,12 @@ function broadcastProcessesUpdated(c: any): void { ```typescript /** - * Topic definitions for the interest manager. + * Topic definitions for the subscription manager. * * Each topic defines how to connect to an actor, fetch initial state, * which event to listen for, and how to apply incoming events to cached state. * - * The interest manager uses these definitions to manage WebSocket connections, + * The subscription manager uses these definitions to manage WebSocket connections, * cached state, and subscriptions for all realtime data flows. */ @@ -331,10 +331,10 @@ export interface TopicDefinition { } export interface AppTopicParams {} -export interface WorkspaceTopicParams { workspaceId: string } -export interface TaskTopicParams { workspaceId: string; repoId: string; taskId: string } -export interface SessionTopicParams { workspaceId: string; repoId: string; taskId: string; sessionId: string } -export interface SandboxProcessesTopicParams { workspaceId: string; providerId: string; sandboxId: string } +export interface OrganizationTopicParams { organizationId: string } +export interface TaskTopicParams { organizationId: string; repoId: string; taskId: string } +export interface SessionTopicParams { organizationId: string; repoId: string; taskId: string; sessionId: string } +export interface SandboxProcessesTopicParams { organizationId: string; providerId: string; sandboxId: string } export const topicDefinitions = { app: { @@ -345,12 +345,12 @@ export const topicDefinitions = { applyEvent: (_current, event: AppEvent) => event.snapshot, } satisfies TopicDefinition, - workspace: { - key: (p) => `workspace:${p.workspaceId}`, - event: "workspaceUpdated", - connect: (b, p) => b.connectWorkspace(p.workspaceId), - fetchInitial: (b, p) => b.getWorkspaceSummary(p.workspaceId), - applyEvent: (current, event: WorkspaceEvent) => { + organization: { + key: (p) => `organization:${p.organizationId}`, + event: "organizationUpdated", + connect: (b, p) => b.connectWorkspace(p.organizationId), + fetchInitial: (b, p) => b.getWorkspaceSummary(p.organizationId), + applyEvent: (current, event: OrganizationEvent) => { switch (event.type) { case "taskSummaryUpdated": return { @@ -375,22 +375,22 @@ export const topicDefinitions = { }; } }, - } satisfies TopicDefinition, + } satisfies TopicDefinition, task: { - key: (p) => `task:${p.workspaceId}:${p.taskId}`, + key: (p) => `task:${p.organizationId}:${p.taskId}`, event: "taskUpdated", - connect: (b, p) => b.connectTask(p.workspaceId, p.repoId, p.taskId), - fetchInitial: (b, p) => b.getTaskDetail(p.workspaceId, p.repoId, p.taskId), + connect: (b, p) => b.connectTask(p.organizationId, p.repoId, p.taskId), + fetchInitial: (b, p) => b.getTaskDetail(p.organizationId, p.repoId, p.taskId), applyEvent: (_current, event: TaskEvent) => event.detail, } satisfies TopicDefinition, session: { - key: (p) => `session:${p.workspaceId}:${p.taskId}:${p.sessionId}`, + key: (p) => `session:${p.organizationId}:${p.taskId}:${p.sessionId}`, event: "sessionUpdated", // Reuses the task actor connection — same actor, different event. - connect: (b, p) => b.connectTask(p.workspaceId, p.repoId, p.taskId), - fetchInitial: (b, p) => b.getSessionDetail(p.workspaceId, p.repoId, p.taskId, p.sessionId), + connect: (b, p) => b.connectTask(p.organizationId, p.repoId, p.taskId), + fetchInitial: (b, p) => b.getSessionDetail(p.organizationId, p.repoId, p.taskId, p.sessionId), applyEvent: (current, event: SessionEvent) => { // Filter: only apply if this event is for our session if (event.session.sessionId !== current.sessionId) return current; @@ -399,10 +399,10 @@ export const topicDefinitions = { } satisfies TopicDefinition, sandboxProcesses: { - key: (p) => `sandbox:${p.workspaceId}:${p.sandboxId}`, + key: (p) => `sandbox:${p.organizationId}:${p.sandboxId}`, event: "processesUpdated", - connect: (b, p) => b.connectSandbox(p.workspaceId, p.providerId, p.sandboxId), - fetchInitial: (b, p) => b.listSandboxProcesses(p.workspaceId, p.providerId, p.sandboxId), + connect: (b, p) => b.connectSandbox(p.organizationId, p.providerId, p.sandboxId), + fetchInitial: (b, p) => b.listSandboxProcesses(p.organizationId, p.providerId, p.sandboxId), applyEvent: (_current, event: SandboxProcessesEvent) => event.processes, } satisfies TopicDefinition, } as const; @@ -413,16 +413,16 @@ export type TopicParams = Parameters<(typeof topicDefinition export type TopicData = Awaited>; ``` -### 3.2 Interest manager interface +### 3.2 Subscription manager interface **File:** `packages/client/src/interest/manager.ts` (new) ```typescript /** - * The InterestManager owns all realtime actor connections and cached state. + * The SubscriptionManager owns all realtime actor connections and cached state. * * Architecture: - * - Each topic (app, workspace, task, session, sandboxProcesses) maps to an actor + event. + * - Each topic (app, organization, task, session, sandboxProcesses) maps to an actor + event. * - On first subscription, the manager opens a WebSocket connection, fetches initial state, * and listens for events. Events carry full replacement payloads for the changed entity. * - Multiple subscribers to the same topic share one connection and one cached state. @@ -430,7 +430,7 @@ export type TopicData = Awaited { const GRACE_PERIOD_MS = 30_000; /** - * Remote implementation of InterestManager. + * Remote implementation of SubscriptionManager. * Manages WebSocket connections to RivetKit actors via BackendClient. */ -export class RemoteInterestManager implements InterestManager { +export class RemoteSubscriptionManager implements SubscriptionManager { private entries = new Map>(); constructor(private backend: BackendClient) {} @@ -634,7 +634,7 @@ class TopicEntry { **File:** `packages/client/src/interest/mock-manager.ts` (new) -Same `InterestManager` interface. Uses in-memory state. Topic definitions provide mock data. Mutations call `applyEvent` directly on the entry to simulate broadcasts. No WebSocket connections. +Same `SubscriptionManager` interface. Uses in-memory state. Topic definitions provide mock data. Mutations call `applyEvent` directly on the entry to simulate broadcasts. No WebSocket connections. ### 3.5 React hook @@ -651,17 +651,17 @@ import { useSyncExternalStore, useMemo } from "react"; * - Multiple components subscribing to the same topic share one connection. * * @example - * // Subscribe to workspace sidebar data - * const workspace = useInterest("workspace", { workspaceId }); + * // Subscribe to organization sidebar data + * const organization = useSubscription("organization", { organizationId }); * * // Subscribe to task detail (only when viewing a task) - * const task = useInterest("task", selectedTaskId ? { workspaceId, repoId, taskId } : null); + * const task = useSubscription("task", selectedTaskId ? { organizationId, repoId, taskId } : null); * * // Subscribe to active session content - * const session = useInterest("session", activeSessionId ? { workspaceId, repoId, taskId, sessionId } : null); + * const session = useSubscription("session", activeSessionId ? { organizationId, repoId, taskId, sessionId } : null); */ -export function useInterest( - manager: InterestManager, +export function useSubscription( + manager: SubscriptionManager, topicKey: K, params: TopicParams | null, ): TopicState { @@ -698,18 +698,18 @@ Add to the `BackendClient` interface: ```typescript // New connection methods (return WebSocket-based ActorConn) -connectWorkspace(workspaceId: string): Promise; -connectTask(workspaceId: string, repoId: string, taskId: string): Promise; -connectSandbox(workspaceId: string, providerId: string, sandboxId: string): Promise; +connectWorkspace(organizationId: string): Promise; +connectTask(organizationId: string, repoId: string, taskId: string): Promise; +connectSandbox(organizationId: string, providerId: string, sandboxId: string): Promise; // New fetch methods (read from materialized state) -getWorkspaceSummary(workspaceId: string): Promise; -getTaskDetail(workspaceId: string, repoId: string, taskId: string): Promise; -getSessionDetail(workspaceId: string, repoId: string, taskId: string, sessionId: string): Promise; +getWorkspaceSummary(organizationId: string): Promise; +getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; +getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; ``` Remove: -- `subscribeWorkbench`, `subscribeApp`, `subscribeSandboxProcesses` (replaced by interest manager) +- `subscribeWorkbench`, `subscribeApp`, `subscribeSandboxProcesses` (replaced by subscription manager) - `getWorkbench` (replaced by `getWorkspaceSummary` + `getTaskDetail`) --- @@ -721,16 +721,16 @@ Remove: **File:** `packages/frontend/src/lib/interest.ts` (new) ```typescript -import { RemoteInterestManager } from "@sandbox-agent/foundry-client"; +import { RemoteSubscriptionManager } from "@sandbox-agent/foundry-client"; import { backendClient } from "./backend"; -export const interestManager = new RemoteInterestManager(backendClient); +export const subscriptionManager = new RemoteSubscriptionManager(backendClient); ``` Or for mock mode: ```typescript -import { MockInterestManager } from "@sandbox-agent/foundry-client"; -export const interestManager = new MockInterestManager(); +import { MockSubscriptionManager } from "@sandbox-agent/foundry-client"; +export const subscriptionManager = new MockSubscriptionManager(); ``` ### 4.2 Replace MockLayout workbench subscription @@ -739,7 +739,7 @@ export const interestManager = new MockInterestManager(); Before: ```typescript -const taskWorkbenchClient = useMemo(() => getTaskWorkbenchClient(workspaceId), [workspaceId]); +const taskWorkbenchClient = useMemo(() => getTaskWorkbenchClient(organizationId), [organizationId]); const viewModel = useSyncExternalStore( taskWorkbenchClient.subscribe.bind(taskWorkbenchClient), taskWorkbenchClient.getSnapshot.bind(taskWorkbenchClient), @@ -749,9 +749,9 @@ const tasks = viewModel.tasks ?? []; After: ```typescript -const workspace = useInterest(interestManager, "workspace", { workspaceId }); -const taskSummaries = workspace.data?.taskSummaries ?? []; -const repos = workspace.data?.repos ?? []; +const organization = useSubscription(subscriptionManager, "organization", { organizationId }); +const taskSummaries = organization.data?.taskSummaries ?? []; +const repos = organization.data?.repos ?? []; ``` ### 4.3 Replace MockLayout task detail @@ -759,8 +759,8 @@ const repos = workspace.data?.repos ?? []; When a task is selected, subscribe to its detail: ```typescript -const taskDetail = useInterest(interestManager, "task", - selectedTaskId ? { workspaceId, repoId: activeRepoId, taskId: selectedTaskId } : null +const taskDetail = useSubscription(subscriptionManager, "task", + selectedTaskId ? { organizationId, repoId: activeRepoId, taskId: selectedTaskId } : null ); ``` @@ -769,25 +769,25 @@ const taskDetail = useInterest(interestManager, "task", When a session tab is active: ```typescript -const sessionDetail = useInterest(interestManager, "session", - activeSessionId ? { workspaceId, repoId, taskId, sessionId: activeSessionId } : null +const sessionDetail = useSubscription(subscriptionManager, "session", + activeSessionId ? { organizationId, repoId, taskId, sessionId: activeSessionId } : null ); ``` -### 4.5 Replace workspace-dashboard.tsx polling +### 4.5 Replace organization-dashboard.tsx polling Remove ALL `useQuery` with `refetchInterval` in this file: -- `tasksQuery` (2.5s polling) → `useInterest("workspace", ...)` -- `taskDetailQuery` (2.5s polling) → `useInterest("task", ...)` -- `reposQuery` (10s polling) → `useInterest("workspace", ...)` -- `repoOverviewQuery` (5s polling) → `useInterest("workspace", ...)` -- `sessionsQuery` (3s polling) → `useInterest("task", ...)` (sessionsSummary field) -- `eventsQuery` (2.5s polling) → `useInterest("session", ...)` +- `tasksQuery` (2.5s polling) → `useSubscription("organization", ...)` +- `taskDetailQuery` (2.5s polling) → `useSubscription("task", ...)` +- `reposQuery` (10s polling) → `useSubscription("organization", ...)` +- `repoOverviewQuery` (5s polling) → `useSubscription("organization", ...)` +- `sessionsQuery` (3s polling) → `useSubscription("task", ...)` (sessionsSummary field) +- `eventsQuery` (2.5s polling) → `useSubscription("session", ...)` ### 4.6 Replace terminal-pane.tsx polling -- `taskQuery` (2s polling) → `useInterest("task", ...)` -- `processesQuery` (3s polling) → `useInterest("sandboxProcesses", ...)` +- `taskQuery` (2s polling) → `useSubscription("task", ...)` +- `processesQuery` (3s polling) → `useSubscription("sandboxProcesses", ...)` - Remove `subscribeSandboxProcesses` useEffect ### 4.7 Replace app client subscription @@ -804,14 +804,14 @@ export function useMockAppSnapshot(): FoundryAppSnapshot { After: ```typescript export function useAppSnapshot(): FoundryAppSnapshot { - const app = useInterest(interestManager, "app", {}); + const app = useSubscription(subscriptionManager, "app", {}); return app.data ?? DEFAULT_APP_SNAPSHOT; } ``` ### 4.8 Mutations -Mutations (`createTask`, `renameTask`, `sendMessage`, etc.) no longer need manual `refetch()` or `refresh()` calls after completion. The backend mutation triggers a broadcast, which the interest manager receives and applies automatically. +Mutations (`createTask`, `renameTask`, `sendMessage`, etc.) no longer need manual `refetch()` or `refresh()` calls after completion. The backend mutation triggers a broadcast, which the subscription manager receives and applies automatically. Before: ```typescript @@ -841,24 +841,24 @@ const createSession = useMutation({ | File/Code | Reason | |---|---| -| `packages/client/src/remote/workbench-client.ts` | Replaced by interest manager `workspace` + `task` topics | -| `packages/client/src/remote/app-client.ts` | Replaced by interest manager `app` topic | +| `packages/client/src/remote/workbench-client.ts` | Replaced by subscription manager `organization` + `task` topics | +| `packages/client/src/remote/app-client.ts` | Replaced by subscription manager `app` topic | | `packages/client/src/workbench-client.ts` | Factory for above — no longer needed | | `packages/client/src/app-client.ts` | Factory for above — no longer needed | -| `packages/frontend/src/lib/workbench.ts` | Workbench client singleton — replaced by interest manager | -| `subscribeWorkbench` in `backend-client.ts` | Replaced by `connectWorkspace` + interest manager | -| `subscribeSandboxProcesses` in `backend-client.ts` | Replaced by `connectSandbox` + interest manager | -| `subscribeApp` in `backend-client.ts` | Replaced by `connectWorkspace("app")` + interest manager | -| `buildWorkbenchSnapshot` in `workspace/actions.ts` | Replaced by `getWorkspaceSummary` (local reads). Keep as `reconcileWorkbenchState` for recovery only. | -| `notifyWorkbenchUpdated` in `workspace/actions.ts` | Replaced by `applyTaskSummaryUpdate` + `c.broadcast` with payload | +| `packages/frontend/src/lib/workbench.ts` | Workbench client singleton — replaced by subscription manager | +| `subscribeWorkbench` in `backend-client.ts` | Replaced by `connectWorkspace` + subscription manager | +| `subscribeSandboxProcesses` in `backend-client.ts` | Replaced by `connectSandbox` + subscription manager | +| `subscribeApp` in `backend-client.ts` | Replaced by `connectWorkspace("app")` + subscription manager | +| `buildWorkbenchSnapshot` in `organization/actions.ts` | Replaced by `getWorkspaceSummary` (local reads). Keep as `reconcileWorkbenchState` for recovery only. | +| `notifyWorkbenchUpdated` in `organization/actions.ts` | Replaced by `applyTaskSummaryUpdate` + `c.broadcast` with payload | | `notifyWorkbenchUpdated` in `task/workbench.ts` | Replaced by `broadcastTaskUpdate` helper | -| `TaskWorkbenchSnapshot` in `shared/workbench.ts` | Replaced by `WorkspaceSummarySnapshot` + `WorkbenchTaskDetail` | +| `TaskWorkbenchSnapshot` in `shared/workbench.ts` | Replaced by `OrganizationSummarySnapshot` + `WorkbenchTaskDetail` | | `WorkbenchTask` in `shared/workbench.ts` | Split into `WorkbenchTaskSummary` + `WorkbenchTaskDetail` | -| `getWorkbench` action on workspace actor | Replaced by `getWorkspaceSummary` | -| `TaskWorkbenchClient` interface | Replaced by `InterestManager` + `useInterest` hook | -| All `useQuery` with `refetchInterval` in `workspace-dashboard.tsx` | Replaced by `useInterest` | -| All `useQuery` with `refetchInterval` in `terminal-pane.tsx` | Replaced by `useInterest` | -| Mock workbench client (`packages/client/src/mock/workbench-client.ts`) | Replaced by `MockInterestManager` | +| `getWorkbench` action on organization actor | Replaced by `getWorkspaceSummary` | +| `TaskWorkbenchClient` interface | Replaced by `SubscriptionManager` + `useSubscription` hook | +| All `useQuery` with `refetchInterval` in `organization-dashboard.tsx` | Replaced by `useSubscription` | +| All `useQuery` with `refetchInterval` in `terminal-pane.tsx` | Replaced by `useSubscription` | +| Mock workbench client (`packages/client/src/mock/workbench-client.ts`) | Replaced by `MockSubscriptionManager` | --- @@ -867,27 +867,27 @@ const createSession = useMutation({ Implement in this order to keep the system working at each step: ### Phase 1: Types and backend materialization -1. Add new types to `packages/shared` (`WorkbenchTaskSummary`, `WorkbenchTaskDetail`, `WorkbenchSessionSummary`, `WorkbenchSessionDetail`, `WorkspaceSummarySnapshot`, event types). -2. Add `taskSummaries` table to workspace actor schema. -3. Add `applyTaskSummaryUpdate`, `removeTaskSummary`, `getWorkspaceSummary` actions to workspace actor. +1. Add new types to `packages/shared` (`WorkbenchTaskSummary`, `WorkbenchTaskDetail`, `WorkbenchSessionSummary`, `WorkbenchSessionDetail`, `OrganizationSummarySnapshot`, event types). +2. Add `taskSummaries` table to organization actor schema. +3. Add `applyTaskSummaryUpdate`, `removeTaskSummary`, `getWorkspaceSummary` actions to organization actor. 4. Add `getTaskDetail`, `getSessionDetail` actions to task actor. 5. Replace all `notifyWorkbenchUpdated` call sites with `broadcastTaskUpdate` that pushes summary + broadcasts detail with payload. 6. Change app actor broadcast to include snapshot payload. 7. Change sandbox actor broadcast to include process list payload. 8. Add one-time reconciliation action to populate `taskSummaries` table from existing task actors (run on startup or on-demand). -### Phase 2: Client interest manager -9. Add `InterestManager` interface, `RemoteInterestManager`, `MockInterestManager` to `packages/client`. +### Phase 2: Client subscription manager +9. Add `SubscriptionManager` interface, `RemoteSubscriptionManager`, `MockSubscriptionManager` to `packages/client`. 10. Add topic definitions registry. -11. Add `useInterest` hook. +11. Add `useSubscription` hook. 12. Add `connectWorkspace`, `connectTask`, `connectSandbox`, `getWorkspaceSummary`, `getTaskDetail`, `getSessionDetail` to `BackendClient`. ### Phase 3: Frontend migration -13. Replace `useMockAppSnapshot` with `useInterest("app", ...)`. -14. Replace `MockLayout` workbench subscription with `useInterest("workspace", ...)`. -15. Replace task detail view with `useInterest("task", ...)` + `useInterest("session", ...)`. -16. Replace `workspace-dashboard.tsx` polling queries with `useInterest`. -17. Replace `terminal-pane.tsx` polling queries with `useInterest`. +13. Replace `useMockAppSnapshot` with `useSubscription("app", ...)`. +14. Replace `MockLayout` workbench subscription with `useSubscription("organization", ...)`. +15. Replace task detail view with `useSubscription("task", ...)` + `useSubscription("session", ...)`. +16. Replace `organization-dashboard.tsx` polling queries with `useSubscription`. +17. Replace `terminal-pane.tsx` polling queries with `useSubscription`. 18. Remove manual `refetch()` calls from mutations. ### Phase 4: Cleanup @@ -902,10 +902,10 @@ Implement in this order to keep the system working at each step: Add doc comments at these locations: - **Topic definitions** — explain the materialized state pattern, why events carry full entity state instead of patches, and the relationship between topics. -- **`broadcastTaskUpdate` helper** — explain the dual-broadcast pattern (push summary to workspace + broadcast detail to direct subscribers). -- **`InterestManager` interface** — explain the grace period, deduplication, and why mock/remote share the same interface. -- **`useInterest` hook** — explain `useSyncExternalStore` integration, null params for conditional interest, and how params key stabilization works. -- **Workspace actor `taskSummaries` table** — explain this is a materialized read projection maintained by task actor pushes, not a source of truth. +- **`broadcastTaskUpdate` helper** — explain the dual-broadcast pattern (push summary to organization + broadcast detail to direct subscribers). +- **`SubscriptionManager` interface** — explain the grace period, deduplication, and why mock/remote share the same interface. +- **`useSubscription` hook** — explain `useSyncExternalStore` integration, null params for conditional interest, and how params key stabilization works. +- **Organization actor `taskSummaries` table** — explain this is a materialized read projection maintained by task actor pushes, not a source of truth. - **`applyTaskSummaryUpdate` action** — explain this is the write path for the materialized projection, called by task actors, not by clients. - **`getWorkspaceSummary` action** — explain this reads from local SQLite only, no fan-out, and why that's the correct pattern. @@ -913,7 +913,7 @@ Add doc comments at these locations: ## 8. Testing -- Interest manager unit tests: subscribe/unsubscribe lifecycle, grace period, deduplication, event application. -- Mock implementation tests: verify same behavior as remote through shared test suite against the `InterestManager` interface. +- Subscription manager unit tests: subscribe/unsubscribe lifecycle, grace period, deduplication, event application. +- Mock implementation tests: verify same behavior as remote through shared test suite against the `SubscriptionManager` interface. - Backend integration: verify `applyTaskSummaryUpdate` correctly materializes and broadcasts. - E2E: verify that a task mutation (e.g. rename) updates the sidebar in realtime without polling. diff --git a/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md b/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md index cd9dcbf..1cb4d37 100644 --- a/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md +++ b/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md @@ -28,7 +28,7 @@ The goal is not just to make individual endpoints faster. The goal is to move Fo ### Workbench -- `getWorkbench` still represents a monolithic workspace read that aggregates repo, project, and task state. +- `getWorkbench` still represents a monolithic organization read that aggregates repo, repository, and task state. - The remote workbench store still responds to every event by pulling a full fresh snapshot. - Some task/workbench detail is still too expensive to compute inline and too broad to refresh after every mutation. @@ -57,7 +57,7 @@ Requests should not block on provider calls, repo sync, sandbox provisioning, tr ### View-model rule - App shell view connects to app/session state and only the org actors visible on screen. -- Workspace/task-list view connects to a workspace-owned summary projection. +- Organization/task-list view connects to a organization-owned summary projection. - Task detail view connects directly to the selected task actor. - Sandbox/session detail connects only when the user opens that detail. @@ -99,7 +99,7 @@ The app shell should stop using `/app/snapshot` as the steady-state read model. #### Changes -1. Introduce a small app-shell projection owned by the app workspace actor: +1. Introduce a small app-shell projection owned by the app organization actor: - auth status - current user summary - active org id @@ -121,7 +121,7 @@ The app shell should stop using `/app/snapshot` as the steady-state read model. #### Likely files -- `foundry/packages/backend/src/actors/workspace/app-shell.ts` +- `foundry/packages/backend/src/actors/organization/app-shell.ts` - `foundry/packages/client/src/backend-client.ts` - `foundry/packages/client/src/remote/app-client.ts` - `foundry/packages/shared/src/app-shell.ts` @@ -133,42 +133,42 @@ The app shell should stop using `/app/snapshot` as the steady-state read model. - Selecting an org returns quickly and the UI updates from actor events. - App shell refresh cost is bounded by visible state, not every eligible organization on every poll. -### 3. Workspace summary becomes a projection, not a full snapshot +### 3. Organization summary becomes a projection, not a full snapshot -The task list should read a workspace-owned summary projection instead of calling into every task actor on each refresh. +The task list should read a organization-owned summary projection instead of calling into every task actor on each refresh. #### Changes -1. Define a durable workspace summary model with only list-screen fields: +1. Define a durable organization summary model with only list-screen fields: - repo summary - - project summary + - repository summary - task summary - selected/open task ids - unread/session status summary - coarse git/PR state summary -2. Update workspace actor workflows so task/project changes incrementally update this projection. +2. Update organization actor workflows so task/repository changes incrementally update this projection. 3. Change `getWorkbench` to return the projection only. 4. Change `workbenchUpdated` from "invalidate and refetch everything" to "here is the updated projection version or changed entity ids". 5. Remove task-actor fan-out from the default list read path. #### Likely files -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` - `foundry/packages/backend/src/actors/task/index.ts` - `foundry/packages/backend/src/actors/task/workbench.ts` -- task/workspace DB schema and migrations +- task/organization DB schema and migrations - `foundry/packages/client/src/remote/workbench-client.ts` #### Acceptance criteria - Workbench list refresh does not call every task actor. - A websocket event does not force a full cross-actor rebuild. -- Initial task-list load time scales roughly with workspace summary size, not repo count times task count times detail reads. +- Initial task-list load time scales roughly with organization summary size, not repo count times task count times detail reads. ### 4. Task detail moves to direct actor reads and events -Heavy task detail should move out of the workspace summary and into the selected task actor. +Heavy task detail should move out of the organization summary and into the selected task actor. #### Changes @@ -258,7 +258,7 @@ Do not delete bootstrap endpoints first. Shrink them after the subscription mode 4. `06-daytona-provisioning-staged-background-flow.md` 5. App shell realtime subscription model 6. `02-repo-overview-from-cached-projection.md` -7. Workspace summary projection +7. Organization summary projection 8. `04-workbench-session-creation-without-inline-provisioning.md` 9. `05-workbench-snapshot-from-derived-state.md` 10. Task-detail direct actor reads/subscriptions @@ -270,7 +270,7 @@ Do not delete bootstrap endpoints first. Shrink them after the subscription mode - Runtime hardening removes the most dangerous correctness bug before more UI load shifts onto actor connections. - The first async workflow items reduce the biggest user-visible stalls quickly. - App shell realtime is smaller and lower-risk than the workbench migration, and it removes the current polling loop. -- Workspace summary and task-detail split should happen after the async workflow moves so the projection model does not encode old synchronous assumptions. +- Organization summary and task-detail split should happen after the async workflow moves so the projection model does not encode old synchronous assumptions. - Auth simplification is valuable but not required to remove the current refresh/polling/runtime problems. ## Observability Requirements @@ -291,7 +291,7 @@ Each log line should include a request id or actor/event correlation id where po 1. Ship runtime hardening and observability first. 2. Ship app-shell realtime behind a client flag while keeping snapshot bootstrap. -3. Ship workspace summary projection behind a separate flag. +3. Ship organization summary projection behind a separate flag. 4. Migrate one heavy detail pane at a time off the monolithic workbench payload. 5. Remove polling once the matching event path is proven stable. 6. Only then remove or demote the old snapshot-heavy steady-state flows. diff --git a/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md b/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md index 2aa9f50..1eb1594 100644 --- a/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md +++ b/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md @@ -10,8 +10,8 @@ That makes a user-facing action depend on queue-backed and provider-backed work ## Current Code Context -- Workspace entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` -- Project task creation path: `foundry/packages/backend/src/actors/project/actions.ts` +- Organization entry point: `foundry/packages/backend/src/actors/organization/actions.ts` +- Repository task creation path: `foundry/packages/backend/src/actors/repository/actions.ts` - Task action surface: `foundry/packages/backend/src/actors/task/index.ts` - Task workflow: `foundry/packages/backend/src/actors/task/workflow/index.ts` - Task init/provision steps: `foundry/packages/backend/src/actors/task/workflow/init.ts` @@ -33,8 +33,8 @@ That makes a user-facing action depend on queue-backed and provider-backed work - persisting any immediately-known metadata - returning the current task record 3. After initialize completes, enqueue `task.command.provision` with `wait: false`. -4. Change `workspace.createTask` to: - - create or resolve the project +4. Change `organization.createTask` to: + - create or resolve the repository - create the task actor - call `task.initialize(...)` - stop awaiting `task.provision(...)` @@ -51,12 +51,12 @@ That makes a user-facing action depend on queue-backed and provider-backed work ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` - `foundry/packages/backend/src/actors/task/index.ts` - `foundry/packages/backend/src/actors/task/workflow/index.ts` - `foundry/packages/backend/src/actors/task/workflow/init.ts` -- `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- `foundry/packages/frontend/src/components/organization-dashboard.tsx` - `foundry/packages/client/src/remote/workbench-client.ts` ## Client Impact diff --git a/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md b/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md index 27afad5..1d31216 100644 --- a/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md +++ b/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md @@ -15,11 +15,11 @@ The frontend polls repo overview repeatedly, so this design multiplies slow work ## Current Code Context -- Workspace overview entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` -- Project overview implementation: `foundry/packages/backend/src/actors/project/actions.ts` -- Branch sync poller: `foundry/packages/backend/src/actors/project-branch-sync/index.ts` -- PR sync poller: `foundry/packages/backend/src/actors/project-pr-sync/index.ts` -- Repo overview client polling: `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- Organization overview entry point: `foundry/packages/backend/src/actors/organization/actions.ts` +- Repository overview implementation: `foundry/packages/backend/src/actors/repository/actions.ts` +- Branch sync poller: `foundry/packages/backend/src/actors/repository-branch-sync/index.ts` +- PR sync poller: `foundry/packages/backend/src/actors/repository-pr-sync/index.ts` +- Repo overview client polling: `foundry/packages/frontend/src/components/organization-dashboard.tsx` ## Target Contract @@ -30,27 +30,27 @@ The frontend polls repo overview repeatedly, so this design multiplies slow work ## Proposed Fix 1. Remove inline `forceProjectSync()` from `getRepoOverview`. -2. Add freshness fields to the project projection, for example: +2. Add freshness fields to the repository projection, for example: - `branchSyncAt` - `prSyncAt` - `branchSyncStatus` - `prSyncStatus` 3. Let the existing polling actors own cache refresh. -4. If the client needs a manual refresh, add a non-blocking command such as `project.requestOverviewRefresh` that: +4. If the client needs a manual refresh, add a non-blocking command such as `repository.requestOverviewRefresh` that: - enqueues refresh work - updates sync status to `queued` or `running` - returns immediately -5. Keep `getRepoOverview` as a pure read over project SQLite state. +5. Keep `getRepoOverview` as a pure read over repository SQLite state. ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` -- `foundry/packages/backend/src/actors/project/db/schema.ts` -- `foundry/packages/backend/src/actors/project/db/migrations.ts` -- `foundry/packages/backend/src/actors/project-branch-sync/index.ts` -- `foundry/packages/backend/src/actors/project-pr-sync/index.ts` -- `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` +- `foundry/packages/backend/src/actors/repository/db/schema.ts` +- `foundry/packages/backend/src/actors/repository/db/migrations.ts` +- `foundry/packages/backend/src/actors/repository-branch-sync/index.ts` +- `foundry/packages/backend/src/actors/repository-pr-sync/index.ts` +- `foundry/packages/frontend/src/components/organization-dashboard.tsx` ## Client Impact diff --git a/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md b/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md index 2c1738c..9fdd46a 100644 --- a/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md +++ b/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md @@ -10,20 +10,20 @@ These flows depend on repo/network state and can take minutes. They should not h ## Current Code Context -- Workspace repo action entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` -- Project repo action implementation: `foundry/packages/backend/src/actors/project/actions.ts` -- Branch/task index state lives in the project actor SQLite DB. +- Organization repo action entry point: `foundry/packages/backend/src/actors/organization/actions.ts` +- Repository repo action implementation: `foundry/packages/backend/src/actors/repository/actions.ts` +- Branch/task index state lives in the repository actor SQLite DB. - Current forced sync uses the PR and branch polling actors before and after the action. ## Target Contract - Repo-affecting actions are accepted quickly and run in the background. -- The project actor owns a durable action record with progress and final result. -- Clients observe status via project/task state instead of waiting for a single response. +- The repository actor owns a durable action record with progress and final result. +- Clients observe status via repository/task state instead of waiting for a single response. ## Proposed Fix -1. Introduce a project-level workflow/job model for repo actions, for example: +1. Introduce a repository-level workflow/job model for repo actions, for example: - `sync_repo` - `restack_repo` - `restack_subtree` @@ -49,11 +49,11 @@ These flows depend on repo/network state and can take minutes. They should not h ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` -- `foundry/packages/backend/src/actors/project/db/schema.ts` -- `foundry/packages/backend/src/actors/project/db/migrations.ts` -- `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` +- `foundry/packages/backend/src/actors/repository/db/schema.ts` +- `foundry/packages/backend/src/actors/repository/db/migrations.ts` +- `foundry/packages/frontend/src/components/organization-dashboard.tsx` - Any shared types in `foundry/packages/shared/src` ## Client Impact @@ -70,5 +70,5 @@ These flows depend on repo/network state and can take minutes. They should not h ## Implementation Notes - Keep validation cheap in the request path; expensive repo inspection belongs in the workflow. -- If job rows are added, decide whether they are project-owned only or also mirrored into history events for UI consumption. +- If job rows are added, decide whether they are repository-owned only or also mirrored into history events for UI consumption. - Fresh-agent check: branch-backed task creation and explicit repo stack actions should use the same background job/status vocabulary where possible. diff --git a/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md b/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md index 9221780..d48e4f0 100644 --- a/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md +++ b/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md @@ -8,7 +8,7 @@ Creating a workbench tab currently provisions the whole task if no active sandbo ## Current Code Context -- Workspace workbench action entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` +- Organization workbench action entry point: `foundry/packages/backend/src/actors/organization/actions.ts` - Task workbench behavior: `foundry/packages/backend/src/actors/task/workbench.ts` - Task provision action: `foundry/packages/backend/src/actors/task/index.ts` - Sandbox session creation path: `foundry/packages/backend/src/actors/sandbox-instance/index.ts` @@ -36,7 +36,7 @@ Creating a workbench tab currently provisions the whole task if no active sandbo ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` - `foundry/packages/backend/src/actors/task/workbench.ts` - `foundry/packages/backend/src/actors/task/index.ts` - `foundry/packages/backend/src/actors/task/db/schema.ts` diff --git a/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md b/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md index 55401a7..07cc0a5 100644 --- a/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md +++ b/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md @@ -17,7 +17,7 @@ The remote workbench client refreshes after each action and on update events, so ## Current Code Context -- Workspace workbench snapshot builder: `foundry/packages/backend/src/actors/workspace/actions.ts` +- Organization workbench snapshot builder: `foundry/packages/backend/src/actors/organization/actions.ts` - Task workbench snapshot builder: `foundry/packages/backend/src/actors/task/workbench.ts` - Sandbox session event persistence: `foundry/packages/backend/src/actors/sandbox-instance/persist.ts` - Remote workbench client refresh loop: `foundry/packages/client/src/remote/workbench-client.ts` @@ -43,7 +43,7 @@ The remote workbench client refreshes after each action and on update events, so ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` - `foundry/packages/backend/src/actors/task/workbench.ts` - `foundry/packages/backend/src/actors/task/db/schema.ts` - `foundry/packages/backend/src/actors/task/db/migrations.ts` diff --git a/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md b/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md index 50f3b56..dbaf976 100644 --- a/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md +++ b/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md @@ -17,8 +17,8 @@ Authentication and user identity are conflated into a single `appSessions` table ## Current Code Context - Custom OAuth flow: `foundry/packages/backend/src/services/app-github.ts` (`buildAuthorizeUrl`, `exchangeCode`, `getViewer`) -- Session + identity management: `foundry/packages/backend/src/actors/workspace/app-shell.ts` (`ensureAppSession`, `updateAppSession`, `initGithubSession`, `syncGithubOrganizations`) -- Session schema: `foundry/packages/backend/src/actors/workspace/db/schema.ts` (`appSessions` table) +- Session + identity management: `foundry/packages/backend/src/actors/organization/app-shell.ts` (`ensureAppSession`, `updateAppSession`, `initGithubSession`, `syncGithubOrganizations`) +- Session schema: `foundry/packages/backend/src/actors/organization/db/schema.ts` (`appSessions` table) - Shared types: `foundry/packages/shared/src/app-shell.ts` (`FoundryUser`, `FoundryAppSnapshot`) - HTTP routes: `foundry/packages/backend/src/index.ts` (`resolveSessionId`, `/v1/auth/github/*`, all `/v1/app/*` routes) - Frontend session persistence: `foundry/packages/client/src/backend-client.ts` (`persistAppSessionId`, `x-foundry-session` header, `foundrySession` URL param extraction) @@ -41,7 +41,7 @@ Authentication and user identity are conflated into a single `appSessions` table - BetterAuth uses a custom adapter that routes all DB operations through RivetKit actors. - Each user has their own actor. BetterAuth's `user`, `session`, and `account` tables live in the per-user actor's SQLite via `c.db`. - The adapter resolves which actor to target based on the primary key BetterAuth passes for each operation (user ID, session ID, account ID). -- A lightweight **session index** on the app-shell workspace actor maps session tokens → user actor identity, so inbound requests can be routed to the correct user actor without knowing the user ID upfront. +- A lightweight **session index** on the app-shell organization actor maps session tokens → user actor identity, so inbound requests can be routed to the correct user actor without knowing the user ID upfront. ### Canonical user record @@ -70,9 +70,9 @@ BetterAuth expects a single database. Foundry uses per-actor SQLite — each act When an HTTP request arrives, the backend has a session token but doesn't know the user ID yet. BetterAuth calls adapter methods like `findSession(sessionId)` to resolve this. But which actor holds that session row? -**Solution: session index on the app-shell workspace actor.** +**Solution: session index on the app-shell organization actor.** -The app-shell workspace actor (which already handles auth routing) maintains a lightweight index table: +The app-shell organization actor (which already handles auth routing) maintains a lightweight index table: ``` sessionIndex @@ -83,7 +83,7 @@ sessionIndex The adapter flow for session lookup: 1. BetterAuth calls `findSession(sessionId)`. -2. Adapter queries `sessionIndex` on the workspace actor to resolve `userActorKey`. +2. Adapter queries `sessionIndex` on the organization actor to resolve `userActorKey`. 3. Adapter gets the user actor handle and queries BetterAuth's `session` table in that actor's `c.db`. The adapter flow for user creation (OAuth callback): @@ -91,12 +91,12 @@ The adapter flow for user creation (OAuth callback): 2. Adapter resolves the GitHub numeric ID from the user data. 3. Adapter creates/gets the user actor keyed by GitHub ID. 4. Adapter inserts into BetterAuth's `user` table in that actor's `c.db`. -5. When `createSession` follows, adapter writes to the user actor's `session` table AND inserts into the workspace actor's `sessionIndex`. +5. When `createSession` follows, adapter writes to the user actor's `session` table AND inserts into the organization actor's `sessionIndex`. ### User actor shape ```text -UserActor (key: ["ws", workspaceId, "user", githubNumericId]) +UserActor (key: ["ws", organizationId, "user", githubNumericId]) ├── BetterAuth tables: user, session, account (managed by BetterAuth schema) ├── userProfiles (app-specific: eligibleOrganizationIds, starterRepoStatus, roleLabel) └── sessionState (app-specific: activeOrganizationId per session) @@ -127,15 +127,15 @@ The adapter must inspect `model` and `where` to determine the target actor: | Model | Routing strategy | |-------|-----------------| | `user` (by id) | User actor key derived directly from user ID | -| `user` (by email) | `emailIndex` on workspace actor → user actor key | -| `session` (by token) | `sessionIndex` on workspace actor → user actor key | -| `session` (by id) | `sessionIndex` on workspace actor → user actor key | +| `user` (by email) | `emailIndex` on organization actor → user actor key | +| `session` (by token) | `sessionIndex` on organization actor → user actor key | +| `session` (by id) | `sessionIndex` on organization actor → user actor key | | `session` (by userId) | User actor key derived directly from userId | | `account` | Always has `userId` in where or data → user actor key | -| `verification` | Workspace actor (not user-scoped — used for email verification, password reset) | +| `verification` | Organization actor (not user-scoped — used for email verification, password reset) | -On `create` for `session` model: write to user actor's `session` table AND insert into workspace actor's `sessionIndex`. -On `delete` for `session` model: delete from user actor's `session` table AND remove from workspace actor's `sessionIndex`. +On `create` for `session` model: write to user actor's `session` table AND insert into organization actor's `sessionIndex`. +On `delete` for `session` model: delete from user actor's `session` table AND remove from organization actor's `sessionIndex`. #### Adapter construction @@ -188,14 +188,14 @@ session: { #### BetterAuth core tables -Four tables, all in the per-user actor's SQLite (except `verification` which goes on workspace actor): +Four tables, all in the per-user actor's SQLite (except `verification` which goes on organization actor): **`user`**: `id`, `name`, `email`, `emailVerified`, `image`, `createdAt`, `updatedAt` **`session`**: `id`, `token`, `userId`, `expiresAt`, `ipAddress?`, `userAgent?`, `createdAt`, `updatedAt` **`account`**: `id`, `userId`, `accountId` (GitHub numeric ID), `providerId` ("github"), `accessToken?`, `refreshToken?`, `scope?`, `createdAt`, `updatedAt` **`verification`**: `id`, `identifier`, `value`, `expiresAt`, `createdAt`, `updatedAt` -For `findUserByEmail`, a secondary index (email → user actor key) is needed on the workspace actor alongside `sessionIndex`. +For `findUserByEmail`, a secondary index (email → user actor key) is needed on the organization actor alongside `sessionIndex`. ## Implementation Plan @@ -210,12 +210,12 @@ Research confirms: 1. **Prototype the adapter + user actor end-to-end** — wire up `createAdapterFactory` with a minimal actor-routed implementation. Confirm that BetterAuth's GitHub OAuth flow completes successfully with user/session/account records landing in the correct per-user actor's SQLite. 2. **Verify `findOne` for session model** — confirm the `where` clause BetterAuth passes for session lookup includes the `token` field (not just `id`), so the adapter can route via `sessionIndex` keyed by token. -3. **Measure cookie-cached vs uncached request latency** — confirm that with cookie caching enabled, the adapter is not called on every request, and that the uncached fallback (workspace actor index → user actor → session table) is acceptable. +3. **Measure cookie-cached vs uncached request latency** — confirm that with cookie caching enabled, the adapter is not called on every request, and that the uncached fallback (organization actor index → user actor → session table) is acceptable. ### Phase 1: User actor + adapter infrastructure (no behavior change) 1. **Install `better-auth` package** in `packages/backend`. -2. **Define `UserActor`** with actor key `["ws", workspaceId, "user", githubNumericId]`. Include BetterAuth's required tables (`user`, `session`, `account`) plus app-specific tables in its schema. +2. **Define `UserActor`** with actor key `["ws", organizationId, "user", githubNumericId]`. Include BetterAuth's required tables (`user`, `session`, `account`) plus app-specific tables in its schema. 3. **Create `userProfiles` table** in user actor schema: ``` userProfiles @@ -237,7 +237,7 @@ Research confirms: ├── createdAt (integer) ├── updatedAt (integer) ``` -5. **Create `sessionIndex` and `emailIndex` tables** on the app-shell workspace actor: +5. **Create `sessionIndex` and `emailIndex` tables** on the app-shell organization actor: ``` sessionIndex ├── sessionId (text, PK) @@ -256,7 +256,7 @@ Research confirms: ### Phase 2: Migrate OAuth flow to BetterAuth 1. **Replace `startAppGithubAuth`** — delegate to BetterAuth's GitHub OAuth initiation instead of hand-rolling `buildAuthorizeUrl` + `oauthState` + `oauthStateExpiresAt`. -2. **Replace `completeAppGithubAuth`** — delegate to BetterAuth's callback handler. BetterAuth creates/updates the user record in the user actor and creates a signed session. The adapter writes to `sessionIndex` on the workspace actor. +2. **Replace `completeAppGithubAuth`** — delegate to BetterAuth's callback handler. BetterAuth creates/updates the user record in the user actor and creates a signed session. The adapter writes to `sessionIndex` on the organization actor. 3. **After BetterAuth callback completes**, populate `userProfiles` in the user actor with app-specific fields and enqueue the slow org sync (same background workflow pattern as today). 4. **Replace `signOutApp`** — delegate to BetterAuth session invalidation. Adapter removes entry from `sessionIndex`. 5. **Update `resolveSessionId`** in `index.ts` — validate the session via BetterAuth (which routes through the adapter → `sessionIndex` → user actor). BetterAuth verifies the signature and checks expiration. @@ -288,18 +288,18 @@ Research confirms: ## Constraints - **Actor-routed adapter.** BetterAuth does not natively support per-user actor databases. The custom adapter must route every DB operation to the correct actor. This adds a layer of indirection and latency (actor handle resolution + message) on adapter calls. -- **Session index cost is mitigated by cookie caching.** With `cookieCache` enabled, BetterAuth validates sessions from a signed cookie on most requests — the adapter (and thus the `sessionIndex` lookup + user actor round-trip) is only called when the cache expires or on writes. Without caching, every authenticated request would hit the workspace actor's `sessionIndex` table then the user actor. -- **Two-actor write on session create/destroy.** Creating or destroying a session requires writing to both the user actor (BetterAuth's `session` table) and the workspace actor (`sessionIndex`). These must be consistent — if the user actor write succeeds but the index write fails, the session exists but is unreachable. +- **Session index cost is mitigated by cookie caching.** With `cookieCache` enabled, BetterAuth validates sessions from a signed cookie on most requests — the adapter (and thus the `sessionIndex` lookup + user actor round-trip) is only called when the cache expires or on writes. Without caching, every authenticated request would hit the organization actor's `sessionIndex` table then the user actor. +- **Two-actor write on session create/destroy.** Creating or destroying a session requires writing to both the user actor (BetterAuth's `session` table) and the organization actor (`sessionIndex`). These must be consistent — if the user actor write succeeds but the index write fails, the session exists but is unreachable. - **Background org sync pattern must be preserved.** The fast-path/slow-path split (`initGithubSession` returns immediately, `syncGithubOrganizations` runs in workflow queue) is critical for avoiding proxy timeout retries. BetterAuth handles the OAuth exchange, but the org sync stays as a background workflow. - **`GitHubAppClient` is still needed.** BetterAuth replaces the OAuth user-auth flow, but installation tokens, webhook verification, repo listing, and org listing are GitHub App operations that BetterAuth does not cover. - **User ID migration.** Changing user IDs from `user-${slugify(login)}` to GitHub numeric IDs affects `organizationMembers`, `seatAssignments`, and any cross-actor references to user IDs. Existing data needs a migration path. -- **`findUserByEmail` requires a secondary index.** BetterAuth sometimes looks up users by email (e.g., account linking). An `emailIndex` table on the workspace actor is needed. This must be kept in sync with the user actor's email field. +- **`findUserByEmail` requires a secondary index.** BetterAuth sometimes looks up users by email (e.g., account linking). An `emailIndex` table on the organization actor is needed. This must be kept in sync with the user actor's email field. ## Risk Assessment - **Adapter call context — RESOLVED.** Research confirms BetterAuth adapter methods are plain async functions with no request context dependency. The adapter closes over the RivetKit registry at init time and resolves actor handles on demand. No ambient `c` context needed. - **Hot-path latency — MITIGATED.** Cookie caching (`cookieCache` with `strategy: "compact"`) means most authenticated requests validate the session from a signed cookie without calling the adapter at all. The adapter (and thus the actor round-trip) is only hit when the cache expires (configurable, e.g., every 5 minutes) or on writes. This makes the session index + user actor lookup acceptable. -- **Two-actor consistency.** Session create/destroy touches two actors (user actor + workspace index). If either write fails, the system is in an inconsistent state. Recommended: write index first, then user actor. A dangling index entry pointing to a nonexistent session is benign — BetterAuth treats it as "session not found" and the user just re-authenticates. +- **Two-actor consistency.** Session create/destroy touches two actors (user actor + organization index). If either write fails, the system is in an inconsistent state. Recommended: write index first, then user actor. A dangling index entry pointing to a nonexistent session is benign — BetterAuth treats it as "session not found" and the user just re-authenticates. - **Cookie vs header auth.** BetterAuth defaults to HTTP-only cookies (`better-auth.session_token`). The current system uses a custom `x-foundry-session` header with `localStorage`. BetterAuth supports `bearer` token mode for programmatic clients via its `bearer` plugin. Enable both for browser + API access. - **Dev bootstrap flow.** `bootstrapAppGithubSession` bypasses the normal OAuth flow for local development. BetterAuth supports programmatic session creation via its internal adapter — the dev path can call the adapter's `create` method directly for the `session` and `account` models. - **Actor lifecycle for users.** User actors are long-lived but low-traffic. RivetKit will idle/unload them. With cookie caching, cold-start only happens when the cache expires — not on every request. Acceptable. diff --git a/foundry/research/specs/async-action-fixes/README.md b/foundry/research/specs/async-action-fixes/README.md index 1dae650..a26fd0e 100644 --- a/foundry/research/specs/async-action-fixes/README.md +++ b/foundry/research/specs/async-action-fixes/README.md @@ -19,7 +19,7 @@ The governing policy now lives in `foundry/CLAUDE.md`: - Backend actor entry points live under `foundry/packages/backend/src/actors`. - Provider-backed long-running work lives under `foundry/packages/backend/src/providers`. - The main UI consumers are: - - `foundry/packages/frontend/src/components/workspace-dashboard.tsx` + - `foundry/packages/frontend/src/components/organization-dashboard.tsx` - `foundry/packages/frontend/src/components/mock-layout.tsx` - `foundry/packages/client/src/remote/workbench-client.ts` - Existing non-blocking examples already exist in app-shell GitHub auth/import flows. Use those as the reference pattern for request returns plus background completion. @@ -32,7 +32,7 @@ The governing policy now lives in `foundry/CLAUDE.md`: 4. `06-daytona-provisioning-staged-background-flow.md` 5. App shell realtime subscription work from `00-end-to-end-async-realtime-plan.md` 6. `02-repo-overview-from-cached-projection.md` -7. Workspace summary projection work from `00-end-to-end-async-realtime-plan.md` +7. Organization summary projection work from `00-end-to-end-async-realtime-plan.md` 8. `04-workbench-session-creation-without-inline-provisioning.md` 9. `05-workbench-snapshot-from-derived-state.md` 10. Task-detail direct subscription work from `00-end-to-end-async-realtime-plan.md` @@ -42,7 +42,7 @@ The governing policy now lives in `foundry/CLAUDE.md`: - Runtime hardening and the first async workflow items remove the highest-risk correctness and timeout issues first. - App shell realtime is a smaller migration than the workbench and removes the current polling loop early. -- Workspace summary and task-detail subscription work are easier once long-running mutations already report durable background state. +- Organization summary and task-detail subscription work are easier once long-running mutations already report durable background state. - Auth simplification is important, but it should not block the snapshot/polling/runtime fixes. ## Fresh Agent Checklist diff --git a/foundry/research/specs/frontend.md b/foundry/research/specs/frontend.md index 2eb4ce5..6c384ae 100644 --- a/foundry/research/specs/frontend.md +++ b/foundry/research/specs/frontend.md @@ -24,8 +24,8 @@ be thorough and careful with your impelmentaiton. this is going to be the ground - left sidebar is similar to the hf switch ui: - list each repo - under each repo, show all of the tasks - - you should see all tasks for the entire workspace here grouped by repo -- the main content area shows the current workspace + - you should see all tasks for the entire organization here grouped by repo +- the main content area shows the current organization - there is a main agent session for the main agent thatn's making the change, so show this by default - build a ui for interacting with sessions - see ~/sandbox-agent/frontend/packages/inspector/ for reference ui diff --git a/foundry/research/specs/github-data-actor.md b/foundry/research/specs/github-data-actor.md index d3af6ab..75a71a1 100644 --- a/foundry/research/specs/github-data-actor.md +++ b/foundry/research/specs/github-data-actor.md @@ -4,7 +4,7 @@ Replace the per-repo polling PR sync actor (`ProjectPrSyncActor`) and per-repo PR cache (`prCache` table) with a single organization-scoped `github-state` actor that owns all GitHub data (repos, PRs, members). All GitHub state updates flow exclusively through webhooks, with a one-shot full sync on initial connection. Manual reload actions are exposed per-entity (org, repo, PR) for recovery from missed webhooks. -Open PRs are surfaced in the left sidebar alongside tasks via a unified workspace interest topic, with lazy task/sandbox creation when a user clicks on a PR. +Open PRs are surfaced in the left sidebar alongside tasks via a unified organization subscription topic, with lazy task/sandbox creation when a user clicks on a PR. ## Reference Implementation @@ -18,7 +18,7 @@ Use `git show 0aca2c7:` to read the reference files. Adapt (don't copy bli ## Constraints -1. **No polling.** Delete `ProjectPrSyncActor` (`actors/project-pr-sync/`), all references to it in handles/keys/index, and the `prCache` table in `ProjectActor`'s DB schema. Remove `prSyncStatus`/`prSyncAt` from `getRepoOverview`. +1. **No polling.** Delete `ProjectPrSyncActor` (`actors/repository-pr-sync/`), all references to it in handles/keys/index, and the `prCache` table in `RepositoryActor`'s DB schema. Remove `prSyncStatus`/`prSyncAt` from `getRepoOverview`. 2. **Keep `ProjectBranchSyncActor`.** This polls the local git clone (not GitHub API) and is the sandbox git status mechanism. It stays. 3. **Webhooks are the sole live update path.** The only GitHub API calls happen during: - Initial full sync on org connection/installation @@ -72,16 +72,16 @@ Replace the current TODO at `app-shell.ts:1521` with dispatch logic adapted from When `github-state` receives a PR update (webhook or manual reload), it should: 1. Update its own `github_pull_requests` table -2. Call `notifyOrganizationUpdated()` → which broadcasts `workspaceUpdated` to connected clients -3. If the PR branch matches an existing task's branch, update that task's `pullRequest` summary in the workspace actor +2. Call `notifyOrganizationUpdated()` → which broadcasts `organizationUpdated` to connected clients +3. If the PR branch matches an existing task's branch, update that task's `pullRequest` summary in the organization actor -### Workspace Summary Changes +### Organization Summary Changes -Extend `WorkspaceSummarySnapshot` to include open PRs: +Extend `OrganizationSummarySnapshot` to include open PRs: ```typescript -export interface WorkspaceSummarySnapshot { - workspaceId: string; +export interface OrganizationSummarySnapshot { + organizationId: string; repos: WorkbenchRepoSummary[]; taskSummaries: WorkbenchTaskSummary[]; openPullRequests: WorkbenchOpenPrSummary[]; // NEW @@ -103,13 +103,13 @@ export interface WorkbenchOpenPrSummary { } ``` -The workspace actor fetches open PRs from the `github-state` actor when building the summary snapshot. PRs that already have an associated task (matched by branch name) should be excluded from `openPullRequests` (they already appear in `taskSummaries` with their `pullRequest` field populated). +The organization actor fetches open PRs from the `github-state` actor when building the summary snapshot. PRs that already have an associated task (matched by branch name) should be excluded from `openPullRequests` (they already appear in `taskSummaries` with their `pullRequest` field populated). ### Interest Manager -The `workspace` interest topic already returns `WorkspaceSummarySnapshot`. Adding `openPullRequests` to that type means the sidebar automatically gets PR data without a new topic. +The `organization` subscription topic already returns `OrganizationSummarySnapshot`. Adding `openPullRequests` to that type means the sidebar automatically gets PR data without a new topic. -`workspaceUpdated` events should include a new variant for PR changes: +`organizationUpdated` events should include a new variant for PR changes: ```typescript { type: "pullRequestUpdated", pullRequest: WorkbenchOpenPrSummary } { type: "pullRequestRemoved", prId: string } @@ -117,7 +117,7 @@ The `workspace` interest topic already returns `WorkspaceSummarySnapshot`. Addin ### Sidebar Changes -The left sidebar currently renders `projects: ProjectSection[]` where each project has `tasks: Task[]`. Extend this to include open PRs as lightweight entries within each project section: +The left sidebar currently renders `repositories: RepositorySection[]` where each repository has `tasks: Task[]`. Extend this to include open PRs as lightweight entries within each repository section: - Open PRs appear in the same list as tasks, sorted by `updatedAtMs` - PRs should be visually distinct: show PR icon instead of task indicator, display `#number` and author @@ -134,7 +134,7 @@ Add a "three dots" menu button in the top-right of the sidebar header. Dropdown - **Reload all PRs** — calls `githubState.fullSync({ force: true })` (convenience shortcut) For per-repo and per-PR reload, add context menu options: -- Right-click a project header → "Reload repository" +- Right-click a repository header → "Reload repository" - Right-click a PR entry → "Reload pull request" These call the corresponding `reloadRepository`/`reloadPullRequest` actions on the `github-state` actor. @@ -143,27 +143,27 @@ These call the corresponding `reloadRepository`/`reloadPullRequest` actions on t Files/code to remove: -1. `foundry/packages/backend/src/actors/project-pr-sync/` — entire directory -2. `foundry/packages/backend/src/actors/project/db/schema.ts` — `prCache` table -3. `foundry/packages/backend/src/actors/project/actions.ts` — `applyPrSyncResultMutation`, `getPullRequestForBranch` (moves to github-state), `prSyncStatus`/`prSyncAt` from `getRepoOverview` +1. `foundry/packages/backend/src/actors/repository-pr-sync/` — entire directory +2. `foundry/packages/backend/src/actors/repository/db/schema.ts` — `prCache` table +3. `foundry/packages/backend/src/actors/repository/actions.ts` — `applyPrSyncResultMutation`, `getPullRequestForBranch` (moves to github-state), `prSyncStatus`/`prSyncAt` from `getRepoOverview` 4. `foundry/packages/backend/src/actors/handles.ts` — `getOrCreateProjectPrSync`, `selfProjectPrSync` 5. `foundry/packages/backend/src/actors/keys.ts` — any PR sync key helper -6. `foundry/packages/backend/src/actors/index.ts` — `projectPrSync` import and registration -7. All call sites in `ProjectActor` that spawn or call the PR sync actor (`initProject`, `refreshProject`) +6. `foundry/packages/backend/src/actors/index.ts` — `repositoryPrSync` import and registration +7. All call sites in `RepositoryActor` that spawn or call the PR sync actor (`initProject`, `refreshProject`) ## Migration Path -The `prCache` table in `ProjectActor`'s DB can simply be dropped — no data migration needed since the `github-state` actor will re-fetch everything on its first `fullSync`. Existing task `pullRequest` fields are populated from the github-state actor going forward. +The `prCache` table in `RepositoryActor`'s DB can simply be dropped — no data migration needed since the `github-state` actor will re-fetch everything on its first `fullSync`. Existing task `pullRequest` fields are populated from the github-state actor going forward. ## Implementation Order 1. Create `github-state` actor (adapt from checkpoint `0aca2c7`) 2. Wire up actor in registry, handles, keys 3. Implement webhook dispatch in app-shell (replace TODO) -4. Delete `ProjectPrSyncActor` and `prCache` from project actor +4. Delete `ProjectPrSyncActor` and `prCache` from repository actor 5. Add manual reload actions to github-state -6. Extend `WorkspaceSummarySnapshot` with `openPullRequests` -7. Wire through interest manager + workspace events +6. Extend `OrganizationSummarySnapshot` with `openPullRequests` +7. Wire through subscription manager + organization events 8. Update sidebar to render open PRs 9. Add three-dots menu with reload options 10. Update task creation flow for lazy PR→task conversion diff --git a/foundry/research/specs/rivetkit-opentui-migration-plan.md b/foundry/research/specs/rivetkit-opentui-migration-plan.md index d078c9a..78acccc 100644 --- a/foundry/research/specs/rivetkit-opentui-migration-plan.md +++ b/foundry/research/specs/rivetkit-opentui-migration-plan.md @@ -6,19 +6,19 @@ Date: 2026-02-08 ## Locked Decisions 1. Entire rewrite is TypeScript. All Rust code will be deleted at cutover. -2. Repo stays a single monorepo, managed with `pnpm` workspaces + Turborepo. +2. Repo stays a single monorepo, managed with `pnpm` organizations + Turborepo. 3. `core` package is renamed to `shared`. 4. `integrations` and `providers` live inside the backend package (not top-level packages). 5. Rivet-backed state uses SQLite + Drizzle only. 6. RivetKit dependencies come from local `../rivet` builds only; no published npm packages. -7. Everything is workspace-scoped. Workspace is configurable from CLI. -8. `ControlPlaneActor` is renamed to `WorkspaceActor` (workspace coordinator). -9. Every actor key is prefixed by workspace. -10. `--workspace` is optional; commands resolve workspace via flag -> config default -> `default`. +7. Everything is organization-scoped. Organization is configurable from CLI. +8. `ControlPlaneActor` is renamed to `OrganizationActor` (organization coordinator). +9. Every actor key is prefixed by organization. +10. `--organization` is optional; commands resolve organization via flag -> config default -> `default`. 11. RivetKit local dependency wiring is `link:`-based. 12. Keep the existing config file path (`~/.config/foundry/config.toml`) and evolve keys in place. 13. `.agents` and skill files are in scope for migration updates. -14. Parent orchestration actors (`workspace`, `project`, `task`) use command-only loops with no timeout. +14. Parent orchestration actors (`organization`, `repository`, `task`) use command-only loops with no timeout. 15. Periodic syncing/polling runs in dedicated child actors, each with a single timeout cadence. 16. For each actor, define the main loop and exactly what data it mutates; keep single-writer ownership strict. @@ -38,10 +38,10 @@ The core architecture changes from "worktree-per-task" to "provider-selected san 1. Rust binaries/backend removed. 2. Existing IPC replaced by new TypeScript transport. -3. Configuration schema changes for workspace selection and sandbox provider defaults. -4. Runtime model changes from global control plane to workspace coordinator actor. -5. Database schema migrates to workspace + provider + sandbox identity model. -6. Command options evolve to include workspace and provider selection. +3. Configuration schema changes for organization selection and sandbox provider defaults. +4. Runtime model changes from global control plane to organization coordinator actor. +5. Database schema migrates to organization + provider + sandbox identity model. +6. Command options evolve to include organization and provider selection. ## Monorepo and Build Tooling @@ -49,7 +49,7 @@ Root tooling is standardized: - `pnpm-workspace.yaml` - `turbo.json` -- workspace scripts through `pnpm` + `turbo run ...` +- organization scripts through `pnpm` + `turbo run ...` Target package layout: @@ -59,13 +59,13 @@ packages/ backend/ src/ actors/ - workspace.ts - project.ts + organization.ts + repository.ts task.ts sandbox-instance.ts history.ts - project-pr-sync.ts - project-branch-sync.ts + repository-pr-sync.ts + repository-branch-sync.ts task-status-sync.ts keys.ts events.ts @@ -88,13 +88,13 @@ packages/ server.ts types.ts config/ - workspace.ts + organization.ts backend.ts cli/ # hf command surface src/ commands/ client/ # backend transport client - workspace/ # workspace selection resolver + organization/ # organization selection resolver tui/ # OpenTUI app src/ app/ @@ -111,13 +111,13 @@ CLI and TUI are separate packages in the same monorepo, not separate repositorie Backend actor files and responsibilities: -1. `packages/backend/src/actors/workspace.ts` -- `WorkspaceActor` implementation. -- Provider profile resolution and workspace-level coordination. -- Spawns/routes to `ProjectActor` handles. +1. `packages/backend/src/actors/organization.ts` +- `OrganizationActor` implementation. +- Provider profile resolution and organization-level coordination. +- Spawns/routes to `RepositoryActor` handles. -2. `packages/backend/src/actors/project.ts` -- `ProjectActor` implementation. +2. `packages/backend/src/actors/repository.ts` +- `RepositoryActor` implementation. - Branch snapshot refresh, PR cache orchestration, stream publication. - Routes task actions to `TaskActor`. @@ -134,7 +134,7 @@ Backend actor files and responsibilities: - Writes workflow events to SQLite via Drizzle. 6. `packages/backend/src/actors/keys.ts` -- Workspace-prefixed actor key builders/parsers. +- Organization-prefixed actor key builders/parsers. 7. `packages/backend/src/actors/events.ts` - Internal actor event envelopes and stream payload types. @@ -145,13 +145,13 @@ Backend actor files and responsibilities: 9. `packages/backend/src/actors/index.ts` - Actor exports and composition wiring. -10. `packages/backend/src/actors/project-pr-sync.ts` +10. `packages/backend/src/actors/repository-pr-sync.ts` - Read-only PR polling loop (single timeout cadence). -- Sends sync results back to `ProjectActor`. +- Sends sync results back to `RepositoryActor`. -11. `packages/backend/src/actors/project-branch-sync.ts` +11. `packages/backend/src/actors/repository-branch-sync.ts` - Read-only branch snapshot polling loop (single timeout cadence). -- Sends sync results back to `ProjectActor`. +- Sends sync results back to `RepositoryActor`. 12. `packages/backend/src/actors/task-status-sync.ts` - Read-only session/sandbox status polling loop (single timeout cadence). @@ -169,17 +169,17 @@ pnpm build -F rivetkit 2. Consume via local `link:` dependencies to built artifacts. 3. Keep dependency wiring deterministic and documented in repo scripts. -## Workspace Model +## Organization Model -Every command executes against a resolved workspace context. +Every command executes against a resolved organization context. -Workspace selection: +Organization selection: -1. CLI flag: `--workspace ` -2. Config default workspace +1. CLI flag: `--organization ` +2. Config default organization 3. Fallback to `default` -Workspace controls: +Organization controls: 1. provider profile defaults 2. sandbox policy @@ -188,45 +188,45 @@ Workspace controls: ## New Actor Implementation Overview -RivetKit registry actor keys are workspace-prefixed: +RivetKit registry actor keys are organization-prefixed: -1. `WorkspaceActor` (workspace coordinator) -- Key: `["ws", workspaceId]` -- Owns workspace config/runtime coordination, provider registry, workspace health. -- Resolves provider defaults and workspace-level policies. +1. `OrganizationActor` (organization coordinator) +- Key: `["ws", organizationId]` +- Owns organization config/runtime coordination, provider registry, organization health. +- Resolves provider defaults and organization-level policies. -2. `ProjectActor` -- Key: `["ws", workspaceId, "project", repoId]` +2. `RepositoryActor` +- Key: `["ws", organizationId, "repository", repoId]` - Owns repo snapshot cache and PR cache refresh orchestration. - Routes branch/task commands to task actors. -- Streams project updates to CLI/TUI subscribers. +- Streams repository updates to CLI/TUI subscribers. 3. `TaskActor` -- Key: `["ws", workspaceId, "project", repoId, "task", taskId]` +- Key: `["ws", organizationId, "repository", repoId, "task", taskId]` - Owns task metadata/runtime state. - Creates/resumes sandbox + session through provider adapter. - Handles attach/push/sync/merge/archive/kill and post-idle automation. 4. `SandboxInstanceActor` (optional but recommended) -- Key: `["ws", workspaceId, "provider", providerId, "sandbox", sandboxId]` +- Key: `["ws", organizationId, "provider", providerId, "sandbox", sandboxId]` - Owns sandbox lifecycle, heartbeat, endpoint readiness, recovery. 5. `HistoryActor` -- Key: `["ws", workspaceId, "project", repoId, "history"]` +- Key: `["ws", organizationId, "repository", repoId, "history"]` - Owns `events` writes and workflow timeline completeness. 6. `ProjectPrSyncActor` (child poller) -- Key: `["ws", workspaceId, "project", repoId, "pr-sync"]` -- Polls PR state on interval and emits results to `ProjectActor`. +- Key: `["ws", organizationId, "repository", repoId, "pr-sync"]` +- Polls PR state on interval and emits results to `RepositoryActor`. - Does not write DB directly. 7. `ProjectBranchSyncActor` (child poller) -- Key: `["ws", workspaceId, "project", repoId, "branch-sync"]` -- Polls branch/worktree state on interval and emits results to `ProjectActor`. +- Key: `["ws", organizationId, "repository", repoId, "branch-sync"]` +- Polls branch/worktree state on interval and emits results to `RepositoryActor`. - Does not write DB directly. 8. `TaskStatusSyncActor` (child poller) -- Key: `["ws", workspaceId, "project", repoId, "task", taskId, "status-sync"]` +- Key: `["ws", organizationId, "repository", repoId, "task", taskId, "status-sync"]` - Polls agent/session/sandbox health on interval and emits results to `TaskActor`. - Does not write DB directly. @@ -236,10 +236,10 @@ Ownership rule: each table/row has one actor writer. Always define actor run-loop + mutated state together: -1. `WorkspaceActor` -- Mutates: `workspaces`, `workspace_provider_profiles`. +1. `OrganizationActor` +- Mutates: `organizations`, `workspace_provider_profiles`. -2. `ProjectActor` +2. `RepositoryActor` - Mutates: `repos`, `branches`, `pr_cache` (applies child poller results). 3. `TaskActor` @@ -251,30 +251,30 @@ Always define actor run-loop + mutated state together: 5. `HistoryActor` - Mutates: `events`. -6. Child sync actors (`project-pr-sync`, `project-branch-sync`, `task-status-sync`) +6. Child sync actors (`repository-pr-sync`, `repository-branch-sync`, `task-status-sync`) - Mutates: none (read-only pollers; publish result messages only). ## Run Loop Patterns (Required) Parent orchestration actors: no timeout, command-only queue loops. -### `WorkspaceActor` (no timeout) +### `OrganizationActor` (no timeout) ```ts run: async (c) => { while (true) { - const msg = await c.queue.next("workspace.command"); - await handleWorkspaceCommand(c, msg); // writes workspace-owned tables only + const msg = await c.queue.next("organization.command"); + await handleOrganizationCommand(c, msg); // writes organization-owned tables only } }; ``` -### `ProjectActor` (no timeout) +### `RepositoryActor` (no timeout) ```ts run: async (c) => { while (true) { - const msg = await c.queue.next("project.command"); + const msg = await c.queue.next("repository.command"); await handleProjectCommand(c, msg); // includes applying sync results to branches/pr_cache } }; @@ -321,10 +321,10 @@ Child sync actors: one timeout each, one cadence each. run: async (c) => { const intervalMs = 30_000; while (true) { - const msg = await c.queue.next("project.pr_sync.command", { timeout: intervalMs }); + const msg = await c.queue.next("repository.pr_sync.command", { timeout: intervalMs }); if (!msg) { const result = await pollPrState(); - await sendToProject({ name: "project.pr_sync.result", result }); + await sendToProject({ name: "repository.pr_sync.result", result }); continue; } await handlePrSyncControl(c, msg); // force/stop/update-interval @@ -338,10 +338,10 @@ run: async (c) => { run: async (c) => { const intervalMs = 5_000; while (true) { - const msg = await c.queue.next("project.branch_sync.command", { timeout: intervalMs }); + const msg = await c.queue.next("repository.branch_sync.command", { timeout: intervalMs }); if (!msg) { const result = await pollBranchState(); - await sendToProject({ name: "project.branch_sync.result", result }); + await sendToProject({ name: "repository.branch_sync.result", result }); continue; } await handleBranchSyncControl(c, msg); @@ -368,7 +368,7 @@ run: async (c) => { ## Sandbox Provider Interface -Provider contract lives under `packages/backend/src/providers/provider-api` and is consumed by workspace/project/task actors. +Provider contract lives under `packages/backend/src/providers/provider-api` and is consumed by organization/repository/task actors. ```ts interface SandboxProvider { @@ -398,26 +398,26 @@ Initial providers: - Boots/ensures Sandbox Agent inside sandbox. - Returns endpoint/token for session operations. -## Command Surface (Workspace + Provider Aware) +## Command Surface (Organization + Provider Aware) -1. `hf create ... --workspace --provider ` -2. `hf switch --workspace [target]` -3. `hf attach --workspace [task]` -4. `hf list --workspace ` -5. `hf kill|archive|merge|push|sync --workspace ...` -6. `hf workspace use ` to set default workspace +1. `hf create ... --organization --provider ` +2. `hf switch --organization [target]` +3. `hf attach --organization [task]` +4. `hf list --organization ` +5. `hf kill|archive|merge|push|sync --organization ...` +6. `hf organization use ` to set default organization List/TUI include provider and sandbox health metadata. -`--workspace` remains optional; omitted values use the standard resolution order. +`--organization` remains optional; omitted values use the standard resolution order. ## Data Model v2 (SQLite + Drizzle) All persistent state is SQLite via Drizzle schema + migrations. -Tables (workspace-scoped): +Tables (organization-scoped): -1. `workspaces` +1. `organizations` 2. `workspace_provider_profiles` 3. `repos` (`workspace_id`, `repo_id`, ...) 4. `branches` (`workspace_id`, `repo_id`, ...) @@ -433,10 +433,10 @@ Migration approach: one-way migration from existing schema during TS backend boo 1. TypeScript backend exposes local control API (socket or localhost HTTP). 2. CLI/TUI are thin clients; all mutations go through backend actors. -3. OpenTUI subscribes to project streams from workspace-scoped project actors. -4. Workspace is required context on all backend mutation requests. +3. OpenTUI subscribes to repository streams from organization-scoped repository actors. +4. Organization is required context on all backend mutation requests. -CLI/TUI are responsible for resolving workspace context before calling backend mutations. +CLI/TUI are responsible for resolving organization context before calling backend mutations. ## CLI + TUI Packaging @@ -451,10 +451,10 @@ The package still calls the same backend API and shares contracts from `packages ## Implementation Phases -## Phase 0: Contracts and Workspace Spec +## Phase 0: Contracts and Organization Spec -1. Freeze workspace model, provider contract, and actor ownership map. -2. Freeze command flags for workspace + provider selection. +1. Freeze organization model, provider contract, and actor ownership map. +2. Freeze command flags for organization + provider selection. 3. Define Drizzle schema draft and migration plan. Exit criteria: @@ -462,7 +462,7 @@ Exit criteria: ## Phase 1: TypeScript Monorepo Bootstrap -1. Add `pnpm` workspace + Turborepo pipeline. +1. Add `pnpm` organization + Turborepo pipeline. 2. Create `shared`, `backend`, and `cli` packages (with TUI integrated into CLI). 3. Add strict TypeScript config and CI checks. @@ -473,10 +473,10 @@ Exit criteria: 1. Wire local RivetKit dependency from `../rivet`. 2. Add SQLite + Drizzle migrations and query layer. -3. Implement actor registry with workspace-prefixed keys. +3. Implement actor registry with organization-prefixed keys. Exit criteria: -- Backend boot + workspace actor health checks pass. +- Backend boot + organization actor health checks pass. ## Phase 3: Provider Layer in Backend @@ -487,9 +487,9 @@ Exit criteria: Exit criteria: - `create/list/switch/attach/push/sync/kill` pass on worktree provider. -## Phase 4: Workspace/Task Lifecycle +## Phase 4: Organization/Task Lifecycle -1. Implement workspace coordinator flows. +1. Implement organization coordinator flows. 2. Implement TaskActor full lifecycle + post-idle automation. 3. Implement history events and PR/CI/review change tracking. @@ -509,7 +509,7 @@ Exit criteria: 1. Build interactive list/switch UI in OpenTUI. 2. Implement key actions (attach/open PR/archive/merge/sync). -3. Add workspace switcher UX and provider/sandbox indicators. +3. Add organization switcher UX and provider/sandbox indicators. Exit criteria: - TUI parity and responsive streaming updates. @@ -534,7 +534,7 @@ Exit criteria: 2. Integration tests - backend + sqlite + provider fakes -- workspace isolation boundaries +- organization isolation boundaries - session recovery and restart handling 3. E2E tests diff --git a/foundry/scripts/build-test-image.sh b/foundry/scripts/build-test-image.sh index 284c8bc..a8cae9b 100755 --- a/foundry/scripts/build-test-image.sh +++ b/foundry/scripts/build-test-image.sh @@ -2,7 +2,7 @@ set -euo pipefail echo "Docker integration test image is not part of the TypeScript migration baseline." -echo "Use workspace tests instead:" +echo "Use monorepo tests instead:" echo " pnpm -w typecheck" echo " pnpm -w build" echo " pnpm -w test" diff --git a/foundry/scripts/data/rivet-dev.json b/foundry/scripts/data/rivet-dev.json index 2b1b6f0..3534cac 100644 --- a/foundry/scripts/data/rivet-dev.json +++ b/foundry/scripts/data/rivet-dev.json @@ -1060,7 +1060,7 @@ }, { "number": 222, - "title": "Recover wellington workspace state", + "title": "Recover wellington organization state", "state": "open", "draft": false, "headRefName": "recovery/wellington-20260309", @@ -1070,7 +1070,7 @@ }, { "number": 220, - "title": "Recover lisbon workspace state", + "title": "Recover lisbon organization state", "state": "open", "draft": false, "headRefName": "recovery/lisbon-20260309", @@ -1080,7 +1080,7 @@ }, { "number": 219, - "title": "Recover karachi-v2 workspace state", + "title": "Recover karachi-v2 organization state", "state": "open", "draft": false, "headRefName": "recovery/karachi-v2-20260309", @@ -1090,7 +1090,7 @@ }, { "number": 218, - "title": "Recover hamburg workspace state", + "title": "Recover hamburg organization state", "state": "open", "draft": false, "headRefName": "recovery/hamburg-20260309", @@ -1100,7 +1100,7 @@ }, { "number": 217, - "title": "Recover geneva workspace state", + "title": "Recover geneva organization state", "state": "open", "draft": false, "headRefName": "recovery/geneva-20260309", @@ -1110,7 +1110,7 @@ }, { "number": 216, - "title": "Recover edinburgh workspace state", + "title": "Recover edinburgh organization state", "state": "open", "draft": false, "headRefName": "recovery/edinburgh-20260309", diff --git a/foundry/scripts/pull-org-data.ts b/foundry/scripts/pull-org-data.ts index 1759cad..3580baa 100644 --- a/foundry/scripts/pull-org-data.ts +++ b/foundry/scripts/pull-org-data.ts @@ -2,8 +2,8 @@ /** * Pull public GitHub organization data into a JSON fixture file. * - * This script mirrors the sync logic in the backend workspace actor - * (see: packages/backend/src/actors/workspace/app-shell.ts — syncGithubOrganizations + * This script mirrors the sync logic in the backend organization actor + * (see: packages/backend/src/actors/organization/app-shell.ts — syncGithubOrganizations * and syncGithubOrganizationRepos). Keep the two in sync: when the backend * sync workflow changes what data it fetches or how it structures organizations, * update this script to match. @@ -205,8 +205,8 @@ async function pullOrgData(orgLogin: string): Promise { console.log(` ${members.length} public members`); // 4. Fetch open PRs across all public repos - // Backend equivalent: ProjectPrSyncActor polls GitHub for open PRs per repo - // and stores them in the pr_cache table on the project actor + // Backend equivalent: open PR metadata is pulled from GitHub and merged into + // the organization/repository projections used by the UI. const openPullRequests: OrgFixturePullRequest[] = []; for (const repo of repos) { const rawPrs = await githubPaginate<{