diff --git a/.agents/skills/agent-browser/SKILL.md b/.agents/skills/agent-browser/SKILL.md index ab3ea3c..680828d 100644 --- a/.agents/skills/agent-browser/SKILL.md +++ b/.agents/skills/agent-browser/SKILL.md @@ -1,265 +1,24 @@ --- name: agent-browser -description: Automates browser interactions for web testing, form filling, screenshots, and data extraction. Use when the user needs to navigate websites, interact with web pages, fill forms, take screenshots, test web applications, or extract information from web pages. -allowed-tools: Bash(agent-browser:*) +description: Browser automation CLI for AI agents. Use when the user needs to interact with websites, including navigating pages, filling forms, clicking buttons, taking screenshots, extracting data, testing web apps, or automating any browser task. Triggers include requests to "open a website", "fill out a form", "click a button", "take a screenshot", "scrape data from a page", "test this web app", "login to a site", "automate browser actions", or any task requiring programmatic web interaction. +allowed-tools: Bash(npx agent-browser:*), Bash(agent-browser:*) --- # Browser Automation with agent-browser -## Quick start +## Core Workflow -```bash -agent-browser open # Navigate to page -agent-browser snapshot -i # Get interactive elements with refs -agent-browser click @e1 # Click element by ref -agent-browser fill @e2 "text" # Fill input by ref -agent-browser close # Close browser -``` +Every browser automation follows this pattern: -## Core workflow - -1. Navigate: `agent-browser open ` -2. Snapshot: `agent-browser snapshot -i` (returns elements with refs like `@e1`, `@e2`) -3. Interact using refs from the snapshot -4. Re-snapshot after navigation or significant DOM changes - -## Commands - -### Navigation - -```bash -agent-browser open # Navigate to URL (aliases: goto, navigate) - # Supports: https://, http://, file://, about:, data:// - # Auto-prepends https:// if no protocol given -agent-browser back # Go back -agent-browser forward # Go forward -agent-browser reload # Reload page -agent-browser close # Close browser (aliases: quit, exit) -agent-browser connect 9222 # Connect to browser via CDP port -``` - -### Snapshot (page analysis) - -```bash -agent-browser snapshot # Full accessibility tree -agent-browser snapshot -i # Interactive elements only (recommended) -agent-browser snapshot -c # Compact output -agent-browser snapshot -d 3 # Limit depth to 3 -agent-browser snapshot -s "#main" # Scope to CSS selector -``` - -### Interactions (use @refs from snapshot) - -```bash -agent-browser click @e1 # Click -agent-browser dblclick @e1 # Double-click -agent-browser focus @e1 # Focus element -agent-browser fill @e2 "text" # Clear and type -agent-browser type @e2 "text" # Type without clearing -agent-browser press Enter # Press key (alias: key) -agent-browser press Control+a # Key combination -agent-browser keydown Shift # Hold key down -agent-browser keyup Shift # Release key -agent-browser hover @e1 # Hover -agent-browser check @e1 # Check checkbox -agent-browser uncheck @e1 # Uncheck checkbox -agent-browser select @e1 "value" # Select dropdown option -agent-browser select @e1 "a" "b" # Select multiple options -agent-browser scroll down 500 # Scroll page (default: down 300px) -agent-browser scrollintoview @e1 # Scroll element into view (alias: scrollinto) -agent-browser drag @e1 @e2 # Drag and drop -agent-browser upload @e1 file.pdf # Upload files -``` - -### Get information - -```bash -agent-browser get text @e1 # Get element text -agent-browser get html @e1 # Get innerHTML -agent-browser get value @e1 # Get input value -agent-browser get attr @e1 href # Get attribute -agent-browser get title # Get page title -agent-browser get url # Get current URL -agent-browser get count ".item" # Count matching elements -agent-browser get box @e1 # Get bounding box -agent-browser get styles @e1 # Get computed styles (font, color, bg, etc.) -``` - -### Check state - -```bash -agent-browser is visible @e1 # Check if visible -agent-browser is enabled @e1 # Check if enabled -agent-browser is checked @e1 # Check if checked -``` - -### Screenshots & PDF - -```bash -agent-browser screenshot # Save to a temporary directory -agent-browser screenshot path.png # Save to a specific path -agent-browser screenshot --full # Full page -agent-browser pdf output.pdf # Save as PDF -``` - -### Video recording - -```bash -agent-browser record start ./demo.webm # Start recording (uses current URL + state) -agent-browser click @e1 # Perform actions -agent-browser record stop # Stop and save video -agent-browser record restart ./take2.webm # Stop current + start new recording -``` - -Recording creates a fresh context but preserves cookies/storage from your session. If no URL is provided, it -automatically returns to your current page. For smooth demos, explore first, then start recording. - -### Wait - -```bash -agent-browser wait @e1 # Wait for element -agent-browser wait 2000 # Wait milliseconds -agent-browser wait --text "Success" # Wait for text (or -t) -agent-browser wait --url "**/dashboard" # Wait for URL pattern (or -u) -agent-browser wait --load networkidle # Wait for network idle (or -l) -agent-browser wait --fn "window.ready" # Wait for JS condition (or -f) -``` - -### Mouse control - -```bash -agent-browser mouse move 100 200 # Move mouse -agent-browser mouse down left # Press button -agent-browser mouse up left # Release button -agent-browser mouse wheel 100 # Scroll wheel -``` - -### Semantic locators (alternative to refs) - -```bash -agent-browser find role button click --name "Submit" -agent-browser find text "Sign In" click -agent-browser find text "Sign In" click --exact # Exact match only -agent-browser find label "Email" fill "user@test.com" -agent-browser find placeholder "Search" type "query" -agent-browser find alt "Logo" click -agent-browser find title "Close" click -agent-browser find testid "submit-btn" click -agent-browser find first ".item" click -agent-browser find last ".item" click -agent-browser find nth 2 "a" hover -``` - -### Browser settings - -```bash -agent-browser set viewport 1920 1080 # Set viewport size -agent-browser set device "iPhone 14" # Emulate device -agent-browser set geo 37.7749 -122.4194 # Set geolocation (alias: geolocation) -agent-browser set offline on # Toggle offline mode -agent-browser set headers '{"X-Key":"v"}' # Extra HTTP headers -agent-browser set credentials user pass # HTTP basic auth (alias: auth) -agent-browser set media dark # Emulate color scheme -agent-browser set media light reduced-motion # Light mode + reduced motion -``` - -### Cookies & Storage - -```bash -agent-browser cookies # Get all cookies -agent-browser cookies set name value # Set cookie -agent-browser cookies clear # Clear cookies -agent-browser storage local # Get all localStorage -agent-browser storage local key # Get specific key -agent-browser storage local set k v # Set value -agent-browser storage local clear # Clear all -``` - -### Network - -```bash -agent-browser network route # Intercept requests -agent-browser network route --abort # Block requests -agent-browser network route --body '{}' # Mock response -agent-browser network unroute [url] # Remove routes -agent-browser network requests # View tracked requests -agent-browser network requests --filter api # Filter requests -``` - -### Tabs & Windows - -```bash -agent-browser tab # List tabs -agent-browser tab new [url] # New tab -agent-browser tab 2 # Switch to tab by index -agent-browser tab close # Close current tab -agent-browser tab close 2 # Close tab by index -agent-browser window new # New window -``` - -### Frames - -```bash -agent-browser frame "#iframe" # Switch to iframe -agent-browser frame main # Back to main frame -``` - -### Dialogs - -```bash -agent-browser dialog accept [text] # Accept dialog -agent-browser dialog dismiss # Dismiss dialog -``` - -### JavaScript - -```bash -agent-browser eval "document.title" # Run JavaScript -``` - -## Global options - -```bash -agent-browser --session ... # Isolated browser session -agent-browser --json ... # JSON output for parsing -agent-browser --headed ... # Show browser window (not headless) -agent-browser --full ... # Full page screenshot (-f) -agent-browser --cdp ... # Connect via Chrome DevTools Protocol -agent-browser -p ... # Cloud browser provider (--provider) -agent-browser --proxy ... # Use proxy server -agent-browser --headers ... # HTTP headers scoped to URL's origin -agent-browser --executable-path

# Custom browser executable -agent-browser --extension ... # Load browser extension (repeatable) -agent-browser --help # Show help (-h) -agent-browser --version # Show version (-V) -agent-browser --help # Show detailed help for a command -``` - -### Proxy support - -```bash -agent-browser --proxy http://proxy.com:8080 open example.com -agent-browser --proxy http://user:pass@proxy.com:8080 open example.com -agent-browser --proxy socks5://proxy.com:1080 open example.com -``` - -## Environment variables - -```bash -AGENT_BROWSER_SESSION="mysession" # Default session name -AGENT_BROWSER_EXECUTABLE_PATH="/path/chrome" # Custom browser path -AGENT_BROWSER_EXTENSIONS="/ext1,/ext2" # Comma-separated extension paths -AGENT_BROWSER_PROVIDER="your-cloud-browser-provider" # Cloud browser provider (select browseruse or browserbase) -AGENT_BROWSER_STREAM_PORT="9223" # WebSocket streaming port -AGENT_BROWSER_HOME="/path/to/agent-browser" # Custom install location (for daemon.js) -``` - -## Example: Form submission +1. **Navigate**: `agent-browser open ` +2. **Snapshot**: `agent-browser snapshot -i` (get element refs like `@e1`, `@e2`) +3. **Interact**: Use refs to click, fill, select +4. **Re-snapshot**: After navigation or DOM changes, get fresh refs ```bash agent-browser open https://example.com/form agent-browser snapshot -i -# Output shows: textbox "Email" [ref=e1], textbox "Password" [ref=e2], button "Submit" [ref=e3] +# Output: @e1 [input type="email"], @e2 [input type="password"], @e3 [button] "Submit" agent-browser fill @e1 "user@example.com" agent-browser fill @e2 "password123" @@ -268,72 +27,504 @@ agent-browser wait --load networkidle agent-browser snapshot -i # Check result ``` -## Example: Authentication with saved state +## Command Chaining + +Commands can be chained with `&&` in a single shell invocation. The browser persists between commands via a background daemon, so chaining is safe and more efficient than separate calls. ```bash -# Login once +# Chain open + wait + snapshot in one call +agent-browser open https://example.com && agent-browser wait --load networkidle && agent-browser snapshot -i + +# Chain multiple interactions +agent-browser fill @e1 "user@example.com" && agent-browser fill @e2 "password123" && agent-browser click @e3 + +# Navigate and capture +agent-browser open https://example.com && agent-browser wait --load networkidle && agent-browser screenshot page.png +``` + +**When to chain:** Use `&&` when you don't need to read the output of an intermediate command before proceeding (e.g., open + wait + screenshot). Run commands separately when you need to parse the output first (e.g., snapshot to discover refs, then interact using those refs). + +## Essential Commands + +```bash +# Navigation +agent-browser open # Navigate (aliases: goto, navigate) +agent-browser close # Close browser + +# Snapshot +agent-browser snapshot -i # Interactive elements with refs (recommended) +agent-browser snapshot -i -C # Include cursor-interactive elements (divs with onclick, cursor:pointer) +agent-browser snapshot -s "#selector" # Scope to CSS selector + +# Interaction (use @refs from snapshot) +agent-browser click @e1 # Click element +agent-browser click @e1 --new-tab # Click and open in new tab +agent-browser fill @e2 "text" # Clear and type text +agent-browser type @e2 "text" # Type without clearing +agent-browser select @e1 "option" # Select dropdown option +agent-browser check @e1 # Check checkbox +agent-browser press Enter # Press key +agent-browser keyboard type "text" # Type at current focus (no selector) +agent-browser keyboard inserttext "text" # Insert without key events +agent-browser scroll down 500 # Scroll page +agent-browser scroll down 500 --selector "div.content" # Scroll within a specific container + +# Get information +agent-browser get text @e1 # Get element text +agent-browser get url # Get current URL +agent-browser get title # Get page title + +# Wait +agent-browser wait @e1 # Wait for element +agent-browser wait --load networkidle # Wait for network idle +agent-browser wait --url "**/page" # Wait for URL pattern +agent-browser wait 2000 # Wait milliseconds + +# Downloads +agent-browser download @e1 ./file.pdf # Click element to trigger download +agent-browser wait --download ./output.zip # Wait for any download to complete +agent-browser --download-path ./downloads open # Set default download directory + +# Capture +agent-browser screenshot # Screenshot to temp dir +agent-browser screenshot --full # Full page screenshot +agent-browser screenshot --annotate # Annotated screenshot with numbered element labels +agent-browser pdf output.pdf # Save as PDF + +# Diff (compare page states) +agent-browser diff snapshot # Compare current vs last snapshot +agent-browser diff snapshot --baseline before.txt # Compare current vs saved file +agent-browser diff screenshot --baseline before.png # Visual pixel diff +agent-browser diff url # Compare two pages +agent-browser diff url --wait-until networkidle # Custom wait strategy +agent-browser diff url --selector "#main" # Scope to element +``` + +## Common Patterns + +### Form Submission + +```bash +agent-browser open https://example.com/signup +agent-browser snapshot -i +agent-browser fill @e1 "Jane Doe" +agent-browser fill @e2 "jane@example.com" +agent-browser select @e3 "California" +agent-browser check @e4 +agent-browser click @e5 +agent-browser wait --load networkidle +``` + +### Authentication with Auth Vault (Recommended) + +```bash +# Save credentials once (encrypted with AGENT_BROWSER_ENCRYPTION_KEY) +# Recommended: pipe password via stdin to avoid shell history exposure +echo "pass" | agent-browser auth save github --url https://github.com/login --username user --password-stdin + +# Login using saved profile (LLM never sees password) +agent-browser auth login github + +# List/show/delete profiles +agent-browser auth list +agent-browser auth show github +agent-browser auth delete github +``` + +### Authentication with State Persistence + +```bash +# Login once and save state agent-browser open https://app.example.com/login agent-browser snapshot -i -agent-browser fill @e1 "username" -agent-browser fill @e2 "password" +agent-browser fill @e1 "$USERNAME" +agent-browser fill @e2 "$PASSWORD" agent-browser click @e3 agent-browser wait --url "**/dashboard" agent-browser state save auth.json -# Later sessions: load saved state +# Reuse in future sessions agent-browser state load auth.json agent-browser open https://app.example.com/dashboard ``` -## Sessions (parallel browsers) +### Session Persistence ```bash -agent-browser --session test1 open site-a.com -agent-browser --session test2 open site-b.com -agent-browser session list +# Auto-save/restore cookies and localStorage across browser restarts +agent-browser --session-name myapp open https://app.example.com/login +# ... login flow ... +agent-browser close # State auto-saved to ~/.agent-browser/sessions/ + +# Next time, state is auto-loaded +agent-browser --session-name myapp open https://app.example.com/dashboard + +# Encrypt state at rest +export AGENT_BROWSER_ENCRYPTION_KEY=$(openssl rand -hex 32) +agent-browser --session-name secure open https://app.example.com + +# Manage saved states +agent-browser state list +agent-browser state show myapp-default.json +agent-browser state clear myapp +agent-browser state clean --older-than 7 ``` -## JSON output (for parsing) - -Add `--json` for machine-readable output: +### Data Extraction ```bash +agent-browser open https://example.com/products +agent-browser snapshot -i +agent-browser get text @e5 # Get specific element text +agent-browser get text body > page.txt # Get all page text + +# JSON output for parsing agent-browser snapshot -i --json agent-browser get text @e1 --json ``` -## Debugging +### Parallel Sessions ```bash -agent-browser --headed open example.com # Show browser window -agent-browser --cdp 9222 snapshot # Connect via CDP port -agent-browser connect 9222 # Alternative: connect command -agent-browser console # View console messages -agent-browser console --clear # Clear console -agent-browser errors # View page errors -agent-browser errors --clear # Clear errors -agent-browser highlight @e1 # Highlight element -agent-browser trace start # Start recording trace -agent-browser trace stop trace.zip # Stop and save trace -agent-browser record start ./debug.webm # Record video from current page -agent-browser record stop # Save recording +agent-browser --session site1 open https://site-a.com +agent-browser --session site2 open https://site-b.com + +agent-browser --session site1 snapshot -i +agent-browser --session site2 snapshot -i + +agent-browser session list ``` -## Deep-dive documentation +### Connect to Existing Chrome -For detailed patterns and best practices, see: +```bash +# Auto-discover running Chrome with remote debugging enabled +agent-browser --auto-connect open https://example.com +agent-browser --auto-connect snapshot -| Reference | Description | +# Or with explicit CDP port +agent-browser --cdp 9222 snapshot +``` + +### Color Scheme (Dark Mode) + +```bash +# Persistent dark mode via flag (applies to all pages and new tabs) +agent-browser --color-scheme dark open https://example.com + +# Or via environment variable +AGENT_BROWSER_COLOR_SCHEME=dark agent-browser open https://example.com + +# Or set during session (persists for subsequent commands) +agent-browser set media dark +``` + +### Visual Browser (Debugging) + +```bash +agent-browser --headed open https://example.com +agent-browser highlight @e1 # Highlight element +agent-browser record start demo.webm # Record session +agent-browser profiler start # Start Chrome DevTools profiling +agent-browser profiler stop trace.json # Stop and save profile (path optional) +``` + +Use `AGENT_BROWSER_HEADED=1` to enable headed mode via environment variable. Browser extensions work in both headed and headless mode. + +### Local Files (PDFs, HTML) + +```bash +# Open local files with file:// URLs +agent-browser --allow-file-access open file:///path/to/document.pdf +agent-browser --allow-file-access open file:///path/to/page.html +agent-browser screenshot output.png +``` + +### iOS Simulator (Mobile Safari) + +```bash +# List available iOS simulators +agent-browser device list + +# Launch Safari on a specific device +agent-browser -p ios --device "iPhone 16 Pro" open https://example.com + +# Same workflow as desktop - snapshot, interact, re-snapshot +agent-browser -p ios snapshot -i +agent-browser -p ios tap @e1 # Tap (alias for click) +agent-browser -p ios fill @e2 "text" +agent-browser -p ios swipe up # Mobile-specific gesture + +# Take screenshot +agent-browser -p ios screenshot mobile.png + +# Close session (shuts down simulator) +agent-browser -p ios close +``` + +**Requirements:** macOS with Xcode, Appium (`npm install -g appium && appium driver install xcuitest`) + +**Real devices:** Works with physical iOS devices if pre-configured. Use `--device ""` where UDID is from `xcrun xctrace list devices`. + +## Security + +All security features are opt-in. By default, agent-browser imposes no restrictions on navigation, actions, or output. + +### Content Boundaries (Recommended for AI Agents) + +Enable `--content-boundaries` to wrap page-sourced output in markers that help LLMs distinguish tool output from untrusted page content: + +```bash +export AGENT_BROWSER_CONTENT_BOUNDARIES=1 +agent-browser snapshot +# Output: +# --- AGENT_BROWSER_PAGE_CONTENT nonce= origin=https://example.com --- +# [accessibility tree] +# --- END_AGENT_BROWSER_PAGE_CONTENT nonce= --- +``` + +### Domain Allowlist + +Restrict navigation to trusted domains. Wildcards like `*.example.com` also match the bare domain `example.com`. Sub-resource requests, WebSocket, and EventSource connections to non-allowed domains are also blocked. Include CDN domains your target pages depend on: + +```bash +export AGENT_BROWSER_ALLOWED_DOMAINS="example.com,*.example.com" +agent-browser open https://example.com # OK +agent-browser open https://malicious.com # Blocked +``` + +### Action Policy + +Use a policy file to gate destructive actions: + +```bash +export AGENT_BROWSER_ACTION_POLICY=./policy.json +``` + +Example `policy.json`: +```json +{"default": "deny", "allow": ["navigate", "snapshot", "click", "scroll", "wait", "get"]} +``` + +Auth vault operations (`auth login`, etc.) bypass action policy but domain allowlist still applies. + +### Output Limits + +Prevent context flooding from large pages: + +```bash +export AGENT_BROWSER_MAX_OUTPUT=50000 +``` + +## Diffing (Verifying Changes) + +Use `diff snapshot` after performing an action to verify it had the intended effect. This compares the current accessibility tree against the last snapshot taken in the session. + +```bash +# Typical workflow: snapshot -> action -> diff +agent-browser snapshot -i # Take baseline snapshot +agent-browser click @e2 # Perform action +agent-browser diff snapshot # See what changed (auto-compares to last snapshot) +``` + +For visual regression testing or monitoring: + +```bash +# Save a baseline screenshot, then compare later +agent-browser screenshot baseline.png +# ... time passes or changes are made ... +agent-browser diff screenshot --baseline baseline.png + +# Compare staging vs production +agent-browser diff url https://staging.example.com https://prod.example.com --screenshot +``` + +`diff snapshot` output uses `+` for additions and `-` for removals, similar to git diff. `diff screenshot` produces a diff image with changed pixels highlighted in red, plus a mismatch percentage. + +## Timeouts and Slow Pages + +The default Playwright timeout is 25 seconds for local browsers. This can be overridden with the `AGENT_BROWSER_DEFAULT_TIMEOUT` environment variable (value in milliseconds). For slow websites or large pages, use explicit waits instead of relying on the default timeout: + +```bash +# Wait for network activity to settle (best for slow pages) +agent-browser wait --load networkidle + +# Wait for a specific element to appear +agent-browser wait "#content" +agent-browser wait @e1 + +# Wait for a specific URL pattern (useful after redirects) +agent-browser wait --url "**/dashboard" + +# Wait for a JavaScript condition +agent-browser wait --fn "document.readyState === 'complete'" + +# Wait a fixed duration (milliseconds) as a last resort +agent-browser wait 5000 +``` + +When dealing with consistently slow websites, use `wait --load networkidle` after `open` to ensure the page is fully loaded before taking a snapshot. If a specific element is slow to render, wait for it directly with `wait ` or `wait @ref`. + +## Session Management and Cleanup + +When running multiple agents or automations concurrently, always use named sessions to avoid conflicts: + +```bash +# Each agent gets its own isolated session +agent-browser --session agent1 open site-a.com +agent-browser --session agent2 open site-b.com + +# Check active sessions +agent-browser session list +``` + +Always close your browser session when done to avoid leaked processes: + +```bash +agent-browser close # Close default session +agent-browser --session agent1 close # Close specific session +``` + +If a previous session was not closed properly, the daemon may still be running. Use `agent-browser close` to clean it up before starting new work. + +## Ref Lifecycle (Important) + +Refs (`@e1`, `@e2`, etc.) are invalidated when the page changes. Always re-snapshot after: + +- Clicking links or buttons that navigate +- Form submissions +- Dynamic content loading (dropdowns, modals) + +```bash +agent-browser click @e5 # Navigates to new page +agent-browser snapshot -i # MUST re-snapshot +agent-browser click @e1 # Use new refs +``` + +## Annotated Screenshots (Vision Mode) + +Use `--annotate` to take a screenshot with numbered labels overlaid on interactive elements. Each label `[N]` maps to ref `@eN`. This also caches refs, so you can interact with elements immediately without a separate snapshot. + +```bash +agent-browser screenshot --annotate +# Output includes the image path and a legend: +# [1] @e1 button "Submit" +# [2] @e2 link "Home" +# [3] @e3 textbox "Email" +agent-browser click @e2 # Click using ref from annotated screenshot +``` + +Use annotated screenshots when: +- The page has unlabeled icon buttons or visual-only elements +- You need to verify visual layout or styling +- Canvas or chart elements are present (invisible to text snapshots) +- You need spatial reasoning about element positions + +## Semantic Locators (Alternative to Refs) + +When refs are unavailable or unreliable, use semantic locators: + +```bash +agent-browser find text "Sign In" click +agent-browser find label "Email" fill "user@test.com" +agent-browser find role button click --name "Submit" +agent-browser find placeholder "Search" type "query" +agent-browser find testid "submit-btn" click +``` + +## JavaScript Evaluation (eval) + +Use `eval` to run JavaScript in the browser context. **Shell quoting can corrupt complex expressions** -- use `--stdin` or `-b` to avoid issues. + +```bash +# Simple expressions work with regular quoting +agent-browser eval 'document.title' +agent-browser eval 'document.querySelectorAll("img").length' + +# Complex JS: use --stdin with heredoc (RECOMMENDED) +agent-browser eval --stdin <<'EVALEOF' +JSON.stringify( + Array.from(document.querySelectorAll("img")) + .filter(i => !i.alt) + .map(i => ({ src: i.src.split("/").pop(), width: i.width })) +) +EVALEOF + +# Alternative: base64 encoding (avoids all shell escaping issues) +agent-browser eval -b "$(echo -n 'Array.from(document.querySelectorAll("a")).map(a => a.href)' | base64)" +``` + +**Why this matters:** When the shell processes your command, inner double quotes, `!` characters (history expansion), backticks, and `$()` can all corrupt the JavaScript before it reaches agent-browser. The `--stdin` and `-b` flags bypass shell interpretation entirely. + +**Rules of thumb:** +- Single-line, no nested quotes -> regular `eval 'expression'` with single quotes is fine +- Nested quotes, arrow functions, template literals, or multiline -> use `eval --stdin <<'EVALEOF'` +- Programmatic/generated scripts -> use `eval -b` with base64 + +## Configuration File + +Create `agent-browser.json` in the project root for persistent settings: + +```json +{ + "headed": true, + "proxy": "http://localhost:8080", + "profile": "./browser-data" +} +``` + +Priority (lowest to highest): `~/.agent-browser/config.json` < `./agent-browser.json` < env vars < CLI flags. Use `--config ` or `AGENT_BROWSER_CONFIG` env var for a custom config file (exits with error if missing/invalid). All CLI options map to camelCase keys (e.g., `--executable-path` -> `"executablePath"`). Boolean flags accept `true`/`false` values (e.g., `--headed false` overrides config). Extensions from user and project configs are merged, not replaced. + +## Deep-Dive Documentation + +| Reference | When to Use | |-----------|-------------| +| [references/commands.md](references/commands.md) | Full command reference with all options | | [references/snapshot-refs.md](references/snapshot-refs.md) | Ref lifecycle, invalidation rules, troubleshooting | | [references/session-management.md](references/session-management.md) | Parallel sessions, state persistence, concurrent scraping | | [references/authentication.md](references/authentication.md) | Login flows, OAuth, 2FA handling, state reuse | | [references/video-recording.md](references/video-recording.md) | Recording workflows for debugging and documentation | +| [references/profiling.md](references/profiling.md) | Chrome DevTools profiling for performance analysis | | [references/proxy-support.md](references/proxy-support.md) | Proxy configuration, geo-testing, rotating proxies | -## Ready-to-use templates +## Experimental: Native Mode -Executable workflow scripts for common patterns: +agent-browser has an experimental native Rust daemon that communicates with Chrome directly via CDP, bypassing Node.js and Playwright entirely. It is opt-in and not recommended for production use yet. + +```bash +# Enable via flag +agent-browser --native open example.com + +# Enable via environment variable (avoids passing --native every time) +export AGENT_BROWSER_NATIVE=1 +agent-browser open example.com +``` + +The native daemon supports Chromium and Safari (via WebDriver). Firefox and WebKit are not yet supported. All core commands (navigate, snapshot, click, fill, screenshot, cookies, storage, tabs, eval, etc.) work identically in native mode. Use `agent-browser close` before switching between native and default mode within the same session. + +## Browser Engine Selection + +Use `--engine` to choose a local browser engine. The default is `chrome`. + +```bash +# Use Lightpanda (fast headless browser, requires separate install) +agent-browser --engine lightpanda open example.com + +# Via environment variable +export AGENT_BROWSER_ENGINE=lightpanda +agent-browser open example.com + +# With custom binary path +agent-browser --engine lightpanda --executable-path /path/to/lightpanda open example.com +``` + +Supported engines: +- `chrome` (default) -- Chrome/Chromium via CDP +- `lightpanda` -- Lightpanda headless browser via CDP (10x faster, 10x less memory than Chrome) + +Lightpanda does not support `--extension`, `--profile`, `--state`, or `--allow-file-access`. Install Lightpanda from https://lightpanda.io/docs/open-source/installation. + +## Ready-to-Use Templates | Template | Description | |----------|-------------| @@ -341,16 +532,8 @@ Executable workflow scripts for common patterns: | [templates/authenticated-session.sh](templates/authenticated-session.sh) | Login once, reuse state | | [templates/capture-workflow.sh](templates/capture-workflow.sh) | Content extraction with screenshots | -Usage: ```bash ./templates/form-automation.sh https://example.com/form ./templates/authenticated-session.sh https://app.example.com/login ./templates/capture-workflow.sh https://example.com ./output ``` - -## HTTPS Certificate Errors - -For sites with self-signed or invalid certificates: -```bash -agent-browser open https://localhost:8443 --ignore-https-errors -``` diff --git a/.agents/skills/agent-browser/references/authentication.md b/.agents/skills/agent-browser/references/authentication.md index 5d801f6..12ef5e4 100644 --- a/.agents/skills/agent-browser/references/authentication.md +++ b/.agents/skills/agent-browser/references/authentication.md @@ -1,6 +1,20 @@ # Authentication Patterns -Patterns for handling login flows, session persistence, and authenticated browsing. +Login flows, session persistence, OAuth, 2FA, and authenticated browsing. + +**Related**: [session-management.md](session-management.md) for state persistence details, [SKILL.md](../SKILL.md) for quick start. + +## Contents + +- [Basic Login Flow](#basic-login-flow) +- [Saving Authentication State](#saving-authentication-state) +- [Restoring Authentication](#restoring-authentication) +- [OAuth / SSO Flows](#oauth--sso-flows) +- [Two-Factor Authentication](#two-factor-authentication) +- [HTTP Basic Auth](#http-basic-auth) +- [Cookie-Based Auth](#cookie-based-auth) +- [Token Refresh Handling](#token-refresh-handling) +- [Security Best Practices](#security-best-practices) ## Basic Login Flow diff --git a/.agents/skills/agent-browser/references/proxy-support.md b/.agents/skills/agent-browser/references/proxy-support.md index 05fcec2..e86a8fe 100644 --- a/.agents/skills/agent-browser/references/proxy-support.md +++ b/.agents/skills/agent-browser/references/proxy-support.md @@ -1,13 +1,29 @@ # Proxy Support -Configure proxy servers for browser automation, useful for geo-testing, rate limiting avoidance, and corporate environments. +Proxy configuration for geo-testing, rate limiting avoidance, and corporate environments. + +**Related**: [commands.md](commands.md) for global options, [SKILL.md](../SKILL.md) for quick start. + +## Contents + +- [Basic Proxy Configuration](#basic-proxy-configuration) +- [Authenticated Proxy](#authenticated-proxy) +- [SOCKS Proxy](#socks-proxy) +- [Proxy Bypass](#proxy-bypass) +- [Common Use Cases](#common-use-cases) +- [Verifying Proxy Connection](#verifying-proxy-connection) +- [Troubleshooting](#troubleshooting) +- [Best Practices](#best-practices) ## Basic Proxy Configuration -Set proxy via environment variable before starting: +Use the `--proxy` flag or set proxy via environment variable: ```bash -# HTTP proxy +# Via CLI flag +agent-browser --proxy "http://proxy.example.com:8080" open https://example.com + +# Via environment variable export HTTP_PROXY="http://proxy.example.com:8080" agent-browser open https://example.com @@ -45,10 +61,13 @@ agent-browser open https://example.com ## Proxy Bypass -Skip proxy for specific domains: +Skip proxy for specific domains using `--proxy-bypass` or `NO_PROXY`: ```bash -# Bypass proxy for local addresses +# Via CLI flag +agent-browser --proxy "http://proxy.example.com:8080" --proxy-bypass "localhost,*.internal.com" open https://example.com + +# Via environment variable export NO_PROXY="localhost,127.0.0.1,.internal.company.com" agent-browser open https://internal.company.com # Direct connection agent-browser open https://external.com # Via proxy diff --git a/.agents/skills/agent-browser/references/session-management.md b/.agents/skills/agent-browser/references/session-management.md index cfc3362..bb5312d 100644 --- a/.agents/skills/agent-browser/references/session-management.md +++ b/.agents/skills/agent-browser/references/session-management.md @@ -1,6 +1,18 @@ # Session Management -Run multiple isolated browser sessions concurrently with state persistence. +Multiple isolated browser sessions with state persistence and concurrent browsing. + +**Related**: [authentication.md](authentication.md) for login patterns, [SKILL.md](../SKILL.md) for quick start. + +## Contents + +- [Named Sessions](#named-sessions) +- [Session Isolation Properties](#session-isolation-properties) +- [Session State Persistence](#session-state-persistence) +- [Common Patterns](#common-patterns) +- [Default Session](#default-session) +- [Session Cleanup](#session-cleanup) +- [Best Practices](#best-practices) ## Named Sessions diff --git a/.agents/skills/agent-browser/references/snapshot-refs.md b/.agents/skills/agent-browser/references/snapshot-refs.md index 0b17a4d..c5868d5 100644 --- a/.agents/skills/agent-browser/references/snapshot-refs.md +++ b/.agents/skills/agent-browser/references/snapshot-refs.md @@ -1,21 +1,29 @@ -# Snapshot + Refs Workflow +# Snapshot and Refs -The core innovation of agent-browser: compact element references that reduce context usage dramatically for AI agents. +Compact element references that reduce context usage dramatically for AI agents. -## How It Works +**Related**: [commands.md](commands.md) for full command reference, [SKILL.md](../SKILL.md) for quick start. -### The Problem -Traditional browser automation sends full DOM to AI agents: +## Contents + +- [How Refs Work](#how-refs-work) +- [Snapshot Command](#the-snapshot-command) +- [Using Refs](#using-refs) +- [Ref Lifecycle](#ref-lifecycle) +- [Best Practices](#best-practices) +- [Ref Notation Details](#ref-notation-details) +- [Troubleshooting](#troubleshooting) + +## How Refs Work + +Traditional approach: ``` -Full DOM/HTML sent → AI parses → Generates CSS selector → Executes action -~3000-5000 tokens per interaction +Full DOM/HTML → AI parses → CSS selector → Action (~3000-5000 tokens) ``` -### The Solution -agent-browser uses compact snapshots with refs: +agent-browser approach: ``` -Compact snapshot → @refs assigned → Direct ref interaction -~200-400 tokens per interaction +Compact snapshot → @refs assigned → Direct interaction (~200-400 tokens) ``` ## The Snapshot Command @@ -166,8 +174,8 @@ agent-browser snapshot -i ### Element Not Visible in Snapshot ```bash -# Scroll to reveal element -agent-browser scroll --bottom +# Scroll down to reveal element +agent-browser scroll down 1000 agent-browser snapshot -i # Or wait for dynamic content diff --git a/.agents/skills/agent-browser/references/video-recording.md b/.agents/skills/agent-browser/references/video-recording.md index 98e6b0a..e6a9fb4 100644 --- a/.agents/skills/agent-browser/references/video-recording.md +++ b/.agents/skills/agent-browser/references/video-recording.md @@ -1,6 +1,17 @@ # Video Recording -Capture browser automation sessions as video for debugging, documentation, or verification. +Capture browser automation as video for debugging, documentation, or verification. + +**Related**: [commands.md](commands.md) for full command reference, [SKILL.md](../SKILL.md) for quick start. + +## Contents + +- [Basic Recording](#basic-recording) +- [Recording Commands](#recording-commands) +- [Use Cases](#use-cases) +- [Best Practices](#best-practices) +- [Output Format](#output-format) +- [Limitations](#limitations) ## Basic Recording diff --git a/.agents/skills/agent-browser/templates/authenticated-session.sh b/.agents/skills/agent-browser/templates/authenticated-session.sh index e44aaad..b66c928 100755 --- a/.agents/skills/agent-browser/templates/authenticated-session.sh +++ b/.agents/skills/agent-browser/templates/authenticated-session.sh @@ -1,67 +1,81 @@ #!/bin/bash # Template: Authenticated Session Workflow -# Login once, save state, reuse for subsequent runs +# Purpose: Login once, save state, reuse for subsequent runs +# Usage: ./authenticated-session.sh [state-file] # -# Usage: -# ./authenticated-session.sh [state-file] +# RECOMMENDED: Use the auth vault instead of this template: +# echo "" | agent-browser auth save myapp --url --username --password-stdin +# agent-browser auth login myapp +# The auth vault stores credentials securely and the LLM never sees passwords. # -# Setup: -# 1. Run once to see your form structure -# 2. Note the @refs for your fields -# 3. Uncomment LOGIN FLOW section and update refs +# Environment variables: +# APP_USERNAME - Login username/email +# APP_PASSWORD - Login password +# +# Two modes: +# 1. Discovery mode (default): Shows form structure so you can identify refs +# 2. Login mode: Performs actual login after you update the refs +# +# Setup steps: +# 1. Run once to see form structure (discovery mode) +# 2. Update refs in LOGIN FLOW section below +# 3. Set APP_USERNAME and APP_PASSWORD +# 4. Delete the DISCOVERY section set -euo pipefail LOGIN_URL="${1:?Usage: $0 [state-file]}" STATE_FILE="${2:-./auth-state.json}" -echo "Authentication workflow for: $LOGIN_URL" +echo "Authentication workflow: $LOGIN_URL" -# ══════════════════════════════════════════════════════════════ -# SAVED STATE: Skip login if we have valid saved state -# ══════════════════════════════════════════════════════════════ +# ================================================================ +# SAVED STATE: Skip login if valid saved state exists +# ================================================================ if [[ -f "$STATE_FILE" ]]; then - echo "Loading saved authentication state..." - agent-browser state load "$STATE_FILE" - agent-browser open "$LOGIN_URL" - agent-browser wait --load networkidle + echo "Loading saved state from $STATE_FILE..." + if agent-browser --state "$STATE_FILE" open "$LOGIN_URL" 2>/dev/null; then + agent-browser wait --load networkidle - CURRENT_URL=$(agent-browser get url) - if [[ "$CURRENT_URL" != *"login"* ]] && [[ "$CURRENT_URL" != *"signin"* ]]; then - echo "Session restored successfully!" - agent-browser snapshot -i - exit 0 + CURRENT_URL=$(agent-browser get url) + if [[ "$CURRENT_URL" != *"login"* ]] && [[ "$CURRENT_URL" != *"signin"* ]]; then + echo "Session restored successfully" + agent-browser snapshot -i + exit 0 + fi + echo "Session expired, performing fresh login..." + agent-browser close 2>/dev/null || true + else + echo "Failed to load state, re-authenticating..." fi - echo "Session expired, performing fresh login..." rm -f "$STATE_FILE" fi -# ══════════════════════════════════════════════════════════════ -# DISCOVERY MODE: Show form structure (remove after setup) -# ══════════════════════════════════════════════════════════════ +# ================================================================ +# DISCOVERY MODE: Shows form structure (delete after setup) +# ================================================================ echo "Opening login page..." agent-browser open "$LOGIN_URL" agent-browser wait --load networkidle echo "" -echo "┌─────────────────────────────────────────────────────────┐" -echo "│ LOGIN FORM STRUCTURE │" -echo "├─────────────────────────────────────────────────────────┤" +echo "Login form structure:" +echo "---" agent-browser snapshot -i -echo "└─────────────────────────────────────────────────────────┘" +echo "---" echo "" echo "Next steps:" -echo " 1. Note refs: @e? = username, @e? = password, @e? = submit" -echo " 2. Uncomment LOGIN FLOW section below" -echo " 3. Replace @e1, @e2, @e3 with your refs" +echo " 1. Note the refs: username=@e?, password=@e?, submit=@e?" +echo " 2. Update the LOGIN FLOW section below with your refs" +echo " 3. Set: export APP_USERNAME='...' APP_PASSWORD='...'" echo " 4. Delete this DISCOVERY MODE section" echo "" agent-browser close exit 0 -# ══════════════════════════════════════════════════════════════ +# ================================================================ # LOGIN FLOW: Uncomment and customize after discovery -# ══════════════════════════════════════════════════════════════ +# ================================================================ # : "${APP_USERNAME:?Set APP_USERNAME environment variable}" # : "${APP_PASSWORD:?Set APP_PASSWORD environment variable}" # @@ -78,14 +92,14 @@ exit 0 # # Verify login succeeded # FINAL_URL=$(agent-browser get url) # if [[ "$FINAL_URL" == *"login"* ]] || [[ "$FINAL_URL" == *"signin"* ]]; then -# echo "ERROR: Login failed - still on login page" +# echo "Login failed - still on login page" # agent-browser screenshot /tmp/login-failed.png # agent-browser close # exit 1 # fi # # # Save state for future runs -# echo "Saving authentication state to: $STATE_FILE" +# echo "Saving state to $STATE_FILE" # agent-browser state save "$STATE_FILE" -# echo "Login successful!" +# echo "Login successful" # agent-browser snapshot -i diff --git a/.agents/skills/agent-browser/templates/capture-workflow.sh b/.agents/skills/agent-browser/templates/capture-workflow.sh index a4eae75..3bc93ad 100755 --- a/.agents/skills/agent-browser/templates/capture-workflow.sh +++ b/.agents/skills/agent-browser/templates/capture-workflow.sh @@ -1,68 +1,69 @@ #!/bin/bash # Template: Content Capture Workflow -# Extract content from web pages with optional authentication +# Purpose: Extract content from web pages (text, screenshots, PDF) +# Usage: ./capture-workflow.sh [output-dir] +# +# Outputs: +# - page-full.png: Full page screenshot +# - page-structure.txt: Page element structure with refs +# - page-text.txt: All text content +# - page.pdf: PDF version +# +# Optional: Load auth state for protected pages set -euo pipefail TARGET_URL="${1:?Usage: $0 [output-dir]}" OUTPUT_DIR="${2:-.}" -echo "Capturing content from: $TARGET_URL" +echo "Capturing: $TARGET_URL" mkdir -p "$OUTPUT_DIR" -# Optional: Load authentication state if needed +# Optional: Load authentication state # if [[ -f "./auth-state.json" ]]; then +# echo "Loading authentication state..." # agent-browser state load "./auth-state.json" # fi -# Navigate to target page +# Navigate to target agent-browser open "$TARGET_URL" agent-browser wait --load networkidle -# Get page metadata -echo "Page title: $(agent-browser get title)" -echo "Page URL: $(agent-browser get url)" +# Get metadata +TITLE=$(agent-browser get title) +URL=$(agent-browser get url) +echo "Title: $TITLE" +echo "URL: $URL" # Capture full page screenshot agent-browser screenshot --full "$OUTPUT_DIR/page-full.png" -echo "Screenshot saved: $OUTPUT_DIR/page-full.png" +echo "Saved: $OUTPUT_DIR/page-full.png" -# Get page structure +# Get page structure with refs agent-browser snapshot -i > "$OUTPUT_DIR/page-structure.txt" -echo "Structure saved: $OUTPUT_DIR/page-structure.txt" +echo "Saved: $OUTPUT_DIR/page-structure.txt" -# Extract main content -# Adjust selector based on target site structure -# agent-browser get text @e1 > "$OUTPUT_DIR/main-content.txt" - -# Extract specific elements (uncomment as needed) -# agent-browser get text "article" > "$OUTPUT_DIR/article.txt" -# agent-browser get text "main" > "$OUTPUT_DIR/main.txt" -# agent-browser get text ".content" > "$OUTPUT_DIR/content.txt" - -# Get full page text +# Extract all text content agent-browser get text body > "$OUTPUT_DIR/page-text.txt" -echo "Text content saved: $OUTPUT_DIR/page-text.txt" +echo "Saved: $OUTPUT_DIR/page-text.txt" -# Optional: Save as PDF +# Save as PDF agent-browser pdf "$OUTPUT_DIR/page.pdf" -echo "PDF saved: $OUTPUT_DIR/page.pdf" +echo "Saved: $OUTPUT_DIR/page.pdf" -# Optional: Capture with scrolling for infinite scroll pages -# scroll_and_capture() { -# local count=0 -# while [[ $count -lt 5 ]]; do -# agent-browser scroll down 1000 -# agent-browser wait 1000 -# ((count++)) -# done -# agent-browser screenshot --full "$OUTPUT_DIR/page-scrolled.png" -# } -# scroll_and_capture +# Optional: Extract specific elements using refs from structure +# agent-browser get text @e5 > "$OUTPUT_DIR/main-content.txt" + +# Optional: Handle infinite scroll pages +# for i in {1..5}; do +# agent-browser scroll down 1000 +# agent-browser wait 1000 +# done +# agent-browser screenshot --full "$OUTPUT_DIR/page-scrolled.png" # Cleanup agent-browser close echo "" -echo "Capture complete! Files saved to: $OUTPUT_DIR" +echo "Capture complete:" ls -la "$OUTPUT_DIR" diff --git a/.agents/skills/agent-browser/templates/form-automation.sh b/.agents/skills/agent-browser/templates/form-automation.sh index 02a7c81..6784fcd 100755 --- a/.agents/skills/agent-browser/templates/form-automation.sh +++ b/.agents/skills/agent-browser/templates/form-automation.sh @@ -1,64 +1,62 @@ #!/bin/bash # Template: Form Automation Workflow -# Fills and submits web forms with validation +# Purpose: Fill and submit web forms with validation +# Usage: ./form-automation.sh +# +# This template demonstrates the snapshot-interact-verify pattern: +# 1. Navigate to form +# 2. Snapshot to get element refs +# 3. Fill fields using refs +# 4. Submit and verify result +# +# Customize: Update the refs (@e1, @e2, etc.) based on your form's snapshot output set -euo pipefail FORM_URL="${1:?Usage: $0 }" -echo "Automating form at: $FORM_URL" +echo "Form automation: $FORM_URL" -# Navigate to form page +# Step 1: Navigate to form agent-browser open "$FORM_URL" agent-browser wait --load networkidle -# Get interactive snapshot to identify form fields -echo "Analyzing form structure..." +# Step 2: Snapshot to discover form elements +echo "" +echo "Form structure:" agent-browser snapshot -i -# Example: Fill common form fields -# Uncomment and modify refs based on snapshot output +# Step 3: Fill form fields (customize these refs based on snapshot output) +# +# Common field types: +# agent-browser fill @e1 "John Doe" # Text input +# agent-browser fill @e2 "user@example.com" # Email input +# agent-browser fill @e3 "SecureP@ss123" # Password input +# agent-browser select @e4 "Option Value" # Dropdown +# agent-browser check @e5 # Checkbox +# agent-browser click @e6 # Radio button +# agent-browser fill @e7 "Multi-line text" # Textarea +# agent-browser upload @e8 /path/to/file.pdf # File upload +# +# Uncomment and modify: +# agent-browser fill @e1 "Test User" +# agent-browser fill @e2 "test@example.com" +# agent-browser click @e3 # Submit button -# Text inputs -# agent-browser fill @e1 "John Doe" # Name field -# agent-browser fill @e2 "user@example.com" # Email field -# agent-browser fill @e3 "+1-555-123-4567" # Phone field - -# Password fields -# agent-browser fill @e4 "SecureP@ssw0rd!" - -# Dropdowns -# agent-browser select @e5 "Option Value" - -# Checkboxes -# agent-browser check @e6 # Check -# agent-browser uncheck @e7 # Uncheck - -# Radio buttons -# agent-browser click @e8 # Select radio option - -# Text areas -# agent-browser fill @e9 "Multi-line text content here" - -# File uploads -# agent-browser upload @e10 /path/to/file.pdf - -# Submit form -# agent-browser click @e11 # Submit button - -# Wait for response +# Step 4: Wait for submission # agent-browser wait --load networkidle -# agent-browser wait --url "**/success" # Or wait for redirect +# agent-browser wait --url "**/success" # Or wait for redirect -# Verify submission -echo "Form submission result:" +# Step 5: Verify result +echo "" +echo "Result:" agent-browser get url agent-browser snapshot -i -# Take screenshot of result +# Optional: Capture evidence agent-browser screenshot /tmp/form-result.png +echo "Screenshot saved: /tmp/form-result.png" # Cleanup agent-browser close - -echo "Form automation complete" +echo "Done" diff --git a/.claude/commands/post-release-testing.md b/.claude/commands/post-release-testing.md index 3622dad..10cf6ff 100644 --- a/.claude/commands/post-release-testing.md +++ b/.claude/commands/post-release-testing.md @@ -43,7 +43,7 @@ Manually verify the install script works in a fresh environment: ```bash docker run --rm alpine:latest sh -c " apk add --no-cache curl ca-certificates libstdc++ libgcc bash && - curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh && + curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh && sandbox-agent --version " ``` diff --git a/.claude/commands/release.md b/.claude/commands/release.md new file mode 100644 index 0000000..487c019 --- /dev/null +++ b/.claude/commands/release.md @@ -0,0 +1,165 @@ +# Release Agent + +You are a release agent for the Gigacode project (sandbox-agent). Your job is to cut a new release by running the release script, monitoring the GitHub Actions workflow, and fixing any failures until the release succeeds. + +## Step 1: Gather Release Information + +Ask the user what type of release they want to cut: + +- **patch** - Bug fixes (e.g., 0.1.8 -> 0.1.9) +- **minor** - New features (e.g., 0.1.8 -> 0.2.0) +- **major** - Breaking changes (e.g., 0.1.8 -> 1.0.0) +- **rc** - Release candidate (e.g., 0.2.0-rc.1) + +For **rc** releases, also ask: +1. What base version the RC is for (e.g., 0.2.0). If the user doesn't specify, determine it by bumping the minor version from the current version. +2. What RC number (e.g., 1, 2, 3). If the user doesn't specify, check existing git tags to auto-determine the next RC number: + +```bash +git tag -l "v-rc.*" | sort -V +``` + +If no prior RC tags exist for that base version, use `rc.1`. Otherwise, increment the highest existing RC number. + +The final RC version string is `-rc.` (e.g., `0.2.0-rc.1`). + +## Step 2: Confirm Release Details + +Before proceeding, display the release details to the user and ask for explicit confirmation: + +- Current version (read from `Cargo.toml` workspace.package.version) +- New version +- Current branch +- Whether it will be tagged as "latest" (RC releases are never tagged as latest) + +Do NOT proceed without user confirmation. + +## Step 3: Run the Release Script (Setup Local) + +The release script handles version bumping, local checks, committing, pushing, and triggering the workflow. + +For **major**, **minor**, or **patch** releases: + +```bash +echo "yes" | ./scripts/release/main.ts -- --phase setup-local +``` + +For **rc** releases (using explicit version): + +```bash +echo "yes" | ./scripts/release/main.ts --version --phase setup-local +``` + +Where `` is `major`, `minor`, or `patch`, and `` is the full RC version string like `0.2.0-rc.1`. + +The `--phase setup-local` runs these steps in order: +1. Confirms release details (interactive prompt - piping "yes" handles this) +2. Updates version in all files (Cargo.toml, package.json files) +3. Runs local checks (cargo check, cargo fmt, pnpm typecheck) +4. Git commits with message `chore(release): update version to X.Y.Z` +5. Git pushes +6. Triggers the GitHub Actions workflow + +If local checks fail at step 3, fix the issues in the codebase, then re-run using `--only-steps` to avoid re-running already-completed steps: + +```bash +echo "yes" | ./scripts/release/main.ts --version --only-steps run-local-checks,git-commit,git-push,trigger-workflow +``` + +## Step 4: Monitor the GitHub Actions Workflow + +After the workflow is triggered, wait 5 seconds for it to register, then begin polling. + +### Find the workflow run + +```bash +gh run list --workflow=release.yaml --limit=1 --json databaseId,status,conclusion,createdAt,url +``` + +Verify the run was created recently (within the last 2 minutes) to confirm you are monitoring the correct run. Save the `databaseId` as the run ID. + +### Poll for completion + +Poll every 15 seconds using: + +```bash +gh run view --json status,conclusion +``` + +Report progress to the user periodically (every ~60 seconds or when status changes). The status values are: +- `queued` / `in_progress` / `waiting` - Still running, keep polling +- `completed` - Done, check `conclusion` + +When `status` is `completed`, check `conclusion`: +- `success` - Release succeeded! Proceed to Step 6. +- `failure` - Proceed to Step 5. +- `cancelled` - Inform the user and stop. + +## Step 5: Handle Workflow Failures + +If the workflow fails: + +### 5a. Get failure logs + +```bash +gh run view --log-failed +``` + +### 5b. Analyze the error + +Read the failure logs carefully. Common failure categories: +- **Build failures** (cargo build, TypeScript compilation) - Fix the code +- **Formatting issues** (cargo fmt) - Run `cargo fmt` and commit +- **Test failures** - Fix the failing tests +- **Publishing failures** (crates.io, npm) - These may be transient; check if retry will help +- **Docker build failures** - Check Dockerfile or build script issues +- **Infrastructure/transient failures** (network timeouts, rate limits) - Just re-trigger without code changes + +### 5c. Fix and re-push + +If a code fix is needed: +1. Make the fix in the codebase +2. Amend the release commit (since the release version commit is the most recent): + +```bash +git add -A +git commit --amend --no-edit +git push --force-with-lease +``` + +IMPORTANT: Use `--force-with-lease` (not `--force`) for safety. Amend the commit rather than creating a new one so the release stays as a single version-bump commit. + +3. Re-trigger the workflow: + +```bash +gh workflow run .github/workflows/release.yaml \ + -f version= \ + -f latest= \ + --ref +``` + +Where `` is the current branch (usually `main`). Set `latest` to `false` for RC releases, `true` for stable releases that are newer than the current latest tag. + +4. Return to Step 4 to monitor the new run. + +If no code fix is needed (transient failure), skip straight to re-triggering the workflow (step 3 above). + +### 5d. Retry limit + +If the workflow has failed **5 times**, stop and report all errors to the user. Ask whether they want to continue retrying or abort the release. Do not retry infinitely. + +## Step 6: Report Success + +When the workflow completes successfully: +1. Print the GitHub Actions run URL +2. Print the new version number +3. Suggest running post-release testing: "Run `/project:post-release-testing` to verify the release works correctly." + +## Important Notes + +- The product name is "Gigacode" (capital G, lowercase c). The CLI binary is `gigacode` (lowercase). +- Do not include co-authors in any commit messages. +- Use conventional commits style (e.g., `chore(release): update version to X.Y.Z`). +- Keep commit messages to a single line. +- The release script requires `tsx` to run (it's a TypeScript file with a shebang). +- Always work on the current branch. Releases are typically cut from `main`. diff --git a/.dockerignore b/.dockerignore index 96880e9..4ba2cf3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,16 +4,20 @@ dist/ build/ # Dependencies -node_modules/ +**/node_modules/ # Cache .cache/ .turbo/ +**/.turbo/ *.tsbuildinfo +.pnpm-store/ +coverage/ # Environment .env .env.* +.foundry/ # IDE .idea/ @@ -24,3 +28,7 @@ node_modules/ # Git .git/ + +# Tests +**/test/ +**/tests/ diff --git a/.env.development.example b/.env.development.example new file mode 100644 index 0000000..0ae0f58 --- /dev/null +++ b/.env.development.example @@ -0,0 +1,34 @@ +# Foundry local development environment. +# Copy ~/misc/the-foundry.env to .env in the repo root to populate secrets. +# .env is gitignored — never commit it. The source of truth is ~/misc/the-foundry.env. +# +# Docker Compose (just foundry-dev) and the justfile (set dotenv-load := true) +# both read .env automatically. + +APP_URL=http://localhost:4173 +BETTER_AUTH_URL=http://localhost:4173 +BETTER_AUTH_SECRET=sandbox-agent-foundry-development-only-change-me +GITHUB_REDIRECT_URI=http://localhost:4173/v1/auth/callback/github + +# Fill these in when enabling live GitHub OAuth. +GITHUB_CLIENT_ID= +GITHUB_CLIENT_SECRET= + +# Fill these in when enabling GitHub App-backed org installation and repo import. +GITHUB_APP_ID= +GITHUB_APP_CLIENT_ID= +GITHUB_APP_CLIENT_SECRET= +# Store PEM material as a quoted single-line value with \n escapes. +GITHUB_APP_PRIVATE_KEY= +# Webhook secret for verifying GitHub webhook payloads. +# Use smee.io for local development: https://smee.io/new +GITHUB_WEBHOOK_SECRET= +# Required for local GitHub webhook forwarding in compose.dev. +SMEE_URL= +SMEE_TARGET=http://backend:7741/v1/webhooks/github + +# Fill these in when enabling live Stripe billing. +STRIPE_SECRET_KEY= +STRIPE_PUBLISHABLE_KEY= +STRIPE_WEBHOOK_SECRET= +STRIPE_PRICE_TEAM= diff --git a/.github/media/gigacode-header.jpeg b/.github/media/gigacode-header.jpeg new file mode 100644 index 0000000..4708249 Binary files /dev/null and b/.github/media/gigacode-header.jpeg differ diff --git a/.github/media/inspector.png b/.github/media/inspector.png index 1c16ed2..02e588e 100644 Binary files a/.github/media/inspector.png and b/.github/media/inspector.png differ diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 638ff8e..85f828d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,6 +11,8 @@ jobs: runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy @@ -21,5 +23,43 @@ jobs: node-version: 20 cache: pnpm - run: pnpm install + - name: Run formatter hooks + shell: bash + run: | + if [ "${{ github.event_name }}" = "pull_request" ]; then + git fetch origin "${{ github.base_ref }}" --depth=1 + diff_range="origin/${{ github.base_ref }}...HEAD" + elif [ "${{ github.event_name }}" = "push" ] && [ "${{ github.event.before }}" != "0000000000000000000000000000000000000000" ]; then + diff_range="${{ github.event.before }}...${{ github.sha }}" + else + diff_range="HEAD^...HEAD" + fi + + mapfile -t changed_files < <( + git diff --name-only --diff-filter=ACMR "$diff_range" \ + | grep -E '\.(cjs|cts|js|jsx|json|jsonc|mjs|mts|rs|ts|tsx)$' \ + || true + ) + + if [ ${#changed_files[@]} -eq 0 ]; then + echo "No formatter-managed files changed." + exit 0 + fi + + args=() + for file in "${changed_files[@]}"; do + args+=(--file "$file") + done + + pnpm exec lefthook run pre-commit --no-stage-fixed --fail-on-changes "${args[@]}" + - run: npm install -g tsx - name: Run checks - run: ./scripts/release/main.ts --version 0.0.0 --check + run: ./scripts/release/main.ts --version 0.0.0 --only-steps run-ci-checks + - name: Run ACP v1 server tests + run: | + cargo test -p sandbox-agent-agent-management + cargo test -p sandbox-agent --test v1_api + cargo test -p sandbox-agent --test v1_agent_process_matrix + cargo test -p sandbox-agent --lib + - name: Run SDK tests + run: pnpm --dir sdks/typescript test diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 76c5b31..34fb64a 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -89,6 +89,7 @@ jobs: needs: [setup] if: ${{ !inputs.reuse_engine_version }} strategy: + fail-fast: false matrix: include: - platform: linux @@ -96,6 +97,11 @@ jobs: target: x86_64-unknown-linux-musl binary_ext: "" arch: x86_64 + - platform: linux + runner: depot-ubuntu-24.04-arm-8 + target: aarch64-unknown-linux-musl + binary_ext: "" + arch: aarch64 - platform: windows runner: depot-ubuntu-24.04-8 target: x86_64-pc-windows-gnu @@ -141,12 +147,13 @@ jobs: sudo apt-get install -y unzip curl # Install AWS CLI - curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" - unzip awscliv2.zip + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscli.zip" + unzip awscli.zip sudo ./aws/install --update COMMIT_SHA_SHORT="${GITHUB_SHA::7}" BINARY_PATH="dist/sandbox-agent-${{ matrix.target }}${{ matrix.binary_ext }}" + GIGACODE_PATH="dist/gigacode-${{ matrix.target }}${{ matrix.binary_ext }}" # Must specify --checksum-algorithm for compatibility with R2 aws s3 cp \ @@ -156,19 +163,37 @@ jobs: --endpoint-url https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com \ --checksum-algorithm CRC32 + aws s3 cp \ + "${GIGACODE_PATH}" \ + "s3://rivet-releases/sandbox-agent/${COMMIT_SHA_SHORT}/binaries/gigacode-${{ matrix.target }}${{ matrix.binary_ext }}" \ + --region auto \ + --endpoint-url https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com \ + --checksum-algorithm CRC32 + docker: name: "Build & Push Docker Images" needs: [setup] if: ${{ !inputs.reuse_engine_version }} strategy: + fail-fast: false matrix: include: - platform: linux/arm64 runner: depot-ubuntu-24.04-arm-8 - arch_suffix: -arm64 + tag_suffix: -arm64 + dockerfile: docker/runtime/Dockerfile - platform: linux/amd64 runner: depot-ubuntu-24.04-8 - arch_suffix: -amd64 + tag_suffix: -amd64 + dockerfile: docker/runtime/Dockerfile + - platform: linux/arm64 + runner: depot-ubuntu-24.04-arm-8 + tag_suffix: -full-arm64 + dockerfile: docker/runtime/Dockerfile.full + - platform: linux/amd64 + runner: depot-ubuntu-24.04-8 + tag_suffix: -full-amd64 + dockerfile: docker/runtime/Dockerfile.full runs-on: ${{ matrix.runner }} steps: - uses: actions/checkout@v4 @@ -190,8 +215,8 @@ jobs: with: context: . push: true - tags: rivetdev/sandbox-agent:${{ steps.vars.outputs.sha_short }}${{ matrix.arch_suffix }} - file: docker/runtime/Dockerfile + tags: rivetdev/sandbox-agent:${{ steps.vars.outputs.sha_short }}${{ matrix.tag_suffix }} + file: ${{ matrix.dockerfile }} platforms: ${{ matrix.platform }} build-args: | TARGETARCH=${{ contains(matrix.platform, 'arm64') && 'arm64' || 'amd64' }} diff --git a/.github/workflows/skill-generator.yml b/.github/workflows/skill-generator.yml index 0f220f8..f9a81ec 100644 --- a/.github/workflows/skill-generator.yml +++ b/.github/workflows/skill-generator.yml @@ -20,17 +20,25 @@ jobs: - name: Sync to skills repo env: - SKILLS_REPO_TOKEN: ${{ secrets.RIVET_GITHUB_PAT }} + GH_TOKEN: ${{ secrets.RIVET_GITHUB_PAT }} run: | - if [ -z "$SKILLS_REPO_TOKEN" ]; then - echo "SKILLS_REPO_TOKEN is not set" >&2 + if [ -z "$GH_TOKEN" ]; then + echo "::error::RIVET_GITHUB_PAT secret is not set" + exit 1 + fi + + # Validate token before proceeding + if ! gh auth status 2>/dev/null; then + echo "::error::RIVET_GITHUB_PAT is invalid or expired. Rotate the token at https://github.com/settings/tokens" exit 1 fi git config --global user.name "github-actions[bot]" git config --global user.email "github-actions[bot]@users.noreply.github.com" - git clone "https://x-access-token:${SKILLS_REPO_TOKEN}@github.com/rivet-dev/skills.git" /tmp/rivet-skills + # Clone public repo, configure auth via gh credential helper + gh auth setup-git + git clone https://github.com/rivet-dev/skills.git /tmp/rivet-skills mkdir -p /tmp/rivet-skills/skills/sandbox-agent rm -rf /tmp/rivet-skills/skills/sandbox-agent/* diff --git a/.gitignore b/.gitignore index e983e76..de4d863 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,9 @@ yarn.lock .astro/ *.tsbuildinfo .turbo/ +**/.turbo/ +.pnpm-store/ +coverage/ # Environment .env @@ -40,5 +43,20 @@ npm-debug.log* Cargo.lock **/*.rs.bk +# Agent runtime directories +.agents/ +.claude/ +.opencode/ + +# Example temp files +.tmp-upload/ +*.db +.foundry/ + # CLI binaries (downloaded during npm publish) sdks/cli/platforms/*/bin/ + +# Foundry desktop app build artifacts +foundry/packages/desktop/frontend-dist/ +foundry/packages/desktop/src-tauri/sidecars/ +.context/ diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 0000000..7bae219 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "everything": { + "args": ["@modelcontextprotocol/server-everything"], + "command": "npx" + } + } +} diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000..f301fed --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +auto-install-peers=false diff --git a/CLAUDE.md b/CLAUDE.md index dfcafd2..248f075 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,75 +1,80 @@ # Instructions -## SDK Modes +## Naming and Ownership -There are two ways to work with the SDKs: +- This repository/product is **Sandbox Agent**. +- **Gigacode** is a separate user-facing UI/client, not the server product name. +- Gigacode integrates with Sandbox Agent via the OpenCode-compatible surface (`/opencode/*`) when that compatibility layer is enabled. +- Canonical extension namespace/domain string is `sandboxagent.dev` (no hyphen). +- Canonical custom ACP extension method prefix is `_sandboxagent/...` (no hyphen). -- **Embedded**: Spawns the `sandbox-agent` server as a subprocess on a unique port and communicates with it locally. Useful for local development or when running the SDK and agent in the same environment. -- **Server**: Connects to a remotely running `sandbox-agent` server. The server is typically running inside a sandbox (e.g., Docker, E2B, Daytona, Vercel Sandboxes) and the SDK connects to it over HTTP. +## Docs Terminology -## Agent Schemas +- Never mention "ACP" in user-facing docs (`docs/**/*.mdx`) except in docs that are specifically about ACP itself (e.g. `docs/acp-http-client.mdx`). +- Never expose underlying protocol method names (e.g. `session/request_permission`, `session/create`, `_sandboxagent/session/detach`) in non-ACP docs. Describe the behavior in user-facing terms instead. +- Do not describe the underlying protocol implementation in docs. Only document the SDK surface (methods, types, options). ACP protocol details belong exclusively in ACP-specific pages. +- Do not use em dashes (`—`) in docs. Use commas, periods, or parentheses instead. -Agent schemas (Claude Code, Codex, OpenCode, Amp) are available for reference in `resources/agent-schemas/artifacts/json-schema/`. +### Docs Source Of Truth (HTTP/CLI) -Extraction methods: -- **Claude**: Uses `claude --output-format json --json-schema` CLI command -- **Codex**: Uses `codex app-server generate-json-schema` CLI command -- **OpenCode**: Fetches from GitHub OpenAPI spec -- **Amp**: Scrapes from `https://ampcode.com/manual/appendix?preview#message-schema` +- For HTTP/CLI docs/examples, source of truth is: + - `server/packages/sandbox-agent/src/router.rs` + - `server/packages/sandbox-agent/src/cli.rs` +- Keep docs aligned to implemented endpoints/commands only (for example ACP under `/v1/acp`, not legacy session REST APIs). -All extractors have fallback schemas for when CLI/URL is unavailable. +## Change Tracking -Research on how different agents operate (CLI flags, streaming formats, HITL patterns, etc.) is in `research/agents/`. When adding or making changes to agent docs, follow the same structure as existing files. +- If the user asks to "push" changes, treat that as permission to commit and push all current workspace changes, not a hand-picked subset, unless the user explicitly scopes the push. +- Keep CLI subcommands and HTTP endpoints in sync. +- Update `docs/cli.mdx` when CLI behavior changes. +- Regenerate `docs/openapi.json` when HTTP contracts change. +- Keep `docs/inspector.mdx` and `docs/sdks/typescript.mdx` aligned with implementation. +- Append blockers/decisions to `research/acp/friction.md` during ACP work. +- `docs/agent-capabilities.mdx` lists models/modes/thought levels per agent. Update it when adding a new agent or changing `fallback_config_options`. If its "Last updated" date is >2 weeks old, re-run `cd scripts/agent-configs && npx tsx dump.ts` and update the doc to match. Source data: `scripts/agent-configs/resources/*.json` and hardcoded entries in `server/packages/sandbox-agent/src/router/support.rs` (`fallback_config_options`). +- Some agent models are gated by subscription (e.g. Claude `opus`). The live report only shows models available to the current credentials. The static doc and JSON resource files should list all known models regardless of subscription tier. -Universal schema guidance: -- The universal schema should cover the full feature set of all agents. -- Conversions must be best-effort overlap without being lossy; preserve raw payloads when needed. -- **The mock agent acts as the reference implementation** for correct event behavior. Real agents should use synthetic events to match the mock agent's event patterns (e.g., emitting both daemon synthetic and agent native `session.started` events, proper `item.started` → `item.delta` → `item.completed` sequences). +## Docker Test Image -## Spec Tracking +- Docker-backed Rust and TypeScript tests build `docker/test-agent/Dockerfile` directly in-process and cache the image tag only in memory (`OnceLock` in Rust, module-level variable in TypeScript). +- Do not add cross-process image-build scripts unless there is a concrete need for them. -- Keep CLI subcommands in sync with every HTTP endpoint. -- Update `CLAUDE.md` to keep CLI endpoints in sync with HTTP API changes. -- When adding or modifying CLI commands, update `docs/cli.mdx` to reflect the changes. -- When changing the HTTP API, update the TypeScript SDK and CLI together. -- Do not make breaking changes to API endpoints. -- When changing API routes, ensure the HTTP/SSE test suite has full coverage of every route. -- When agent schema changes, ensure API tests cover the new schema and event shapes end-to-end. -- When the universal schema changes, update mock-agent events to cover the new fields or event types. -- Update `docs/conversion.md` whenever agent-native schema terms, synthetic events, identifier mappings, or conversion logic change. -- Never use synthetic data or mocked responses in tests. -- Never manually write agent types; always use generated types in `resources/agent-schemas/`. If types are broken, fix the generated types. -- The universal schema must provide consistent behavior across providers; avoid requiring frontend/client logic to special-case agents. -- The UI must reflect every field in AgentCapabilities; keep it in sync with `docs/session-transcript-schema.mdx` and `agent_capabilities_for`. -- When parsing agent data, if something is unexpected or does not match the schema, bail out and surface the error rather than trying to continue with partial parsing. -- When defining the universal schema, choose the option most compatible with native agent APIs, and add synthetics to fill gaps for other agents. -- Use `docs/session-transcript-schema.mdx` as the source of truth for schema terminology and keep it updated alongside schema changes. -- On parse failures, emit an `agent.unparsed` event (source=daemon, synthetic=true) and treat it as a test failure. Preserve raw payloads when `include_raw=true`. -- Track subagent support in `docs/conversion.md`. For now, normalize subagent activity into normal message/tool flow, but revisit explicit subagent modeling later. -- Keep the FAQ in `README.md` and `frontend/packages/website/src/components/FAQ.tsx` in sync. When adding or modifying FAQ entries, update both files. +## Common Software Sync -### CLI ⇄ HTTP endpoint map (keep in sync) +- These three files must stay in sync: + - `docs/common-software.mdx` (user-facing documentation) + - `docker/test-common-software/Dockerfile` (packages installed in the test image) + - `server/packages/sandbox-agent/tests/common_software.rs` (test assertions) +- When adding or removing software from `docs/common-software.mdx`, also add/remove the corresponding `apt-get install` line in the Dockerfile and add/remove the test in `common_software.rs`. +- Run `cargo test -p sandbox-agent --test common_software` to verify. -- `sandbox-agent api agents list` ↔ `GET /v1/agents` -- `sandbox-agent api agents install` ↔ `POST /v1/agents/{agent}/install` -- `sandbox-agent api agents modes` ↔ `GET /v1/agents/{agent}/modes` -- `sandbox-agent api sessions list` ↔ `GET /v1/sessions` -- `sandbox-agent api sessions create` ↔ `POST /v1/sessions/{sessionId}` -- `sandbox-agent api sessions send-message` ↔ `POST /v1/sessions/{sessionId}/messages` -- `sandbox-agent api sessions send-message-stream` ↔ `POST /v1/sessions/{sessionId}/messages/stream` -- `sandbox-agent api sessions terminate` ↔ `POST /v1/sessions/{sessionId}/terminate` -- `sandbox-agent api sessions events` / `get-messages` ↔ `GET /v1/sessions/{sessionId}/events` -- `sandbox-agent api sessions events-sse` ↔ `GET /v1/sessions/{sessionId}/events/sse` -- `sandbox-agent api sessions reply-question` ↔ `POST /v1/sessions/{sessionId}/questions/{questionId}/reply` -- `sandbox-agent api sessions reject-question` ↔ `POST /v1/sessions/{sessionId}/questions/{questionId}/reject` -- `sandbox-agent api sessions reply-permission` ↔ `POST /v1/sessions/{sessionId}/permissions/{permissionId}/reply` +## Install Version References -## Post-Release Testing - -After cutting a release, verify the release works correctly. Run `/project:post-release-testing` to execute the testing agent. - -## Git Commits - -- Do not include any co-authors in commit messages (no `Co-Authored-By` lines) -- Use conventional commits style (e.g., `feat:`, `fix:`, `docs:`, `chore:`, `refactor:`) -- Keep commit messages to a single line +- Channel policy: + - Sandbox Agent install/version references use a pinned minor channel `0.N.x` (for curl URLs and `sandbox-agent` / `@sandbox-agent/cli` npm/bun installs). + - Gigacode install/version references use `latest` (for `@sandbox-agent/gigacode` install/run commands and `gigacode-install.*` release promotion). + - Release promotion policy: `latest` releases must still update `latest`; when a release is `latest`, Sandbox Agent must also be promoted to the matching minor channel `0.N.x`. +- Keep every install-version reference below in sync whenever versions/channels change: + - `README.md` + - `docs/acp-http-client.mdx` + - `docs/cli.mdx` + - `docs/quickstart.mdx` + - `docs/sdk-overview.mdx` + - `docs/react-components.mdx` + - `docs/session-persistence.mdx` + - `docs/deploy/local.mdx` + - `docs/deploy/cloudflare.mdx` + - `docs/deploy/vercel.mdx` + - `docs/deploy/daytona.mdx` + - `docs/deploy/e2b.mdx` + - `docs/deploy/docker.mdx` + - `frontend/packages/website/src/components/GetStarted.tsx` + - `.claude/commands/post-release-testing.md` + - `examples/cloudflare/Dockerfile` + - `examples/daytona/src/index.ts` + - `examples/shared/src/docker.ts` + - `examples/docker/src/index.ts` + - `examples/e2b/src/index.ts` + - `examples/vercel/src/index.ts` + - `scripts/release/main.ts` + - `scripts/release/promote-artifacts.ts` + - `scripts/release/sdk.ts` diff --git a/Cargo.toml b/Cargo.toml index a55bdf4..0fc4dc8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,23 +1,25 @@ [workspace] resolver = "2" -members = ["server/packages/*"] +members = ["server/packages/*", "gigacode"] +exclude = ["factory/packages/desktop/src-tauri", "foundry/packages/desktop/src-tauri"] [workspace.package] -version = "0.1.4-rc.7" +version = "0.4.2" edition = "2021" authors = [ "Rivet Gaming, LLC " ] license = "Apache-2.0" repository = "https://github.com/rivet-dev/sandbox-agent" -description = "Universal API for automatic coding agents in sandboxes. Supprots Claude Code, Codex, OpenCode, and Amp." +description = "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp." [workspace.dependencies] # Internal crates -sandbox-agent = { version = "0.1.4-rc.7", path = "server/packages/sandbox-agent" } -sandbox-agent-error = { version = "0.1.4-rc.7", path = "server/packages/error" } -sandbox-agent-agent-management = { version = "0.1.4-rc.7", path = "server/packages/agent-management" } -sandbox-agent-agent-credentials = { version = "0.1.4-rc.7", path = "server/packages/agent-credentials" } -sandbox-agent-universal-agent-schema = { version = "0.1.4-rc.7", path = "server/packages/universal-agent-schema" } -sandbox-agent-extracted-agent-schemas = { version = "0.1.4-rc.7", path = "server/packages/extracted-agent-schemas" } +sandbox-agent = { version = "0.4.2", path = "server/packages/sandbox-agent" } +sandbox-agent-error = { version = "0.4.2", path = "server/packages/error" } +sandbox-agent-agent-management = { version = "0.4.2", path = "server/packages/agent-management" } +sandbox-agent-agent-credentials = { version = "0.4.2", path = "server/packages/agent-credentials" } +sandbox-agent-opencode-adapter = { version = "0.4.2", path = "server/packages/opencode-adapter" } +sandbox-agent-opencode-server-manager = { version = "0.4.2", path = "server/packages/opencode-server-manager" } +acp-http-adapter = { version = "0.4.2", path = "server/packages/acp-http-adapter" } # Serialization serde = { version = "1.0", features = ["derive"] } @@ -31,7 +33,7 @@ schemars = "0.8" utoipa = { version = "4.2", features = ["axum_extras"] } # Web framework -axum = "0.7" +axum = { version = "0.7", features = ["ws"] } tower = { version = "0.5", features = ["util"] } tower-http = { version = "0.5", features = ["cors", "trace"] } @@ -68,6 +70,8 @@ zip = { version = "0.6", default-features = false, features = ["deflate"] } url = "2.5" regress = "0.10" include_dir = "0.7" +base64 = "0.22" +toml_edit = "0.22" # Code generation (build deps) typify = "0.4" diff --git a/README.md b/README.md index 1f90841..cf9b933 100644 --- a/README.md +++ b/README.md @@ -5,13 +5,17 @@

Run Coding Agents in Sandboxes. Control Them Over HTTP.

- A server that runs inside your sandbox. Your app connects remotely to control Claude Code, Codex, OpenCode, or Amp — streaming events, handling permissions, managing sessions. + A server that runs inside your sandbox. Your app connects remotely to control Claude Code, Codex, OpenCode, Cursor, Amp, or Pi — streaming events, handling permissions, managing sessions.

DocumentationAPI ReferenceDiscord

+

+ Experimental: Gigacode — use OpenCode's TUI with any coding agent. +

+ ## Why Sandbox Agent? Running coding agents remotely is hard. Existing SDKs assume local execution, SSH breaks TTY handling and streaming, and every agent has a different API. Building from scratch means reimplementing everything for each coding agent. @@ -20,20 +24,18 @@ Sandbox Agent solves three problems: 1. **Coding agents need sandboxes** — You can't let AI execute arbitrary code on your production servers. Coding agents need isolated environments, but existing SDKs assume local execution. Sandbox Agent is a server that runs inside the sandbox and exposes HTTP/SSE. -2. **Every coding agent is different** — Claude Code, Codex, OpenCode, and Amp each have proprietary APIs, event formats, and behaviors. Swapping agents means rewriting your integration. Sandbox Agent provides one HTTP API — write your code once, swap agents with a config change. +2. **Every coding agent is different** — Claude Code, Codex, OpenCode, Cursor, Amp, and Pi each have proprietary APIs, event formats, and behaviors. Swapping agents means rewriting your integration. Sandbox Agent provides one HTTP API — write your code once, swap agents with a config change. 3. **Sessions are ephemeral** — Agent transcripts live in the sandbox. When the process ends, you lose everything. Sandbox Agent streams events in a universal schema to your storage. Persist to Postgres, ClickHouse, or [Rivet](https://rivet.dev). Replay later, audit everything. ## Features -- **Universal Agent API**: Single interface to control Claude Code, Codex, OpenCode, and Amp with full feature coverage -- **Streaming Events**: Real-time SSE stream of everything the agent does — tool calls, permission requests, file edits, and more -- **Universal Session Schema**: [Standardized schema](https://sandboxagent.dev/docs/session-transcript-schema) that normalizes all agent event formats for storage and replay -- **Human-in-the-Loop**: Approve or deny tool executions and answer agent questions remotely over HTTP -- **Automatic Agent Installation**: Agents are installed on-demand when first used — no setup required +- **Universal Agent API**: Single interface to control Claude Code, Codex, OpenCode, Cursor, Amp, and Pi with full feature coverage +- **Universal Session Schema**: Standardized schema that normalizes all agent event formats for storage and replay - **Runs Inside Any Sandbox**: Lightweight static Rust binary. One curl command to install inside E2B, Daytona, Vercel Sandboxes, or Docker - **Server or SDK Mode**: Run as an HTTP server or embed with the TypeScript SDK - **OpenAPI Spec**: [Well documented](https://sandboxagent.dev/docs/api-reference) and easy to integrate from any language +- **OpenCode SDK & UI Support** *(Experimental)*: [Connect OpenCode CLI, SDK, or web UI](https://sandboxagent.dev/docs/opencode-compatibility) to control agents through familiar OpenCode tooling ## Architecture @@ -63,10 +65,14 @@ Choose the installation method that works best for your use case. Install skill with: -``` +```bash npx skills add rivet-dev/skills -s sandbox-agent ``` +```bash +bunx skills add rivet-dev/skills -s sandbox-agent +``` + ### TypeScript SDK Import the SDK directly into your Node or browser application. Full type safety and streaming support. @@ -74,7 +80,13 @@ Import the SDK directly into your Node or browser application. Full type safety **Install** ```bash -npm install sandbox-agent +npm install sandbox-agent@0.4.x +``` + +```bash +bun add sandbox-agent@0.4.x +# Optional: allow Bun to run postinstall scripts for native binaries (required for SandboxAgent.start()). +bun pm trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 ``` **Setup** @@ -106,7 +118,6 @@ const agents = await client.listAgents(); await client.createSession("demo", { agent: "codex", agentMode: "default", - permissionMode: "plan", }); await client.postMessage("demo", { message: "Hello from the SDK." }); @@ -116,7 +127,7 @@ for await (const event of client.streamEvents("demo", { offset: 0 })) { } ``` -[SDK documentation](https://sandboxagent.dev/docs/sdks/typescript) — [Building a Chat UI](https://sandboxagent.dev/docs/building-chat-ui) — [Managing Sessions](https://sandboxagent.dev/docs/manage-sessions) +[SDK documentation](https://sandboxagent.dev/docs/sdks/typescript) — [Managing Sessions](https://sandboxagent.dev/docs/manage-sessions) ### HTTP Server @@ -124,7 +135,7 @@ Run as an HTTP server and connect from any language. Deploy to E2B, Daytona, Ver ```bash # Install it -curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh +curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh # Run it sandbox-agent server --token "$SANDBOX_TOKEN" --host 127.0.0.1 --port 2468 ``` @@ -132,10 +143,7 @@ sandbox-agent server --token "$SANDBOX_TOKEN" --host 127.0.0.1 --port 2468 Optional: preinstall agent binaries (no server required; they will be installed lazily on first use if you skip this): ```bash -sandbox-agent install-agent claude -sandbox-agent install-agent codex -sandbox-agent install-agent opencode -sandbox-agent install-agent amp +sandbox-agent install-agent --all ``` To disable auth locally: @@ -151,7 +159,13 @@ sandbox-agent server --no-token --host 127.0.0.1 --port 2468 Install the CLI wrapper (optional but convenient): ```bash -npm install -g @sandbox-agent/cli +npm install -g @sandbox-agent/cli@0.4.x +``` + +```bash +# Allow Bun to run postinstall scripts for native binaries. +bun add -g @sandbox-agent/cli@0.4.x +bun pm -g trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 ``` Create a session and send a message: @@ -165,7 +179,11 @@ sandbox-agent api sessions send-message-stream my-session --message "Hello" --en You can also use npx like: ```bash -npx sandbox-agent --help +npx @sandbox-agent/cli@0.4.x --help +``` + +```bash +bunx @sandbox-agent/cli@0.4.x --help ``` [CLI documentation](https://sandboxagent.dev/docs/cli) @@ -182,10 +200,6 @@ Debug sessions and events with the built-in Inspector UI (e.g., `http://localhos [Explore API](https://sandboxagent.dev/docs/api-reference) — [View Specification](https://github.com/rivet-dev/sandbox-agent/blob/main/docs/openapi.json) -### Session Transcript Schema - -All events follow a [session transcript schema](https://sandboxagent.dev/docs/session-transcript-schema) that normalizes differences between agents. - ### Tip: Extract credentials Often you need to use your personal API tokens to test agents on sandboxes: @@ -196,18 +210,6 @@ sandbox-agent credentials extract-env --export This prints environment variables for your OpenAI/Anthropic/etc API keys to test with Sandbox Agent SDK. -## Integrations - -Works with your stack: - -| Sandbox Providers | AI Platforms | Infrastructure | Storage | -|---|---|---|---| -| [Daytona](https://sandboxagent.dev/docs/deploy/daytona) | Anthropic | Docker | Postgres | -| [E2B](https://sandboxagent.dev/docs/deploy/e2b) | OpenAI | Fly.io | ClickHouse | -| [Vercel Sandboxes](https://sandboxagent.dev/docs/deploy) | [AI SDK](https://ai-sdk.dev) | AWS Nitro | [Rivet](https://rivet.dev) | - -Want support for another agent or sandbox provider? [Open an issue](https://github.com/rivet-dev/sandbox-agent/issues/new) to request it. - ## FAQ
@@ -219,7 +221,7 @@ No, they're complementary. AI SDK is for building chat interfaces and calling LL
Which coding agents are supported? -Claude Code, Codex, OpenCode, and Amp. The SDK normalizes their APIs so you can swap between them without changing your code. +Claude Code, Codex, OpenCode, Cursor, Amp, and Pi. The SDK normalizes their APIs so you can swap between them without changing your code.
@@ -243,7 +245,7 @@ The server is a single Rust binary that runs anywhere with a curl install. If yo
Can I use this with my personal API keys? -Yes. Use `sandbox-agent credentials extract-env` to extract API keys from your local agent configs (Claude Code, Codex, OpenCode, Amp) and pass them to the sandbox environment. +Yes. Use `sandbox-agent credentials extract-env` to extract API keys from your local agent configs (Claude Code, Codex, OpenCode, Amp, Pi) and pass them to the sandbox environment.
@@ -275,7 +277,7 @@ Coding agents expect interactive terminals with proper TTY handling. SSH with pi - **Storage of sessions on disk**: Sessions are already stored by the respective coding agents on disk. It's assumed that the consumer is streaming data from this machine to an external storage, such as Postgres, ClickHouse, or Rivet. - **Direct LLM wrappers**: Use the [Vercel AI SDK](https://ai-sdk.dev/docs/introduction) if you want to implement your own agent from scratch. - **Git Repo Management**: Just use git commands or the features provided by your sandbox provider of choice. -- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide guides that let you integrate this project with sandbox providers. +- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide guides that let you integrate this repository with sandbox providers. ## Roadmap diff --git a/biome.json b/biome.json new file mode 100644 index 0000000..4a8bd54 --- /dev/null +++ b/biome.json @@ -0,0 +1,7 @@ +{ + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", + "formatter": { + "indentStyle": "space", + "lineWidth": 160 + } +} diff --git a/docker/inspector-dev/Dockerfile b/docker/inspector-dev/Dockerfile new file mode 100644 index 0000000..b55923f --- /dev/null +++ b/docker/inspector-dev/Dockerfile @@ -0,0 +1,7 @@ +FROM node:22-bookworm-slim + +RUN npm install -g pnpm@10.28.2 + +WORKDIR /app + +CMD ["bash", "-lc", "pnpm install --filter @sandbox-agent/inspector... && cd frontend/packages/inspector && exec pnpm vite --host 0.0.0.0 --port 5173"] diff --git a/docker/release/build.sh b/docker/release/build.sh index a9e42e4..2f5204e 100755 --- a/docker/release/build.sh +++ b/docker/release/build.sh @@ -17,24 +17,35 @@ case $TARGET in DOCKERFILE="linux-x86_64.Dockerfile" TARGET_STAGE="x86_64-builder" BINARY="sandbox-agent-$TARGET" + GIGACODE="gigacode-$TARGET" + ;; + aarch64-unknown-linux-musl) + echo "Building for Linux aarch64 musl" + DOCKERFILE="linux-aarch64.Dockerfile" + TARGET_STAGE="aarch64-builder" + BINARY="sandbox-agent-$TARGET" + GIGACODE="gigacode-$TARGET" ;; x86_64-pc-windows-gnu) echo "Building for Windows x86_64" DOCKERFILE="windows.Dockerfile" TARGET_STAGE="" BINARY="sandbox-agent-$TARGET.exe" + GIGACODE="gigacode-$TARGET.exe" ;; x86_64-apple-darwin) echo "Building for macOS x86_64" DOCKERFILE="macos-x86_64.Dockerfile" TARGET_STAGE="x86_64-builder" BINARY="sandbox-agent-$TARGET" + GIGACODE="gigacode-$TARGET" ;; aarch64-apple-darwin) echo "Building for macOS aarch64" DOCKERFILE="macos-aarch64.Dockerfile" TARGET_STAGE="aarch64-builder" BINARY="sandbox-agent-$TARGET" + GIGACODE="gigacode-$TARGET" ;; *) echo "Unsupported target: $TARGET" @@ -53,10 +64,13 @@ CONTAINER_ID=$(docker create "sandbox-agent-builder-$TARGET") mkdir -p dist docker cp "$CONTAINER_ID:/artifacts/$BINARY" "dist/" +docker cp "$CONTAINER_ID:/artifacts/$GIGACODE" "dist/" docker rm "$CONTAINER_ID" if [[ "$BINARY" != *.exe ]]; then chmod +x "dist/$BINARY" + chmod +x "dist/$GIGACODE" fi echo "Binary saved to: dist/$BINARY" +echo "Binary saved to: dist/$GIGACODE" diff --git a/docker/release/linux-aarch64.Dockerfile b/docker/release/linux-aarch64.Dockerfile new file mode 100644 index 0000000..d5ff208 --- /dev/null +++ b/docker/release/linux-aarch64.Dockerfile @@ -0,0 +1,81 @@ +# syntax=docker/dockerfile:1.10.0 + +# Build inspector frontend +FROM node:22-alpine AS inspector-build +WORKDIR /app +RUN npm install -g pnpm + +# Copy package files for workspaces +COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ +COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ +COPY sdks/typescript/package.json ./sdks/typescript/ + +# Install dependencies +RUN pnpm install --filter @sandbox-agent/inspector... + +# Copy SDK source (with pre-generated types from docs/openapi.json) +COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react +COPY sdks/typescript ./sdks/typescript + +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup +RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup + +# Copy inspector source and build +COPY frontend/packages/inspector ./frontend/packages/inspector +RUN cd frontend/packages/inspector && pnpm exec vite build + +# Use Alpine with native musl for ARM64 builds (runs natively on ARM64 runner) +FROM rust:1.88-alpine AS aarch64-builder + +# Accept version as build arg +ARG SANDBOX_AGENT_VERSION +ENV SANDBOX_AGENT_VERSION=${SANDBOX_AGENT_VERSION} + +# Install dependencies +RUN apk add --no-cache \ + musl-dev \ + clang \ + llvm-dev \ + openssl-dev \ + openssl-libs-static \ + pkgconfig \ + git \ + curl \ + build-base + +# Add musl target +RUN rustup target add aarch64-unknown-linux-musl + +# Set environment variables for native musl build +ENV CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true \ + RUSTFLAGS="-C target-feature=+crt-static" + +WORKDIR /build + +# Copy the source code +COPY . . + +# Copy pre-built inspector frontend +COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/packages/inspector/dist + +# Build for Linux with musl (static binary) - aarch64 +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-agent -p gigacode --release --target aarch64-unknown-linux-musl && \ + mkdir -p /artifacts && \ + cp target/aarch64-unknown-linux-musl/release/sandbox-agent /artifacts/sandbox-agent-aarch64-unknown-linux-musl && \ + cp target/aarch64-unknown-linux-musl/release/gigacode /artifacts/gigacode-aarch64-unknown-linux-musl + +# Default command to show help +CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/linux-x86_64.Dockerfile b/docker/release/linux-x86_64.Dockerfile index d707d20..1c41711 100644 --- a/docker/release/linux-x86_64.Dockerfile +++ b/docker/release/linux-x86_64.Dockerfile @@ -8,6 +8,9 @@ RUN npm install -g pnpm # Copy package files for workspaces COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ # Install dependencies @@ -15,10 +18,16 @@ RUN pnpm install --filter @sandbox-agent/inspector... # Copy SDK source (with pre-generated types from docs/openapi.json) COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build SDK (just tsup, skip generate since types are pre-generated) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build COPY frontend/packages/inspector ./frontend/packages/inspector @@ -97,9 +106,10 @@ COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/pac RUN --mount=type=cache,target=/usr/local/cargo/registry \ --mount=type=cache,target=/usr/local/cargo/git \ --mount=type=cache,target=/build/target \ - cargo build -p sandbox-agent --release --target x86_64-unknown-linux-musl && \ + cargo build -p sandbox-agent -p gigacode --release --target x86_64-unknown-linux-musl && \ mkdir -p /artifacts && \ - cp target/x86_64-unknown-linux-musl/release/sandbox-agent /artifacts/sandbox-agent-x86_64-unknown-linux-musl + cp target/x86_64-unknown-linux-musl/release/sandbox-agent /artifacts/sandbox-agent-x86_64-unknown-linux-musl && \ + cp target/x86_64-unknown-linux-musl/release/gigacode /artifacts/gigacode-x86_64-unknown-linux-musl # Default command to show help CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/macos-aarch64.Dockerfile b/docker/release/macos-aarch64.Dockerfile index dcc9466..5d918b2 100644 --- a/docker/release/macos-aarch64.Dockerfile +++ b/docker/release/macos-aarch64.Dockerfile @@ -8,6 +8,9 @@ RUN npm install -g pnpm # Copy package files for workspaces COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ # Install dependencies @@ -15,10 +18,16 @@ RUN pnpm install --filter @sandbox-agent/inspector... # Copy SDK source (with pre-generated types from docs/openapi.json) COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build SDK (just tsup, skip generate since types are pre-generated) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build COPY frontend/packages/inspector ./frontend/packages/inspector @@ -95,9 +104,10 @@ COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/pac RUN --mount=type=cache,target=/usr/local/cargo/registry \ --mount=type=cache,target=/usr/local/cargo/git \ --mount=type=cache,target=/build/target \ - cargo build -p sandbox-agent --release --target aarch64-apple-darwin && \ + cargo build -p sandbox-agent -p gigacode --release --target aarch64-apple-darwin && \ mkdir -p /artifacts && \ - cp target/aarch64-apple-darwin/release/sandbox-agent /artifacts/sandbox-agent-aarch64-apple-darwin + cp target/aarch64-apple-darwin/release/sandbox-agent /artifacts/sandbox-agent-aarch64-apple-darwin && \ + cp target/aarch64-apple-darwin/release/gigacode /artifacts/gigacode-aarch64-apple-darwin # Default command to show help CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/macos-x86_64.Dockerfile b/docker/release/macos-x86_64.Dockerfile index 62d7c90..9b52aa6 100644 --- a/docker/release/macos-x86_64.Dockerfile +++ b/docker/release/macos-x86_64.Dockerfile @@ -8,6 +8,9 @@ RUN npm install -g pnpm # Copy package files for workspaces COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ # Install dependencies @@ -15,10 +18,16 @@ RUN pnpm install --filter @sandbox-agent/inspector... # Copy SDK source (with pre-generated types from docs/openapi.json) COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build SDK (just tsup, skip generate since types are pre-generated) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build COPY frontend/packages/inspector ./frontend/packages/inspector @@ -95,9 +104,10 @@ COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/pac RUN --mount=type=cache,target=/usr/local/cargo/registry \ --mount=type=cache,target=/usr/local/cargo/git \ --mount=type=cache,target=/build/target \ - cargo build -p sandbox-agent --release --target x86_64-apple-darwin && \ + cargo build -p sandbox-agent -p gigacode --release --target x86_64-apple-darwin && \ mkdir -p /artifacts && \ - cp target/x86_64-apple-darwin/release/sandbox-agent /artifacts/sandbox-agent-x86_64-apple-darwin + cp target/x86_64-apple-darwin/release/sandbox-agent /artifacts/sandbox-agent-x86_64-apple-darwin && \ + cp target/x86_64-apple-darwin/release/gigacode /artifacts/gigacode-x86_64-apple-darwin # Default command to show help CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/windows.Dockerfile b/docker/release/windows.Dockerfile index 38fb3c5..92067db 100644 --- a/docker/release/windows.Dockerfile +++ b/docker/release/windows.Dockerfile @@ -8,6 +8,9 @@ RUN npm install -g pnpm # Copy package files for workspaces COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ # Install dependencies @@ -15,10 +18,16 @@ RUN pnpm install --filter @sandbox-agent/inspector... # Copy SDK source (with pre-generated types from docs/openapi.json) COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build SDK (just tsup, skip generate since types are pre-generated) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build COPY frontend/packages/inspector ./frontend/packages/inspector @@ -81,9 +90,10 @@ COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/pac RUN --mount=type=cache,target=/usr/local/cargo/registry \ --mount=type=cache,target=/usr/local/cargo/git \ --mount=type=cache,target=/build/target \ - cargo build -p sandbox-agent --release --target x86_64-pc-windows-gnu && \ + cargo build -p sandbox-agent -p gigacode --release --target x86_64-pc-windows-gnu && \ mkdir -p /artifacts && \ - cp target/x86_64-pc-windows-gnu/release/sandbox-agent.exe /artifacts/sandbox-agent-x86_64-pc-windows-gnu.exe + cp target/x86_64-pc-windows-gnu/release/sandbox-agent.exe /artifacts/sandbox-agent-x86_64-pc-windows-gnu.exe && \ + cp target/x86_64-pc-windows-gnu/release/gigacode.exe /artifacts/gigacode-x86_64-pc-windows-gnu.exe # Default command to show help CMD ["ls", "-la", "/artifacts"] diff --git a/docker/runtime/Dockerfile b/docker/runtime/Dockerfile index 520aa79..85473be 100644 --- a/docker/runtime/Dockerfile +++ b/docker/runtime/Dockerfile @@ -10,6 +10,9 @@ RUN npm install -g pnpm # Copy package files for workspaces COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ # Install dependencies @@ -17,10 +20,16 @@ RUN pnpm install --filter @sandbox-agent/inspector... # Copy SDK source (with pre-generated types from docs/openapi.json) COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build SDK (just tsup, skip generate since types are pre-generated) +# Build cli-shared, acp-http-client, SDK, then persist-indexeddb and react (depends on SDK) +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build COPY frontend/packages/inspector ./frontend/packages/inspector @@ -140,7 +149,8 @@ FROM debian:bookworm-slim RUN apt-get update && apt-get install -y \ ca-certificates \ curl \ - git && \ + git \ + ffmpeg && \ rm -rf /var/lib/apt/lists/* # Copy the binary from builder @@ -155,4 +165,4 @@ WORKDIR /home/sandbox EXPOSE 2468 ENTRYPOINT ["sandbox-agent"] -CMD ["--host", "0.0.0.0", "--port", "2468"] +CMD ["server", "--host", "0.0.0.0", "--port", "2468"] diff --git a/docker/runtime/Dockerfile.full b/docker/runtime/Dockerfile.full new file mode 100644 index 0000000..9ab4c0d --- /dev/null +++ b/docker/runtime/Dockerfile.full @@ -0,0 +1,159 @@ +# syntax=docker/dockerfile:1.10.0 + +# ============================================================================ +# Build inspector frontend +# ============================================================================ +FROM node:22-alpine AS inspector-build +WORKDIR /app +RUN npm install -g pnpm + +COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ +COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ +COPY sdks/typescript/package.json ./sdks/typescript/ + +RUN pnpm install --filter @sandbox-agent/inspector... + +COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react +COPY sdks/typescript ./sdks/typescript + +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup +RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup + +COPY frontend/packages/inspector ./frontend/packages/inspector +RUN cd frontend/packages/inspector && pnpm exec vite build + +# ============================================================================ +# AMD64 Builder - Uses cross-tools musl toolchain +# ============================================================================ +FROM --platform=linux/amd64 rust:1.88.0 AS builder-amd64 + +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && apt-get install -y \ + musl-tools \ + musl-dev \ + llvm-14-dev \ + libclang-14-dev \ + clang-14 \ + libssl-dev \ + pkg-config \ + ca-certificates \ + g++ \ + g++-multilib \ + git \ + curl \ + wget && \ + rm -rf /var/lib/apt/lists/* + +RUN wget -q https://github.com/cross-tools/musl-cross/releases/latest/download/x86_64-unknown-linux-musl.tar.xz && \ + tar -xf x86_64-unknown-linux-musl.tar.xz -C /opt/ && \ + rm x86_64-unknown-linux-musl.tar.xz && \ + rustup target add x86_64-unknown-linux-musl + +ENV PATH="/opt/x86_64-unknown-linux-musl/bin:$PATH" \ + LIBCLANG_PATH=/usr/lib/llvm-14/lib \ + CLANG_PATH=/usr/bin/clang-14 \ + CC_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-gcc \ + CXX_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-g++ \ + AR_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-ar \ + CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=x86_64-unknown-linux-musl-gcc \ + CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true + +ENV SSL_VER=1.1.1w +RUN wget https://www.openssl.org/source/openssl-$SSL_VER.tar.gz && \ + tar -xzf openssl-$SSL_VER.tar.gz && \ + cd openssl-$SSL_VER && \ + ./Configure no-shared no-async --prefix=/musl --openssldir=/musl/ssl linux-x86_64 && \ + make -j$(nproc) && \ + make install_sw && \ + cd .. && \ + rm -rf openssl-$SSL_VER* + +ENV OPENSSL_DIR=/musl \ + OPENSSL_INCLUDE_DIR=/musl/include \ + OPENSSL_LIB_DIR=/musl/lib \ + PKG_CONFIG_ALLOW_CROSS=1 \ + RUSTFLAGS="-C target-feature=+crt-static -C link-arg=-static-libgcc" + +WORKDIR /build +COPY . . + +COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/packages/inspector/dist + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-agent --release --target x86_64-unknown-linux-musl && \ + cp target/x86_64-unknown-linux-musl/release/sandbox-agent /sandbox-agent + +# ============================================================================ +# ARM64 Builder - Uses Alpine with native musl +# ============================================================================ +FROM --platform=linux/arm64 rust:1.88-alpine AS builder-arm64 + +RUN apk add --no-cache \ + musl-dev \ + clang \ + llvm-dev \ + openssl-dev \ + openssl-libs-static \ + pkgconfig \ + git \ + curl \ + build-base + +RUN rustup target add aarch64-unknown-linux-musl + +ENV CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true \ + RUSTFLAGS="-C target-feature=+crt-static" + +WORKDIR /build +COPY . . + +COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/packages/inspector/dist + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-agent --release --target aarch64-unknown-linux-musl && \ + cp target/aarch64-unknown-linux-musl/release/sandbox-agent /sandbox-agent + +# ============================================================================ +# Select the appropriate builder based on target architecture +# ============================================================================ +ARG TARGETARCH +FROM builder-${TARGETARCH} AS builder + +# Runtime stage - full image with all supported agents preinstalled +FROM node:22-bookworm-slim + +RUN apt-get update && apt-get install -y \ + bash \ + ca-certificates \ + curl \ + git && \ + rm -rf /var/lib/apt/lists/* + +COPY --from=builder /sandbox-agent /usr/local/bin/sandbox-agent +RUN chmod +x /usr/local/bin/sandbox-agent + +RUN useradd -m -s /bin/bash sandbox +USER sandbox +WORKDIR /home/sandbox + +RUN sandbox-agent install-agent --all + +EXPOSE 2468 + +ENTRYPOINT ["sandbox-agent"] +CMD ["server", "--host", "0.0.0.0", "--port", "2468"] diff --git a/docker/test-agent/Dockerfile b/docker/test-agent/Dockerfile new file mode 100644 index 0000000..67888b3 --- /dev/null +++ b/docker/test-agent/Dockerfile @@ -0,0 +1,61 @@ +FROM rust:1.88.0-bookworm AS builder +WORKDIR /build + +COPY Cargo.toml Cargo.lock ./ +COPY server/ ./server/ +COPY gigacode/ ./gigacode/ +COPY resources/agent-schemas/artifacts/ ./resources/agent-schemas/artifacts/ +COPY scripts/agent-configs/ ./scripts/agent-configs/ +COPY scripts/audit-acp-deps/ ./scripts/audit-acp-deps/ + +ENV SANDBOX_AGENT_SKIP_INSPECTOR=1 + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-agent --release && \ + cp target/release/sandbox-agent /sandbox-agent + +# Extract neko binary from the official image for WebRTC desktop streaming. +# Using neko v3 base image from GHCR which provides multi-arch support (amd64, arm64). +# Pinned by digest to prevent breaking changes from upstream. +# Reference client: https://github.com/demodesk/neko-client/blob/37f93eae6bd55b333c94bd009d7f2b079075a026/src/component/internal/webrtc.ts +FROM ghcr.io/m1k1o/neko/base@sha256:0c384afa56268aaa2d5570211d284763d0840dcdd1a7d9a24be3081d94d3dfce AS neko-base + +FROM node:22-bookworm-slim +RUN apt-get update -qq && \ + apt-get install -y -qq --no-install-recommends \ + ca-certificates \ + bash \ + libstdc++6 \ + xvfb \ + openbox \ + xdotool \ + imagemagick \ + ffmpeg \ + gstreamer1.0-tools \ + gstreamer1.0-plugins-base \ + gstreamer1.0-plugins-good \ + gstreamer1.0-plugins-bad \ + gstreamer1.0-plugins-ugly \ + gstreamer1.0-nice \ + gstreamer1.0-x \ + gstreamer1.0-pulseaudio \ + libxcvt0 \ + x11-xserver-utils \ + dbus-x11 \ + xauth \ + fonts-dejavu-core \ + xterm \ + > /dev/null 2>&1 && \ + rm -rf /var/lib/apt/lists/* + +COPY --from=builder /sandbox-agent /usr/local/bin/sandbox-agent +COPY --from=neko-base /usr/bin/neko /usr/local/bin/neko + +EXPOSE 3000 +# Expose UDP port range for WebRTC media transport +EXPOSE 59050-59070/udp + +ENTRYPOINT ["/usr/local/bin/sandbox-agent"] +CMD ["server", "--host", "0.0.0.0", "--port", "3000", "--no-token"] diff --git a/docker/test-common-software/Dockerfile b/docker/test-common-software/Dockerfile new file mode 100644 index 0000000..7a03abc --- /dev/null +++ b/docker/test-common-software/Dockerfile @@ -0,0 +1,37 @@ +# Extends the base test-agent image with common software pre-installed. +# Used by the common_software integration test to verify that all documented +# software in docs/common-software.mdx works correctly inside the sandbox. +# +# KEEP IN SYNC with docs/common-software.mdx + +ARG BASE_IMAGE=sandbox-agent-test:dev +FROM ${BASE_IMAGE} + +USER root + +RUN apt-get update -qq && \ + apt-get install -y -qq --no-install-recommends \ + # Browsers + chromium \ + firefox-esr \ + # Languages + python3 python3-pip python3-venv \ + default-jdk \ + ruby-full \ + # Databases + sqlite3 \ + redis-server \ + # Build tools + build-essential cmake pkg-config \ + # CLI tools + git jq tmux \ + # Media and graphics + imagemagick \ + poppler-utils \ + # Desktop apps + gimp \ + > /dev/null 2>&1 && \ + rm -rf /var/lib/apt/lists/* + +ENTRYPOINT ["/usr/local/bin/sandbox-agent"] +CMD ["server", "--host", "0.0.0.0", "--port", "3000", "--no-token"] diff --git a/docs/agent-sessions.mdx b/docs/agent-sessions.mdx new file mode 100644 index 0000000..0154537 --- /dev/null +++ b/docs/agent-sessions.mdx @@ -0,0 +1,268 @@ +--- +title: "Agent Sessions" +description: "Create sessions, prompt agents, and inspect event history." +sidebarTitle: "Sessions" +icon: "comments" +--- + +Sessions are the unit of interaction with an agent. Create one session per task, send prompts, and consume event history. + +For SDK-based flows, sessions can be restored after runtime/session loss when persistence is enabled. +See [Session Restoration](/session-restoration). + +## Create a session + +```ts +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +const session = await sdk.createSession({ + agent: "codex", + cwd: "/", +}); + +console.log(session.id, session.agentSessionId); +``` + +## Send a prompt + +```ts +const response = await session.prompt([ + { type: "text", text: "Summarize the repository structure." }, +]); + +console.log(response.stopReason); +``` + +## Subscribe to live events + +```ts +const unsubscribe = session.onEvent((event) => { + console.log(event.eventIndex, event.sender, event.payload); +}); + +await session.prompt([ + { type: "text", text: "Explain the main entrypoints." }, +]); + +unsubscribe(); +``` + +### Event types + +Each event's `payload` contains a session update. The `sessionUpdate` field identifies the type. + + + +Streamed text or content from the agent's response. + +```json +{ + "sessionUpdate": "agent_message_chunk", + "content": { "type": "text", "text": "Here's how the repository is structured..." } +} +``` + + + +Internal reasoning from the agent (chain-of-thought / extended thinking). + +```json +{ + "sessionUpdate": "agent_thought_chunk", + "content": { "type": "text", "text": "I should start by looking at the project structure..." } +} +``` + + + +Echo of the user's prompt being processed. + +```json +{ + "sessionUpdate": "user_message_chunk", + "content": { "type": "text", "text": "Summarize the repository structure." } +} +``` + + + +The agent invoked a tool (file edit, terminal command, etc.). + +```json +{ + "sessionUpdate": "tool_call", + "toolCallId": "tc_abc123", + "title": "Read file", + "status": "in_progress", + "rawInput": { "path": "/src/index.ts" } +} +``` + + + +Progress or result update for an in-progress tool call. + +```json +{ + "sessionUpdate": "tool_call_update", + "toolCallId": "tc_abc123", + "status": "completed", + "content": [{ "type": "text", "text": "import express from 'express';\n..." }] +} +``` + + + +The agent's execution plan for the current task. + +```json +{ + "sessionUpdate": "plan", + "entries": [ + { "content": "Read the project structure", "status": "completed" }, + { "content": "Identify main entrypoints", "status": "in_progress" }, + { "content": "Write summary", "status": "pending" } + ] +} +``` + + + +Token usage metrics for the current turn. + +```json +{ + "sessionUpdate": "usage_update" +} +``` + + + +Session metadata changed (e.g. agent-generated title). + +```json +{ + "sessionUpdate": "session_info_update", + "title": "Repository structure analysis" +} +``` + + + +## Fetch persisted event history + +```ts +const page = await sdk.getEvents({ + sessionId: session.id, + limit: 50, +}); + +for (const event of page.items) { + console.log(event.id, event.createdAt, event.sender); +} +``` + +## List and load sessions + +```ts +const sessions = await sdk.listSessions({ limit: 20 }); + +for (const item of sessions.items) { + console.log(item.id, item.agent, item.createdAt); +} + +if (sessions.items.length > 0) { + const loaded = await sdk.resumeSession(sessions.items[0]!.id); + await loaded.prompt([{ type: "text", text: "Continue." }]); +} +``` + +## Configure model, mode, and thought level + +Set the model, mode, or thought level on a session at creation time or after: + +```ts +// At creation time +const session = await sdk.createSession({ + agent: "codex", + model: "gpt-5.3-codex", + mode: "auto", + thoughtLevel: "high", +}); +``` + +```ts +// After creation +await session.setModel("gpt-5.2-codex"); +await session.setMode("full-access"); +await session.setThoughtLevel("medium"); +``` + +Query available modes: + +```ts +const modes = await session.getModes(); +console.log(modes?.currentModeId, modes?.availableModes); +``` + +### Advanced config options + +For config options beyond model, mode, and thought level, use `getConfigOptions` to discover what the agent supports and `setConfigOption` to set any option by ID: + +```ts +const options = await session.getConfigOptions(); +for (const opt of options) { + console.log(opt.id, opt.category, opt.type); +} +``` + +```ts +await session.setConfigOption("some-agent-option", "value"); +``` + +## Handle permission requests + +For agents that request tool-use permissions, register a permission listener and reply with `once`, `always`, or `reject`: + +```ts +const session = await sdk.createSession({ + agent: "claude", + mode: "default", +}); + +session.onPermissionRequest((request) => { + console.log(request.toolCall.title, request.availableReplies); + void session.respondPermission(request.id, "once"); +}); + +await session.prompt([ + { type: "text", text: "Create ./permission-example.txt with the text hello." }, +]); +``` + + +### Auto-approving permissions + +To auto-approve all permission requests, respond with `"once"` or `"always"` in your listener: + +```ts +session.onPermissionRequest((request) => { + void session.respondPermission(request.id, "always"); +}); +``` + +See `examples/permissions/src/index.ts` for a complete permissions example that works with Claude and Codex. + + +Some agents like Claude allow configuring permission behavior through modes (e.g. `bypassPermissions`, `acceptEdits`). We recommend leaving the mode as `default` and handling permission decisions explicitly in `onPermissionRequest` instead. + + +## Destroy a session + +```ts +await sdk.destroySession(session.id); +``` diff --git a/docs/agents/amp.mdx b/docs/agents/amp.mdx new file mode 100644 index 0000000..f94e97d --- /dev/null +++ b/docs/agents/amp.mdx @@ -0,0 +1,20 @@ +--- +title: "Amp" +description: "Use Amp as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "amp", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `amp-default` | +| **Modes** | `default`, `bypass` | +| **Thought levels** | Unsupported | diff --git a/docs/agents/claude.mdx b/docs/agents/claude.mdx new file mode 100644 index 0000000..2e4fd43 --- /dev/null +++ b/docs/agents/claude.mdx @@ -0,0 +1,49 @@ +--- +title: "Claude" +description: "Use Claude Code as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "claude", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `default`, `sonnet`, `opus`, `haiku` | +| **Modes** | `default`, `acceptEdits`, `plan`, `dontAsk`, `bypassPermissions` | +| **Thought levels** | Unsupported | + +## Configuring effort level + +Claude does not support changing effort level after a session starts. Configure it in the filesystem before creating the session. + +```ts +import { mkdir, writeFile } from "node:fs/promises"; +import path from "node:path"; + +const cwd = "/path/to/workspace"; +await mkdir(path.join(cwd, ".claude"), { recursive: true }); +await writeFile( + path.join(cwd, ".claude", "settings.json"), + JSON.stringify({ effortLevel: "high" }, null, 2), +); + +const session = await client.createSession({ + agent: "claude", + cwd, +}); +``` + + + +1. `~/.claude/settings.json` +2. `/.claude/settings.json` +3. `/.claude/settings.local.json` + + diff --git a/docs/agents/codex.mdx b/docs/agents/codex.mdx new file mode 100644 index 0000000..d359beb --- /dev/null +++ b/docs/agents/codex.mdx @@ -0,0 +1,20 @@ +--- +title: "Codex" +description: "Use OpenAI Codex as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "codex", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `gpt-5.3-codex` (default), `gpt-5.3-codex-spark`, `gpt-5.2-codex`, `gpt-5.1-codex-max`, `gpt-5.2`, `gpt-5.1-codex-mini` | +| **Modes** | `read-only` (default), `auto`, `full-access` | +| **Thought levels** | `low`, `medium`, `high` (default), `xhigh` | diff --git a/docs/agents/cursor.mdx b/docs/agents/cursor.mdx new file mode 100644 index 0000000..0905baa --- /dev/null +++ b/docs/agents/cursor.mdx @@ -0,0 +1,34 @@ +--- +title: "Cursor" +description: "Use Cursor as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "cursor", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | See below | +| **Modes** | Unsupported | +| **Thought levels** | Unsupported | + + + +| Group | Models | +|-------|--------| +| **Auto** | `auto` | +| **Composer** | `composer-1.5`, `composer-1` | +| **GPT-5.3 Codex** | `gpt-5.3-codex`, `gpt-5.3-codex-low`, `gpt-5.3-codex-high`, `gpt-5.3-codex-xhigh`, `gpt-5.3-codex-fast`, `gpt-5.3-codex-low-fast`, `gpt-5.3-codex-high-fast`, `gpt-5.3-codex-xhigh-fast` | +| **GPT-5.2** | `gpt-5.2`, `gpt-5.2-high`, `gpt-5.2-codex`, `gpt-5.2-codex-low`, `gpt-5.2-codex-high`, `gpt-5.2-codex-xhigh`, `gpt-5.2-codex-fast`, `gpt-5.2-codex-low-fast`, `gpt-5.2-codex-high-fast`, `gpt-5.2-codex-xhigh-fast` | +| **GPT-5.1** | `gpt-5.1-high`, `gpt-5.1-codex-max`, `gpt-5.1-codex-max-high` | +| **Claude** | `opus-4.6-thinking` (default), `opus-4.6`, `opus-4.5`, `opus-4.5-thinking`, `sonnet-4.5`, `sonnet-4.5-thinking` | +| **Other** | `gemini-3-pro`, `gemini-3-flash`, `grok` | + + diff --git a/docs/agents/opencode.mdx b/docs/agents/opencode.mdx new file mode 100644 index 0000000..db7b640 --- /dev/null +++ b/docs/agents/opencode.mdx @@ -0,0 +1,31 @@ +--- +title: "OpenCode" +description: "Use OpenCode as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "opencode", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | See below | +| **Modes** | `build` (default), `plan` | +| **Thought levels** | Unsupported | + + + +| Provider | Models | +|----------|--------| +| **Anthropic** | `anthropic/claude-3-5-haiku-20241022`, `anthropic/claude-3-5-haiku-latest`, `anthropic/claude-3-5-sonnet-20240620`, `anthropic/claude-3-5-sonnet-20241022`, `anthropic/claude-3-7-sonnet-20250219`, `anthropic/claude-3-7-sonnet-latest`, `anthropic/claude-3-haiku-20240307`, `anthropic/claude-3-opus-20240229`, `anthropic/claude-3-sonnet-20240229`, `anthropic/claude-haiku-4-5`, `anthropic/claude-haiku-4-5-20251001`, `anthropic/claude-opus-4-0`, `anthropic/claude-opus-4-1`, `anthropic/claude-opus-4-1-20250805`, `anthropic/claude-opus-4-20250514`, `anthropic/claude-opus-4-5`, `anthropic/claude-opus-4-5-20251101`, `anthropic/claude-opus-4-6`, `anthropic/claude-sonnet-4-0`, `anthropic/claude-sonnet-4-20250514`, `anthropic/claude-sonnet-4-5`, `anthropic/claude-sonnet-4-5-20250929` | +| **OpenAI** | `openai/gpt-5.1-codex`, `openai/gpt-5.1-codex-max`, `openai/gpt-5.1-codex-mini`, `openai/gpt-5.2`, `openai/gpt-5.2-codex`, `openai/gpt-5.3-codex` | +| **Cerebras** | `cerebras/gpt-oss-120b`, `cerebras/qwen-3-235b-a22b-instruct-2507`, `cerebras/zai-glm-4.7` | +| **OpenCode Zen** | `opencode/big-pickle`, `opencode/claude-3-5-haiku`, `opencode/claude-haiku-4-5`, `opencode/claude-opus-4-1`, `opencode/claude-opus-4-5`, `opencode/claude-opus-4-6`, `opencode/claude-sonnet-4`, `opencode/claude-sonnet-4-5`, `opencode/gemini-3-flash`, `opencode/gemini-3-pro` (default), `opencode/glm-4.6`, `opencode/glm-4.7`, `opencode/gpt-5`, `opencode/gpt-5-codex`, `opencode/gpt-5-nano`, `opencode/gpt-5.1`, `opencode/gpt-5.1-codex`, `opencode/gpt-5.1-codex-max`, `opencode/gpt-5.1-codex-mini`, `opencode/gpt-5.2`, `opencode/gpt-5.2-codex`, `opencode/kimi-k2`, `opencode/kimi-k2-thinking`, `opencode/kimi-k2.5`, `opencode/kimi-k2.5-free`, `opencode/minimax-m2.1`, `opencode/minimax-m2.1-free`, `opencode/trinity-large-preview-free` | + + diff --git a/docs/agents/pi.mdx b/docs/agents/pi.mdx new file mode 100644 index 0000000..1d56370 --- /dev/null +++ b/docs/agents/pi.mdx @@ -0,0 +1,20 @@ +--- +title: "Pi" +description: "Use Pi as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "pi", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `default` | +| **Modes** | Unsupported | +| **Thought levels** | Unsupported | diff --git a/docs/ai/llms-txt.mdx b/docs/ai/llms-txt.mdx index ad6fa8b..e5aaf8d 100644 --- a/docs/ai/llms-txt.mdx +++ b/docs/ai/llms-txt.mdx @@ -8,8 +8,8 @@ Mintlify publishes `llms.txt` and `llms-full.txt` for this documentation site. Access them at: ``` -https://rivet.dev/docs/llms.txt -https://rivet.dev/docs/llms-full.txt +https://sandboxagent.dev/docs/llms.txt +https://sandboxagent.dev/docs/llms-full.txt ``` If you run a reverse proxy in front of the docs, forward `/llms.txt` and `/llms-full.txt` to Mintlify. diff --git a/docs/ai/skill.mdx b/docs/ai/skill.mdx index bb48c2c..105026c 100644 --- a/docs/ai/skill.mdx +++ b/docs/ai/skill.mdx @@ -8,14 +8,23 @@ Mintlify hosts a `skill.md` file for this documentation site. Access it at: ``` -https://rivet.dev/docs/skill.md +https://sandboxagent.dev/docs/skill.md ``` To add it to an agent using the Skills CLI: -``` -npx skills add rivet-dev/skills -s sandbox-agent -``` + + + ```bash + npx skills add rivet-dev/skills -s sandbox-agent + ``` + + + ```bash + bunx skills add rivet-dev/skills -s sandbox-agent + ``` + + If you run a reverse proxy in front of the docs, make sure `/skill.md` and `/.well-known/skills/*` are forwarded to Mintlify. diff --git a/docs/architecture.mdx b/docs/architecture.mdx new file mode 100644 index 0000000..61b4689 --- /dev/null +++ b/docs/architecture.mdx @@ -0,0 +1,63 @@ +--- +title: "Architecture" +description: "How the Sandbox Agent server, SDK, and agent processes fit together." +--- + +Sandbox Agent is a lightweight HTTP server that runs **inside** a sandbox. It: + +- **Agent management**: Installs, spawns, and stops coding agent processes +- **Sessions**: Routes prompts to agents and streams events back in real time +- **Sandbox APIs**: Filesystem, process, and terminal access for the sandbox environment + +## Components + +```mermaid +flowchart LR + CLIENT["Your App"] + + subgraph SANDBOX["Sandbox"] + direction TB + SERVER["Sandbox Agent Server"] + AGENT["Agent Process
(Claude, Codex, etc.)"] + SERVER --> AGENT + end + + CLIENT -->|"SDK (HTTP)"| SERVER +``` + +- **Your app**: Uses the `sandbox-agent` TypeScript SDK to talk to the server over HTTP. +- **Sandbox**: An isolated runtime (local process, Docker, E2B, Daytona, Vercel, Cloudflare). +- **Sandbox Agent server**: A single binary inside the sandbox that manages agent lifecycles, routes prompts, streams events, and exposes filesystem/process/terminal APIs. +- **Agent process**: A coding agent (Claude Code, Codex, etc.) spawned by the server. Each session maps to one agent process. + +## What `SandboxAgent.start()` does + +1. **Provision**: The provider creates a sandbox (starts a container, creates a VM, etc.) +2. **Install**: The Sandbox Agent binary is installed inside the sandbox +3. **Boot**: The server starts listening on an HTTP port +4. **Health check**: The SDK waits for `/v1/health` to respond +5. **Ready**: The SDK returns a connected client + +For the `local` provider, provisioning is a no-op and the server runs as a local subprocess. + +### Server recovery + +If the server process stops, the SDK automatically calls the provider's `ensureServer()` after 3 consecutive health-check failures. Most built-in providers implement this. Custom providers can add `ensureServer(sandboxId)` to their `SandboxProvider` object. + +## Server HTTP API + +See the [HTTP API reference](/api-reference) for the full list of server endpoints. + +## Agent installation + +Agents are installed lazily on first use. To avoid the cold-start delay, pre-install them: + +```bash +sandbox-agent install-agent --all +``` + +The `rivetdev/sandbox-agent:0.4.2-full` Docker image ships with all agents pre-installed. + +## Production-ready agent orchestration + +For production deployments, see [Orchestration Architecture](/orchestration-architecture) for recommended topology, backend requirements, and session persistence patterns. diff --git a/docs/attachments.mdx b/docs/attachments.mdx new file mode 100644 index 0000000..8deac05 --- /dev/null +++ b/docs/attachments.mdx @@ -0,0 +1,61 @@ +--- +title: "Attachments" +description: "Upload files into the sandbox and reference them in prompts." +sidebarTitle: "Attachments" +icon: "paperclip" +--- + +Use the filesystem API to upload files, then include file references in prompt content. + + + + + ```ts TypeScript + import { SandboxAgent } from "sandbox-agent"; + import fs from "node:fs"; + + const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + }); + + const buffer = await fs.promises.readFile("./data.csv"); + + const upload = await sdk.writeFsFile( + { path: "./uploads/data.csv" }, + buffer, + ); + + console.log(upload.path); + ``` + + ```bash cURL + curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=./uploads/data.csv" \ + --data-binary @./data.csv + ``` + + + The upload response returns the absolute path. + + + + ```ts TypeScript + const session = await sdk.createSession({ agent: "mock" }); + + await session.prompt([ + { type: "text", text: "Please analyze the attached CSV." }, + { + type: "resource_link", + name: "data.csv", + uri: "file:///home/sandbox/uploads/data.csv", + mimeType: "text/csv", + }, + ]); + ``` + + + +## Notes + +- Use absolute file URIs in `resource_link` blocks. +- If `mimeType` is omitted, the agent/runtime may infer a default. +- Support for non-text resources depends on each agent's prompt capabilities. diff --git a/docs/building-chat-ui.mdx b/docs/building-chat-ui.mdx deleted file mode 100644 index 0f3b559..0000000 --- a/docs/building-chat-ui.mdx +++ /dev/null @@ -1,356 +0,0 @@ ---- -title: "Building a Chat UI" -description: "Build a chat interface using the universal event stream." -icon: "comments" ---- - -## Setup - -### List agents - -```ts -const { agents } = await client.listAgents(); - -// Each agent has capabilities that determine what UI to show -const claude = agents.find((a) => a.id === "claude"); -if (claude?.capabilities.permissions) { - // Show permission approval UI -} -if (claude?.capabilities.questions) { - // Show question response UI -} -``` - -### Create a session - -```ts -const sessionId = `session-${crypto.randomUUID()}`; - -await client.createSession(sessionId, { - agent: "claude", - agentMode: "code", // Optional: agent-specific mode - permissionMode: "default", // Optional: "default" | "plan" | "bypass" - model: "claude-sonnet-4", // Optional: model override -}); -``` - -### Send a message - -```ts -await client.postMessage(sessionId, { message: "Hello, world!" }); -``` - -### Stream events - -Three options for receiving events: - -```ts -// Option 1: SSE (recommended for real-time UI) -const stream = client.streamEvents(sessionId, { offset: 0 }); -for await (const event of stream) { - handleEvent(event); -} - -// Option 2: Polling -const { events, hasMore } = await client.getEvents(sessionId, { offset: 0 }); -events.forEach(handleEvent); - -// Option 3: Turn streaming (send + stream in one call) -const stream = client.streamTurn(sessionId, { message: "Hello" }); -for await (const event of stream) { - handleEvent(event); -} -``` - -Use `offset` to track the last seen `sequence` number and resume from where you left off. - ---- - -## Handling Events - -### Bare minimum - -Handle these three events to render a basic chat: - -```ts -type ItemState = { - item: UniversalItem; - deltas: string[]; -}; - -const items = new Map(); - -function handleEvent(event: UniversalEvent) { - switch (event.type) { - case "item.started": { - const { item } = event.data as ItemEventData; - items.set(item.item_id, { item, deltas: [] }); - break; - } - - case "item.delta": { - const { item_id, delta } = event.data as ItemDeltaData; - const state = items.get(item_id); - if (state) { - state.deltas.push(delta); - } - break; - } - - case "item.completed": { - const { item } = event.data as ItemEventData; - const state = items.get(item.item_id); - if (state) { - state.item = item; - state.deltas = []; // Clear deltas, use final content - } - break; - } - } -} -``` - -When rendering, show a loading indicator while `item.status === "in_progress"`: - -```ts -function renderItem(state: ItemState) { - const { item, deltas } = state; - const isLoading = item.status === "in_progress"; - - // For streaming text, combine item content with accumulated deltas - const text = item.content - .filter((p) => p.type === "text") - .map((p) => p.text) - .join(""); - const streamedText = text + deltas.join(""); - - return { - content: streamedText, - isLoading, - role: item.role, - kind: item.kind, - }; -} -``` - -### Extra events - -Handle these for a complete implementation: - -```ts -function handleEvent(event: UniversalEvent) { - switch (event.type) { - // ... bare minimum events above ... - - case "session.started": { - // Session is ready - break; - } - - case "session.ended": { - const { reason, terminated_by } = event.data as SessionEndedData; - // Disable input, show end reason - // reason: "completed" | "error" | "terminated" - // terminated_by: "agent" | "daemon" - break; - } - - case "error": { - const { message, code } = event.data as ErrorData; - // Display error to user - break; - } - - case "agent.unparsed": { - const { error, location } = event.data as AgentUnparsedData; - // Parsing failure - treat as bug in development - console.error(`Parse error at ${location}: ${error}`); - break; - } - } -} -``` - -### Content parts - -Each item has `content` parts. Render based on `type`: - -```ts -function renderContentPart(part: ContentPart) { - switch (part.type) { - case "text": - return {part.text}; - - case "tool_call": - return ; - - case "tool_result": - return ; - - case "file_ref": - return ; - - case "reasoning": - return {part.text}; - - case "status": - return ; - - case "image": - return ; - } -} -``` - ---- - -## Handling Permissions - -When `permission.requested` arrives, show an approval UI: - -```ts -const pendingPermissions = new Map(); - -function handleEvent(event: UniversalEvent) { - if (event.type === "permission.requested") { - const data = event.data as PermissionEventData; - pendingPermissions.set(data.permission_id, data); - } - - if (event.type === "permission.resolved") { - const data = event.data as PermissionEventData; - pendingPermissions.delete(data.permission_id); - } -} - -// User clicks approve/deny -async function replyPermission(id: string, reply: "once" | "always" | "reject") { - await client.replyPermission(sessionId, id, { reply }); - pendingPermissions.delete(id); -} -``` - -Render permission requests: - -```ts -function PermissionRequest({ data }: { data: PermissionEventData }) { - return ( -
-

Allow: {data.action}

- - - -
- ); -} -``` - ---- - -## Handling Questions - -When `question.requested` arrives, show a selection UI: - -```ts -const pendingQuestions = new Map(); - -function handleEvent(event: UniversalEvent) { - if (event.type === "question.requested") { - const data = event.data as QuestionEventData; - pendingQuestions.set(data.question_id, data); - } - - if (event.type === "question.resolved") { - const data = event.data as QuestionEventData; - pendingQuestions.delete(data.question_id); - } -} - -// User selects answer(s) -async function answerQuestion(id: string, answers: string[][]) { - await client.replyQuestion(sessionId, id, { answers }); - pendingQuestions.delete(id); -} - -async function rejectQuestion(id: string) { - await client.rejectQuestion(sessionId, id); - pendingQuestions.delete(id); -} -``` - -Render question requests: - -```ts -function QuestionRequest({ data }: { data: QuestionEventData }) { - const [selected, setSelected] = useState([]); - - return ( -
-

{data.prompt}

- {data.options.map((option) => ( - - ))} - - -
- ); -} -``` - ---- - -## Testing with Mock Agent - -The `mock` agent lets you test UI behaviors without external credentials: - -```ts -await client.createSession("test-session", { agent: "mock" }); -``` - -Send `help` to see available commands: - -| Command | Tests | -|---------|-------| -| `help` | Lists all commands | -| `demo` | Full UI coverage sequence with markers | -| `markdown` | Streaming markdown rendering | -| `tool` | Tool call + result with file refs | -| `status` | Status item updates | -| `image` | Image content part | -| `permission` | Permission request flow | -| `question` | Question request flow | -| `error` | Error + unparsed events | -| `end` | Session ended event | -| `echo ` | Echo text as assistant message | - -Any unrecognized text is echoed back as an assistant message. - ---- - -## Reference Implementation - -The [Inspector UI](https://github.com/rivet-dev/sandbox-agent/blob/main/frontend/packages/inspector/src/App.tsx) -is a complete reference showing session management, event rendering, and HITL flows. diff --git a/docs/cli.mdx b/docs/cli.mdx index 855bd44..362de49 100644 --- a/docs/cli.mdx +++ b/docs/cli.mdx @@ -1,13 +1,17 @@ --- title: "CLI Reference" -description: "Complete CLI reference for sandbox-agent." +description: "CLI reference for sandbox-agent." sidebarTitle: "CLI" -icon: "terminal" --- -## Server +Global flags (available on all commands): -Start the HTTP server: +- `-t, --token `: require/use bearer auth +- `-n, --no-token`: disable auth + +## server + +Run the HTTP server. ```bash sandbox-agent server [OPTIONS] @@ -15,45 +19,174 @@ sandbox-agent server [OPTIONS] | Option | Default | Description | |--------|---------|-------------| -| `-t, --token ` | - | Authentication token for all requests | -| `-n, --no-token` | - | Disable authentication (local dev only) | -| `-H, --host ` | `127.0.0.1` | Host to bind to | -| `-p, --port ` | `2468` | Port to bind to | -| `-O, --cors-allow-origin ` | - | CORS origin to allow (repeatable) | -| `-M, --cors-allow-method ` | all | CORS allowed method (repeatable) | -| `-A, --cors-allow-header
` | all | CORS allowed header (repeatable) | -| `-C, --cors-allow-credentials` | - | Enable CORS credentials | -| `--no-telemetry` | - | Disable anonymous telemetry | +| `-H, --host ` | `127.0.0.1` | Host to bind | +| `-p, --port ` | `2468` | Port to bind | +| `-O, --cors-allow-origin ` | - | Allowed CORS origin (repeatable) | +| `-M, --cors-allow-method ` | all | Allowed CORS method (repeatable) | +| `-A, --cors-allow-header
` | all | Allowed CORS header (repeatable) | +| `-C, --cors-allow-credentials` | false | Enable CORS credentials | +| `--no-telemetry` | false | Disable anonymous telemetry | ```bash -sandbox-agent server --token "$TOKEN" --port 3000 +sandbox-agent server --port 3000 ``` ---- +Notes: -## Install Agent (Local) +- Server logs are redirected to files by default. +- Set `SANDBOX_AGENT_LOG_STDOUT=1` to force stdout/stderr logging. +- Use `SANDBOX_AGENT_LOG_DIR` to override log directory. -Install an agent without running the server: +## install + +Install first-party runtime dependencies. + +### install desktop + +Install the Linux desktop runtime packages required by `/v1/desktop/*`. ```bash -sandbox-agent install-agent [OPTIONS] +sandbox-agent install desktop [OPTIONS] ``` | Option | Description | |--------|-------------| -| `-r, --reinstall` | Force reinstall even if already installed | +| `--yes` | Skip the confirmation prompt | +| `--print-only` | Print the package-manager command without executing it | +| `--package-manager ` | Override package-manager detection | +| `--no-fonts` | Skip the default DejaVu font package | + +```bash +sandbox-agent install desktop --yes +sandbox-agent install desktop --print-only +``` + +Notes: + +- Supported on Linux only. +- The command detects `apt`, `dnf`, or `apk`. +- If the host is not already running as root, the command requires `sudo`. + +## install-agent + +Install or reinstall a single agent, or every supported agent with `--all`. + +```bash +sandbox-agent install-agent [] [OPTIONS] +``` + +| Option | Description | +|--------|-------------| +| `--all` | Install every supported agent | +| `-r, --reinstall` | Force reinstall | +| `--agent-version ` | Override agent package version (conflicts with `--all`) | +| `--agent-process-version ` | Override agent process version (conflicts with `--all`) | + +Examples: ```bash sandbox-agent install-agent claude --reinstall +sandbox-agent install-agent --all ``` ---- +### Custom Pi implementation path -## Credentials +If you use a forked/custom `pi` binary with `pi-acp`, you can override what executable gets launched. -### Extract +#### Option 1: explicit command override (recommended) -Extract locally discovered credentials: +Set `PI_ACP_PI_COMMAND` in the environment where `sandbox-agent` runs: + +```bash +PI_ACP_PI_COMMAND=/absolute/path/to/your/pi-fork sandbox-agent server +``` + +This is forwarded to `pi-acp`, which uses it instead of looking up `pi` on `PATH`. + +#### Option 2: PATH override + +Put your custom `pi` first on `PATH` before starting `sandbox-agent`: + +```bash +export PATH="/path/to/custom-pi-dir:$PATH" +sandbox-agent server +``` + +#### Option 3: symlink override + +Point `pi` to your custom binary via symlink in a directory that is early on `PATH`: + +```bash +ln -sf /absolute/path/to/your/pi-fork /usr/local/bin/pi +``` + +Then start `sandbox-agent` normally. + +## opencode (experimental) + +Start/reuse daemon and run `opencode attach` against `/opencode`. + +```bash +sandbox-agent opencode [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host ` | `127.0.0.1` | Daemon host | +| `-p, --port ` | `2468` | Daemon port | +| `--session-title ` | - | Reserved option (currently no-op) | +| `--yolo` | false | OpenCode attach mode flag | + +```bash +sandbox-agent opencode +``` + +## daemon + +Manage the background daemon. + +### daemon start + +```bash +sandbox-agent daemon start [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host <HOST>` | `127.0.0.1` | Host | +| `-p, --port <PORT>` | `2468` | Port | +| `--upgrade` | false | Use ensure-running + upgrade behavior | + +```bash +sandbox-agent daemon start +sandbox-agent daemon start --upgrade +``` + +### daemon stop + +```bash +sandbox-agent daemon stop [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host <HOST>` | `127.0.0.1` | Host | +| `-p, --port <PORT>` | `2468` | Port | + +### daemon status + +```bash +sandbox-agent daemon status [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host <HOST>` | `127.0.0.1` | Host | +| `-p, --port <PORT>` | `2468` | Port | + +## credentials + +### credentials extract ```bash sandbox-agent credentials extract [OPTIONS] @@ -61,20 +194,17 @@ sandbox-agent credentials extract [OPTIONS] | Option | Description | |--------|-------------| -| `-a, --agent <AGENT>` | Filter by agent (`claude`, `codex`, `opencode`, `amp`) | -| `-p, --provider <PROVIDER>` | Filter by provider (`anthropic`, `openai`) | -| `-d, --home-dir <DIR>` | Custom home directory for credential search | -| `-r, --reveal` | Show full credential values (default: redacted) | -| `--no-oauth` | Exclude OAuth credentials | +| `-a, --agent <AGENT>` | Filter by `claude`, `codex`, `opencode`, or `amp` | +| `-p, --provider <PROVIDER>` | Filter by provider | +| `-d, --home-dir <DIR>` | Override home dir | +| `--no-oauth` | Skip OAuth sources | +| `-r, --reveal` | Show full credential values | ```bash sandbox-agent credentials extract --agent claude --reveal -sandbox-agent credentials extract --provider anthropic ``` -### Extract as Environment Variables - -Output credentials as shell environment variables: +### credentials extract-env ```bash sandbox-agent credentials extract-env [OPTIONS] @@ -82,229 +212,87 @@ sandbox-agent credentials extract-env [OPTIONS] | Option | Description | |--------|-------------| -| `-e, --export` | Prefix each line with `export` | -| `-d, --home-dir <DIR>` | Custom home directory for credential search | -| `--no-oauth` | Exclude OAuth credentials | +| `-e, --export` | Prefix output with `export` | +| `-d, --home-dir <DIR>` | Override home dir | +| `--no-oauth` | Skip OAuth sources | ```bash -# Source directly into shell eval "$(sandbox-agent credentials extract-env --export)" ``` ---- +## api -## API Commands +API subcommands for scripting. -The `sandbox-agent api` subcommand mirrors the HTTP API for scripting without client code. - -All API commands support: +Shared option: | Option | Default | Description | |--------|---------|-------------| -| `-e, --endpoint <URL>` | `http://127.0.0.1:2468` | API endpoint | -| `-t, --token <TOKEN>` | - | Authentication token | +| `-e, --endpoint <URL>` | `http://127.0.0.1:2468` | Target server | ---- +### api agents -### Agents +```bash +sandbox-agent api agents list [--endpoint <URL>] +sandbox-agent api agents report [--endpoint <URL>] +sandbox-agent api agents install <AGENT> [--reinstall] [--endpoint <URL>] +``` -#### List Agents +#### api agents list + +List all agents and their install status. ```bash sandbox-agent api agents list ``` -#### Install Agent +#### api agents report + +Emit a JSON report of available models, modes, and thought levels for every agent, grouped by category. ```bash -sandbox-agent api agents install <AGENT> [OPTIONS] +sandbox-agent api agents report --endpoint http://127.0.0.1:2468 | jq . ``` -| Option | Description | -|--------|-------------| -| `-r, --reinstall` | Force reinstall | +Example output: + +```json +{ + "generatedAtMs": 1740000000000, + "endpoint": "http://127.0.0.1:2468", + "agents": [ + { + "id": "claude", + "installed": true, + "models": { + "currentValue": "default", + "values": [ + { "value": "default", "name": "Default" }, + { "value": "sonnet", "name": "Sonnet" }, + { "value": "opus", "name": "Opus" }, + { "value": "haiku", "name": "Haiku" } + ] + }, + "modes": { + "currentValue": "default", + "values": [ + { "value": "default", "name": "Default" }, + { "value": "acceptEdits", "name": "Accept Edits" }, + { "value": "plan", "name": "Plan" }, + { "value": "dontAsk", "name": "Don't Ask" }, + { "value": "bypassPermissions", "name": "Bypass Permissions" } + ] + }, + "thoughtLevels": { "values": [] } + } + ] +} +``` + +See individual agent pages (e.g. [Claude](/agents/claude), [Codex](/agents/codex)) for supported models, modes, and thought levels. + +#### api agents install ```bash -sandbox-agent api agents install claude --reinstall +sandbox-agent api agents install codex --reinstall ``` - -#### Get Agent Modes - -```bash -sandbox-agent api agents modes <AGENT> -``` - -```bash -sandbox-agent api agents modes claude -``` - ---- - -### Sessions - -#### List Sessions - -```bash -sandbox-agent api sessions list -``` - -#### Create Session - -```bash -sandbox-agent api sessions create <SESSION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-a, --agent <AGENT>` | Agent identifier (required) | -| `-g, --agent-mode <MODE>` | Agent mode | -| `-p, --permission-mode <MODE>` | Permission mode (`default`, `plan`, `bypass`) | -| `-m, --model <MODEL>` | Model override | -| `-v, --variant <VARIANT>` | Model variant | -| `-A, --agent-version <VERSION>` | Agent version | - -```bash -sandbox-agent api sessions create my-session \ - --agent claude \ - --agent-mode code \ - --permission-mode default -``` - -#### Send Message - -```bash -sandbox-agent api sessions send-message <SESSION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-m, --message <TEXT>` | Message text (required) | - -```bash -sandbox-agent api sessions send-message my-session \ - --message "Summarize the repository" -``` - -#### Send Message (Streaming) - -Send a message and stream the response: - -```bash -sandbox-agent api sessions send-message-stream <SESSION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-m, --message <TEXT>` | Message text (required) | -| `--include-raw` | Include raw agent data | - -```bash -sandbox-agent api sessions send-message-stream my-session \ - --message "Help me debug this" -``` - -#### Terminate Session - -```bash -sandbox-agent api sessions terminate <SESSION_ID> -``` - -```bash -sandbox-agent api sessions terminate my-session -``` - -#### Get Events - -Fetch session events: - -```bash -sandbox-agent api sessions events <SESSION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-o, --offset <N>` | Event offset | -| `-l, --limit <N>` | Max events to return | -| `--include-raw` | Include raw agent data | - -```bash -sandbox-agent api sessions events my-session --offset 0 --limit 50 -``` - -`get-messages` is an alias for `events`. - -#### Stream Events (SSE) - -Stream session events via Server-Sent Events: - -```bash -sandbox-agent api sessions events-sse <SESSION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-o, --offset <N>` | Event offset to start from | -| `--include-raw` | Include raw agent data | - -```bash -sandbox-agent api sessions events-sse my-session --offset 0 -``` - -#### Reply to Question - -```bash -sandbox-agent api sessions reply-question <SESSION_ID> <QUESTION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-a, --answers <JSON>` | JSON array of answers (required) | - -```bash -sandbox-agent api sessions reply-question my-session q1 \ - --answers '[["yes"]]' -``` - -#### Reject Question - -```bash -sandbox-agent api sessions reject-question <SESSION_ID> <QUESTION_ID> -``` - -```bash -sandbox-agent api sessions reject-question my-session q1 -``` - -#### Reply to Permission - -```bash -sandbox-agent api sessions reply-permission <SESSION_ID> <PERMISSION_ID> [OPTIONS] -``` - -| Option | Description | -|--------|-------------| -| `-r, --reply <REPLY>` | `once`, `always`, or `reject` (required) | - -```bash -sandbox-agent api sessions reply-permission my-session perm1 --reply once -``` - ---- - -## CLI to HTTP Mapping - -| CLI Command | HTTP Endpoint | -|-------------|---------------| -| `api agents list` | `GET /v1/agents` | -| `api agents install` | `POST /v1/agents/{agent}/install` | -| `api agents modes` | `GET /v1/agents/{agent}/modes` | -| `api sessions list` | `GET /v1/sessions` | -| `api sessions create` | `POST /v1/sessions/{sessionId}` | -| `api sessions send-message` | `POST /v1/sessions/{sessionId}/messages` | -| `api sessions send-message-stream` | `POST /v1/sessions/{sessionId}/messages/stream` | -| `api sessions terminate` | `POST /v1/sessions/{sessionId}/terminate` | -| `api sessions events` | `GET /v1/sessions/{sessionId}/events` | -| `api sessions events-sse` | `GET /v1/sessions/{sessionId}/events/sse` | -| `api sessions reply-question` | `POST /v1/sessions/{sessionId}/questions/{questionId}/reply` | -| `api sessions reject-question` | `POST /v1/sessions/{sessionId}/questions/{questionId}/reject` | -| `api sessions reply-permission` | `POST /v1/sessions/{sessionId}/permissions/{permissionId}/reply` | diff --git a/docs/common-software.mdx b/docs/common-software.mdx new file mode 100644 index 0000000..7997a92 --- /dev/null +++ b/docs/common-software.mdx @@ -0,0 +1,560 @@ +--- +title: "Common Software" +description: "Install browsers, languages, databases, and other tools inside the sandbox." +sidebarTitle: "Common Software" +icon: "box-open" +--- + +The sandbox runs a Debian/Ubuntu base image. You can install software with `apt-get` via the [Process API](/processes) or by customizing your Docker image. This page covers commonly needed packages and how to install them. + +## Browsers + +### Chromium + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "chromium", "chromium-sandbox"], +}); + +// Launch headless +await sdk.runProcess({ + command: "chromium", + args: ["--headless", "--no-sandbox", "--disable-gpu", "https://example.com"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","chromium","chromium-sandbox"]}' +``` +</CodeGroup> + +<Note> +Use `--no-sandbox` when running Chromium inside a container. The container itself provides isolation. +</Note> + +### Firefox + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "firefox-esr"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","firefox-esr"]}' +``` +</CodeGroup> + +### Playwright browsers + +Playwright bundles its own browser binaries. Install the Playwright CLI and let it download browsers for you. + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "npx", + args: ["playwright", "install", "--with-deps", "chromium"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"npx","args":["playwright","install","--with-deps","chromium"]}' +``` +</CodeGroup> + +--- + +## Languages and runtimes + +### Node.js + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "nodejs", "npm"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","nodejs","npm"]}' +``` +</CodeGroup> + +For a specific version, use [nvm](https://github.com/nvm-sh/nvm): + +```ts TypeScript +await sdk.runProcess({ + command: "bash", + args: ["-c", "curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash && . ~/.nvm/nvm.sh && nvm install 22"], +}); +``` + +### Python + +Python 3 is typically pre-installed. To add pip and common packages: + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "python3", "python3-pip", "python3-venv"], +}); + +await sdk.runProcess({ + command: "pip3", + args: ["install", "numpy", "pandas", "matplotlib"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","python3","python3-pip","python3-venv"]}' + +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"pip3","args":["install","numpy","pandas","matplotlib"]}' +``` +</CodeGroup> + +### Go + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "bash", + args: ["-c", "curl -fsSL https://go.dev/dl/go1.23.6.linux-amd64.tar.gz | tar -C /usr/local -xz"], +}); + +// Add to PATH for subsequent commands +await sdk.runProcess({ + command: "bash", + args: ["-c", "export PATH=$PATH:/usr/local/go/bin && go version"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"bash","args":["-c","curl -fsSL https://go.dev/dl/go1.23.6.linux-amd64.tar.gz | tar -C /usr/local -xz"]}' +``` +</CodeGroup> + +### Rust + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "bash", + args: ["-c", "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"bash","args":["-c","curl --proto =https --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y"]}' +``` +</CodeGroup> + +### Java (OpenJDK) + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "default-jdk"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","default-jdk"]}' +``` +</CodeGroup> + +### Ruby + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "ruby-full"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","ruby-full"]}' +``` +</CodeGroup> + +--- + +## Databases + +### PostgreSQL + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "postgresql", "postgresql-client"], +}); + +// Start the service +const proc = await sdk.createProcess({ + command: "bash", + args: ["-c", "su - postgres -c 'pg_ctlcluster 15 main start'"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","postgresql","postgresql-client"]}' +``` +</CodeGroup> + +### SQLite + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "sqlite3"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","sqlite3"]}' +``` +</CodeGroup> + +### Redis + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "redis-server"], +}); + +const proc = await sdk.createProcess({ + command: "redis-server", + args: ["--daemonize", "no"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","redis-server"]}' + +curl -X POST "http://127.0.0.1:2468/v1/processes" \ + -H "Content-Type: application/json" \ + -d '{"command":"redis-server","args":["--daemonize","no"]}' +``` +</CodeGroup> + +### MySQL / MariaDB + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "mariadb-server", "mariadb-client"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","mariadb-server","mariadb-client"]}' +``` +</CodeGroup> + +--- + +## Build tools + +### Essential build toolchain + +Most compiled software needs the standard build toolchain: + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "build-essential", "cmake", "pkg-config"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","build-essential","cmake","pkg-config"]}' +``` +</CodeGroup> + +This installs `gcc`, `g++`, `make`, `cmake`, and related tools. + +--- + +## Desktop applications + +These require the [Computer Use](/computer-use) desktop to be started first. + +### LibreOffice + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "libreoffice"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","libreoffice"]}' +``` +</CodeGroup> + +### GIMP + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "gimp"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","gimp"]}' +``` +</CodeGroup> + +### VLC + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "vlc"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","vlc"]}' +``` +</CodeGroup> + +### VS Code (code-server) + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "bash", + args: ["-c", "curl -fsSL https://code-server.dev/install.sh | sh"], +}); + +const proc = await sdk.createProcess({ + command: "code-server", + args: ["--bind-addr", "0.0.0.0:8080", "--auth", "none"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"bash","args":["-c","curl -fsSL https://code-server.dev/install.sh | sh"]}' + +curl -X POST "http://127.0.0.1:2468/v1/processes" \ + -H "Content-Type: application/json" \ + -d '{"command":"code-server","args":["--bind-addr","0.0.0.0:8080","--auth","none"]}' +``` +</CodeGroup> + +--- + +## CLI tools + +### Git + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "git"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","git"]}' +``` +</CodeGroup> + +### Docker + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "bash", + args: ["-c", "curl -fsSL https://get.docker.com | sh"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"bash","args":["-c","curl -fsSL https://get.docker.com | sh"]}' +``` +</CodeGroup> + +### jq + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "jq"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","jq"]}' +``` +</CodeGroup> + +### tmux + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "tmux"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","tmux"]}' +``` +</CodeGroup> + +--- + +## Media and graphics + +### FFmpeg + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "ffmpeg"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","ffmpeg"]}' +``` +</CodeGroup> + +### ImageMagick + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "imagemagick"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","imagemagick"]}' +``` +</CodeGroup> + +### Poppler (PDF utilities) + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "poppler-utils"], +}); + +// Convert PDF to images +await sdk.runProcess({ + command: "pdftoppm", + args: ["-png", "document.pdf", "output"], +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","poppler-utils"]}' +``` +</CodeGroup> + +--- + +## Pre-installing in a Docker image + +For production use, install software in your Dockerfile instead of at runtime. This avoids repeated downloads and makes startup faster. + +```dockerfile +FROM ubuntu:22.04 + +RUN apt-get update && apt-get install -y \ + chromium \ + firefox-esr \ + nodejs npm \ + python3 python3-pip \ + git curl wget \ + build-essential \ + sqlite3 \ + ffmpeg \ + imagemagick \ + jq \ + && rm -rf /var/lib/apt/lists/* + +RUN pip3 install numpy pandas matplotlib +``` + +See [Docker deployment](/deploy/docker) for how to use custom images with Sandbox Agent. diff --git a/docs/computer-use.mdx b/docs/computer-use.mdx new file mode 100644 index 0000000..fc6b7d0 --- /dev/null +++ b/docs/computer-use.mdx @@ -0,0 +1,859 @@ +--- +title: "Computer Use" +description: "Control a virtual desktop inside the sandbox with mouse, keyboard, screenshots, recordings, and live streaming." +sidebarTitle: "Computer Use" +icon: "desktop" +--- + +Sandbox Agent provides a managed virtual desktop (Xvfb + openbox) that you can control programmatically. This is useful for browser automation, GUI testing, and AI computer-use workflows. + +## Start and stop + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +const status = await sdk.startDesktop({ + width: 1920, + height: 1080, + dpi: 96, +}); + +console.log(status.state); // "active" +console.log(status.display); // ":99" + +// When done +await sdk.stopDesktop(); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/desktop/start" \ + -H "Content-Type: application/json" \ + -d '{"width":1920,"height":1080,"dpi":96}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/stop" +``` +</CodeGroup> + +All fields in the start request are optional. Defaults are 1440x900 at 96 DPI. + +### Start request options + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `width` | number | 1440 | Desktop width in pixels | +| `height` | number | 900 | Desktop height in pixels | +| `dpi` | number | 96 | Display DPI | +| `displayNum` | number | 99 | Starting X display number. The runtime probes from this number upward to find an available display. | +| `stateDir` | string | (auto) | Desktop state directory for home, logs, recordings | +| `streamVideoCodec` | string | `"vp8"` | WebRTC video codec (`vp8`, `vp9`, `h264`) | +| `streamAudioCodec` | string | `"opus"` | WebRTC audio codec (`opus`, `g722`) | +| `streamFrameRate` | number | 30 | Streaming frame rate (1-60) | +| `webrtcPortRange` | string | `"59050-59070"` | UDP port range for WebRTC media | +| `recordingFps` | number | 30 | Default recording FPS when not specified in `startDesktopRecording` (1-60) | + +The streaming and recording options configure defaults for the desktop session. They take effect when streaming or recording is started later. + +<CodeGroup> +```ts TypeScript +const status = await sdk.startDesktop({ + width: 1920, + height: 1080, + streamVideoCodec: "h264", + streamFrameRate: 60, + webrtcPortRange: "59100-59120", + recordingFps: 15, +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/desktop/start" \ + -H "Content-Type: application/json" \ + -d '{ + "width": 1920, + "height": 1080, + "streamVideoCodec": "h264", + "streamFrameRate": 60, + "webrtcPortRange": "59100-59120", + "recordingFps": 15 + }' +``` +</CodeGroup> + +## Status + +<CodeGroup> +```ts TypeScript +const status = await sdk.getDesktopStatus(); +console.log(status.state); // "inactive" | "active" | "failed" | ... +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/status" +``` +</CodeGroup> + +## Screenshots + +Capture the full desktop or a specific region. Optionally include the cursor position. + +<CodeGroup> +```ts TypeScript +// Full screenshot (PNG by default) +const png = await sdk.takeDesktopScreenshot(); + +// JPEG at 70% quality, half scale +const jpeg = await sdk.takeDesktopScreenshot({ + format: "jpeg", + quality: 70, + scale: 0.5, +}); + +// Include cursor overlay +const withCursor = await sdk.takeDesktopScreenshot({ + showCursor: true, +}); + +// Region screenshot +const region = await sdk.takeDesktopRegionScreenshot({ + x: 100, + y: 100, + width: 400, + height: 300, +}); +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/screenshot" --output screenshot.png + +curl "http://127.0.0.1:2468/v1/desktop/screenshot?format=jpeg&quality=70&scale=0.5" \ + --output screenshot.jpg + +# Include cursor overlay +curl "http://127.0.0.1:2468/v1/desktop/screenshot?show_cursor=true" \ + --output with_cursor.png + +curl "http://127.0.0.1:2468/v1/desktop/screenshot/region?x=100&y=100&width=400&height=300" \ + --output region.png +``` +</CodeGroup> + +### Screenshot options + +| Param | Type | Default | Description | +|-------|------|---------|-------------| +| `format` | string | `"png"` | Output format: `png`, `jpeg`, or `webp` | +| `quality` | number | 85 | Compression quality (1-100, JPEG/WebP only) | +| `scale` | number | 1.0 | Scale factor (0.1-1.0) | +| `showCursor` | boolean | `false` | Composite a crosshair at the cursor position | + +When `showCursor` is enabled, the cursor position is captured at the moment of the screenshot and a red crosshair is drawn at that location. This is useful for AI agents that need to see where the cursor is in the screenshot. + +## Mouse + +<CodeGroup> +```ts TypeScript +// Get current position +const pos = await sdk.getDesktopMousePosition(); +console.log(pos.x, pos.y); + +// Move +await sdk.moveDesktopMouse({ x: 500, y: 300 }); + +// Click (left by default) +await sdk.clickDesktop({ x: 500, y: 300 }); + +// Right click +await sdk.clickDesktop({ x: 500, y: 300, button: "right" }); + +// Double click +await sdk.clickDesktop({ x: 500, y: 300, clickCount: 2 }); + +// Drag +await sdk.dragDesktopMouse({ + startX: 100, startY: 100, + endX: 400, endY: 400, +}); + +// Scroll +await sdk.scrollDesktop({ x: 500, y: 300, deltaY: -3 }); +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/mouse/position" + +curl -X POST "http://127.0.0.1:2468/v1/desktop/mouse/click" \ + -H "Content-Type: application/json" \ + -d '{"x":500,"y":300}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/mouse/drag" \ + -H "Content-Type: application/json" \ + -d '{"startX":100,"startY":100,"endX":400,"endY":400}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/mouse/scroll" \ + -H "Content-Type: application/json" \ + -d '{"x":500,"y":300,"deltaY":-3}' +``` +</CodeGroup> + +## Keyboard + +<CodeGroup> +```ts TypeScript +// Type text +await sdk.typeDesktopText({ text: "Hello, world!" }); + +// Press a key with modifiers +await sdk.pressDesktopKey({ + key: "c", + modifiers: { ctrl: true }, +}); + +// Low-level key down/up +await sdk.keyDownDesktop({ key: "Shift_L" }); +await sdk.keyUpDesktop({ key: "Shift_L" }); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/desktop/keyboard/type" \ + -H "Content-Type: application/json" \ + -d '{"text":"Hello, world!"}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/keyboard/press" \ + -H "Content-Type: application/json" \ + -d '{"key":"c","modifiers":{"ctrl":true}}' +``` +</CodeGroup> + +## Clipboard + +Read and write the X11 clipboard programmatically. + +<CodeGroup> +```ts TypeScript +// Read clipboard +const clipboard = await sdk.getDesktopClipboard(); +console.log(clipboard.text); + +// Read primary selection (mouse-selected text) +const primary = await sdk.getDesktopClipboard({ selection: "primary" }); + +// Write to clipboard +await sdk.setDesktopClipboard({ text: "Pasted via API" }); + +// Write to both clipboard and primary selection +await sdk.setDesktopClipboard({ + text: "Synced text", + selection: "both", +}); +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/clipboard" + +curl "http://127.0.0.1:2468/v1/desktop/clipboard?selection=primary" + +curl -X POST "http://127.0.0.1:2468/v1/desktop/clipboard" \ + -H "Content-Type: application/json" \ + -d '{"text":"Pasted via API"}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/clipboard" \ + -H "Content-Type: application/json" \ + -d '{"text":"Synced text","selection":"both"}' +``` +</CodeGroup> + +The `selection` parameter controls which X11 selection to read or write: + +| Value | Description | +|-------|-------------| +| `clipboard` (default) | The standard clipboard (Ctrl+C / Ctrl+V) | +| `primary` | The primary selection (text selected with the mouse) | +| `both` | Write to both clipboard and primary selection (write only) | + +## Display and windows + +<CodeGroup> +```ts TypeScript +const display = await sdk.getDesktopDisplayInfo(); +console.log(display.resolution); // { width: 1920, height: 1080, dpi: 96 } + +const { windows } = await sdk.listDesktopWindows(); +for (const win of windows) { + console.log(win.title, win.x, win.y, win.width, win.height); +} +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/display/info" + +curl "http://127.0.0.1:2468/v1/desktop/windows" +``` +</CodeGroup> + +The windows endpoint filters out noise automatically: window manager internals (Openbox), windows with empty titles, and tiny helper windows (under 120x80) are excluded. The currently active/focused window is always included regardless of filters. + +### Focused window + +Get the currently focused window without listing all windows. + +<CodeGroup> +```ts TypeScript +const focused = await sdk.getDesktopFocusedWindow(); +console.log(focused.title, focused.id); +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/windows/focused" +``` +</CodeGroup> + +Returns 404 if no window currently has focus. + +### Window management + +Focus, move, and resize windows by their X11 window ID. + +<CodeGroup> +```ts TypeScript +const { windows } = await sdk.listDesktopWindows(); +const win = windows[0]; + +// Bring window to foreground +await sdk.focusDesktopWindow(win.id); + +// Move window +await sdk.moveDesktopWindow(win.id, { x: 100, y: 50 }); + +// Resize window +await sdk.resizeDesktopWindow(win.id, { width: 1280, height: 720 }); +``` + +```bash cURL +# Focus a window +curl -X POST "http://127.0.0.1:2468/v1/desktop/windows/12345/focus" + +# Move a window +curl -X POST "http://127.0.0.1:2468/v1/desktop/windows/12345/move" \ + -H "Content-Type: application/json" \ + -d '{"x":100,"y":50}' + +# Resize a window +curl -X POST "http://127.0.0.1:2468/v1/desktop/windows/12345/resize" \ + -H "Content-Type: application/json" \ + -d '{"width":1280,"height":720}' +``` +</CodeGroup> + +All three endpoints return the updated window info so you can verify the operation took effect. The window manager may adjust the requested position or size. + +## App launching + +Launch applications or open files/URLs on the desktop without needing to shell out. + +<CodeGroup> +```ts TypeScript +// Launch an app by name +const result = await sdk.launchDesktopApp({ + app: "firefox", + args: ["--private"], +}); +console.log(result.processId); // "proc_7" + +// Launch and wait for the window to appear +const withWindow = await sdk.launchDesktopApp({ + app: "xterm", + wait: true, +}); +console.log(withWindow.windowId); // "12345" or null if timed out + +// Open a URL with the default handler +const opened = await sdk.openDesktopTarget({ + target: "https://example.com", +}); +console.log(opened.processId); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/desktop/launch" \ + -H "Content-Type: application/json" \ + -d '{"app":"firefox","args":["--private"]}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/launch" \ + -H "Content-Type: application/json" \ + -d '{"app":"xterm","wait":true}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/open" \ + -H "Content-Type: application/json" \ + -d '{"target":"https://example.com"}' +``` +</CodeGroup> + +The returned `processId` can be used with the [Process API](/processes) to read logs (`GET /v1/processes/{id}/logs`) or stop the application (`POST /v1/processes/{id}/stop`). + +When `wait` is `true`, the API polls for up to 5 seconds for a window to appear. If the window appears, its ID is returned in `windowId`. If it times out, `windowId` is `null` but the process is still running. + +<Tip> +**Launch/Open vs the Process API:** Both `launch` and `open` are convenience wrappers around the [Process API](/processes). They create managed processes (with `owner: "desktop"`) that you can inspect, log, and stop through the same Process endpoints. The difference is that `launch` validates the binary exists in PATH first and can optionally wait for a window to appear, while `open` delegates to the system default handler (`xdg-open`). Use the Process API directly when you need full control over command, environment, working directory, or restart policies. +</Tip> + +## Recording + +Record the desktop to MP4. + +<CodeGroup> +```ts TypeScript +const recording = await sdk.startDesktopRecording({ fps: 30 }); +console.log(recording.id); + +// ... do things ... + +const stopped = await sdk.stopDesktopRecording(); + +// List all recordings +const { recordings } = await sdk.listDesktopRecordings(); + +// Download +const mp4 = await sdk.downloadDesktopRecording(recording.id); + +// Clean up +await sdk.deleteDesktopRecording(recording.id); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/desktop/recording/start" \ + -H "Content-Type: application/json" \ + -d '{"fps":30}' + +curl -X POST "http://127.0.0.1:2468/v1/desktop/recording/stop" + +curl "http://127.0.0.1:2468/v1/desktop/recordings" + +curl "http://127.0.0.1:2468/v1/desktop/recordings/rec_1/download" --output recording.mp4 + +curl -X DELETE "http://127.0.0.1:2468/v1/desktop/recordings/rec_1" +``` +</CodeGroup> + +## Desktop processes + +The desktop runtime manages several background processes (Xvfb, openbox, neko, ffmpeg). These are all registered with the general [Process API](/processes) under the `desktop` owner, so you can inspect logs, check status, and troubleshoot using the same tools you use for any other managed process. + +<CodeGroup> +```ts TypeScript +// List all processes, including desktop-owned ones +const { processes } = await sdk.listProcesses(); + +const desktopProcs = processes.filter((p) => p.owner === "desktop"); +for (const p of desktopProcs) { + console.log(p.id, p.command, p.status); +} + +// Read logs from a specific desktop process +const logs = await sdk.getProcessLogs(desktopProcs[0].id, { tail: 50 }); +for (const entry of logs.entries) { + console.log(entry.stream, atob(entry.data)); +} +``` + +```bash cURL +# List all processes (desktop processes have owner: "desktop") +curl "http://127.0.0.1:2468/v1/processes" + +# Get logs from a specific desktop process +curl "http://127.0.0.1:2468/v1/processes/proc_1/logs?tail=50" +``` +</CodeGroup> + +The desktop status endpoint also includes a summary of running processes: + +<CodeGroup> +```ts TypeScript +const status = await sdk.getDesktopStatus(); +for (const proc of status.processes) { + console.log(proc.name, proc.pid, proc.running); +} +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/desktop/status" +# Response includes: processes: [{ name: "Xvfb", pid: 123, running: true }, ...] +``` +</CodeGroup> + +| Process | Role | Restart policy | +|---------|------|---------------| +| Xvfb | Virtual X11 framebuffer | Auto-restart while desktop is active | +| openbox | Window manager | Auto-restart while desktop is active | +| neko | WebRTC streaming server (started by `startDesktopStream`) | No auto-restart | +| ffmpeg | Screen recorder (started by `startDesktopRecording`) | No auto-restart | + +## Live streaming + +Start a WebRTC stream for real-time desktop viewing in a browser. + +<CodeGroup> +```ts TypeScript +await sdk.startDesktopStream(); + +// Check stream status +const status = await sdk.getDesktopStreamStatus(); +console.log(status.active); // true +console.log(status.processId); // "proc_5" + +// Connect via the React DesktopViewer component or +// use the WebSocket signaling endpoint directly +// at ws://127.0.0.1:2468/v1/desktop/stream/signaling + +await sdk.stopDesktopStream(); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/desktop/stream/start" + +# Check stream status +curl "http://127.0.0.1:2468/v1/desktop/stream/status" + +# Connect to ws://127.0.0.1:2468/v1/desktop/stream/signaling for WebRTC signaling + +curl -X POST "http://127.0.0.1:2468/v1/desktop/stream/stop" +``` +</CodeGroup> + +For a drop-in React component, see [React Components](/react-components). + +## API reference + +### Endpoints + +| Method | Path | Description | +|--------|------|-------------| +| `POST` | `/v1/desktop/start` | Start the desktop runtime | +| `POST` | `/v1/desktop/stop` | Stop the desktop runtime | +| `GET` | `/v1/desktop/status` | Get desktop runtime status | +| `GET` | `/v1/desktop/screenshot` | Capture full desktop screenshot | +| `GET` | `/v1/desktop/screenshot/region` | Capture a region screenshot | +| `GET` | `/v1/desktop/mouse/position` | Get current mouse position | +| `POST` | `/v1/desktop/mouse/move` | Move the mouse | +| `POST` | `/v1/desktop/mouse/click` | Click the mouse | +| `POST` | `/v1/desktop/mouse/down` | Press mouse button down | +| `POST` | `/v1/desktop/mouse/up` | Release mouse button | +| `POST` | `/v1/desktop/mouse/drag` | Drag from one point to another | +| `POST` | `/v1/desktop/mouse/scroll` | Scroll at a position | +| `POST` | `/v1/desktop/keyboard/type` | Type text | +| `POST` | `/v1/desktop/keyboard/press` | Press a key with optional modifiers | +| `POST` | `/v1/desktop/keyboard/down` | Press a key down (hold) | +| `POST` | `/v1/desktop/keyboard/up` | Release a key | +| `GET` | `/v1/desktop/display/info` | Get display info | +| `GET` | `/v1/desktop/windows` | List visible windows | +| `GET` | `/v1/desktop/windows/focused` | Get focused window info | +| `POST` | `/v1/desktop/windows/{id}/focus` | Focus a window | +| `POST` | `/v1/desktop/windows/{id}/move` | Move a window | +| `POST` | `/v1/desktop/windows/{id}/resize` | Resize a window | +| `GET` | `/v1/desktop/clipboard` | Read clipboard contents | +| `POST` | `/v1/desktop/clipboard` | Write to clipboard | +| `POST` | `/v1/desktop/launch` | Launch an application | +| `POST` | `/v1/desktop/open` | Open a file or URL | +| `POST` | `/v1/desktop/recording/start` | Start recording | +| `POST` | `/v1/desktop/recording/stop` | Stop recording | +| `GET` | `/v1/desktop/recordings` | List recordings | +| `GET` | `/v1/desktop/recordings/{id}` | Get recording metadata | +| `GET` | `/v1/desktop/recordings/{id}/download` | Download recording | +| `DELETE` | `/v1/desktop/recordings/{id}` | Delete recording | +| `POST` | `/v1/desktop/stream/start` | Start WebRTC streaming | +| `POST` | `/v1/desktop/stream/stop` | Stop WebRTC streaming | +| `GET` | `/v1/desktop/stream/status` | Get stream status | +| `GET` | `/v1/desktop/stream/signaling` | WebSocket for WebRTC signaling | + +### TypeScript SDK methods + +| Method | Returns | Description | +|--------|---------|-------------| +| `startDesktop(request?)` | `DesktopStatusResponse` | Start the desktop | +| `stopDesktop()` | `DesktopStatusResponse` | Stop the desktop | +| `getDesktopStatus()` | `DesktopStatusResponse` | Get desktop status | +| `takeDesktopScreenshot(query?)` | `Uint8Array` | Capture screenshot | +| `takeDesktopRegionScreenshot(query)` | `Uint8Array` | Capture region screenshot | +| `getDesktopMousePosition()` | `DesktopMousePositionResponse` | Get mouse position | +| `moveDesktopMouse(request)` | `DesktopMousePositionResponse` | Move mouse | +| `clickDesktop(request)` | `DesktopMousePositionResponse` | Click mouse | +| `mouseDownDesktop(request)` | `DesktopMousePositionResponse` | Mouse button down | +| `mouseUpDesktop(request)` | `DesktopMousePositionResponse` | Mouse button up | +| `dragDesktopMouse(request)` | `DesktopMousePositionResponse` | Drag mouse | +| `scrollDesktop(request)` | `DesktopMousePositionResponse` | Scroll | +| `typeDesktopText(request)` | `DesktopActionResponse` | Type text | +| `pressDesktopKey(request)` | `DesktopActionResponse` | Press key | +| `keyDownDesktop(request)` | `DesktopActionResponse` | Key down | +| `keyUpDesktop(request)` | `DesktopActionResponse` | Key up | +| `getDesktopDisplayInfo()` | `DesktopDisplayInfoResponse` | Get display info | +| `listDesktopWindows()` | `DesktopWindowListResponse` | List windows | +| `getDesktopFocusedWindow()` | `DesktopWindowInfo` | Get focused window | +| `focusDesktopWindow(id)` | `DesktopWindowInfo` | Focus a window | +| `moveDesktopWindow(id, request)` | `DesktopWindowInfo` | Move a window | +| `resizeDesktopWindow(id, request)` | `DesktopWindowInfo` | Resize a window | +| `getDesktopClipboard(query?)` | `DesktopClipboardResponse` | Read clipboard | +| `setDesktopClipboard(request)` | `DesktopActionResponse` | Write clipboard | +| `launchDesktopApp(request)` | `DesktopLaunchResponse` | Launch an app | +| `openDesktopTarget(request)` | `DesktopOpenResponse` | Open file/URL | +| `startDesktopRecording(request?)` | `DesktopRecordingInfo` | Start recording | +| `stopDesktopRecording()` | `DesktopRecordingInfo` | Stop recording | +| `listDesktopRecordings()` | `DesktopRecordingListResponse` | List recordings | +| `getDesktopRecording(id)` | `DesktopRecordingInfo` | Get recording | +| `downloadDesktopRecording(id)` | `Uint8Array` | Download recording | +| `deleteDesktopRecording(id)` | `void` | Delete recording | +| `startDesktopStream()` | `DesktopStreamStatusResponse` | Start streaming | +| `stopDesktopStream()` | `DesktopStreamStatusResponse` | Stop streaming | +| `getDesktopStreamStatus()` | `DesktopStreamStatusResponse` | Stream status | + +## Customizing the desktop environment + +The desktop runs inside the sandbox filesystem, so you can customize it using the [File System](/file-system) API before or after starting the desktop. The desktop HOME directory is located at `~/.local/state/sandbox-agent/desktop/home` (or `$XDG_STATE_HOME/sandbox-agent/desktop/home` if `XDG_STATE_HOME` is set). + +All configuration files below are written to paths relative to this HOME directory. + +### Window manager (openbox) + +The desktop uses [openbox](http://openbox.org/) as its window manager. You can customize its behavior, theme, and keyboard shortcuts by writing an `rc.xml` config file. + +<CodeGroup> +```ts TypeScript +const openboxConfig = `<?xml version="1.0" encoding="UTF-8"?> +<openbox_config xmlns="http://openbox.org/3.4/rc"> + <theme> + <name>Clearlooks</name> + <titleLayout>NLIMC</titleLayout> + <font place="ActiveWindow"><name>DejaVu Sans</name><size>10</size></font> + </theme> + <desktops><number>1</number></desktops> + <keyboard> + <keybind key="A-F4"><action name="Close"/></keybind> + <keybind key="A-Tab"><action name="NextWindow"/></keybind> + </keyboard> +</openbox_config>`; + +await sdk.mkdirFs({ path: "~/.local/state/sandbox-agent/desktop/home/.config/openbox" }); +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/.config/openbox/rc.xml" }, + openboxConfig, +); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/fs/mkdir?path=~/.local/state/sandbox-agent/desktop/home/.config/openbox" + +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/.config/openbox/rc.xml" \ + -H "Content-Type: application/octet-stream" \ + --data-binary @rc.xml +``` +</CodeGroup> + +### Autostart programs + +Openbox runs scripts in `~/.config/openbox/autostart` on startup. Use this to launch applications, set the background, or configure the environment. + +<CodeGroup> +```ts TypeScript +const autostart = `#!/bin/sh +# Set a solid background color +xsetroot -solid "#1e1e2e" & + +# Launch a terminal +xterm -geometry 120x40+50+50 & + +# Launch a browser +firefox --no-remote & +`; + +await sdk.mkdirFs({ path: "~/.local/state/sandbox-agent/desktop/home/.config/openbox" }); +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/.config/openbox/autostart" }, + autostart, +); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/fs/mkdir?path=~/.local/state/sandbox-agent/desktop/home/.config/openbox" + +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/.config/openbox/autostart" \ + -H "Content-Type: application/octet-stream" \ + --data-binary @autostart.sh +``` +</CodeGroup> + +<Note> +The autostart script runs when openbox starts, which happens during `startDesktop()`. Write the autostart file before calling `startDesktop()` for it to take effect. +</Note> + +### Background + +There is no wallpaper set by default (the background is the X root window default). You can set it using `xsetroot` in the autostart script (as shown above), or use `feh` if you need an image: + +<CodeGroup> +```ts TypeScript +// Upload a wallpaper image +import fs from "node:fs"; + +const wallpaper = await fs.promises.readFile("./wallpaper.png"); +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/wallpaper.png" }, + wallpaper, +); + +// Set the autostart to apply it +const autostart = `#!/bin/sh +feh --bg-fill ~/wallpaper.png & +`; + +await sdk.mkdirFs({ path: "~/.local/state/sandbox-agent/desktop/home/.config/openbox" }); +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/.config/openbox/autostart" }, + autostart, +); +``` + +```bash cURL +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/wallpaper.png" \ + -H "Content-Type: application/octet-stream" \ + --data-binary @wallpaper.png + +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/.config/openbox/autostart" \ + -H "Content-Type: application/octet-stream" \ + --data-binary @autostart.sh +``` +</CodeGroup> + +<Note> +`feh` is not installed by default. Install it via the [Process API](/processes) before starting the desktop: `await sdk.runProcess({ command: "apt-get", args: ["install", "-y", "feh"] })`. +</Note> + +### Fonts + +Only `fonts-dejavu-core` is installed by default. To add more fonts, install them with your system package manager or copy font files into the sandbox: + +<CodeGroup> +```ts TypeScript +// Install a font package +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "fonts-noto", "fonts-liberation"], +}); + +// Or copy a custom font file +import fs from "node:fs"; + +const font = await fs.promises.readFile("./CustomFont.ttf"); +await sdk.mkdirFs({ path: "~/.local/state/sandbox-agent/desktop/home/.local/share/fonts" }); +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/.local/share/fonts/CustomFont.ttf" }, + font, +); + +// Rebuild the font cache +await sdk.runProcess({ command: "fc-cache", args: ["-fv"] }); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","fonts-noto","fonts-liberation"]}' + +curl -X POST "http://127.0.0.1:2468/v1/fs/mkdir?path=~/.local/state/sandbox-agent/desktop/home/.local/share/fonts" + +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/.local/share/fonts/CustomFont.ttf" \ + -H "Content-Type: application/octet-stream" \ + --data-binary @CustomFont.ttf + +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"fc-cache","args":["-fv"]}' +``` +</CodeGroup> + +### Cursor theme + +<CodeGroup> +```ts TypeScript +await sdk.runProcess({ + command: "apt-get", + args: ["install", "-y", "dmz-cursor-theme"], +}); + +const xresources = `Xcursor.theme: DMZ-White\nXcursor.size: 24\n`; +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/.Xresources" }, + xresources, +); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"apt-get","args":["install","-y","dmz-cursor-theme"]}' + +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/.Xresources" \ + -H "Content-Type: application/octet-stream" \ + --data-binary 'Xcursor.theme: DMZ-White\nXcursor.size: 24' +``` +</CodeGroup> + +<Note> +Run `xrdb -merge ~/.Xresources` (via the autostart or process API) after writing the file for changes to take effect. +</Note> + +### Shell and terminal + +No terminal emulator or shell is launched by default. Add one to the openbox autostart: + +```sh +# In ~/.config/openbox/autostart +xterm -geometry 120x40+50+50 & +``` + +To use a different shell, set the `SHELL` environment variable in your Dockerfile or install your preferred shell and configure the terminal to use it. + +### GTK theme + +Applications using GTK will pick up settings from `~/.config/gtk-3.0/settings.ini`: + +<CodeGroup> +```ts TypeScript +const gtkSettings = `[Settings] +gtk-theme-name=Adwaita +gtk-icon-theme-name=Adwaita +gtk-font-name=DejaVu Sans 10 +gtk-cursor-theme-name=DMZ-White +gtk-cursor-theme-size=24 +`; + +await sdk.mkdirFs({ path: "~/.local/state/sandbox-agent/desktop/home/.config/gtk-3.0" }); +await sdk.writeFsFile( + { path: "~/.local/state/sandbox-agent/desktop/home/.config/gtk-3.0/settings.ini" }, + gtkSettings, +); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/fs/mkdir?path=~/.local/state/sandbox-agent/desktop/home/.config/gtk-3.0" + +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=~/.local/state/sandbox-agent/desktop/home/.config/gtk-3.0/settings.ini" \ + -H "Content-Type: application/octet-stream" \ + --data-binary @settings.ini +``` +</CodeGroup> + +### Summary of configuration paths + +All paths are relative to the desktop HOME directory (`~/.local/state/sandbox-agent/desktop/home`). + +| What | Path | Notes | +|------|------|-------| +| Openbox config | `.config/openbox/rc.xml` | Window manager theme, keybindings, behavior | +| Autostart | `.config/openbox/autostart` | Shell script run on desktop start | +| Custom fonts | `.local/share/fonts/` | TTF/OTF files, run `fc-cache -fv` after | +| Cursor theme | `.Xresources` | Requires `xrdb -merge` to apply | +| GTK 3 settings | `.config/gtk-3.0/settings.ini` | Theme, icons, fonts for GTK apps | +| Wallpaper | Any path, referenced from autostart | Requires `feh` or similar tool | diff --git a/docs/conversion.mdx b/docs/conversion.mdx deleted file mode 100644 index d155ab4..0000000 --- a/docs/conversion.mdx +++ /dev/null @@ -1,82 +0,0 @@ -# Universal ↔ Agent Term Mapping - -Source of truth: generated agent schemas in `resources/agent-schemas/artifacts/json-schema/`. - -Identifiers - -+----------------------+------------------------+------------------------------------------+-----------------------------+------------------------+ -| Universal term | Claude | Codex (app-server) | OpenCode | Amp | -+----------------------+------------------------+------------------------------------------+-----------------------------+------------------------+ -| session_id | n/a (daemon-only) | n/a (daemon-only) | n/a (daemon-only) | n/a (daemon-only) | -| native_session_id | none | threadId | sessionID | none | -| item_id | synthetic | ThreadItem.id | Message.id | StreamJSONMessage.id | -| native_item_id | none | ThreadItem.id | Message.id | StreamJSONMessage.id | -+----------------------+------------------------+------------------------------------------+-----------------------------+------------------------+ - -Notes: -- When a provider does not supply IDs (Claude), we synthesize item_id values and keep native_item_id null. -- native_session_id is the only provider session identifier. It is intentionally used for thread/session/run ids. -- native_item_id preserves the agent-native item/message id when present. -- source indicates who emitted the event: agent (native) or daemon (synthetic). -- raw is always present on events. When clients do not opt-in to raw payloads, raw is null. -- opt-in via `include_raw=true` on events endpoints (HTTP + SSE). -- If parsing fails, emit agent.unparsed (source=daemon, synthetic=true). Tests must assert zero unparsed events. - -Events / Message Flow - -+------------------------+------------------------------+--------------------------------------------+-----------------------------------------+----------------------------------+ -| Universal term | Claude | Codex (app-server) | OpenCode | Amp | -+------------------------+------------------------------+--------------------------------------------+-----------------------------------------+----------------------------------+ -| session.started | none | method=thread/started | type=session.created | none | -| session.ended | SDKMessage.type=result | no explicit session end (turn/completed) | no explicit session end (session.deleted)| type=done | -| message (user) | SDKMessage.type=user | item/completed (ThreadItem.type=userMessage)| message.updated (Message.role=user) | type=message | -| message (assistant) | SDKMessage.type=assistant | item/completed (ThreadItem.type=agentMessage)| message.updated (Message.role=assistant)| type=message | -| message.delta | stream_event (partial) or synthetic | method=item/agentMessage/delta | type=message.part.updated (delta) | synthetic | -| tool call | type=tool_use | method=item/mcpToolCall/progress | message.part.updated (part.type=tool) | type=tool_call | -| tool result | user.message.content.tool_result | item/completed (tool result ThreadItem variants) | message.part.updated (part.type=tool, state=completed) | type=tool_result | -| permission.requested | control_request.can_use_tool | none | type=permission.asked | none | -| permission.resolved | daemon reply to can_use_tool | none | type=permission.replied | none | -| question.requested | tool_use (AskUserQuestion) | experimental request_user_input (payload) | type=question.asked | none | -| question.resolved | tool_result (AskUserQuestion) | experimental request_user_input (payload) | type=question.replied / question.rejected | none | -| error | SDKResultMessage.error | method=error | type=session.error (or message error) | type=error | -+------------------------+------------------------------+--------------------------------------------+-----------------------------------------+----------------------------------+ - -Synthetics - -+------------------------------+------------------------+--------------------------+--------------------------------------------------------------+ -| Synthetic element | When it appears | Stored as | Notes | -+------------------------------+------------------------+--------------------------+--------------------------------------------------------------+ -| session.started | When agent emits no explicit start | session.started event | Mark source=daemon | -| session.ended | When agent emits no explicit end | session.ended event | Mark source=daemon; reason may be inferred | -| item_id (Claude) | Claude provides no item IDs | item_id | Maintain provider_item_id map when possible | -| user message (Claude) | Claude emits only assistant output | item.completed | Mark source=daemon; preserve raw input in event metadata | -| question events (Claude) | AskUserQuestion tool usage | question.requested/resolved | Derived from tool_use blocks (source=agent) | -| native_session_id (Codex) | Codex uses threadId | native_session_id | Intentionally merged threadId into native_session_id | -+------------------------------+------------------------+--------------------------+--------------------------------------------------------------+ -| message.delta (Claude) | No native deltas emitted | item.delta | Synthetic delta with full message content; source=daemon | -| message.delta (Amp) | No native deltas | item.delta | Synthetic delta with full message content; source=daemon | -+------------------------------+------------------------+--------------------------+--------------------------------------------------------------+ -| message.delta (OpenCode) | part delta before message | item.delta | If part arrives first, create item.started stub then delta | -+------------------------------+------------------------+--------------------------+--------------------------------------------------------------+ - -Delta handling - -- Codex emits agent message and other deltas (e.g., item/agentMessage/delta). -- OpenCode emits part deltas via message.part.updated with a delta string. -- Claude can emit stream_event deltas when partial streaming is enabled; Amp does not emit deltas. - -Policy: -- Always emit item.delta across all providers. -- For providers without native deltas, emit a single synthetic delta containing the full content prior to item.completed. -- For Claude when partial streaming is enabled, forward native deltas and skip the synthetic full-content delta. -- For providers with native deltas, forward as-is; also emit item.completed when final content is known. - -Message normalization notes - -- user vs assistant: normalized via role in the universal item; provider role fields or item types determine role. -- file artifacts: always represented as content parts (type=file_ref) inside message/tool_result items, not a separate item kind. -- reasoning: represented as content parts (type=reasoning) inside message items, with visibility when available. -- subagents: OpenCode subtask parts and Claude Task tool usage are currently normalized into standard message/tool flow (no dedicated subagent fields). -- OpenCode unrolling: message.updated creates/updates the parent message item; tool-related parts emit separate tool item events (item.started/ item.completed) with parent_id pointing to the message item. -- If a message.part.updated arrives before message.updated, we create a stub item.started (source=daemon) so deltas have a parent. -- Tool calls/results are always emitted as separate tool items to keep behavior consistent across agents. diff --git a/docs/cors.mdx b/docs/cors.mdx index 5e50888..d6bfddf 100644 --- a/docs/cors.mdx +++ b/docs/cors.mdx @@ -2,7 +2,6 @@ title: "CORS Configuration" description: "Configure CORS for browser-based applications." sidebarTitle: "CORS" -icon: "globe" --- When calling the Sandbox Agent server from a browser, CORS (Cross-Origin Resource Sharing) controls which origins can make requests. @@ -13,7 +12,6 @@ By default, no CORS origins are allowed. You must explicitly specify origins for ```bash sandbox-agent server \ - --token "$SANDBOX_TOKEN" \ --cors-allow-origin "http://localhost:5173" ``` @@ -36,7 +34,6 @@ Specify the flag multiple times to allow multiple origins: ```bash sandbox-agent server \ - --token "$SANDBOX_TOKEN" \ --cors-allow-origin "http://localhost:5173" \ --cors-allow-origin "http://localhost:3000" ``` @@ -47,7 +44,6 @@ By default, all methods and headers are allowed. To restrict them: ```bash sandbox-agent server \ - --token "$SANDBOX_TOKEN" \ --cors-allow-origin "https://your-app.com" \ --cors-allow-method "GET" \ --cors-allow-method "POST" \ diff --git a/docs/custom-tools.mdx b/docs/custom-tools.mdx new file mode 100644 index 0000000..2fb3e15 --- /dev/null +++ b/docs/custom-tools.mdx @@ -0,0 +1,159 @@ +--- +title: "Custom Tools" +description: "Give agents custom tools inside the sandbox using MCP servers or skills." +sidebarTitle: "Custom Tools" +icon: "wrench" +--- + +There are two common patterns for sandbox-local custom tooling: + +| | MCP Server | Skill | +|---|---|---| +| **How it works** | Agent connects to an MCP server (`mcpServers`) | Agent follows `SKILL.md` instructions and runs scripts | +| **Best for** | Typed tool calls and structured protocols | Lightweight task-specific guidance | +| **Requires** | MCP server process (stdio/http/sse) | Script + `SKILL.md` | + +## Option A: MCP server (stdio) + +<Steps> + <Step title="Write and bundle your MCP server"> + +```ts src/mcp-server.ts +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; +import { z } from "zod"; + +const server = new McpServer({ name: "rand", version: "1.0.0" }); + +server.tool( + "random_number", + "Generate a random integer between min and max", + { + min: z.number(), + max: z.number(), + }, + async ({ min, max }) => ({ + content: [{ type: "text", text: String(Math.floor(Math.random() * (max - min + 1)) + min) }], + }), +); + +await server.connect(new StdioServerTransport()); +``` + +```bash +npx esbuild src/mcp-server.ts --bundle --format=cjs --platform=node --target=node18 --outfile=dist/mcp-server.cjs +``` + </Step> + + <Step title="Upload it into the sandbox"> + +```ts +import { SandboxAgent } from "sandbox-agent"; +import fs from "node:fs"; + +const sdk = await SandboxAgent.connect({ baseUrl: "http://127.0.0.1:2468" }); +const content = await fs.promises.readFile("./dist/mcp-server.cjs"); + +await sdk.writeFsFile({ path: "/opt/mcp/custom-tools/mcp-server.cjs" }, content); +``` + +```bash +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=/opt/mcp/custom-tools/mcp-server.cjs" \ + --data-binary @./dist/mcp-server.cjs +``` + </Step> + + <Step title="Register MCP config and create a session"> + +```ts +await sdk.setMcpConfig( + { + directory: "/workspace", + mcpName: "customTools", + }, + { + type: "local", + command: "node", + args: ["/opt/mcp/custom-tools/mcp-server.cjs"], + }, +); + +const session = await sdk.createSession({ + agent: "claude", + cwd: "/workspace", +}); + +await session.prompt([ + { type: "text", text: "Use the random_number tool with min=1 and max=10." }, +]); +``` + </Step> +</Steps> + +## Option B: Skills + +<Steps> + <Step title="Write script + skill file"> + +```ts src/random-number.ts +const min = Number(process.argv[2]); +const max = Number(process.argv[3]); + +if (Number.isNaN(min) || Number.isNaN(max)) { + console.error("Usage: random-number <min> <max>"); + process.exit(1); +} + +console.log(Math.floor(Math.random() * (max - min + 1)) + min); +``` + +````md SKILL.md +--- +name: random-number +description: Generate a random integer between min and max. +--- + +Run: + +```bash +node /opt/skills/random-number/random-number.cjs <min> <max> +``` +```` + +```bash +npx esbuild src/random-number.ts --bundle --format=cjs --platform=node --target=node18 --outfile=dist/random-number.cjs +``` + </Step> + + <Step title="Upload files"> + +```ts +import fs from "node:fs"; + +const script = await fs.promises.readFile("./dist/random-number.cjs"); +await sdk.writeFsFile({ path: "/opt/skills/random-number/random-number.cjs" }, script); + +const skill = await fs.promises.readFile("./SKILL.md"); +await sdk.writeFsFile({ path: "/opt/skills/random-number/SKILL.md" }, skill); +``` + </Step> + + <Step title="Use in a session"> + +```ts +const session = await sdk.createSession({ + agent: "claude", + cwd: "/workspace", +}); + +await session.prompt([ + { type: "text", text: "Use the random-number skill to pick a number from 1 to 100." }, +]); +``` + </Step> +</Steps> + +## Notes + +- The sandbox runtime must include Node.js (or your chosen runtime). +- For persistent skill-source wiring by directory, see [Skills](/skills-config). diff --git a/docs/daemon.mdx b/docs/daemon.mdx new file mode 100644 index 0000000..f2cc100 --- /dev/null +++ b/docs/daemon.mdx @@ -0,0 +1,69 @@ +--- +title: "Daemon" +description: "Background daemon lifecycle and management." +--- + +The sandbox-agent daemon is a background server process. Commands like `sandbox-agent opencode` and `gigacode` can ensure it is running. + +## How it works + +1. A daemon-aware command checks for a healthy daemon at host/port. +2. If missing, it starts one in the background and records PID/version files. +3. Subsequent checks can compare build/version and restart when required. + +## Auto-upgrade behavior + +- `sandbox-agent opencode` and `gigacode` use ensure-running behavior with upgrade checks. +- `sandbox-agent daemon start` uses direct start by default. +- `sandbox-agent daemon start --upgrade` uses ensure-running behavior (including version check/restart). + +## Managing the daemon + +### Start + +```bash +sandbox-agent daemon start [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host <HOST>` | `127.0.0.1` | Host | +| `-p, --port <PORT>` | `2468` | Port | +| `--upgrade` | false | Use ensure-running + upgrade behavior | + +```bash +sandbox-agent daemon start +sandbox-agent daemon start --upgrade +``` + +### Stop + +```bash +sandbox-agent daemon stop [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host <HOST>` | `127.0.0.1` | Host | +| `-p, --port <PORT>` | `2468` | Port | + +### Status + +```bash +sandbox-agent daemon status [OPTIONS] +``` + +| Option | Default | Description | +|--------|---------|-------------| +| `-H, --host <HOST>` | `127.0.0.1` | Host | +| `-p, --port <PORT>` | `2468` | Port | + +## Files + +Daemon state is stored under the sandbox-agent data directory (for example `~/.local/share/sandbox-agent/daemon/`): + +| File | Purpose | +|------|---------| +| `daemon-{host}-{port}.pid` | PID of running daemon | +| `daemon-{host}-{port}.version` | Build/version marker | +| `daemon-{host}-{port}.log` | Daemon stdout/stderr log | diff --git a/docs/deploy/boxlite.mdx b/docs/deploy/boxlite.mdx new file mode 100644 index 0000000..8c02bb4 --- /dev/null +++ b/docs/deploy/boxlite.mdx @@ -0,0 +1,67 @@ +--- +title: "BoxLite" +description: "Run Sandbox Agent inside a BoxLite micro-VM." +--- + +BoxLite is a local-first micro-VM sandbox — no cloud account needed. +See [BoxLite docs](https://docs.boxlite.ai) for platform requirements (KVM on Linux, Apple Silicon on macOS). + +## Prerequisites + +- `@boxlite-ai/boxlite` installed (requires KVM or Apple Hypervisor) +- Docker (to build the base image) +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` + +## Base image + +Build a Docker image with Sandbox Agent pre-installed, then export it as an OCI layout +that BoxLite can load directly (BoxLite has its own image store separate from Docker): + +```dockerfile +FROM node:22-bookworm-slim +RUN apt-get update && apt-get install -y curl ca-certificates && rm -rf /var/lib/apt/lists/* +RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh +RUN sandbox-agent install-agent claude +RUN sandbox-agent install-agent codex +``` + +```bash +docker build -t sandbox-agent-boxlite . +mkdir -p oci-image +docker save sandbox-agent-boxlite | tar -xf - -C oci-image +``` + +## TypeScript example + +```typescript +import { SimpleBox } from "@boxlite-ai/boxlite"; +import { SandboxAgent } from "sandbox-agent"; + +const env: Record<string, string> = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const box = new SimpleBox({ + rootfsPath: "./oci-image", + env, + ports: [{ hostPort: 3000, guestPort: 3000 }], + diskSizeGb: 4, +}); + +await box.exec("sh", "-c", + "nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &" +); + +const baseUrl = "http://localhost:3000"; +const sdk = await SandboxAgent.connect({ baseUrl }); + +const session = await sdk.createSession({ agent: "claude" }); +const off = session.onEvent((event) => { + console.log(event.sender, event.payload); +}); + +await session.prompt([{ type: "text", text: "Summarize this repository" }]); +off(); + +await box.stop(); +``` diff --git a/docs/deploy/cloudflare.mdx b/docs/deploy/cloudflare.mdx new file mode 100644 index 0000000..c0370e4 --- /dev/null +++ b/docs/deploy/cloudflare.mdx @@ -0,0 +1,188 @@ +--- +title: "Cloudflare" +description: "Deploy Sandbox Agent inside a Cloudflare Sandbox." +--- + +## Prerequisites + +- Cloudflare account with Workers paid plan +- Docker for local `wrangler dev` +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` + +<Note> +Cloudflare Sandbox SDK is beta. See [Sandbox SDK docs](https://developers.cloudflare.com/sandbox/). +</Note> + +## Quick start + +```bash +npm create cloudflare@latest -- my-sandbox --template=cloudflare/sandbox-sdk/examples/minimal +cd my-sandbox +``` + +## Dockerfile + +```dockerfile +FROM cloudflare/sandbox:0.7.0 + +RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh +RUN sandbox-agent install-agent claude && sandbox-agent install-agent codex + +EXPOSE 8000 +``` + +## TypeScript example (with provider) + +For standalone scripts, use the `cloudflare` provider: + +```bash +npm install sandbox-agent@0.4.x @cloudflare/sandbox +``` + +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { cloudflare } from "sandbox-agent/cloudflare"; + +const sdk = await SandboxAgent.start({ + sandbox: cloudflare(), +}); + +try { + const session = await sdk.createSession({ agent: "codex" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} +``` + +The `cloudflare` provider uses `containerFetch` under the hood, automatically stripping `AbortSignal` to avoid dropped streaming updates. + +## TypeScript example (Durable Objects) + +For Workers with Durable Objects, use `SandboxAgent.connect(...)` with a custom `fetch` backed by `sandbox.containerFetch(...)`: + +```typescript +import { getSandbox, type Sandbox } from "@cloudflare/sandbox"; +import { Hono } from "hono"; +import { SandboxAgent } from "sandbox-agent"; + +export { Sandbox } from "@cloudflare/sandbox"; + +type Bindings = { + Sandbox: DurableObjectNamespace<Sandbox>; + ASSETS: Fetcher; + ANTHROPIC_API_KEY?: string; + OPENAI_API_KEY?: string; +}; + +const app = new Hono<{ Bindings: Bindings }>(); +const PORT = 8000; + +async function isServerRunning(sandbox: Sandbox): Promise<boolean> { + try { + const result = await sandbox.exec(`curl -sf http://localhost:${PORT}/v1/health`); + return result.success; + } catch { + return false; + } +} + +async function getReadySandbox(name: string, env: Bindings): Promise<Sandbox> { + const sandbox = getSandbox(env.Sandbox, name); + if (!(await isServerRunning(sandbox))) { + const envVars: Record<string, string> = {}; + if (env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = env.ANTHROPIC_API_KEY; + if (env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = env.OPENAI_API_KEY; + await sandbox.setEnvVars(envVars); + await sandbox.startProcess(`sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`); + } + return sandbox; +} + +app.post("/sandbox/:name/prompt", async (c) => { + const sandbox = await getReadySandbox(c.req.param("name"), c.env); + + const sdk = await SandboxAgent.connect({ + fetch: (input, init) => + sandbox.containerFetch( + input as Request | string | URL, + { + ...(init ?? {}), + // Avoid passing AbortSignal through containerFetch; it can drop streamed session updates. + signal: undefined, + }, + PORT, + ), + }); + + const session = await sdk.createSession({ agent: "codex" }); + const response = await session.prompt([{ type: "text", text: "Summarize this repository" }]); + await sdk.destroySession(session.id); + await sdk.dispose(); + + return c.json(response); +}); + +app.all("/sandbox/:name/proxy/*", async (c) => { + const sandbox = await getReadySandbox(c.req.param("name"), c.env); + const wildcard = c.req.param("*"); + const path = wildcard ? `/${wildcard}` : "/"; + const query = new URL(c.req.raw.url).search; + + return sandbox.containerFetch(new Request(`http://localhost${path}${query}`, c.req.raw), PORT); +}); + +app.all("*", (c) => c.env.ASSETS.fetch(c.req.raw)); + +export default app; +``` + +This keeps all Sandbox Agent calls inside the Cloudflare sandbox routing path and does not require a `baseUrl`. + +## Troubleshooting streaming updates + +If you only receive: +- the outbound prompt request +- the final `{ stopReason: "end_turn" }` response + +then the streamed update channel dropped. In Cloudflare sandbox paths, this is typically caused by forwarding `AbortSignal` from SDK fetch init into `containerFetch(...)`. + +Fix: + +```ts +const sdk = await SandboxAgent.connect({ + fetch: (input, init) => + sandbox.containerFetch( + input as Request | string | URL, + { + ...(init ?? {}), + // Avoid passing AbortSignal through containerFetch; it can drop streamed session updates. + signal: undefined, + }, + PORT, + ), +}); +``` + +This keeps prompt completion behavior the same, but restores streamed text/tool updates. + +## Local development + +```bash +npm run dev +``` + +Test health: + +```bash +curl http://localhost:8787/sandbox/demo/proxy/v1/health +``` + +## Production deployment + +```bash +wrangler deploy +``` diff --git a/docs/deploy/computesdk.mdx b/docs/deploy/computesdk.mdx new file mode 100644 index 0000000..601d9c7 --- /dev/null +++ b/docs/deploy/computesdk.mdx @@ -0,0 +1,81 @@ +--- +title: "ComputeSDK" +description: "Deploy Sandbox Agent using ComputeSDK's provider-agnostic sandbox API." +--- + +[ComputeSDK](https://computesdk.com) provides a unified interface for managing sandboxes across multiple providers. Write once, deploy anywhere by changing environment variables. + +## Prerequisites + +- `COMPUTESDK_API_KEY` from [console.computesdk.com](https://console.computesdk.com) +- Provider API key (one of: `E2B_API_KEY`, `DAYTONA_API_KEY`, `VERCEL_TOKEN`, `MODAL_TOKEN_ID` + `MODAL_TOKEN_SECRET`, `BLAXEL_API_KEY`, `CSB_API_KEY`) +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` + +## TypeScript example + +```bash +npm install sandbox-agent@0.4.x computesdk +``` + +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { computesdk } from "sandbox-agent/computesdk"; + +const envs: Record<string, string> = {}; +if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const sdk = await SandboxAgent.start({ + sandbox: computesdk({ + create: { + envs, + image: process.env.COMPUTESDK_IMAGE, + templateId: process.env.COMPUTESDK_TEMPLATE_ID, + }, + }), +}); + +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} +``` + +The `computesdk` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. ComputeSDK routes to your configured provider behind the scenes. +The `create` option now forwards the full ComputeSDK sandbox-create payload, including provider-specific fields such as `image` and `templateId` when the selected provider supports them. + +Before calling `SandboxAgent.start()`, configure ComputeSDK with your provider: + +```typescript +import { compute } from "computesdk"; + +compute.setConfig({ + provider: "e2b", // or auto-detect via detectProvider() + computesdkApiKey: process.env.COMPUTESDK_API_KEY, +}); +``` + +## Supported providers + +ComputeSDK auto-detects your provider from environment variables: + +| Provider | Environment Variables | +|----------|----------------------| +| E2B | `E2B_API_KEY` | +| Daytona | `DAYTONA_API_KEY` | +| Vercel | `VERCEL_TOKEN` or `VERCEL_OIDC_TOKEN` | +| Modal | `MODAL_TOKEN_ID` + `MODAL_TOKEN_SECRET` | +| Blaxel | `BLAXEL_API_KEY` | +| CodeSandbox | `CSB_API_KEY` | + +## Notes + +- **Provider resolution**: Set `COMPUTESDK_PROVIDER` to force a specific provider, or let ComputeSDK auto-detect from API keys. +- `sandbox.runCommand(..., { background: true })` keeps the server running while your app continues. +- `sandbox.getUrl({ port })` returns a public URL for the sandbox port. +- Always destroy the sandbox when done to avoid leaking resources. diff --git a/docs/deploy/daytona.mdx b/docs/deploy/daytona.mdx index fad980d..e546bef 100644 --- a/docs/deploy/daytona.mdx +++ b/docs/deploy/daytona.mdx @@ -1,63 +1,52 @@ --- title: "Daytona" -description: "Run the daemon in a Daytona workspace." +description: "Run Sandbox Agent in a Daytona workspace." --- <Warning> -Daytona Tier 3+ is required to access api.anthropic.com and api.openai.com. Tier 1/2 sandboxes have restricted network access that will cause agent failures. See [Daytona network limits](https://www.daytona.io/docs/en/network-limits/) for details. +Daytona Tier 3+ is required for access to common model provider endpoints. +See [Daytona network limits](https://www.daytona.io/docs/en/network-limits/). </Warning> ## Prerequisites -- `DAYTONA_API_KEY` environment variable -- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` for the coding agents +- `DAYTONA_API_KEY` +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` -## TypeScript Example +## TypeScript example + +```bash +npm install sandbox-agent@0.4.x @daytonaio/sdk +``` ```typescript -import { Daytona } from "@daytonaio/sdk"; import { SandboxAgent } from "sandbox-agent"; +import { daytona } from "sandbox-agent/daytona"; -const daytona = new Daytona(); - -// Pass API keys to the sandbox const envVars: Record<string, string> = {}; if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -const sandbox = await daytona.create({ envVars }); - -// Install sandbox-agent -await sandbox.process.executeCommand( - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh" -); - -// Start the server in the background -await sandbox.process.executeCommand( - "nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &" -); - -// Wait for server to be ready -await new Promise((r) => setTimeout(r, 2000)); - -// Get the public URL -const baseUrl = (await sandbox.getSignedPreviewUrl(3000, 4 * 60 * 60)).url; - -// Connect and use the SDK -const client = await SandboxAgent.connect({ baseUrl }); - -await client.createSession("my-session", { - agent: "claude", - permissionMode: "default", +const sdk = await SandboxAgent.start({ + sandbox: daytona({ + create: { envVars }, + }), }); -// Cleanup when done -await sandbox.delete(); +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} ``` -## Using Snapshots for Faster Startup +The `daytona` provider uses the `rivetdev/sandbox-agent:0.4.2-full` image by default and starts the server automatically. -For production, use snapshots with pre-installed binaries: +## Using snapshots for faster startup ```typescript import { Daytona, Image } from "@daytonaio/sdk"; @@ -65,7 +54,6 @@ import { Daytona, Image } from "@daytonaio/sdk"; const daytona = new Daytona(); const SNAPSHOT = "sandbox-agent-ready"; -// Create snapshot once (takes 2-3 minutes) const hasSnapshot = await daytona.snapshot.get(SNAPSHOT).then(() => true, () => false); if (!hasSnapshot) { @@ -73,18 +61,10 @@ if (!hasSnapshot) { name: SNAPSHOT, image: Image.base("ubuntu:22.04").runCommands( "apt-get update && apt-get install -y curl ca-certificates", - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh", + "curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh", "sandbox-agent install-agent claude", "sandbox-agent install-agent codex", ), }); } - -// Now sandboxes start instantly -const sandbox = await daytona.create({ - snapshot: SNAPSHOT, - envVars, -}); ``` - -See [Daytona Snapshots](https://daytona.io/docs/snapshots) for details. diff --git a/docs/deploy/docker.mdx b/docs/deploy/docker.mdx index 4961b0c..c5a3432 100644 --- a/docs/deploy/docker.mdx +++ b/docs/deploy/docker.mdx @@ -1,33 +1,46 @@ --- title: "Docker" -description: "Build and run the daemon in a Docker container." +description: "Build and run Sandbox Agent in a Docker container." --- <Warning> -Docker is not recommended for production. Standard Docker containers don't provide sufficient isolation for running untrusted code. Use a dedicated sandbox provider like E2B or Daytona for production workloads. +Docker is not recommended for production isolation of untrusted workloads. Use dedicated sandbox providers (E2B, Daytona, etc.) for stronger isolation. </Warning> -## Quick Start +## Quick start -Run sandbox-agent in a container with agents pre-installed: +Run the published full image with all supported agents pre-installed: ```bash docker run --rm -p 3000:3000 \ -e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \ -e OPENAI_API_KEY="$OPENAI_API_KEY" \ - alpine:latest sh -c "\ - apk add --no-cache curl ca-certificates libstdc++ libgcc bash && \ - curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh && \ - sandbox-agent install-agent claude && \ - sandbox-agent install-agent codex && \ + rivetdev/sandbox-agent:0.4.2-full \ + server --no-token --host 0.0.0.0 --port 3000 +``` + +The `0.4.2-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. + +If you also want the desktop API inside the container, install desktop dependencies before starting the server: + +```bash +docker run --rm -p 3000:3000 \ + -e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \ + -e OPENAI_API_KEY="$OPENAI_API_KEY" \ + node:22-bookworm-slim sh -c "\ + apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y curl ca-certificates bash libstdc++6 && \ + rm -rf /var/lib/apt/lists/* && \ + curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh && \ + sandbox-agent install desktop --yes && \ sandbox-agent server --no-token --host 0.0.0.0 --port 3000" ``` -<Note> -Alpine is required because Claude Code is built for musl libc. Debian/Ubuntu images use glibc and won't work. -</Note> +In a Dockerfile: -Access the API at `http://localhost:3000`. +```dockerfile +RUN sandbox-agent install desktop --yes +``` ## TypeScript with dockerode @@ -39,17 +52,12 @@ const docker = new Docker(); const PORT = 3000; const container = await docker.createContainer({ - Image: "alpine:latest", - Cmd: ["sh", "-c", [ - "apk add --no-cache curl ca-certificates libstdc++ libgcc bash", - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh", - "sandbox-agent install-agent claude", - "sandbox-agent install-agent codex", - `sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`, - ].join(" && ")], + Image: "rivetdev/sandbox-agent:0.4.2-full", + Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", `${PORT}`], Env: [ `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}`, `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}`, + `CODEX_API_KEY=${process.env.CODEX_API_KEY}`, ].filter(Boolean), ExposedPorts: { [`${PORT}/tcp`]: {} }, HostConfig: { @@ -60,24 +68,41 @@ const container = await docker.createContainer({ await container.start(); -// Wait for server and connect const baseUrl = `http://127.0.0.1:${PORT}`; -const client = await SandboxAgent.connect({ baseUrl }); +const sdk = await SandboxAgent.connect({ baseUrl }); -// Use the client... -await client.createSession("my-session", { - agent: "claude", - permissionMode: "default", -}); +const session = await sdk.createSession({ agent: "codex" }); +await session.prompt([{ type: "text", text: "Summarize this repository." }]); ``` -## Building from Source +## Building a custom image with everything preinstalled -To build a static binary for use in minimal containers: +If you need to extend your own base image, install Sandbox Agent and preinstall every supported agent in one step: + +```dockerfile +FROM node:22-bookworm-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + bash ca-certificates curl git && \ + rm -rf /var/lib/apt/lists/* + +RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh && \ + sandbox-agent install-agent --all + +RUN useradd -m -s /bin/bash sandbox +USER sandbox +WORKDIR /home/sandbox + +EXPOSE 2468 +ENTRYPOINT ["sandbox-agent"] +CMD ["server", "--host", "0.0.0.0", "--port", "2468"] +``` + +## Building from source ```bash docker build -f docker/release/linux-x86_64.Dockerfile -t sandbox-agent-build . docker run --rm -v "$PWD/artifacts:/artifacts" sandbox-agent-build ``` -The binary will be at `./artifacts/sandbox-agent-x86_64-unknown-linux-musl`. +Binary output: `./artifacts/sandbox-agent-x86_64-unknown-linux-musl`. diff --git a/docs/deploy/e2b.mdx b/docs/deploy/e2b.mdx index cfe1d66..225cfdc 100644 --- a/docs/deploy/e2b.mdx +++ b/docs/deploy/e2b.mdx @@ -1,79 +1,52 @@ --- title: "E2B" -description: "Deploy the daemon inside an E2B sandbox." +description: "Deploy Sandbox Agent inside an E2B sandbox." --- ## Prerequisites -- `E2B_API_KEY` environment variable -- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` for the coding agents +- `E2B_API_KEY` +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` -## TypeScript Example +## TypeScript example + +```bash +npm install sandbox-agent@0.4.x @e2b/code-interpreter +``` ```typescript -import { Sandbox } from "@e2b/code-interpreter"; import { SandboxAgent } from "sandbox-agent"; +import { e2b } from "sandbox-agent/e2b"; -// Pass API keys to the sandbox const envs: Record<string, string> = {}; if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +const template = process.env.E2B_TEMPLATE; -const sandbox = await Sandbox.create({ allowInternetAccess: true, envs }); - -// Install sandbox-agent -await sandbox.commands.run( - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh" -); - -// Install agents before starting the server -await sandbox.commands.run("sandbox-agent install-agent claude"); -await sandbox.commands.run("sandbox-agent install-agent codex"); - -// Start the server in the background -await sandbox.commands.run( - "sandbox-agent server --no-token --host 0.0.0.0 --port 3000", - { background: true } -); - -// Connect to the server -const baseUrl = `https://${sandbox.getHost(3000)}`; -const client = await SandboxAgent.connect({ baseUrl }); - -// Wait for server to be ready -for (let i = 0; i < 30; i++) { - try { - await client.getHealth(); - break; - } catch { - await new Promise((r) => setTimeout(r, 1000)); - } -} - -// Create a session and start coding -await client.createSession("my-session", { - agent: "claude", - permissionMode: "default", +const sdk = await SandboxAgent.start({ + sandbox: e2b({ + template, + create: { envs }, + }), }); -await client.postMessage("my-session", { - message: "Summarize this repository", -}); - -for await (const event of client.streamEvents("my-session")) { - console.log(event.type, event.data); +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); } - -// Cleanup -await sandbox.kill(); ``` -## Faster Cold Starts +The `e2b` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. Sandboxes pause by default instead of being deleted, and reconnecting with the same `sandboxId` resumes them automatically. -For faster startup, create a custom E2B template with sandbox-agent and agents pre-installed: +Pass `template` when you want to start from a custom E2B template alias or template ID. E2B base-image selection happens when you build the template, then `sandbox-agent/e2b` uses that template at sandbox creation time. -1. Create a template with the install script baked in -2. Pre-install agents: `sandbox-agent install-agent claude codex` -3. Use the template ID when creating sandboxes +## Faster cold starts -See [E2B Custom Templates](https://e2b.dev/docs/sandbox-template) for details. +For faster startup, create a custom E2B template with Sandbox Agent and target agents pre-installed. +Build System 2.0 also lets you choose the template's base image in code. +See [E2B Custom Templates](https://e2b.dev/docs/sandbox-template) and [E2B Base Images](https://e2b.dev/docs/template/base-image). diff --git a/docs/deploy/index.mdx b/docs/deploy/index.mdx deleted file mode 100644 index 90c9226..0000000 --- a/docs/deploy/index.mdx +++ /dev/null @@ -1,24 +0,0 @@ ---- -title: "Deploy" -sidebarTitle: "Overview" -description: "Choose where to run the sandbox-agent server." -icon: "server" ---- - -<CardGroup cols={2}> - <Card title="Local" icon="laptop" href="/deploy/local"> - Run locally for development. The SDK can auto-spawn the server. - </Card> - <Card title="E2B" icon="cube" href="/deploy/e2b"> - Deploy inside an E2B sandbox with network access. - </Card> - <Card title="Vercel" icon="triangle" href="/deploy/vercel"> - Deploy inside a Vercel Sandbox with port forwarding. - </Card> - <Card title="Daytona" icon="cloud" href="/deploy/daytona"> - Run in a Daytona workspace with port forwarding. - </Card> - <Card title="Docker" icon="docker" href="/deploy/docker"> - Build and run in a container (development only). - </Card> -</CardGroup> diff --git a/docs/deploy/local.mdx b/docs/deploy/local.mdx index e70a14f..6ecdb09 100644 --- a/docs/deploy/local.mdx +++ b/docs/deploy/local.mdx @@ -1,43 +1,70 @@ --- title: "Local" -description: "Run the daemon locally for development." +description: "Run Sandbox Agent locally for development." --- -For local development, you can run the daemon directly on your machine. +For local development, run Sandbox Agent directly on your machine. ## With the CLI ```bash # Install -curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh +curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh # Run sandbox-agent server --no-token --host 127.0.0.1 --port 2468 ``` -Or with npm: +Or with npm/Bun: -```bash -npx sandbox-agent server --no-token --host 127.0.0.1 --port 2468 -``` +<Tabs> + <Tab title="npx"> + ```bash + npx @sandbox-agent/cli@0.4.x server --no-token --host 127.0.0.1 --port 2468 + ``` + </Tab> + <Tab title="bunx"> + ```bash + bunx @sandbox-agent/cli@0.4.x server --no-token --host 127.0.0.1 --port 2468 + ``` + </Tab> +</Tabs> ## With the TypeScript SDK -The SDK can automatically spawn and manage the server as a subprocess: +The SDK can spawn and manage the server as a subprocess using the `local` provider: ```typescript import { SandboxAgent } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; -// Spawns sandbox-agent server as a subprocess -const client = await SandboxAgent.start(); - -await client.createSession("my-session", { - agent: "claude", - permissionMode: "default", +const sdk = await SandboxAgent.start({ + sandbox: local(), }); -// When done -await client.dispose(); +const session = await sdk.createSession({ + agent: "claude", +}); + +await session.prompt([ + { type: "text", text: "Summarize this repository." }, +]); + +await sdk.destroySandbox(); ``` -This installs the binary (if needed) and starts the server on a random available port. No manual setup required. +This starts the server on an available local port and connects automatically. + +Pass options to customize the local provider: + +```typescript +const sdk = await SandboxAgent.start({ + sandbox: local({ + port: 3000, + log: "inherit", + env: { + ANTHROPIC_API_KEY: process.env.MY_ANTHROPIC_KEY, + }, + }), +}); +``` diff --git a/docs/deploy/modal.mdx b/docs/deploy/modal.mdx new file mode 100644 index 0000000..5850fd8 --- /dev/null +++ b/docs/deploy/modal.mdx @@ -0,0 +1,55 @@ +--- +title: "Modal" +description: "Deploy Sandbox Agent inside a Modal sandbox." +--- + +## Prerequisites + +- `MODAL_TOKEN_ID` and `MODAL_TOKEN_SECRET` from [modal.com/settings](https://modal.com/settings) +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` + +## TypeScript example + +```bash +npm install sandbox-agent@0.4.x modal +``` + +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { modal } from "sandbox-agent/modal"; + +const secrets: Record<string, string> = {}; +if (process.env.ANTHROPIC_API_KEY) secrets.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) secrets.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +const baseImage = process.env.MODAL_BASE_IMAGE ?? "node:22-slim"; + +const sdk = await SandboxAgent.start({ + sandbox: modal({ + image: baseImage, + create: { secrets }, + }), +}); + +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} +``` + +The `modal` provider handles app creation, image building, sandbox provisioning, agent installation, server startup, and tunnel networking automatically. +Set `image` to change the base Docker image before Sandbox Agent and its agent binaries are layered on top. You can also pass a prebuilt Modal `Image` object. + +## Faster cold starts + +Modal caches image layers, so the Dockerfile commands that install `curl` and `sandbox-agent` only run on the first build. Subsequent sandbox creates reuse the cached image. + +## Notes + +- Modal sandboxes use [gVisor](https://gvisor.dev/) for strong isolation. +- Ports are exposed via encrypted tunnels (`encryptedPorts`). The provider uses `sb.tunnels()` to get the public HTTPS URL. +- Environment variables (API keys) are passed as Modal [Secrets](https://modal.com/docs/guide/secrets) for security. diff --git a/docs/deploy/vercel.mdx b/docs/deploy/vercel.mdx index be2bec0..ec931d8 100644 --- a/docs/deploy/vercel.mdx +++ b/docs/deploy/vercel.mdx @@ -1,91 +1,50 @@ --- title: "Vercel" -description: "Deploy the daemon inside a Vercel Sandbox." +description: "Deploy Sandbox Agent inside a Vercel Sandbox." --- ## Prerequisites -- `VERCEL_OIDC_TOKEN` or `VERCEL_ACCESS_TOKEN` environment variable -- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` for the coding agents +- `VERCEL_OIDC_TOKEN` or `VERCEL_ACCESS_TOKEN` +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` -## TypeScript Example +## TypeScript example + +```bash +npm install sandbox-agent@0.4.x @vercel/sandbox +``` ```typescript -import { Sandbox } from "@vercel/sandbox"; import { SandboxAgent } from "sandbox-agent"; +import { vercel } from "sandbox-agent/vercel"; -// Pass API keys to the sandbox -const envs: Record<string, string> = {}; -if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +const env: Record<string, string> = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -// Create sandbox with port 3000 exposed -const sandbox = await Sandbox.create({ - runtime: "node24", - ports: [3000], +const sdk = await SandboxAgent.start({ + sandbox: vercel({ + create: { + runtime: "node24", + env, + }, + }), }); -// Helper to run commands -const run = async (cmd: string, args: string[] = []) => { - const result = await sandbox.runCommand({ cmd, args, env: envs }); - if (result.exitCode !== 0) { - throw new Error(`Command failed: ${cmd} ${args.join(" ")}`); - } - return result; -}; - -// Install sandbox-agent -await run("sh", ["-c", "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh"]); - -// Install agents before starting the server -await run("sandbox-agent", ["install-agent", "claude"]); -await run("sandbox-agent", ["install-agent", "codex"]); - -// Start the server in the background -await sandbox.runCommand({ - cmd: "sandbox-agent", - args: ["server", "--no-token", "--host", "0.0.0.0", "--port", "3000"], - env: envs, - detached: true, -}); - -// Connect to the server -const baseUrl = sandbox.domain(3000); -const client = await SandboxAgent.connect({ baseUrl }); - -// Wait for server to be ready -for (let i = 0; i < 30; i++) { - try { - await client.getHealth(); - break; - } catch { - await new Promise((r) => setTimeout(r, 1000)); - } +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); } - -// Create a session and start coding -await client.createSession("my-session", { - agent: "claude", - permissionMode: "default", -}); - -await client.postMessage("my-session", { - message: "Summarize this repository", -}); - -for await (const event of client.streamEvents("my-session")) { - console.log(event.type, event.data); -} - -// Cleanup -await sandbox.stop(); ``` +The `vercel` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. + ## Authentication -Vercel Sandboxes support two authentication methods: - -- **OIDC Token**: Set `VERCEL_OIDC_TOKEN` (recommended for CI/CD) -- **Access Token**: Set `VERCEL_ACCESS_TOKEN` (for local development, run `vercel env pull`) - -See [Vercel Sandbox docs](https://vercel.com/docs/functions/sandbox) for details. +Vercel Sandboxes support OIDC token auth (recommended) and access-token auth. +See [Vercel Sandbox docs](https://vercel.com/docs/functions/sandbox). diff --git a/docs/docs.json b/docs/docs.json index b54e31e..dbcc407 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -1,84 +1,130 @@ { - "$schema": "https://mintlify.com/docs.json", - "theme": "willow", - "name": "Sandbox Agent SDK", - "appearance": { - "default": "dark", - "strict": true - }, - "colors": { - "primary": "#ff4f00", - "light": "#ff4f00", - "dark": "#ff4f00" - }, - "favicon": "/favicon.svg", - "logo": { - "light": "/logo/light.svg", - "dark": "/logo/dark.svg" - }, - "integrations": { - "posthog": { - "apiKey": "phc_6kfTNEAVw7rn1LA51cO3D69FefbKupSWFaM7OUgEpEo", - "apiHost": "https://ph.rivet.gg", - "sessionRecording": true - } - }, - "navbar": { - "links": [ - { - "label": "Discord", - "icon": "discord", - "href": "https://discord.gg/auCecybynK" - }, - { - "label": "GitHub", - "icon": "github", - "href": "https://github.com/rivet-dev/sandbox-agent" - } - ] - }, - "navigation": { - "pages": [ - { - "group": "Getting started", - "pages": ["quickstart", "building-chat-ui", "manage-sessions"] - }, - { - "group": "Deploy", - "pages": [ - "deploy/index", - "deploy/local", - "deploy/e2b", - "deploy/vercel", - "deploy/daytona", - "deploy/docker" - ] - }, - { - "group": "SDKs", - "pages": ["sdks/typescript", "sdks/python"] - }, - { - "group": "Reference", - "pages": [ - "cli", - "inspector", - "session-transcript-schema", - "cors", - { - "group": "AI", - "pages": ["ai/skill", "ai/llms-txt"] - }, - { - "group": "Advanced", - "pages": ["telemetry"] - } - ] - }, - { - "group": "HTTP API Reference", - "openapi": "openapi.json" - } - ] - } + "$schema": "https://mintlify.com/docs.json", + "theme": "mint", + "name": "Sandbox Agent SDK", + "appearance": { + "default": "dark", + "strict": true + }, + "colors": { + "primary": "#ff4f00", + "light": "#ff6a2a", + "dark": "#cc3f00" + }, + "favicon": "/favicon.svg", + "logo": { + "light": "/logo/light.svg", + "dark": "/logo/dark.svg" + }, + "integrations": { + "posthog": { + "apiKey": "phc_6kfTNEAVw7rn1LA51cO3D69FefbKupSWFaM7OUgEpEo", + "apiHost": "https://ph.rivet.gg", + "sessionRecording": true + } + }, + "navbar": { + "links": [ + { + "label": "Discord", + "icon": "discord", + "href": "https://discord.gg/auCecybynK" + }, + { + "label": "GitHub", + "type": "github", + "href": "https://github.com/rivet-dev/sandbox-agent" + } + ] + }, + "navigation": { + "tabs": [ + { + "tab": "Documentation", + "pages": [ + { + "group": "Getting started", + "pages": [ + "quickstart", + "sdk-overview", + "llm-credentials", + "react-components", + { + "group": "Deploy", + "icon": "server", + "pages": [ + "deploy/local", + "deploy/e2b", + "deploy/daytona", + "deploy/vercel", + "deploy/cloudflare", + "deploy/docker", + "deploy/modal", + "deploy/boxlite", + "deploy/computesdk" + ] + } + ] + }, + { + "group": "Agent", + "pages": [ + "agent-sessions", + { + "group": "Agents", + "icon": "robot", + "pages": ["agents/claude", "agents/codex", "agents/opencode", "agents/cursor", "agents/amp", "agents/pi"] + }, + "attachments", + "skills-config", + "mcp-config", + "custom-tools" + ] + }, + { + "group": "System", + "pages": ["file-system", "processes", "computer-use", "common-software"] + }, + { + "group": "Reference", + "pages": [ + "troubleshooting", + "architecture", + "cli", + "inspector", + "opencode-compatibility", + { + "group": "More", + "pages": [ + "daemon", + "cors", + "session-restoration", + "telemetry", + { + "group": "AI", + "pages": ["ai/skill", "ai/llms-txt"] + } + ] + } + ] + } + ] + }, + { + "tab": "HTTP API", + "pages": [ + { + "group": "HTTP Reference", + "openapi": "openapi.json" + } + ] + } + ] + }, + "__removed": [ + { + "group": "Orchestration", + "pages": ["orchestration-architecture", "session-persistence", "observability", "multiplayer", "security"] + } + ] } diff --git a/docs/favicon.svg b/docs/favicon.svg index d36cc7e..287c425 100644 --- a/docs/favicon.svg +++ b/docs/favicon.svg @@ -1,10 +1 @@ -<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg"> - <rect width="32" height="32" rx="6" fill="url(#bg_gradient)"/> - <text x="16" y="22" text-anchor="middle" font-family="system-ui, -apple-system, sans-serif" font-size="16" font-weight="700" fill="white">SA</text> - <defs> - <linearGradient id="bg_gradient" x1="0" y1="0" x2="32" y2="32" gradientUnits="userSpaceOnUse"> - <stop stop-color="#16A34A"/> - <stop offset="1" stop-color="#15803D"/> - </linearGradient> - </defs> -</svg> +<svg width="128" height="128" fill="none" xmlns="http://www.w3.org/2000/svg"><rect x="1" y="1" width="126" height="126" rx="44" fill="#0F0F0F"/><rect x="18.25" y="18.25" width="91.5" height="91.5" rx="25.75" stroke="#F0F0F0" stroke-width="8.5"/><path fill-rule="evenodd" clip-rule="evenodd" d="M57.694 43.098c0-.622-.505-1.126-1.127-1.126h-8.444a5.114 5.114 0 0 0-5.112 5.111v33.824a5.114 5.114 0 0 0 5.112 5.112h8.444c.622 0 1.127-.505 1.127-1.127V43.098Zm24.424 27.869c-1.238-2.222-4.047-4.026-6.27-4.026H62.923c-.684 0-.93.555-.549 1.239l7.703 13.822c1.239 2.223 4.048 4.026 6.27 4.026h12.927c.683 0 .93-.555.548-1.239l-7.703-13.822Zm.538-18.718c0-5.672-4.605-10.277-10.277-10.277H63.31a1.21 1.21 0 0 0-1.209 1.209v18.137c0 .667.542 1.209 1.21 1.209h9.068c5.672 0 10.277-4.605 10.277-10.278Z" fill="#F0F0F0"/></svg> \ No newline at end of file diff --git a/docs/file-system.mdx b/docs/file-system.mdx new file mode 100644 index 0000000..a91fd6b --- /dev/null +++ b/docs/file-system.mdx @@ -0,0 +1,154 @@ +--- +title: "File System" +description: "Read, write, and manage files inside the sandbox." +sidebarTitle: "File System" +icon: "folder" +--- + +The filesystem API lets you list, read, write, move, and delete files inside the sandbox, plus upload tar archives in batch. + +## Path resolution + +- Absolute paths are used as-is. +- Relative paths resolve from the server process working directory. +- Requests that attempt to escape allowed roots are rejected by the server. + +## List entries + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +const entries = await sdk.listFsEntries({ + path: "./workspace", +}); + +console.log(entries); +``` + +```bash cURL +curl -X GET "http://127.0.0.1:2468/v1/fs/entries?path=./workspace" +``` +</CodeGroup> + +## Read and write files + +`PUT /v1/fs/file` writes raw bytes. `GET /v1/fs/file` returns raw bytes. + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +await sdk.writeFsFile({ path: "./notes.txt" }, "hello"); + +const bytes = await sdk.readFsFile({ path: "./notes.txt" }); +const text = new TextDecoder().decode(bytes); + +console.log(text); +``` + +```bash cURL +curl -X PUT "http://127.0.0.1:2468/v1/fs/file?path=./notes.txt" \ + --data-binary "hello" + +curl -X GET "http://127.0.0.1:2468/v1/fs/file?path=./notes.txt" \ + --output ./notes.txt +``` +</CodeGroup> + +## Create directories + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +await sdk.mkdirFs({ path: "./data" }); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/fs/mkdir?path=./data" +``` +</CodeGroup> + +## Move, delete, and stat + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +await sdk.moveFs({ + from: "./notes.txt", + to: "./notes-old.txt", + overwrite: true, +}); + +const stat = await sdk.statFs({ path: "./notes-old.txt" }); +await sdk.deleteFsEntry({ path: "./notes-old.txt" }); + +console.log(stat); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/fs/move" \ + -H "Content-Type: application/json" \ + -d '{"from":"./notes.txt","to":"./notes-old.txt","overwrite":true}' + +curl -X GET "http://127.0.0.1:2468/v1/fs/stat?path=./notes-old.txt" + +curl -X DELETE "http://127.0.0.1:2468/v1/fs/entry?path=./notes-old.txt" +``` +</CodeGroup> + +## Batch upload (tar) + +Batch upload accepts `application/x-tar` and extracts into the destination directory. + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; +import fs from "node:fs"; +import path from "node:path"; +import tar from "tar"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +const archivePath = path.join(process.cwd(), "skills.tar"); +await tar.c({ + cwd: "./skills", + file: archivePath, +}, ["."]); + +const tarBuffer = await fs.promises.readFile(archivePath); +const result = await sdk.uploadFsBatch(tarBuffer, { + path: "./skills", +}); + +console.log(result); +``` + +```bash cURL +tar -cf skills.tar -C ./skills . + +curl -X POST "http://127.0.0.1:2468/v1/fs/upload-batch?path=./skills" \ + -H "Content-Type: application/x-tar" \ + --data-binary @skills.tar +``` +</CodeGroup> diff --git a/docs/images/inspector.png b/docs/images/inspector.png index 1c16ed2..02e588e 100644 Binary files a/docs/images/inspector.png and b/docs/images/inspector.png differ diff --git a/docs/inspector.mdx b/docs/inspector.mdx index 8e80c22..1412c21 100644 --- a/docs/inspector.mdx +++ b/docs/inspector.mdx @@ -1,10 +1,9 @@ --- title: "Inspector" description: "Debug and inspect agent sessions with the Inspector UI." -icon: "magnifying-glass" --- -The Inspector is a web-based GUI for debugging and inspecting Sandbox Agent sessions. Use it to view events, send messages, and troubleshoot agent behavior in real-time. +The Inspector is a web UI for inspecting Sandbox Agent sessions. Use it to view events, inspect payloads, and troubleshoot behavior. <Frame> <img src="/images/inspector.png" alt="Sandbox Agent Inspector" /> @@ -12,34 +11,56 @@ The Inspector is a web-based GUI for debugging and inspecting Sandbox Agent sess ## Open the Inspector -The Inspector UI is served at `/ui/` on your sandbox-agent server. For example, if your server is running at `http://localhost:2468`, open `http://localhost:2468/ui/` in your browser. +The Inspector is served at `/ui/` on your Sandbox Agent server. +For example, if your server runs at `http://localhost:2468`, open `http://localhost:2468/ui/`. -You can also generate a pre-filled Inspector URL with authentication from the TypeScript SDK: +You can also generate a pre-filled Inspector URL from the SDK: ```typescript import { buildInspectorUrl } from "sandbox-agent"; const url = buildInspectorUrl({ baseUrl: "http://127.0.0.1:2468", - token: process.env.SANDBOX_TOKEN, }); + console.log(url); -// http://127.0.0.1:2468/ui/?token=... +// http://127.0.0.1:2468/ui/ ``` ## Features -- **Session list**: View all active sessions and their status -- **Event stream**: See events in real-time as they arrive (SSE or polling) -- **Event details**: Expand any event to see its full JSON payload -- **Send messages**: Post messages to a session directly from the UI -- **Agent selection**: Switch between agents and modes -- **Request log**: View raw HTTP requests and responses for debugging +- Session list +- Event stream view +- Event JSON inspector +- Prompt testing +- Request/response debugging +- Interactive permission prompts (approve, always-allow, or reject tool-use requests) +- Desktop panel for status, remediation, start/stop, and screenshot refresh +- Process management (create, stop, kill, delete, view logs) +- Interactive PTY terminal for tty processes +- One-shot command execution -## When to Use +## When to use -The Inspector is useful for: +- Development: validate session behavior quickly +- Debugging: inspect raw event payloads +- Integration work: compare UI behavior with SDK/API calls -- **Development**: Test your integration without writing client code -- **Debugging**: Inspect event payloads and timing issues -- **Learning**: Understand how agents respond to different prompts +## Process terminal + +The Inspector includes an embedded Ghostty-based terminal for interactive tty +processes. The UI uses the SDK's high-level `connectProcessTerminal(...)` +wrapper via the shared `@sandbox-agent/react` `ProcessTerminal` component. + +## Desktop panel + +The `Desktop` panel shows the current desktop runtime state, missing dependencies, +the suggested install command, last error details, process/log paths, and the +latest captured screenshot. + +Use it to: + +- Check whether desktop dependencies are installed +- Start or stop the managed desktop runtime +- Refresh desktop status +- Capture a fresh screenshot on demand diff --git a/docs/llm-credentials.mdx b/docs/llm-credentials.mdx new file mode 100644 index 0000000..e771740 --- /dev/null +++ b/docs/llm-credentials.mdx @@ -0,0 +1,250 @@ +--- +title: "LLM Credentials" +description: "Strategies for providing LLM provider credentials to agents." +icon: "key" +--- + +Sandbox Agent needs LLM provider credentials (Anthropic, OpenAI, etc.) to run agent sessions. + +## Configuration + +Pass credentials via `spawn.env` when starting a sandbox. Each call to `SandboxAgent.start()` can use different credentials: + +```typescript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.start({ + spawn: { + env: { + ANTHROPIC_API_KEY: "sk-ant-...", + OPENAI_API_KEY: "sk-...", + }, + }, +}); +``` + +Each agent requires credentials from a specific provider. Sandbox Agent checks environment variables (including those passed via `spawn.env`) and host config files: + +| Agent | Provider | Environment variables | Config files | +|-------|----------|----------------------|--------------| +| Claude Code | Anthropic | `ANTHROPIC_API_KEY`, `CLAUDE_API_KEY` | `~/.claude.json`, `~/.claude/.credentials.json` | +| Amp | Anthropic | `ANTHROPIC_API_KEY`, `CLAUDE_API_KEY` | `~/.amp/config.json` | +| Codex | OpenAI | `OPENAI_API_KEY`, `CODEX_API_KEY` | `~/.codex/auth.json` | +| OpenCode | Anthropic or OpenAI | `ANTHROPIC_API_KEY`, `OPENAI_API_KEY` | `~/.local/share/opencode/auth.json` | +| Mock | None | - | - | + +## Credential strategies + +LLM credentials are passed into the sandbox as environment variables. The agent and everything inside the sandbox has access to the token, so it's important to choose the right strategy for how you provision and scope these credentials. + +| Strategy | Who pays | Cost attribution | Best for | +|----------|----------|-----------------|----------| +| **Per-tenant gateway** (recommended) | Your organization, billed back per tenant | Per-tenant keys with budgets | Multi-tenant SaaS, usage-based billing | +| **Bring your own key** | Each user (usage-based) | Per-user by default | Dev environments, internal tools | +| **Shared API key** | Your organization | None (single bill) | Single-tenant apps, internal platforms | +| **Personal subscription** | Each user (existing subscription) | Per-user by default | Local dev, internal tools where users have Claude or Codex subscriptions | + +### Per-tenant gateway (recommended) + +Route LLM traffic through a gateway that mints per-tenant API keys, each with its own spend tracking and budget limits. + +```mermaid +graph LR + B[Your Backend] -->|tenant key| S[Sandbox] + S -->|LLM requests| G[Gateway] + G -->|scoped key| P[LLM Provider] +``` + +Your backend issues a scoped key per tenant, then passes it to the sandbox. This is the typical pattern when using sandbox providers (E2B, Daytona, Docker). + +```typescript expandable +import { SandboxAgent } from "sandbox-agent"; + +async function createTenantSandbox(tenantId: string) { + // Issue a scoped key for this tenant via OpenRouter + const res = await fetch("https://openrouter.ai/api/v1/keys", { + method: "POST", + headers: { + Authorization: `Bearer ${process.env.OPENROUTER_PROVISIONING_KEY}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + name: `tenant-${tenantId}`, + limit: 50, + limitResetType: "monthly", + }), + }); + const { key } = await res.json(); + + // Start a sandbox with the tenant's scoped key + const sdk = await SandboxAgent.start({ + spawn: { + env: { + OPENAI_API_KEY: key, // OpenRouter uses OpenAI-compatible endpoints + }, + }, + }); + + const session = await sdk.createSession({ + agent: "claude", + sessionInit: { cwd: "/workspace" }, + }); + + return { sdk, session }; +} +``` + +#### Security + +Recommended for multi-tenant applications. Each tenant gets a scoped key with its own budget, so exfiltration only exposes that tenant's allowance. + +#### Use cases + +- **Multi-tenant SaaS**: per-tenant spend tracking and budget limits +- **Production apps**: exposed to end users who need isolated credentials +- **Usage-based billing**: each tenant pays for their own consumption + +#### Choosing a gateway + +<AccordionGroup> + +<Accordion title="OpenRouter provisioned keys" icon="cloud"> + +Managed service, zero infrastructure. [OpenRouter](https://openrouter.ai/docs/features/provisioning-api-keys) provides per-tenant API keys with spend tracking and budget limits via their Provisioning API. Pass the tenant key to Sandbox Agent as `OPENAI_API_KEY` (OpenRouter uses OpenAI-compatible endpoints). + +```bash +# Create a key for a tenant with a $50/month budget +curl https://openrouter.ai/api/v1/keys \ + -H "Authorization: Bearer $PROVISIONING_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "tenant-acme", + "limit": 50, + "limitResetType": "monthly" + }' +``` + +Easiest to set up but not open-source. See [OpenRouter pricing](https://openrouter.ai/docs/framework/pricing) for details. + +</Accordion> + +<Accordion title="LiteLLM proxy" icon="server"> + +Self-hosted, open-source (MIT). [LiteLLM](https://github.com/BerriAI/litellm) is an OpenAI-compatible proxy with hierarchical budgets (org, team, user, key), virtual keys, and spend tracking. Requires Python + PostgreSQL. + +```bash +# Create a team (tenant) with a $500 budget +curl http://litellm:4000/team/new \ + -H "Authorization: Bearer $LITELLM_MASTER_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "team_alias": "tenant-acme", + "max_budget": 500 + }' + +# Generate a key for that team +curl http://litellm:4000/key/generate \ + -H "Authorization: Bearer $LITELLM_MASTER_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "team_id": "team-abc123", + "max_budget": 100 + }' +``` + +Full control with no vendor lock-in. Organization-level features require an enterprise license. + +</Accordion> + +<Accordion title="Portkey gateway" icon="code-branch"> + +Self-hosted, open-source (Apache 2.0). [Portkey](https://github.com/Portkey-AI/gateway) is a lightweight OpenAI-compatible gateway supporting 200+ providers. Single binary, no database required. Create virtual keys with per-tenant budget limits and pass them to Sandbox Agent. + +Lightest operational footprint of the self-hosted options. Observability and analytics require the managed platform or your own tooling. + +</Accordion> + +</AccordionGroup> + +To bill tenants for LLM usage, use [Stripe token billing](https://docs.stripe.com/billing/token-billing) (integrates natively with OpenRouter) or query your gateway's spend API and feed usage into your billing system. + +### Bring your own key + +Each user provides their own API key. Users are billed directly by the LLM provider with no additional infrastructure needed. + +Pass the user's key via `spawn.env`: + +```typescript +const sdk = await SandboxAgent.start({ + spawn: { + env: { + ANTHROPIC_API_KEY: userProvidedKey, + }, + }, +}); +``` + +#### Security + +API keys are typically long-lived. The key is visible to the agent and anything running inside the sandbox, so exfiltration is possible. This is usually acceptable for developer-facing tools where the user owns the key. + +#### Use cases + +- **Developer tools**: each user manages their own API key +- **Internal platforms**: users already have LLM provider accounts +- **Per-user billing**: no extra infrastructure needed + +### Shared credentials + +A single organization-wide API key is used for all sessions. All token usage appears on one bill with no per-user or per-tenant cost attribution. + +```typescript +const sdk = await SandboxAgent.start({ + spawn: { + env: { + ANTHROPIC_API_KEY: process.env.ORG_ANTHROPIC_KEY!, + OPENAI_API_KEY: process.env.ORG_OPENAI_KEY!, + }, + }, +}); +``` + +If you need to track or limit spend per tenant, use a per-tenant gateway instead. + +#### Security + +Not recommended for anything other than internal tooling. A single exfiltrated key exposes your organization's entire LLM budget. If you need org-paid credentials for external users, use a per-tenant gateway with scoped keys instead. + +#### Use cases + +- **Single-tenant apps**: small number of users, one bill +- **Prototyping**: cost attribution not needed yet +- **Simplicity over security**: acceptable when exfiltration risk is low + +### Personal subscription + +If the user is signed into Claude Code or Codex on the host machine, Sandbox Agent automatically picks up their OAuth tokens. No configuration is needed. + +#### Remote sandboxes + +Extract credentials locally and pass them to a remote sandbox via `spawn.env`: + +```bash +$ sandbox-agent credentials extract-env +ANTHROPIC_API_KEY=sk-ant-... +CLAUDE_API_KEY=sk-ant-... +OPENAI_API_KEY=sk-... +CODEX_API_KEY=sk-... +``` + +Use `-e` to prefix with `export` for shell sourcing. + +#### Security + +Personal subscriptions use OAuth tokens with a limited lifespan. These are the same credentials used when running an agent normally on the host. If a token is exfiltrated from the sandbox, the exposure window is short. + +#### Use cases + +- **Local development**: users are already signed into Claude Code or Codex +- **Internal tools**: every user has their own subscription +- **Prototyping**: no key management needed \ No newline at end of file diff --git a/docs/manage-sessions.mdx b/docs/manage-sessions.mdx index a43c371..c39dd04 100644 --- a/docs/manage-sessions.mdx +++ b/docs/manage-sessions.mdx @@ -6,8 +6,6 @@ icon: "database" Sandbox Agent stores sessions in memory only. When the server restarts or the sandbox is destroyed, all session data is lost. It's your responsibility to persist events to your own database. -See the [Building a Chat UI](/building-chat-ui) guide for understanding session lifecycle events like `session.started` and `session.ended`. - ## Recommended approach 1. Store events to your database as they arrive @@ -18,17 +16,18 @@ This prevents duplicate writes and lets you recover from disconnects. ## Receiving Events -Two ways to receive events: SSE streaming (recommended) or polling. +Two ways to receive events: streaming (recommended) or polling. ### Streaming -Use SSE for real-time events with automatic reconnection support. +Use streaming for real-time events with automatic reconnection support. ```typescript -import { SandboxAgent } from "sandbox-agent"; +import { SandboxAgentClient } from "sandbox-agent"; -const client = await SandboxAgent.connect({ +const client = new SandboxAgentClient({ baseUrl: "http://127.0.0.1:2468", + agent: "mock", }); // Get offset from last stored event (0 returns all events) @@ -43,7 +42,7 @@ for await (const event of client.streamEvents("my-session", { offset })) { ### Polling -If you can't use SSE streaming, poll the events endpoint: +If you can't use streaming, poll the events endpoint: ```typescript const lastEvent = await db.getLastEvent("my-session"); @@ -130,7 +129,10 @@ const codingSession = actor({ }, createVars: async (c): Promise<CodingSessionVars> => { - const client = await SandboxAgent.connect({ baseUrl: c.state.baseUrl }); + const client = new SandboxAgentClient({ + baseUrl: c.state.baseUrl, + agent: "mock", +}); await client.createSession(c.state.sessionId, { agent: "claude" }); return { client }; }, @@ -240,7 +242,7 @@ const events = await redis.lrange(`session:${sessionId}`, offset, -1); ## Handling disconnects -The SSE stream may disconnect due to network issues. Handle reconnection gracefully: +The event stream may disconnect due to network issues. Handle reconnection gracefully: ```typescript async function streamWithRetry(sessionId: string) { diff --git a/docs/mcp-config.mdx b/docs/mcp-config.mdx new file mode 100644 index 0000000..cc1c976 --- /dev/null +++ b/docs/mcp-config.mdx @@ -0,0 +1,82 @@ +--- +title: "MCP" +description: "Configure MCP servers for agent sessions." +sidebarTitle: "MCP" +icon: "plug" +--- + +MCP (Model Context Protocol) servers extend agents with tools and external context. + +## Configuring MCP servers + +The HTTP config endpoints let you store/retrieve MCP server configs by directory + name. + +```ts +// Create MCP config +await sdk.setMcpConfig( + { + directory: "/workspace", + mcpName: "github", + }, + { + type: "remote", + url: "https://example.com/mcp", + }, +); + +// Create a session using the configured MCP servers +const session = await sdk.createSession({ + agent: "claude", + cwd: "/workspace", +}); + +await session.prompt([ + { type: "text", text: "Use available MCP servers to help with this task." }, +]); + +// List MCP configs +const config = await sdk.getMcpConfig({ + directory: "/workspace", + mcpName: "github", +}); + +console.log(config.type); + +// Delete MCP config +await sdk.deleteMcpConfig({ + directory: "/workspace", + mcpName: "github", +}); +``` + +## Config fields + +### Local server + +| Field | Description | +|---|---| +| `type` | `local` | +| `command` | executable path | +| `args` | array of CLI args | +| `env` | environment variable map | +| `cwd` | working directory | +| `enabled` | enable/disable server | +| `timeoutMs` | timeout override | + +### Remote server + +| Field | Description | +|---|---| +| `type` | `remote` | +| `url` | MCP server URL | +| `transport` | `http` or `sse` | +| `headers` | static headers map | +| `bearerTokenEnvVar` | env var name to inject in auth header | +| `envHeaders` | header name to env var map | +| `oauth` | optional OAuth config object | +| `enabled` | enable/disable server | +| `timeoutMs` | timeout override | + +## Custom MCP servers + +To bundle and upload your own MCP server into the sandbox, see [Custom Tools](/custom-tools). diff --git a/docs/multiplayer.mdx b/docs/multiplayer.mdx new file mode 100644 index 0000000..215bb1c --- /dev/null +++ b/docs/multiplayer.mdx @@ -0,0 +1,147 @@ +--- +title: "Multiplayer" +description: "Use Rivet Actors to coordinate shared sessions." +icon: "users" +--- + +For multiplayer orchestration, use [Rivet Actors](https://rivet.dev/docs/actors). + +Recommended model: + +- One actor per collaborative workspace/thread. +- The actor owns Sandbox Agent session lifecycle and persistence. +- Clients connect to the actor and receive realtime broadcasts. + +Use [actor keys](https://rivet.dev/docs/actors/keys) to map each workspace to one actor, [events](https://rivet.dev/docs/actors/events) for realtime updates, and [lifecycle hooks](https://rivet.dev/docs/actors/lifecycle) for cleanup. + +## Example + +<CodeGroup> + +```ts Actor (server) +import { actor, setup } from "rivetkit"; +import { SandboxAgent, type SessionPersistDriver, type SessionRecord, type SessionEvent, type ListPageRequest, type ListPage, type ListEventsRequest } from "sandbox-agent"; + +interface RivetPersistData { sessions: Record<string, SessionRecord>; events: Record<string, SessionEvent[]>; } +type RivetPersistState = { _sandboxAgentPersist: RivetPersistData }; + +class RivetSessionPersistDriver implements SessionPersistDriver { + private readonly stateKey: string; + private readonly ctx: { state: Record<string, unknown> }; + constructor(ctx: { state: Record<string, unknown> }, options: { stateKey?: string } = {}) { + this.ctx = ctx; + this.stateKey = options.stateKey ?? "_sandboxAgentPersist"; + if (!this.ctx.state[this.stateKey]) { + this.ctx.state[this.stateKey] = { sessions: {}, events: {} }; + } + } + private get data(): RivetPersistData { return this.ctx.state[this.stateKey] as RivetPersistData; } + async getSession(id: string) { const s = this.data.sessions[id]; return s ? { ...s } : undefined; } + async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> { + const sorted = Object.values(this.data.sessions).sort((a, b) => a.createdAt - b.createdAt || a.id.localeCompare(b.id)); + const offset = Number(request.cursor ?? 0); + const limit = request.limit ?? 100; + const slice = sorted.slice(offset, offset + limit); + return { items: slice, nextCursor: offset + slice.length < sorted.length ? String(offset + slice.length) : undefined }; + } + async updateSession(session: SessionRecord) { this.data.sessions[session.id] = { ...session }; if (!this.data.events[session.id]) this.data.events[session.id] = []; } + async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> { + const all = [...(this.data.events[request.sessionId] ?? [])].sort((a, b) => a.eventIndex - b.eventIndex || a.id.localeCompare(b.id)); + const offset = Number(request.cursor ?? 0); + const limit = request.limit ?? 100; + const slice = all.slice(offset, offset + limit); + return { items: slice, nextCursor: offset + slice.length < all.length ? String(offset + slice.length) : undefined }; + } + async insertEvent(sessionId: string, event: SessionEvent) { const events = this.data.events[sessionId] ?? []; events.push({ ...event, payload: JSON.parse(JSON.stringify(event.payload)) }); this.data.events[sessionId] = events; } +} + +type WorkspaceState = RivetPersistState & { + sandboxId: string; + baseUrl: string; +}; + +export const workspace = actor({ + createState: async () => { + return { + sandboxId: "sbx_123", + baseUrl: "http://127.0.0.1:2468", + } satisfies Partial<WorkspaceState>; + }, + + createVars: async (c) => { + const persist = new RivetSessionPersistDriver(c); + const sdk = await SandboxAgent.connect({ + baseUrl: c.state.baseUrl, + persist, + }); + + const session = await sdk.resumeOrCreateSession({ id: "default", agent: "codex" }); + + const unsubscribe = session.onEvent((event) => { + c.broadcast("session.event", event); + }); + + return { sdk, session, unsubscribe }; + }, + + actions: { + getSessionInfo: (c) => ({ + workspaceId: c.key[0], + sandboxId: c.state.sandboxId, + }), + + prompt: async (c, input: { userId: string; text: string }) => { + c.broadcast("chat.user", { + userId: input.userId, + text: input.text, + createdAt: Date.now(), + }); + + await c.vars.session.prompt([{ type: "text", text: input.text }]); + }, + }, + + onSleep: async (c) => { + c.vars.unsubscribe?.(); + await c.vars.sdk.dispose(); + }, +}); + +export const registry = setup({ + use: { workspace }, +}); +``` + +```ts Client (browser) +import { createClient } from "rivetkit/client"; +import type { registry } from "./actors"; + +const client = createClient<typeof registry>({ + endpoint: process.env.NEXT_PUBLIC_RIVET_ENDPOINT!, +}); + +const workspaceId = "workspace-42"; +const room = client.workspace.getOrCreate([workspaceId]); +const conn = room.connect(); + +conn.on("chat.user", (event) => { + console.log("user message", event); +}); + +conn.on("session.event", (event) => { + console.log("sandbox event", event); +}); + +await conn.prompt({ + userId: "user-123", + text: "Propose a refactor plan for auth middleware.", +}); +``` + +</CodeGroup> + +## Notes + +- Keep sandbox calls actor-only. Browser clients should not call Sandbox Agent directly. +- Copy the Rivet persist driver from the example above into your project so session history persists in actor state. +- For client connection patterns, see [Rivet JavaScript client](https://rivet.dev/docs/clients/javascript). diff --git a/docs/observability.mdx b/docs/observability.mdx new file mode 100644 index 0000000..5b5751b --- /dev/null +++ b/docs/observability.mdx @@ -0,0 +1,64 @@ +--- +title: "Observability" +description: "Track session activity with OpenTelemetry." +icon: "chart-line" +--- + +Use OpenTelemetry to instrument session traffic, then ship telemetry to your collector/backend. + +## Common collectors and backends + +- [OpenTelemetry Collector](https://opentelemetry.io/docs/collector/) +- [Jaeger](https://www.jaegertracing.io/) +- [Grafana Tempo](https://grafana.com/oss/tempo/) +- [Honeycomb](https://www.honeycomb.io/) +- [Datadog APM](https://docs.datadoghq.com/tracing/) + +## Example: trace a prompt round-trip + +Wrap `session.prompt()` in a span to measure the full round-trip, then log individual events as span events. + +Assumes your OTEL provider/exporter is already configured. + +```ts +import { trace } from "@opentelemetry/api"; +import { SandboxAgent } from "sandbox-agent"; + +const tracer = trace.getTracer("my-app/sandbox-agent"); + +const sdk = await SandboxAgent.connect({ + baseUrl: process.env.SANDBOX_URL!, +}); + +const session = await sdk.createSession({ agent: "mock" }); + +// Log each event as an OTEL span event on the active span +const unsubscribe = session.onEvent((event) => { + const activeSpan = trace.getActiveSpan(); + if (!activeSpan) return; + + activeSpan.addEvent("session.event", { + "sandbox.sender": event.sender, + "sandbox.event_index": event.eventIndex, + }); +}); + +// The span covers the full prompt round-trip +await tracer.startActiveSpan("sandbox_agent.prompt", async (span) => { + span.setAttribute("sandbox.session_id", session.id); + + try { + const result = await session.prompt([ + { type: "text", text: "Summarize this repository." }, + ]); + span.setAttribute("sandbox.stop_reason", result.stopReason); + } catch (error) { + span.recordException(error as Error); + throw error; + } finally { + span.end(); + } +}); + +unsubscribe(); +``` diff --git a/docs/openapi.json b/docs/openapi.json index a003943..3624707 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -2,7 +2,7 @@ "openapi": "3.0.3", "info": { "title": "sandbox-agent", - "description": "Universal API for automatic coding agents in sandboxes. Supprots Claude Code, Codex, OpenCode, and Amp.", + "description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.", "contact": { "name": "Rivet Gaming, LLC", "email": "developer@rivet.gg" @@ -10,7 +10,7 @@ "license": { "name": "Apache-2.0" }, - "version": "0.1.4-rc.7" + "version": "0.4.2" }, "servers": [ { @@ -18,15 +18,235 @@ } ], "paths": { - "/v1/agents": { + "/v1/acp": { "get": { - "tags": [ - "agents" - ], - "operationId": "list_agents", + "tags": ["v1"], + "operationId": "get_v1_acp_servers", "responses": { "200": { - "description": "", + "description": "Active ACP server instances", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AcpServerListResponse" + } + } + } + } + } + } + }, + "/v1/acp/{server_id}": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_acp", + "parameters": [ + { + "name": "server_id", + "in": "path", + "description": "Client-defined ACP server id", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "SSE stream of ACP envelopes" + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "404": { + "description": "Unknown ACP server", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "406": { + "description": "Client does not accept SSE responses", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "post": { + "tags": ["v1"], + "operationId": "post_v1_acp", + "parameters": [ + { + "name": "server_id", + "in": "path", + "description": "Client-defined ACP server id", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "agent", + "in": "query", + "description": "Agent id required for first POST", + "required": false, + "schema": { + "type": "string", + "nullable": true + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AcpEnvelope" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "JSON-RPC response envelope", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AcpEnvelope" + } + } + } + }, + "202": { + "description": "JSON-RPC notification accepted" + }, + "400": { + "description": "Invalid ACP envelope", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "404": { + "description": "Unknown ACP server", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "406": { + "description": "Client does not accept JSON responses", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "ACP server bound to different agent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "415": { + "description": "Unsupported media type", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "504": { + "description": "ACP agent process response timeout", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "delete": { + "tags": ["v1"], + "operationId": "delete_v1_acp", + "parameters": [ + { + "name": "server_id", + "in": "path", + "description": "Client-defined ACP server id", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "ACP server closed" + } + } + } + }, + "/v1/agents": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_agents", + "parameters": [ + { + "name": "config", + "in": "query", + "description": "When true, include version/path/configOptions (slower)", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + }, + { + "name": "no_cache", + "in": "query", + "description": "When true, bypass version cache", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "List of v1 agents", "content": { "application/json": { "schema": { @@ -34,16 +254,93 @@ } } } + }, + "401": { + "description": "Authentication required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/agents/{agent}": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_agent", + "parameters": [ + { + "name": "agent", + "in": "path", + "description": "Agent id", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "config", + "in": "query", + "description": "When true, include version/path/configOptions (slower)", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + }, + { + "name": "no_cache", + "in": "query", + "description": "When true, bypass version cache", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Agent info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AgentInfo" + } + } + } + }, + "400": { + "description": "Unknown agent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "401": { + "description": "Authentication required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } } } } }, "/v1/agents/{agent}/install": { "post": { - "tags": [ - "agents" - ], - "operationId": "install_agent", + "tags": ["v1"], + "operationId": "post_v1_agent_install", "parameters": [ { "name": "agent", @@ -66,21 +363,18 @@ "required": true }, "responses": { - "204": { - "description": "Agent installed" - }, - "400": { - "description": "", + "200": { + "description": "Agent install result", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ProblemDetails" + "$ref": "#/components/schemas/AgentInstallResponse" } } } }, - "404": { - "description": "", + "400": { + "description": "Invalid request", "content": { "application/json": { "schema": { @@ -90,7 +384,7 @@ } }, "500": { - "description": "", + "description": "Install failed", "content": { "application/json": { "schema": { @@ -102,17 +396,24 @@ } } }, - "/v1/agents/{agent}/modes": { + "/v1/config/mcp": { "get": { - "tags": [ - "agents" - ], - "operationId": "get_agent_modes", + "tags": ["v1"], + "operationId": "get_v1_config_mcp", "parameters": [ { - "name": "agent", - "in": "path", - "description": "Agent id", + "name": "directory", + "in": "query", + "description": "Target directory", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "mcpName", + "in": "query", + "description": "MCP entry name", "required": true, "schema": { "type": "string" @@ -121,17 +422,380 @@ ], "responses": { "200": { - "description": "", + "description": "MCP entry", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AgentModesResponse" + "$ref": "#/components/schemas/McpServerConfig" + } + } + } + }, + "404": { + "description": "Entry not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "put": { + "tags": ["v1"], + "operationId": "put_v1_config_mcp", + "parameters": [ + { + "name": "directory", + "in": "query", + "description": "Target directory", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "mcpName", + "in": "query", + "description": "MCP entry name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/McpServerConfig" + } + } + }, + "required": true + }, + "responses": { + "204": { + "description": "Stored" + } + } + }, + "delete": { + "tags": ["v1"], + "operationId": "delete_v1_config_mcp", + "parameters": [ + { + "name": "directory", + "in": "query", + "description": "Target directory", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "mcpName", + "in": "query", + "description": "MCP entry name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "Deleted" + } + } + } + }, + "/v1/config/skills": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_config_skills", + "parameters": [ + { + "name": "directory", + "in": "query", + "description": "Target directory", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "skillName", + "in": "query", + "description": "Skill entry name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Skills entry", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SkillsConfig" + } + } + } + }, + "404": { + "description": "Entry not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "put": { + "tags": ["v1"], + "operationId": "put_v1_config_skills", + "parameters": [ + { + "name": "directory", + "in": "query", + "description": "Target directory", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "skillName", + "in": "query", + "description": "Skill entry name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SkillsConfig" + } + } + }, + "required": true + }, + "responses": { + "204": { + "description": "Stored" + } + } + }, + "delete": { + "tags": ["v1"], + "operationId": "delete_v1_config_skills", + "parameters": [ + { + "name": "directory", + "in": "query", + "description": "Target directory", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "skillName", + "in": "query", + "description": "Skill entry name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "Deleted" + } + } + } + }, + "/v1/desktop/clipboard": { + "get": { + "tags": ["v1"], + "summary": "Read the desktop clipboard.", + "description": "Returns the current text content of the X11 clipboard.", + "operationId": "get_v1_desktop_clipboard", + "parameters": [ + { + "name": "selection", + "in": "query", + "required": false, + "schema": { + "type": "string", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Clipboard contents", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopClipboardResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "500": { + "description": "Clipboard read failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "post": { + "tags": ["v1"], + "summary": "Write to the desktop clipboard.", + "description": "Sets the text content of the X11 clipboard.", + "operationId": "post_v1_desktop_clipboard", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopClipboardWriteRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Clipboard updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopActionResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "500": { + "description": "Clipboard write failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/display/info": { + "get": { + "tags": ["v1"], + "summary": "Get desktop display information.", + "description": "Performs a health-gated display query against the managed desktop and\nreturns the current display identifier and resolution.", + "operationId": "get_v1_desktop_display_info", + "responses": { + "200": { + "description": "Desktop display information", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopDisplayInfoResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "503": { + "description": "Desktop runtime health or display query failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/keyboard/down": { + "post": { + "tags": ["v1"], + "summary": "Press and hold a desktop keyboard key.", + "description": "Performs a health-gated `xdotool keydown` operation against the managed\ndesktop.", + "operationId": "post_v1_desktop_keyboard_down", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopKeyboardDownRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop keyboard action result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopActionResponse" } } } }, "400": { - "description": "", + "description": "Invalid keyboard down request", "content": { "application/json": { "schema": { @@ -139,19 +803,1905 @@ } } } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/keyboard/press": { + "post": { + "tags": ["v1"], + "summary": "Press a desktop keyboard shortcut.", + "description": "Performs a health-gated `xdotool key` operation against the managed\ndesktop.", + "operationId": "post_v1_desktop_keyboard_press", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopKeyboardPressRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop keyboard action result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopActionResponse" + } + } + } + }, + "400": { + "description": "Invalid keyboard press request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/keyboard/type": { + "post": { + "tags": ["v1"], + "summary": "Type desktop keyboard text.", + "description": "Performs a health-gated `xdotool type` operation against the managed\ndesktop.", + "operationId": "post_v1_desktop_keyboard_type", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopKeyboardTypeRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop keyboard action result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopActionResponse" + } + } + } + }, + "400": { + "description": "Invalid keyboard type request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/keyboard/up": { + "post": { + "tags": ["v1"], + "summary": "Release a desktop keyboard key.", + "description": "Performs a health-gated `xdotool keyup` operation against the managed\ndesktop.", + "operationId": "post_v1_desktop_keyboard_up", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopKeyboardUpRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop keyboard action result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopActionResponse" + } + } + } + }, + "400": { + "description": "Invalid keyboard up request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/launch": { + "post": { + "tags": ["v1"], + "summary": "Launch a desktop application.", + "description": "Launches an application by name on the managed desktop, optionally waiting\nfor its window to appear.", + "operationId": "post_v1_desktop_launch", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopLaunchRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Application launched", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopLaunchResponse" + } + } + } + }, + "404": { + "description": "Application not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/click": { + "post": { + "tags": ["v1"], + "summary": "Click on the desktop.", + "description": "Performs a health-gated pointer move and click against the managed desktop\nand returns the resulting mouse position.", + "operationId": "post_v1_desktop_mouse_click", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMouseClickRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop mouse position after click", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "400": { + "description": "Invalid mouse click request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/down": { + "post": { + "tags": ["v1"], + "summary": "Press and hold a desktop mouse button.", + "description": "Performs a health-gated optional pointer move followed by `xdotool mousedown`\nand returns the resulting mouse position.", + "operationId": "post_v1_desktop_mouse_down", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMouseDownRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop mouse position after button press", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "400": { + "description": "Invalid mouse down request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/drag": { + "post": { + "tags": ["v1"], + "summary": "Drag the desktop mouse.", + "description": "Performs a health-gated drag gesture against the managed desktop and\nreturns the resulting mouse position.", + "operationId": "post_v1_desktop_mouse_drag", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMouseDragRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop mouse position after drag", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "400": { + "description": "Invalid mouse drag request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/move": { + "post": { + "tags": ["v1"], + "summary": "Move the desktop mouse.", + "description": "Performs a health-gated absolute pointer move on the managed desktop and\nreturns the resulting mouse position.", + "operationId": "post_v1_desktop_mouse_move", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMouseMoveRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop mouse position after move", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "400": { + "description": "Invalid mouse move request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/position": { + "get": { + "tags": ["v1"], + "summary": "Get the current desktop mouse position.", + "description": "Performs a health-gated mouse position query against the managed desktop.", + "operationId": "get_v1_desktop_mouse_position", + "responses": { + "200": { + "description": "Desktop mouse position", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input check failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/scroll": { + "post": { + "tags": ["v1"], + "summary": "Scroll the desktop mouse wheel.", + "description": "Performs a health-gated scroll gesture at the requested coordinates and\nreturns the resulting mouse position.", + "operationId": "post_v1_desktop_mouse_scroll", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMouseScrollRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop mouse position after scroll", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "400": { + "description": "Invalid mouse scroll request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/mouse/up": { + "post": { + "tags": ["v1"], + "summary": "Release a desktop mouse button.", + "description": "Performs a health-gated optional pointer move followed by `xdotool mouseup`\nand returns the resulting mouse position.", + "operationId": "post_v1_desktop_mouse_up", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMouseUpRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop mouse position after button release", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopMousePositionResponse" + } + } + } + }, + "400": { + "description": "Invalid mouse up request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or input failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/open": { + "post": { + "tags": ["v1"], + "summary": "Open a file or URL with the default handler.", + "description": "Opens a file path or URL using xdg-open on the managed desktop.", + "operationId": "post_v1_desktop_open", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopOpenRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Target opened", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopOpenResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/recording/start": { + "post": { + "tags": ["v1"], + "summary": "Start desktop recording.", + "description": "Starts an ffmpeg x11grab recording against the managed desktop and returns\nthe created recording metadata.", + "operationId": "post_v1_desktop_recording_start", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopRecordingStartRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop recording started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopRecordingInfo" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready or a recording is already active", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop recording failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/recording/stop": { + "post": { + "tags": ["v1"], + "summary": "Stop desktop recording.", + "description": "Stops the active desktop recording and returns the finalized recording\nmetadata.", + "operationId": "post_v1_desktop_recording_stop", + "responses": { + "200": { + "description": "Desktop recording stopped", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopRecordingInfo" + } + } + } + }, + "409": { + "description": "No active desktop recording", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop recording stop failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/recordings": { + "get": { + "tags": ["v1"], + "summary": "List desktop recordings.", + "description": "Returns the current desktop recording catalog.", + "operationId": "get_v1_desktop_recordings", + "responses": { + "200": { + "description": "Desktop recordings", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopRecordingListResponse" + } + } + } + }, + "502": { + "description": "Desktop recordings query failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/recordings/{id}": { + "get": { + "tags": ["v1"], + "summary": "Get desktop recording metadata.", + "description": "Returns metadata for a single desktop recording.", + "operationId": "get_v1_desktop_recording", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Desktop recording ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Desktop recording metadata", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopRecordingInfo" + } + } + } + }, + "404": { + "description": "Unknown desktop recording", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "delete": { + "tags": ["v1"], + "summary": "Delete a desktop recording.", + "description": "Removes a completed desktop recording and its file from disk.", + "operationId": "delete_v1_desktop_recording", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Desktop recording ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "Desktop recording deleted" + }, + "404": { + "description": "Unknown desktop recording", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop recording is still active", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/recordings/{id}/download": { + "get": { + "tags": ["v1"], + "summary": "Download a desktop recording.", + "description": "Serves the recorded MP4 bytes for a completed desktop recording.", + "operationId": "get_v1_desktop_recording_download", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Desktop recording ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Desktop recording as MP4 bytes" + }, + "404": { + "description": "Unknown desktop recording", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/screenshot": { + "get": { + "tags": ["v1"], + "summary": "Capture a full desktop screenshot.", + "description": "Performs a health-gated full-frame screenshot of the managed desktop and\nreturns the requested image bytes.", + "operationId": "get_v1_desktop_screenshot", + "parameters": [ + { + "name": "format", + "in": "query", + "required": false, + "schema": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopScreenshotFormat" + } + ], + "nullable": true + } + }, + { + "name": "quality", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + } + }, + { + "name": "scale", + "in": "query", + "required": false, + "schema": { + "type": "number", + "format": "float", + "nullable": true + } + }, + { + "name": "showCursor", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Desktop screenshot as image bytes" + }, + "400": { + "description": "Invalid screenshot query", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or screenshot capture failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/screenshot/region": { + "get": { + "tags": ["v1"], + "summary": "Capture a desktop screenshot region.", + "description": "Performs a health-gated screenshot crop against the managed desktop and\nreturns the requested region image bytes.", + "operationId": "get_v1_desktop_screenshot_region", + "parameters": [ + { + "name": "x", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "y", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32" + } + }, + { + "name": "width", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "height", + "in": "query", + "required": true, + "schema": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + }, + { + "name": "format", + "in": "query", + "required": false, + "schema": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopScreenshotFormat" + } + ], + "nullable": true + } + }, + { + "name": "quality", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + } + }, + { + "name": "scale", + "in": "query", + "required": false, + "schema": { + "type": "number", + "format": "float", + "nullable": true + } + }, + { + "name": "showCursor", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Desktop screenshot region as image bytes" + }, + "400": { + "description": "Invalid screenshot region", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop runtime health or screenshot capture failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/start": { + "post": { + "tags": ["v1"], + "summary": "Start the private desktop runtime.", + "description": "Lazily launches the managed Xvfb/openbox stack, validates display health,\nand returns the resulting desktop status snapshot.", + "operationId": "post_v1_desktop_start", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStartRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Desktop runtime status after start", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStatusResponse" + } + } + } + }, + "400": { + "description": "Invalid desktop start request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is already transitioning", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Desktop API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "503": { + "description": "Desktop runtime could not be started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/status": { + "get": { + "tags": ["v1"], + "summary": "Get desktop runtime status.", + "description": "Returns the current desktop runtime state, dependency status, active\ndisplay metadata, and supervised process information.", + "operationId": "get_v1_desktop_status", + "responses": { + "200": { + "description": "Desktop runtime status", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStatusResponse" + } + } + } + }, + "401": { + "description": "Authentication required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/stop": { + "post": { + "tags": ["v1"], + "summary": "Stop the private desktop runtime.", + "description": "Terminates the managed openbox/Xvfb/dbus processes owned by the desktop\nruntime and returns the resulting status snapshot.", + "operationId": "post_v1_desktop_stop", + "responses": { + "200": { + "description": "Desktop runtime status after stop", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStatusResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is already transitioning", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/stream/signaling": { + "get": { + "tags": ["v1"], + "summary": "Open a desktop WebRTC signaling session.", + "description": "Upgrades the connection to a WebSocket used for WebRTC signaling between\nthe browser client and the desktop streaming process. Also accepts mouse\nand keyboard input frames as a fallback transport.", + "operationId": "get_v1_desktop_stream_ws", + "parameters": [ + { + "name": "access_token", + "in": "query", + "description": "Bearer token alternative for WS auth", + "required": false, + "schema": { + "type": "string", + "nullable": true + } + } + ], + "responses": { + "101": { + "description": "WebSocket upgraded" + }, + "409": { + "description": "Desktop runtime or streaming session is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "502": { + "description": "Desktop stream failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/stream/start": { + "post": { + "tags": ["v1"], + "summary": "Start desktop streaming.", + "description": "Enables desktop websocket streaming for the managed desktop.", + "operationId": "post_v1_desktop_stream_start", + "responses": { + "200": { + "description": "Desktop streaming started", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStreamStatusResponse" + } + } + } + } + } + } + }, + "/v1/desktop/stream/status": { + "get": { + "tags": ["v1"], + "summary": "Get desktop stream status.", + "description": "Returns the current state of the desktop WebRTC streaming session.", + "operationId": "get_v1_desktop_stream_status", + "responses": { + "200": { + "description": "Desktop stream status", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStreamStatusResponse" + } + } + } + } + } + } + }, + "/v1/desktop/stream/stop": { + "post": { + "tags": ["v1"], + "summary": "Stop desktop streaming.", + "description": "Disables desktop websocket streaming for the managed desktop.", + "operationId": "post_v1_desktop_stream_stop", + "responses": { + "200": { + "description": "Desktop streaming stopped", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopStreamStatusResponse" + } + } + } + } + } + } + }, + "/v1/desktop/windows": { + "get": { + "tags": ["v1"], + "summary": "List visible desktop windows.", + "description": "Performs a health-gated visible-window enumeration against the managed\ndesktop and returns the current window metadata.", + "operationId": "get_v1_desktop_windows", + "responses": { + "200": { + "description": "Visible desktop windows", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowListResponse" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "503": { + "description": "Desktop runtime health or window query failed", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/windows/focused": { + "get": { + "tags": ["v1"], + "summary": "Get the currently focused desktop window.", + "description": "Returns information about the window that currently has input focus.", + "operationId": "get_v1_desktop_windows_focused", + "responses": { + "200": { + "description": "Focused window info", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowInfo" + } + } + } + }, + "404": { + "description": "No window is focused", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/windows/{id}/focus": { + "post": { + "tags": ["v1"], + "summary": "Focus a desktop window.", + "description": "Brings the specified window to the foreground and gives it input focus.", + "operationId": "post_v1_desktop_window_focus", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "X11 window ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Window info after focus", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowInfo" + } + } + } + }, + "404": { + "description": "Window not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/windows/{id}/move": { + "post": { + "tags": ["v1"], + "summary": "Move a desktop window.", + "description": "Moves the specified window to the given position.", + "operationId": "post_v1_desktop_window_move", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "X11 window ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowMoveRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Window info after move", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowInfo" + } + } + } + }, + "404": { + "description": "Window not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/desktop/windows/{id}/resize": { + "post": { + "tags": ["v1"], + "summary": "Resize a desktop window.", + "description": "Resizes the specified window to the given dimensions.", + "operationId": "post_v1_desktop_window_resize", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "X11 window ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowResizeRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Window info after resize", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DesktopWindowInfo" + } + } + } + }, + "404": { + "description": "Window not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Desktop runtime is not ready", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/fs/entries": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_fs_entries", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "Directory path", + "required": false, + "schema": { + "type": "string", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Directory entries", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/FsEntry" + } + } + } + } + } + } + } + }, + "/v1/fs/entry": { + "delete": { + "tags": ["v1"], + "operationId": "delete_v1_fs_entry", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "File or directory path", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "recursive", + "in": "query", + "description": "Delete directory recursively", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Delete result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsActionResponse" + } + } + } + } + } + } + }, + "/v1/fs/file": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_fs_file", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "File path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "File content" + } + } + }, + "put": { + "tags": ["v1"], + "operationId": "put_v1_fs_file", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "File path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "description": "Raw file bytes", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Write result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsWriteResponse" + } + } + } + } + } + } + }, + "/v1/fs/mkdir": { + "post": { + "tags": ["v1"], + "operationId": "post_v1_fs_mkdir", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "Directory path", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Directory created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsActionResponse" + } + } + } + } + } + } + }, + "/v1/fs/move": { + "post": { + "tags": ["v1"], + "operationId": "post_v1_fs_move", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsMoveRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Move result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsMoveResponse" + } + } + } + } + } + } + }, + "/v1/fs/stat": { + "get": { + "tags": ["v1"], + "operationId": "get_v1_fs_stat", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "Path to stat", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Path metadata", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsStat" + } + } + } + } + } + } + }, + "/v1/fs/upload-batch": { + "post": { + "tags": ["v1"], + "operationId": "post_v1_fs_upload_batch", + "parameters": [ + { + "name": "path", + "in": "query", + "description": "Destination path", + "required": false, + "schema": { + "type": "string", + "nullable": true + } + } + ], + "requestBody": { + "description": "tar archive body", + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Upload/extract result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FsUploadBatchResponse" + } + } + } } } } }, "/v1/health": { "get": { - "tags": [ - "meta" - ], - "operationId": "get_health", + "tags": ["v1"], + "operationId": "get_v1_health", "responses": { "200": { - "description": "", + "description": "Service health response", "content": { "application/json": { "schema": { @@ -163,48 +2713,60 @@ } } }, - "/v1/sessions": { + "/v1/processes": { "get": { - "tags": [ - "sessions" + "tags": ["v1"], + "summary": "List all managed processes.", + "description": "Returns a list of all processes (running and exited) currently tracked\nby the runtime, sorted by process ID.", + "operationId": "get_v1_processes", + "parameters": [ + { + "name": "owner", + "in": "query", + "required": false, + "schema": { + "allOf": [ + { + "$ref": "#/components/schemas/ProcessOwner" + } + ], + "nullable": true + } + } ], - "operationId": "list_sessions", "responses": { "200": { - "description": "", + "description": "List processes", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SessionListResponse" + "$ref": "#/components/schemas/ProcessListResponse" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" } } } } } - } - }, - "/v1/sessions/{session_id}": { + }, "post": { - "tags": [ - "sessions" - ], - "operationId": "create_session", - "parameters": [ - { - "name": "session_id", - "in": "path", - "description": "Client session id", - "required": true, - "schema": { - "type": "string" - } - } - ], + "tags": ["v1"], + "summary": "Create a long-lived managed process.", + "description": "Spawns a new process with the given command and arguments. Supports both\npipe-based and PTY (tty) modes. Returns the process descriptor on success.", + "operationId": "post_v1_processes", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateSessionRequest" + "$ref": "#/components/schemas/ProcessCreateRequest" } } }, @@ -212,17 +2774,17 @@ }, "responses": { "200": { - "description": "", + "description": "Started process", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateSessionResponse" + "$ref": "#/components/schemas/ProcessInfo" } } } }, "400": { - "description": "", + "description": "Invalid request", "content": { "application/json": { "schema": { @@ -232,7 +2794,17 @@ } }, "409": { - "description": "", + "description": "Process limit or state conflict", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", "content": { "application/json": { "schema": { @@ -244,70 +2816,73 @@ } } }, - "/v1/sessions/{session_id}/events": { + "/v1/processes/config": { "get": { - "tags": [ - "sessions" - ], - "operationId": "get_events", - "parameters": [ - { - "name": "session_id", - "in": "path", - "description": "Session id", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "offset", - "in": "query", - "description": "Last seen event sequence (exclusive)", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - } - }, - { - "name": "limit", - "in": "query", - "description": "Max events to return", - "required": false, - "schema": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - } - }, - { - "name": "include_raw", - "in": "query", - "description": "Include raw provider payloads", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } - } - ], + "tags": ["v1"], + "summary": "Get process runtime configuration.", + "description": "Returns the current runtime configuration for the process management API,\nincluding limits for concurrency, timeouts, and buffer sizes.", + "operationId": "get_v1_processes_config", "responses": { "200": { - "description": "", + "description": "Current runtime process config", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EventsResponse" + "$ref": "#/components/schemas/ProcessConfig" } } } }, - "404": { - "description": "", + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "post": { + "tags": ["v1"], + "summary": "Update process runtime configuration.", + "description": "Replaces the runtime configuration for the process management API.\nValidates that all values are non-zero and clamps default timeout to max.", + "operationId": "post_v1_processes_config", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessConfig" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Updated runtime process config", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessConfig" + } + } + } + }, + "400": { + "description": "Invalid config", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", "content": { "application/json": { "schema": { @@ -319,26 +2894,260 @@ } } }, - "/v1/sessions/{session_id}/events/sse": { + "/v1/processes/run": { + "post": { + "tags": ["v1"], + "summary": "Run a one-shot command.", + "description": "Executes a command to completion and returns its stdout, stderr, exit code,\nand duration. Supports configurable timeout and output size limits.", + "operationId": "post_v1_processes_run", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessRunRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "One-off command result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessRunResponse" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}": { "get": { - "tags": [ - "sessions" - ], - "operationId": "get_events_sse", + "tags": ["v1"], + "summary": "Get a single process by ID.", + "description": "Returns the current state of a managed process including its status,\nPID, exit code, and creation/exit timestamps.", + "operationId": "get_v1_process", "parameters": [ { - "name": "session_id", + "name": "id", "in": "path", - "description": "Session id", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Process details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "delete": { + "tags": ["v1"], + "summary": "Delete a process record.", + "description": "Removes a stopped process from the runtime. Returns 409 if the process\nis still running; stop or kill it first.", + "operationId": "delete_v1_process", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "Process deleted" + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Process is still running", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/input": { + "post": { + "tags": ["v1"], + "summary": "Write input to a process.", + "description": "Sends data to a process's stdin (pipe mode) or PTY writer (tty mode).\nData can be encoded as base64, utf8, or text. Returns 413 if the decoded\npayload exceeds the configured `maxInputBytesPerRequest` limit.", + "operationId": "post_v1_process_input", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInputRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Input accepted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInputResponse" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Process not writable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "413": { + "description": "Input exceeds configured limit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/kill": { + "post": { + "tags": ["v1"], + "summary": "Send SIGKILL to a process.", + "description": "Sends SIGKILL to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.", + "operationId": "post_v1_process_kill", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", "required": true, "schema": { "type": "string" } }, { - "name": "offset", + "name": "waitMs", "in": "query", - "description": "Last seen event sequence (exclusive)", + "description": "Wait up to N ms for process to exit", "required": false, "schema": { "type": "integer", @@ -346,58 +3155,31 @@ "nullable": true, "minimum": 0 } - }, - { - "name": "include_raw", - "in": "query", - "description": "Include raw provider payloads", - "required": false, - "schema": { - "type": "boolean", - "nullable": true - } } ], "responses": { "200": { - "description": "SSE event stream" - } - } - } - }, - "/v1/sessions/{session_id}/messages": { - "post": { - "tags": [ - "sessions" - ], - "operationId": "post_message", - "parameters": [ - { - "name": "session_id", - "in": "path", - "description": "Session id", - "required": true, - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MessageRequest" + "description": "Kill signal sent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } } } }, - "required": true - }, - "responses": { - "204": { - "description": "Message accepted" - }, "404": { - "description": "", + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", "content": { "application/json": { "schema": { @@ -409,38 +3191,189 @@ } } }, - "/v1/sessions/{session_id}/messages/stream": { - "post": { - "tags": [ - "sessions" - ], - "operationId": "post_message_stream", + "/v1/processes/{id}/logs": { + "get": { + "tags": ["v1"], + "summary": "Fetch process logs.", + "description": "Returns buffered log entries for a process. Supports filtering by stream\ntype, tail count, and sequence-based resumption. When `follow=true`,\nreturns an SSE stream that replays buffered entries then streams live output.", + "operationId": "get_v1_process_logs", "parameters": [ { - "name": "session_id", + "name": "id", "in": "path", - "description": "Session id", + "description": "Process ID", "required": true, "schema": { "type": "string" } }, { - "name": "include_raw", + "name": "stream", "in": "query", - "description": "Include raw provider payloads", + "description": "stdout|stderr|combined|pty", + "required": false, + "schema": { + "allOf": [ + { + "$ref": "#/components/schemas/ProcessLogsStream" + } + ], + "nullable": true + } + }, + { + "name": "tail", + "in": "query", + "description": "Tail N entries", + "required": false, + "schema": { + "type": "integer", + "nullable": true, + "minimum": 0 + } + }, + { + "name": "follow", + "in": "query", + "description": "Follow via SSE", "required": false, "schema": { "type": "boolean", "nullable": true } + }, + { + "name": "since", + "in": "query", + "description": "Only entries with sequence greater than this", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Process logs", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessLogsResponse" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/stop": { + "post": { + "tags": ["v1"], + "summary": "Send SIGTERM to a process.", + "description": "Sends SIGTERM to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.", + "operationId": "post_v1_process_stop", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "waitMs", + "in": "query", + "description": "Wait up to N ms for process to exit", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Stop signal sent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/terminal/resize": { + "post": { + "tags": ["v1"], + "summary": "Resize a process terminal.", + "description": "Sets the PTY window size (columns and rows) for a tty-mode process and\nsends SIGWINCH so the child process can adapt.", + "operationId": "post_v1_process_terminal_resize", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } } ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MessageRequest" + "$ref": "#/components/schemas/ProcessTerminalResizeRequest" } } }, @@ -448,10 +3381,47 @@ }, "responses": { "200": { - "description": "SSE event stream" + "description": "Resize accepted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessTerminalResizeResponse" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } }, "404": { - "description": "", + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Not a terminal process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", "content": { "application/json": { "schema": { @@ -463,48 +3433,39 @@ } } }, - "/v1/sessions/{session_id}/permissions/{permission_id}/reply": { - "post": { - "tags": [ - "sessions" - ], - "operationId": "reply_permission", + "/v1/processes/{id}/terminal/ws": { + "get": { + "tags": ["v1"], + "summary": "Open an interactive WebSocket terminal session.", + "description": "Upgrades the connection to a WebSocket for bidirectional PTY I/O. Accepts\n`access_token` query param for browser-based auth (WebSocket API cannot\nsend custom headers). Streams raw PTY output as binary frames and accepts\nJSON control frames for input, resize, and close.", + "operationId": "get_v1_process_terminal_ws", "parameters": [ { - "name": "session_id", + "name": "id", "in": "path", - "description": "Session id", + "description": "Process ID", "required": true, "schema": { "type": "string" } }, { - "name": "permission_id", - "in": "path", - "description": "Permission id", - "required": true, + "name": "access_token", + "in": "query", + "description": "Bearer token alternative for WS auth", + "required": false, "schema": { - "type": "string" + "type": "string", + "nullable": true } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PermissionReplyRequest" - } - } - }, - "required": true - }, "responses": { - "204": { - "description": "Permission reply accepted" + "101": { + "description": "WebSocket upgraded" }, - "404": { - "description": "", + "400": { + "description": "Invalid websocket frame or upgrade request", "content": { "application/json": { "schema": { @@ -512,42 +3473,9 @@ } } } - } - } - } - }, - "/v1/sessions/{session_id}/questions/{question_id}/reject": { - "post": { - "tags": [ - "sessions" - ], - "operationId": "reject_question", - "parameters": [ - { - "name": "session_id", - "in": "path", - "description": "Session id", - "required": true, - "schema": { - "type": "string" - } - }, - { - "name": "question_id", - "in": "path", - "description": "Question id", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "204": { - "description": "Question rejected" }, "404": { - "description": "", + "description": "Unknown process", "content": { "application/json": { "schema": { @@ -555,52 +3483,9 @@ } } } - } - } - } - }, - "/v1/sessions/{session_id}/questions/{question_id}/reply": { - "post": { - "tags": [ - "sessions" - ], - "operationId": "reply_question", - "parameters": [ - { - "name": "session_id", - "in": "path", - "description": "Session id", - "required": true, - "schema": { - "type": "string" - } }, - { - "name": "question_id", - "in": "path", - "description": "Question id", - "required": true, - "schema": { - "type": "string" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/QuestionReplyRequest" - } - } - }, - "required": true - }, - "responses": { - "204": { - "description": "Question answered" - }, - "404": { - "description": "", + "409": { + "description": "Not a terminal process", "content": { "application/json": { "schema": { @@ -608,33 +3493,9 @@ } } } - } - } - } - }, - "/v1/sessions/{session_id}/terminate": { - "post": { - "tags": [ - "sessions" - ], - "operationId": "terminate_session", - "parameters": [ - { - "name": "session_id", - "in": "path", - "description": "Session id", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "204": { - "description": "Session terminated" }, - "404": { - "description": "", + "501": { + "description": "Process API unsupported on this platform", "content": { "application/json": { "schema": { @@ -649,6 +3510,68 @@ }, "components": { "schemas": { + "AcpEnvelope": { + "type": "object", + "required": ["jsonrpc"], + "properties": { + "error": { + "nullable": true + }, + "id": { + "nullable": true + }, + "jsonrpc": { + "type": "string" + }, + "method": { + "type": "string", + "nullable": true + }, + "params": { + "nullable": true + }, + "result": { + "nullable": true + } + } + }, + "AcpPostQuery": { + "type": "object", + "properties": { + "agent": { + "type": "string", + "nullable": true + } + } + }, + "AcpServerInfo": { + "type": "object", + "required": ["serverId", "agent", "createdAtMs"], + "properties": { + "agent": { + "type": "string" + }, + "createdAtMs": { + "type": "integer", + "format": "int64" + }, + "serverId": { + "type": "string" + } + } + }, + "AcpServerListResponse": { + "type": "object", + "required": ["servers"], + "properties": { + "servers": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AcpServerInfo" + } + } + } + }, "AgentCapabilities": { "type": "object", "required": [ @@ -709,8 +3632,7 @@ "type": "boolean" }, "sharedProcess": { - "type": "boolean", - "description": "Whether this agent uses a shared long-running server process (vs per-turn subprocess)" + "type": "boolean" }, "status": { "type": "boolean" @@ -729,43 +3651,25 @@ } } }, - "AgentError": { - "type": "object", - "required": [ - "type", - "message" - ], - "properties": { - "agent": { - "type": "string", - "nullable": true - }, - "details": { - "nullable": true - }, - "message": { - "type": "string" - }, - "session_id": { - "type": "string", - "nullable": true - }, - "type": { - "$ref": "#/components/schemas/ErrorType" - } - } - }, "AgentInfo": { "type": "object", - "required": [ - "id", - "installed", - "capabilities" - ], + "required": ["id", "installed", "credentialsAvailable", "capabilities"], "properties": { "capabilities": { "$ref": "#/components/schemas/AgentCapabilities" }, + "configError": { + "type": "string", + "nullable": true + }, + "configOptions": { + "type": "array", + "items": {}, + "nullable": true + }, + "credentialsAvailable": { + "type": "boolean" + }, "id": { "type": "string" }, @@ -790,20 +3694,60 @@ } } }, + "AgentInstallArtifact": { + "type": "object", + "required": ["kind", "path", "source"], + "properties": { + "kind": { + "type": "string" + }, + "path": { + "type": "string" + }, + "source": { + "type": "string" + }, + "version": { + "type": "string", + "nullable": true + } + } + }, "AgentInstallRequest": { "type": "object", "properties": { + "agentProcessVersion": { + "type": "string", + "nullable": true + }, + "agentVersion": { + "type": "string", + "nullable": true + }, "reinstall": { "type": "boolean", "nullable": true } } }, + "AgentInstallResponse": { + "type": "object", + "required": ["already_installed", "artifacts"], + "properties": { + "already_installed": { + "type": "boolean" + }, + "artifacts": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AgentInstallArtifact" + } + } + } + }, "AgentListResponse": { "type": "object", - "required": [ - "agents" - ], + "required": ["agents"], "properties": { "agents": { "type": "array", @@ -813,231 +3757,1062 @@ } } }, - "AgentModeInfo": { + "DesktopActionResponse": { "type": "object", - "required": [ - "id", - "name", - "description" - ], + "required": ["ok"], "properties": { - "description": { - "type": "string" - }, - "id": { - "type": "string" - }, - "name": { - "type": "string" + "ok": { + "type": "boolean" } } }, - "AgentModesResponse": { + "DesktopClipboardQuery": { "type": "object", - "required": [ - "modes" - ], "properties": { - "modes": { - "type": "array", - "items": { - "$ref": "#/components/schemas/AgentModeInfo" - } - } - } - }, - "AgentUnparsedData": { - "type": "object", - "required": [ - "error", - "location" - ], - "properties": { - "error": { - "type": "string" - }, - "location": { - "type": "string" - }, - "raw_hash": { + "selection": { "type": "string", "nullable": true } } }, - "ContentPart": { + "DesktopClipboardResponse": { + "type": "object", + "required": ["text", "selection"], + "properties": { + "selection": { + "type": "string" + }, + "text": { + "type": "string" + } + } + }, + "DesktopClipboardWriteRequest": { + "type": "object", + "required": ["text"], + "properties": { + "selection": { + "type": "string", + "nullable": true + }, + "text": { + "type": "string" + } + } + }, + "DesktopDisplayInfoResponse": { + "type": "object", + "required": ["display", "resolution"], + "properties": { + "display": { + "type": "string" + }, + "resolution": { + "$ref": "#/components/schemas/DesktopResolution" + } + } + }, + "DesktopErrorInfo": { + "type": "object", + "required": ["code", "message"], + "properties": { + "code": { + "type": "string" + }, + "message": { + "type": "string" + } + } + }, + "DesktopKeyModifiers": { + "type": "object", + "properties": { + "alt": { + "type": "boolean", + "nullable": true + }, + "cmd": { + "type": "boolean", + "nullable": true + }, + "ctrl": { + "type": "boolean", + "nullable": true + }, + "shift": { + "type": "boolean", + "nullable": true + } + } + }, + "DesktopKeyboardDownRequest": { + "type": "object", + "required": ["key"], + "properties": { + "key": { + "type": "string" + } + } + }, + "DesktopKeyboardPressRequest": { + "type": "object", + "required": ["key"], + "properties": { + "key": { + "type": "string" + }, + "modifiers": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopKeyModifiers" + } + ], + "nullable": true + } + } + }, + "DesktopKeyboardTypeRequest": { + "type": "object", + "required": ["text"], + "properties": { + "delayMs": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "text": { + "type": "string" + } + } + }, + "DesktopKeyboardUpRequest": { + "type": "object", + "required": ["key"], + "properties": { + "key": { + "type": "string" + } + } + }, + "DesktopLaunchRequest": { + "type": "object", + "required": ["app"], + "properties": { + "app": { + "type": "string" + }, + "args": { + "type": "array", + "items": { + "type": "string" + }, + "nullable": true + }, + "wait": { + "type": "boolean", + "nullable": true + } + } + }, + "DesktopLaunchResponse": { + "type": "object", + "required": ["processId"], + "properties": { + "pid": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "processId": { + "type": "string" + }, + "windowId": { + "type": "string", + "nullable": true + } + } + }, + "DesktopMouseButton": { + "type": "string", + "enum": ["left", "middle", "right"] + }, + "DesktopMouseClickRequest": { + "type": "object", + "required": ["x", "y"], + "properties": { + "button": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopMouseButton" + } + ], + "nullable": true + }, + "clickCount": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopMouseDownRequest": { + "type": "object", + "properties": { + "button": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopMouseButton" + } + ], + "nullable": true + }, + "x": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "y": { + "type": "integer", + "format": "int32", + "nullable": true + } + } + }, + "DesktopMouseDragRequest": { + "type": "object", + "required": ["startX", "startY", "endX", "endY"], + "properties": { + "button": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopMouseButton" + } + ], + "nullable": true + }, + "endX": { + "type": "integer", + "format": "int32" + }, + "endY": { + "type": "integer", + "format": "int32" + }, + "startX": { + "type": "integer", + "format": "int32" + }, + "startY": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopMouseMoveRequest": { + "type": "object", + "required": ["x", "y"], + "properties": { + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopMousePositionResponse": { + "type": "object", + "required": ["x", "y"], + "properties": { + "screen": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "window": { + "type": "string", + "nullable": true + }, + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopMouseScrollRequest": { + "type": "object", + "required": ["x", "y"], + "properties": { + "deltaX": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "deltaY": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopMouseUpRequest": { + "type": "object", + "properties": { + "button": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopMouseButton" + } + ], + "nullable": true + }, + "x": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "y": { + "type": "integer", + "format": "int32", + "nullable": true + } + } + }, + "DesktopOpenRequest": { + "type": "object", + "required": ["target"], + "properties": { + "target": { + "type": "string" + } + } + }, + "DesktopOpenResponse": { + "type": "object", + "required": ["processId"], + "properties": { + "pid": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "processId": { + "type": "string" + } + } + }, + "DesktopProcessInfo": { + "type": "object", + "required": ["name", "running"], + "properties": { + "logPath": { + "type": "string", + "nullable": true + }, + "name": { + "type": "string" + }, + "pid": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "running": { + "type": "boolean" + } + } + }, + "DesktopRecordingInfo": { + "type": "object", + "required": ["id", "status", "fileName", "bytes", "startedAt"], + "properties": { + "bytes": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "endedAt": { + "type": "string", + "nullable": true + }, + "fileName": { + "type": "string" + }, + "id": { + "type": "string" + }, + "processId": { + "type": "string", + "nullable": true + }, + "startedAt": { + "type": "string" + }, + "status": { + "$ref": "#/components/schemas/DesktopRecordingStatus" + } + } + }, + "DesktopRecordingListResponse": { + "type": "object", + "required": ["recordings"], + "properties": { + "recordings": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DesktopRecordingInfo" + } + } + } + }, + "DesktopRecordingStartRequest": { + "type": "object", + "properties": { + "fps": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + } + } + }, + "DesktopRecordingStatus": { + "type": "string", + "enum": ["recording", "completed", "failed"] + }, + "DesktopRegionScreenshotQuery": { + "type": "object", + "required": ["x", "y", "width", "height"], + "properties": { + "format": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopScreenshotFormat" + } + ], + "nullable": true + }, + "height": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "quality": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "scale": { + "type": "number", + "format": "float", + "nullable": true + }, + "showCursor": { + "type": "boolean", + "nullable": true + }, + "width": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopResolution": { + "type": "object", + "required": ["width", "height"], + "properties": { + "dpi": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "height": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "width": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "DesktopScreenshotFormat": { + "type": "string", + "enum": ["png", "jpeg", "webp"] + }, + "DesktopScreenshotQuery": { + "type": "object", + "properties": { + "format": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopScreenshotFormat" + } + ], + "nullable": true + }, + "quality": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "scale": { + "type": "number", + "format": "float", + "nullable": true + }, + "showCursor": { + "type": "boolean", + "nullable": true + } + } + }, + "DesktopStartRequest": { + "type": "object", + "properties": { + "displayNum": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "dpi": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "height": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "recordingFps": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "stateDir": { + "type": "string", + "nullable": true + }, + "streamAudioCodec": { + "type": "string", + "nullable": true + }, + "streamFrameRate": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "streamVideoCodec": { + "type": "string", + "nullable": true + }, + "webrtcPortRange": { + "type": "string", + "nullable": true + }, + "width": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + } + } + }, + "DesktopState": { + "type": "string", + "enum": ["inactive", "install_required", "starting", "active", "stopping", "failed"] + }, + "DesktopStatusResponse": { + "type": "object", + "required": ["state"], + "properties": { + "display": { + "type": "string", + "nullable": true + }, + "installCommand": { + "type": "string", + "nullable": true + }, + "lastError": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopErrorInfo" + } + ], + "nullable": true + }, + "missingDependencies": { + "type": "array", + "items": { + "type": "string" + } + }, + "processes": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DesktopProcessInfo" + } + }, + "resolution": { + "allOf": [ + { + "$ref": "#/components/schemas/DesktopResolution" + } + ], + "nullable": true + }, + "runtimeLogPath": { + "type": "string", + "nullable": true + }, + "startedAt": { + "type": "string", + "nullable": true + }, + "state": { + "$ref": "#/components/schemas/DesktopState" + }, + "windows": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DesktopWindowInfo" + }, + "description": "Current visible windows (included when the desktop is active)." + } + } + }, + "DesktopStreamStatusResponse": { + "type": "object", + "required": ["active"], + "properties": { + "active": { + "type": "boolean" + }, + "processId": { + "type": "string", + "nullable": true + }, + "windowId": { + "type": "string", + "nullable": true + } + } + }, + "DesktopWindowInfo": { + "type": "object", + "required": ["id", "title", "x", "y", "width", "height", "isActive"], + "properties": { + "height": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "id": { + "type": "string" + }, + "isActive": { + "type": "boolean" + }, + "title": { + "type": "string" + }, + "width": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopWindowListResponse": { + "type": "object", + "required": ["windows"], + "properties": { + "windows": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DesktopWindowInfo" + } + } + } + }, + "DesktopWindowMoveRequest": { + "type": "object", + "required": ["x", "y"], + "properties": { + "x": { + "type": "integer", + "format": "int32" + }, + "y": { + "type": "integer", + "format": "int32" + } + } + }, + "DesktopWindowResizeRequest": { + "type": "object", + "required": ["width", "height"], + "properties": { + "height": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "width": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "ErrorType": { + "type": "string", + "enum": [ + "invalid_request", + "conflict", + "unsupported_agent", + "agent_not_installed", + "install_failed", + "agent_process_exited", + "token_invalid", + "permission_denied", + "not_acceptable", + "unsupported_media_type", + "not_found", + "session_not_found", + "session_already_exists", + "mode_not_supported", + "stream_error", + "timeout" + ] + }, + "FsActionResponse": { + "type": "object", + "required": ["path"], + "properties": { + "path": { + "type": "string" + } + } + }, + "FsDeleteQuery": { + "type": "object", + "required": ["path"], + "properties": { + "path": { + "type": "string" + }, + "recursive": { + "type": "boolean", + "nullable": true + } + } + }, + "FsEntriesQuery": { + "type": "object", + "properties": { + "path": { + "type": "string", + "nullable": true + } + } + }, + "FsEntry": { + "type": "object", + "required": ["name", "path", "entryType", "size"], + "properties": { + "entryType": { + "$ref": "#/components/schemas/FsEntryType" + }, + "modified": { + "type": "string", + "nullable": true + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "size": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "FsEntryType": { + "type": "string", + "enum": ["file", "directory"] + }, + "FsMoveRequest": { + "type": "object", + "required": ["from", "to"], + "properties": { + "from": { + "type": "string" + }, + "overwrite": { + "type": "boolean", + "nullable": true + }, + "to": { + "type": "string" + } + } + }, + "FsMoveResponse": { + "type": "object", + "required": ["from", "to"], + "properties": { + "from": { + "type": "string" + }, + "to": { + "type": "string" + } + } + }, + "FsPathQuery": { + "type": "object", + "required": ["path"], + "properties": { + "path": { + "type": "string" + } + } + }, + "FsStat": { + "type": "object", + "required": ["path", "entryType", "size"], + "properties": { + "entryType": { + "$ref": "#/components/schemas/FsEntryType" + }, + "modified": { + "type": "string", + "nullable": true + }, + "path": { + "type": "string" + }, + "size": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "FsUploadBatchQuery": { + "type": "object", + "properties": { + "path": { + "type": "string", + "nullable": true + } + } + }, + "FsUploadBatchResponse": { + "type": "object", + "required": ["paths", "truncated"], + "properties": { + "paths": { + "type": "array", + "items": { + "type": "string" + } + }, + "truncated": { + "type": "boolean" + } + } + }, + "FsWriteResponse": { + "type": "object", + "required": ["path", "bytesWritten"], + "properties": { + "bytesWritten": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "path": { + "type": "string" + } + } + }, + "HealthResponse": { + "type": "object", + "required": ["status"], + "properties": { + "status": { + "type": "string" + } + } + }, + "McpConfigQuery": { + "type": "object", + "required": ["directory", "mcpName"], + "properties": { + "directory": { + "type": "string" + }, + "mcpName": { + "type": "string" + } + } + }, + "McpServerConfig": { "oneOf": [ { "type": "object", - "required": [ - "text", - "type" - ], + "required": ["command", "type"], "properties": { - "text": { - "type": "string" + "args": { + "type": "array", + "items": { + "type": "string" + } }, - "type": { - "type": "string", - "enum": [ - "text" - ] - } - } - }, - { - "type": "object", - "required": [ - "json", - "type" - ], - "properties": { - "json": {}, - "type": { - "type": "string", - "enum": [ - "json" - ] - } - } - }, - { - "type": "object", - "required": [ - "name", - "arguments", - "call_id", - "type" - ], - "properties": { - "arguments": { - "type": "string" + "command": { + "$ref": "#/components/schemas/McpCommand" }, - "call_id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "type": { - "type": "string", - "enum": [ - "tool_call" - ] - } - } - }, - { - "type": "object", - "required": [ - "call_id", - "output", - "type" - ], - "properties": { - "call_id": { - "type": "string" - }, - "output": { - "type": "string" - }, - "type": { - "type": "string", - "enum": [ - "tool_result" - ] - } - } - }, - { - "type": "object", - "required": [ - "path", - "action", - "type" - ], - "properties": { - "action": { - "$ref": "#/components/schemas/FileAction" - }, - "diff": { + "cwd": { "type": "string", "nullable": true }, - "path": { - "type": "string" + "enabled": { + "type": "boolean", + "nullable": true + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "nullable": true + }, + "timeoutMs": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 }, "type": { "type": "string", - "enum": [ - "file_ref" - ] + "enum": ["local"] } } }, { "type": "object", - "required": [ - "text", - "visibility", - "type" - ], + "required": ["url", "type"], "properties": { - "text": { - "type": "string" - }, - "type": { - "type": "string", - "enum": [ - "reasoning" - ] - }, - "visibility": { - "$ref": "#/components/schemas/ReasoningVisibility" - } - } - }, - { - "type": "object", - "required": [ - "path", - "type" - ], - "properties": { - "mime": { + "bearerTokenEnvVar": { "type": "string", "nullable": true }, - "path": { - "type": "string" - }, - "type": { - "type": "string", - "enum": [ - "image" - ] - } - } - }, - { - "type": "object", - "required": [ - "label", - "type" - ], - "properties": { - "detail": { - "type": "string", + "enabled": { + "type": "boolean", "nullable": true }, - "label": { - "type": "string" + "envHeaders": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "nullable": true + }, + "headers": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "nullable": true + }, + "oauth": { + "allOf": [ + { + "$ref": "#/components/schemas/McpOAuthConfigOrDisabled" + } + ], + "nullable": true + }, + "timeoutMs": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + }, + "transport": { + "allOf": [ + { + "$ref": "#/components/schemas/McpRemoteTransport" + } + ], + "nullable": true }, "type": { "type": "string", - "enum": [ - "status" - ] + "enum": ["remote"] + }, + "url": { + "type": "string" } } } @@ -1046,285 +4821,9 @@ "propertyName": "type" } }, - "CreateSessionRequest": { - "type": "object", - "required": [ - "agent" - ], - "properties": { - "agent": { - "type": "string" - }, - "agentMode": { - "type": "string", - "nullable": true - }, - "agentVersion": { - "type": "string", - "nullable": true - }, - "model": { - "type": "string", - "nullable": true - }, - "permissionMode": { - "type": "string", - "nullable": true - }, - "variant": { - "type": "string", - "nullable": true - } - } - }, - "CreateSessionResponse": { - "type": "object", - "required": [ - "healthy" - ], - "properties": { - "error": { - "allOf": [ - { - "$ref": "#/components/schemas/AgentError" - } - ], - "nullable": true - }, - "healthy": { - "type": "boolean" - }, - "nativeSessionId": { - "type": "string", - "nullable": true - } - } - }, - "ErrorData": { - "type": "object", - "required": [ - "message" - ], - "properties": { - "code": { - "type": "string", - "nullable": true - }, - "details": { - "nullable": true - }, - "message": { - "type": "string" - } - } - }, - "ErrorType": { - "type": "string", - "enum": [ - "invalid_request", - "unsupported_agent", - "agent_not_installed", - "install_failed", - "agent_process_exited", - "token_invalid", - "permission_denied", - "session_not_found", - "session_already_exists", - "mode_not_supported", - "stream_error", - "timeout" - ] - }, - "EventSource": { - "type": "string", - "enum": [ - "agent", - "daemon" - ] - }, - "EventsQuery": { - "type": "object", - "properties": { - "includeRaw": { - "type": "boolean", - "nullable": true - }, - "limit": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - }, - "offset": { - "type": "integer", - "format": "int64", - "nullable": true, - "minimum": 0 - } - } - }, - "EventsResponse": { - "type": "object", - "required": [ - "events", - "hasMore" - ], - "properties": { - "events": { - "type": "array", - "items": { - "$ref": "#/components/schemas/UniversalEvent" - } - }, - "hasMore": { - "type": "boolean" - } - } - }, - "FileAction": { - "type": "string", - "enum": [ - "read", - "write", - "patch" - ] - }, - "HealthResponse": { - "type": "object", - "required": [ - "status" - ], - "properties": { - "status": { - "type": "string" - } - } - }, - "ItemDeltaData": { - "type": "object", - "required": [ - "item_id", - "delta" - ], - "properties": { - "delta": { - "type": "string" - }, - "item_id": { - "type": "string" - }, - "native_item_id": { - "type": "string", - "nullable": true - } - } - }, - "ItemEventData": { - "type": "object", - "required": [ - "item" - ], - "properties": { - "item": { - "$ref": "#/components/schemas/UniversalItem" - } - } - }, - "ItemKind": { - "type": "string", - "enum": [ - "message", - "tool_call", - "tool_result", - "system", - "status", - "unknown" - ] - }, - "ItemRole": { - "type": "string", - "enum": [ - "user", - "assistant", - "system", - "tool" - ] - }, - "ItemStatus": { - "type": "string", - "enum": [ - "in_progress", - "completed", - "failed" - ] - }, - "MessageRequest": { - "type": "object", - "required": [ - "message" - ], - "properties": { - "message": { - "type": "string" - } - } - }, - "PermissionEventData": { - "type": "object", - "required": [ - "permission_id", - "action", - "status" - ], - "properties": { - "action": { - "type": "string" - }, - "metadata": { - "nullable": true - }, - "permission_id": { - "type": "string" - }, - "status": { - "$ref": "#/components/schemas/PermissionStatus" - } - } - }, - "PermissionReply": { - "type": "string", - "enum": [ - "once", - "always", - "reject" - ] - }, - "PermissionReplyRequest": { - "type": "object", - "required": [ - "reply" - ], - "properties": { - "reply": { - "$ref": "#/components/schemas/PermissionReply" - } - } - }, - "PermissionStatus": { - "type": "string", - "enum": [ - "requested", - "approved", - "denied" - ] - }, "ProblemDetails": { "type": "object", - "required": [ - "type", - "title", - "status" - ], + "required": ["type", "title", "status"], "properties": { "detail": { "type": "string", @@ -1348,97 +4847,371 @@ }, "additionalProperties": {} }, - "QuestionEventData": { + "ProcessConfig": { "type": "object", - "required": [ - "question_id", - "prompt", - "options", - "status" - ], + "required": ["maxConcurrentProcesses", "defaultRunTimeoutMs", "maxRunTimeoutMs", "maxOutputBytes", "maxLogBytesPerProcess", "maxInputBytesPerRequest"], "properties": { - "options": { + "defaultRunTimeoutMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "maxConcurrentProcesses": { + "type": "integer", + "minimum": 0 + }, + "maxInputBytesPerRequest": { + "type": "integer", + "minimum": 0 + }, + "maxLogBytesPerProcess": { + "type": "integer", + "minimum": 0 + }, + "maxOutputBytes": { + "type": "integer", + "minimum": 0 + }, + "maxRunTimeoutMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "ProcessCreateRequest": { + "type": "object", + "required": ["command"], + "properties": { + "args": { "type": "array", "items": { "type": "string" } }, - "prompt": { + "command": { "type": "string" }, - "question_id": { - "type": "string" - }, - "response": { + "cwd": { "type": "string", "nullable": true }, - "status": { - "$ref": "#/components/schemas/QuestionStatus" + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "interactive": { + "type": "boolean" + }, + "tty": { + "type": "boolean" } } }, - "QuestionReplyRequest": { + "ProcessInfo": { "type": "object", - "required": [ - "answers" - ], + "required": ["id", "command", "args", "tty", "interactive", "owner", "status", "createdAtMs"], "properties": { - "answers": { + "args": { "type": "array", "items": { - "type": "array", - "items": { - "type": "string" + "type": "string" + } + }, + "command": { + "type": "string" + }, + "createdAtMs": { + "type": "integer", + "format": "int64" + }, + "cwd": { + "type": "string", + "nullable": true + }, + "exitCode": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "exitedAtMs": { + "type": "integer", + "format": "int64", + "nullable": true + }, + "id": { + "type": "string" + }, + "interactive": { + "type": "boolean" + }, + "owner": { + "$ref": "#/components/schemas/ProcessOwner" + }, + "pid": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "status": { + "$ref": "#/components/schemas/ProcessState" + }, + "tty": { + "type": "boolean" + } + } + }, + "ProcessInputRequest": { + "type": "object", + "required": ["data"], + "properties": { + "data": { + "type": "string" + }, + "encoding": { + "type": "string", + "nullable": true + } + } + }, + "ProcessInputResponse": { + "type": "object", + "required": ["bytesWritten"], + "properties": { + "bytesWritten": { + "type": "integer", + "minimum": 0 + } + } + }, + "ProcessListQuery": { + "type": "object", + "properties": { + "owner": { + "allOf": [ + { + "$ref": "#/components/schemas/ProcessOwner" } + ], + "nullable": true + } + } + }, + "ProcessListResponse": { + "type": "object", + "required": ["processes"], + "properties": { + "processes": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProcessInfo" } } } }, - "QuestionStatus": { - "type": "string", - "enum": [ - "requested", - "answered", - "rejected" - ] - }, - "ReasoningVisibility": { - "type": "string", - "enum": [ - "public", - "private" - ] - }, - "ServerStatus": { - "type": "string", - "description": "Status of a shared server process for an agent", - "enum": [ - "running", - "stopped", - "error" - ] - }, - "ServerStatusInfo": { + "ProcessLogEntry": { "type": "object", - "required": [ - "status", - "restartCount" - ], + "required": ["sequence", "stream", "timestampMs", "data", "encoding"], "properties": { - "baseUrl": { - "type": "string", - "nullable": true + "data": { + "type": "string" }, - "lastError": { - "type": "string", - "nullable": true + "encoding": { + "type": "string" }, - "restartCount": { + "sequence": { "type": "integer", "format": "int64", "minimum": 0 }, + "stream": { + "$ref": "#/components/schemas/ProcessLogsStream" + }, + "timestampMs": { + "type": "integer", + "format": "int64" + } + } + }, + "ProcessLogsQuery": { + "type": "object", + "properties": { + "follow": { + "type": "boolean", + "nullable": true + }, + "since": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + }, + "stream": { + "allOf": [ + { + "$ref": "#/components/schemas/ProcessLogsStream" + } + ], + "nullable": true + }, + "tail": { + "type": "integer", + "nullable": true, + "minimum": 0 + } + } + }, + "ProcessLogsResponse": { + "type": "object", + "required": ["processId", "stream", "entries"], + "properties": { + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProcessLogEntry" + } + }, + "processId": { + "type": "string" + }, + "stream": { + "$ref": "#/components/schemas/ProcessLogsStream" + } + } + }, + "ProcessLogsStream": { + "type": "string", + "enum": ["stdout", "stderr", "combined", "pty"] + }, + "ProcessOwner": { + "type": "string", + "enum": ["user", "desktop", "system"] + }, + "ProcessRunRequest": { + "type": "object", + "required": ["command"], + "properties": { + "args": { + "type": "array", + "items": { + "type": "string" + } + }, + "command": { + "type": "string" + }, + "cwd": { + "type": "string", + "nullable": true + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "maxOutputBytes": { + "type": "integer", + "nullable": true, + "minimum": 0 + }, + "timeoutMs": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + }, + "ProcessRunResponse": { + "type": "object", + "required": ["timedOut", "stdout", "stderr", "stdoutTruncated", "stderrTruncated", "durationMs"], + "properties": { + "durationMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "exitCode": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "stderr": { + "type": "string" + }, + "stderrTruncated": { + "type": "boolean" + }, + "stdout": { + "type": "string" + }, + "stdoutTruncated": { + "type": "boolean" + }, + "timedOut": { + "type": "boolean" + } + } + }, + "ProcessSignalQuery": { + "type": "object", + "properties": { + "waitMs": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + }, + "ProcessState": { + "type": "string", + "enum": ["running", "exited"] + }, + "ProcessTerminalResizeRequest": { + "type": "object", + "required": ["cols", "rows"], + "properties": { + "cols": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "rows": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "ProcessTerminalResizeResponse": { + "type": "object", + "required": ["cols", "rows"], + "properties": { + "cols": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "rows": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "ServerStatus": { + "type": "string", + "enum": ["running", "stopped"] + }, + "ServerStatusInfo": { + "type": "object", + "required": ["status"], + "properties": { "status": { "$ref": "#/components/schemas/ServerStatus" }, @@ -1450,290 +5223,54 @@ } } }, - "SessionEndReason": { - "type": "string", - "enum": [ - "completed", - "error", - "terminated" - ] - }, - "SessionEndedData": { + "SkillSource": { "type": "object", - "required": [ - "reason", - "terminated_by" - ], + "required": ["type", "source"], "properties": { - "exit_code": { - "type": "integer", - "format": "int32", - "description": "Process exit code when reason is Error", - "nullable": true - }, - "message": { - "type": "string", - "description": "Error message when reason is Error", - "nullable": true - }, - "reason": { - "$ref": "#/components/schemas/SessionEndReason" - }, - "stderr": { - "allOf": [ - { - "$ref": "#/components/schemas/StderrOutput" - } - ], - "nullable": true - }, - "terminated_by": { - "$ref": "#/components/schemas/TerminatedBy" - } - } - }, - "SessionInfo": { - "type": "object", - "required": [ - "sessionId", - "agent", - "agentMode", - "permissionMode", - "ended", - "eventCount" - ], - "properties": { - "agent": { - "type": "string" - }, - "agentMode": { - "type": "string" - }, - "ended": { - "type": "boolean" - }, - "eventCount": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "model": { + "ref": { "type": "string", "nullable": true }, - "nativeSessionId": { - "type": "string", - "nullable": true - }, - "permissionMode": { - "type": "string" - }, - "sessionId": { - "type": "string" - }, - "variant": { - "type": "string", - "nullable": true - } - } - }, - "SessionListResponse": { - "type": "object", - "required": [ - "sessions" - ], - "properties": { - "sessions": { + "skills": { "type": "array", "items": { - "$ref": "#/components/schemas/SessionInfo" - } - } - } - }, - "SessionStartedData": { - "type": "object", - "properties": { - "metadata": { + "type": "string" + }, "nullable": true - } - } - }, - "StderrOutput": { - "type": "object", - "required": [ - "truncated" - ], - "properties": { - "head": { - "type": "string", - "description": "First N lines of stderr (if truncated) or full stderr (if not truncated)", - "nullable": true - }, - "tail": { - "type": "string", - "description": "Last N lines of stderr (only present if truncated)", - "nullable": true - }, - "total_lines": { - "type": "integer", - "description": "Total number of lines in stderr", - "nullable": true, - "minimum": 0 - }, - "truncated": { - "type": "boolean", - "description": "Whether the output was truncated" - } - } - }, - "TerminatedBy": { - "type": "string", - "enum": [ - "agent", - "daemon" - ] - }, - "TurnStreamQuery": { - "type": "object", - "properties": { - "includeRaw": { - "type": "boolean", - "nullable": true - } - } - }, - "UniversalEvent": { - "type": "object", - "required": [ - "event_id", - "sequence", - "time", - "session_id", - "synthetic", - "source", - "type", - "data" - ], - "properties": { - "data": { - "$ref": "#/components/schemas/UniversalEventData" - }, - "event_id": { - "type": "string" - }, - "native_session_id": { - "type": "string", - "nullable": true - }, - "raw": { - "nullable": true - }, - "sequence": { - "type": "integer", - "format": "int64", - "minimum": 0 - }, - "session_id": { - "type": "string" }, "source": { - "$ref": "#/components/schemas/EventSource" - }, - "synthetic": { - "type": "boolean" - }, - "time": { "type": "string" }, + "subpath": { + "type": "string", + "nullable": true + }, "type": { - "$ref": "#/components/schemas/UniversalEventType" + "type": "string" } } }, - "UniversalEventData": { - "oneOf": [ - { - "$ref": "#/components/schemas/SessionStartedData" - }, - { - "$ref": "#/components/schemas/SessionEndedData" - }, - { - "$ref": "#/components/schemas/ItemEventData" - }, - { - "$ref": "#/components/schemas/ItemDeltaData" - }, - { - "$ref": "#/components/schemas/ErrorData" - }, - { - "$ref": "#/components/schemas/PermissionEventData" - }, - { - "$ref": "#/components/schemas/QuestionEventData" - }, - { - "$ref": "#/components/schemas/AgentUnparsedData" - } - ] - }, - "UniversalEventType": { - "type": "string", - "enum": [ - "session.started", - "session.ended", - "item.started", - "item.delta", - "item.completed", - "error", - "permission.requested", - "permission.resolved", - "question.requested", - "question.resolved", - "agent.unparsed" - ] - }, - "UniversalItem": { + "SkillsConfig": { "type": "object", - "required": [ - "item_id", - "kind", - "content", - "status" - ], + "required": ["sources"], "properties": { - "content": { + "sources": { "type": "array", "items": { - "$ref": "#/components/schemas/ContentPart" + "$ref": "#/components/schemas/SkillSource" } - }, - "item_id": { + } + } + }, + "SkillsConfigQuery": { + "type": "object", + "required": ["directory", "skillName"], + "properties": { + "directory": { "type": "string" }, - "kind": { - "$ref": "#/components/schemas/ItemKind" - }, - "native_item_id": { - "type": "string", - "nullable": true - }, - "parent_id": { - "type": "string", - "nullable": true - }, - "role": { - "allOf": [ - { - "$ref": "#/components/schemas/ItemRole" - } - ], - "nullable": true - }, - "status": { - "$ref": "#/components/schemas/ItemStatus" + "skillName": { + "type": "string" } } } @@ -1741,16 +5278,8 @@ }, "tags": [ { - "name": "meta", - "description": "Service metadata" - }, - { - "name": "agents", - "description": "Agent management" - }, - { - "name": "sessions", - "description": "Session management" + "name": "v1", + "description": "ACP proxy v1 API" } ] -} \ No newline at end of file +} diff --git a/docs/opencode-compatibility.mdx b/docs/opencode-compatibility.mdx new file mode 100644 index 0000000..ac766b4 --- /dev/null +++ b/docs/opencode-compatibility.mdx @@ -0,0 +1,125 @@ +--- +title: "OpenCode Compatibility" +description: "Connect OpenCode clients, SDKs, and web UI to Sandbox Agent." +--- + +<Warning> + **Experimental**: OpenCode SDK/UI compatibility may change. +</Warning> + +Sandbox Agent exposes an OpenCode-compatible API at `/opencode`. + +## Why use OpenCode clients with Sandbox Agent? + +- OpenCode CLI (`opencode attach`) +- OpenCode web UI +- OpenCode TypeScript SDK (`@opencode-ai/sdk`) + +## Quick start + +### OpenCode CLI / TUI + +```bash +sandbox-agent opencode --port 2468 --no-token +``` + +Or start server + attach manually: + +```bash +sandbox-agent server --no-token --host 127.0.0.1 --port 2468 +opencode attach http://localhost:2468/opencode +``` + +With authentication enabled: + +```bash +sandbox-agent server --token "$SANDBOX_TOKEN" --host 127.0.0.1 --port 2468 +opencode attach http://localhost:2468/opencode --password "$SANDBOX_TOKEN" +``` + +### OpenCode web UI + +<Steps> + <Step title="Start Sandbox Agent with CORS"> + ```bash + sandbox-agent server --no-token --host 127.0.0.1 --port 2468 --cors-allow-origin http://127.0.0.1:5173 + ``` + </Step> + <Step title="Run OpenCode web app"> + ```bash + git clone https://github.com/anomalyco/opencode + cd opencode/packages/app + export VITE_OPENCODE_SERVER_HOST=127.0.0.1 + export VITE_OPENCODE_SERVER_PORT=2468 + bun install + bun run dev -- --host 127.0.0.1 --port 5173 + ``` + </Step> + <Step title="Open UI"> + Visit `http://127.0.0.1:5173/`. + </Step> +</Steps> + +### OpenCode SDK + +```typescript +import { createOpencodeClient } from "@opencode-ai/sdk"; + +const client = createOpencodeClient({ + baseUrl: "http://localhost:2468/opencode", +}); + +const session = await client.session.create(); + +await client.session.promptAsync({ + path: { id: session.data.id }, + body: { + parts: [{ type: "text", text: "Hello, write a hello world script" }], + }, +}); + +const events = await client.event.subscribe({}); +for await (const event of events.stream) { + console.log(event); +} +``` + +## Notes + +- API base path: `/opencode` +- If server auth is enabled, pass bearer auth (or `--password` in OpenCode CLI) +- For browser UIs, configure CORS with `--cors-allow-origin` +- Provider selector currently exposes compatible providers (`mock`, `amp`, `claude`, `codex`) +- Provider/model metadata for compatibility endpoints is normalized and may differ from native OpenCode grouping +- Optional proxy: set `OPENCODE_COMPAT_PROXY_URL` to forward selected endpoints to native OpenCode + +## Endpoint coverage + +<Accordion title="Endpoint Status Table"> + +| Endpoint | Status | Notes | +|---|---|---| +| `GET /event` | ✓ | Session/message updates (SSE) | +| `GET /global/event` | ✓ | GlobalEvent-wrapped stream | +| `GET /session` | ✓ | Session list | +| `POST /session` | ✓ | Create session | +| `GET /session/{id}` | ✓ | Session details | +| `POST /session/{id}/message` | ✓ | Send message | +| `GET /session/{id}/message` | ✓ | Session messages | +| `GET /permission` | ✓ | Pending permissions | +| `POST /permission/{id}/reply` | ✓ | Permission reply | +| `GET /question` | ✓ | Pending questions | +| `POST /question/{id}/reply` | ✓ | Question reply | +| `GET /provider` | ✓ | Provider metadata | +| `GET /command` | ↔ | Proxied when `OPENCODE_COMPAT_PROXY_URL` is set; otherwise stub | +| `GET /config` | ↔ | Proxied when set; otherwise stub | +| `PATCH /config` | ↔ | Proxied when set; otherwise local compatibility behavior | +| `GET /global/config` | ↔ | Proxied when set; otherwise stub | +| `PATCH /global/config` | ↔ | Proxied when set; otherwise local compatibility behavior | +| `/tui/*` | ↔ | Proxied when set; otherwise local compatibility behavior | +| `GET /agent` | − | Agent list | +| *other endpoints* | − | Empty/stub responses | + +✓ Functional ↔ Proxied optional − Stubbed + +</Accordion> diff --git a/docs/orchestration-architecture.mdx b/docs/orchestration-architecture.mdx new file mode 100644 index 0000000..08c776c --- /dev/null +++ b/docs/orchestration-architecture.mdx @@ -0,0 +1,43 @@ +--- +title: "Orchestration Architecture" +description: "Production topology, backend requirements, and session persistence." +icon: "sitemap" +--- + +This page covers production topology and backend requirements. Read [Architecture](/architecture) first for an overview of how the server, SDK, and agent processes fit together. + +## Suggested Topology + +Run the SDK on your backend, then call it from your frontend. + +This extra hop is recommended because it keeps auth/token logic on the backend and makes persistence simpler. + +```mermaid placement="top-right" + flowchart LR + BROWSER["Browser"] + subgraph BACKEND["Your backend"] + direction TB + SDK["Sandbox Agent SDK"] + end + subgraph SANDBOX_SIMPLE["Sandbox"] + SERVER_SIMPLE["Sandbox Agent server"] + end + + BROWSER --> BACKEND + BACKEND --> SDK --> SERVER_SIMPLE +``` + +### Backend requirements + +Your backend layer needs to handle: + +- **Long-running connections**: prompts can take minutes. +- **Session affinity**: follow-up messages must reach the same session. +- **State between requests**: session metadata and event history must persist across requests. +- **Graceful recovery**: sessions should resume after backend restarts. + +We recommend [Rivet](https://rivet.dev) over serverless because actors natively support the long-lived connections, session routing, and state persistence that agent workloads require. + +## Session persistence + +For storage driver options and replay behavior, see [Persisting Sessions](/session-persistence). diff --git a/docs/processes.mdx b/docs/processes.mdx new file mode 100644 index 0000000..282c0f1 --- /dev/null +++ b/docs/processes.mdx @@ -0,0 +1,258 @@ +--- +title: "Processes" +description: "Run commands and manage long-lived processes inside the sandbox." +sidebarTitle: "Processes" +icon: "terminal" +--- + +The process API supports: + +- **One-shot execution** — run a command to completion and capture stdout, stderr, and exit code +- **Managed processes** — spawn, list, stop, kill, and delete long-lived processes +- **Log streaming** — fetch buffered logs or follow live output +- **Terminals** — full PTY support with bidirectional WebSocket I/O +- **Configurable limits** — control concurrency, timeouts, and buffer sizes per runtime + +## Run a command + +Execute a command to completion and get its output. + +<CodeGroup> +```ts TypeScript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +const result = await sdk.runProcess({ + command: "ls", + args: ["-la", "/workspace"], +}); + +console.log(result.exitCode); // 0 +console.log(result.stdout); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"ls","args":["-la","/workspace"]}' +``` +</CodeGroup> + +You can set a timeout and cap output size: + +<CodeGroup> +```ts TypeScript +const result = await sdk.runProcess({ + command: "make", + args: ["build"], + timeoutMs: 60000, + maxOutputBytes: 1048576, +}); + +if (result.timedOut) { + console.log("Build timed out"); +} +if (result.stdoutTruncated) { + console.log("Output was truncated"); +} +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/run" \ + -H "Content-Type: application/json" \ + -d '{"command":"make","args":["build"],"timeoutMs":60000,"maxOutputBytes":1048576}' +``` +</CodeGroup> + +## Managed processes + +Create a long-lived process that you can interact with, monitor, and stop later. + +### Create + +<CodeGroup> +```ts TypeScript +const proc = await sdk.createProcess({ + command: "node", + args: ["server.js"], + cwd: "/workspace", +}); + +console.log(proc.id, proc.pid); // proc_1, 12345 +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes" \ + -H "Content-Type: application/json" \ + -d '{"command":"node","args":["server.js"],"cwd":"/workspace"}' +``` +</CodeGroup> + +### List and get + +<CodeGroup> +```ts TypeScript +const { processes } = await sdk.listProcesses(); + +for (const p of processes) { + console.log(p.id, p.command, p.status); +} + +const proc = await sdk.getProcess("proc_1"); +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/processes" + +curl "http://127.0.0.1:2468/v1/processes/proc_1" +``` +</CodeGroup> + +### Stop, kill, and delete + +<CodeGroup> +```ts TypeScript +// SIGTERM with optional wait +await sdk.stopProcess("proc_1", { waitMs: 5000 }); + +// SIGKILL +await sdk.killProcess("proc_1", { waitMs: 1000 }); + +// Remove exited process record +await sdk.deleteProcess("proc_1"); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/proc_1/stop?waitMs=5000" + +curl -X POST "http://127.0.0.1:2468/v1/processes/proc_1/kill?waitMs=1000" + +curl -X DELETE "http://127.0.0.1:2468/v1/processes/proc_1" +``` +</CodeGroup> + +## Logs + +### Fetch buffered logs + +<CodeGroup> +```ts TypeScript +const logs = await sdk.getProcessLogs("proc_1", { + tail: 50, + stream: "combined", +}); + +for (const entry of logs.entries) { + console.log(entry.stream, atob(entry.data)); +} +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/processes/proc_1/logs?tail=50&stream=combined" +``` +</CodeGroup> + +### Follow logs + +Stream log entries in real time. The subscription replays buffered entries first, then streams new output as it arrives. + +```ts TypeScript +const sub = await sdk.followProcessLogs("proc_1", (entry) => { + console.log(entry.stream, atob(entry.data)); +}); + +// Later, stop following +sub.close(); +await sub.closed; +``` + +## Terminals + +Create a process with `tty: true` to allocate a pseudo-terminal, then connect via WebSocket for full bidirectional I/O. + +```ts TypeScript +const proc = await sdk.createProcess({ + command: "bash", + tty: true, +}); +``` + +### Write input + +<CodeGroup> +```ts TypeScript +await sdk.sendProcessInput("proc_1", { + data: "echo hello\n", + encoding: "utf8", +}); +``` + +```bash cURL +curl -X POST "http://127.0.0.1:2468/v1/processes/proc_1/input" \ + -H "Content-Type: application/json" \ + -d '{"data":"echo hello\n","encoding":"utf8"}' +``` +</CodeGroup> + +### Connect to a terminal + +Use `ProcessTerminalSession` unless you need direct frame access. + +```ts TypeScript +const terminal = sdk.connectProcessTerminal("proc_1"); + +terminal.onReady(() => { + terminal.resize({ cols: 120, rows: 40 }); + terminal.sendInput("ls\n"); +}); + +terminal.onData((bytes) => { + process.stdout.write(new TextDecoder().decode(bytes)); +}); + +terminal.onExit((status) => { + console.log("exit:", status.exitCode); +}); + +terminal.onError((error) => { + console.error(error instanceof Error ? error.message : error.message); +}); + +terminal.onClose(() => { + console.log("terminal closed"); +}); +``` + +Since the browser WebSocket API cannot send custom headers, the endpoint accepts an `access_token` query parameter for authentication. The SDK handles this automatically. + +### Browser terminal emulators + +The terminal session works with any browser terminal emulator like ghostty-web or xterm.js. For a drop-in React terminal, see [React Components](/react-components). + +## Configuration + +Adjust runtime limits like max concurrent processes, timeouts, and buffer sizes. + +<CodeGroup> +```ts TypeScript +const config = await sdk.getProcessConfig(); +console.log(config); + +await sdk.setProcessConfig({ + ...config, + maxConcurrentProcesses: 32, + defaultRunTimeoutMs: 60000, +}); +``` + +```bash cURL +curl "http://127.0.0.1:2468/v1/processes/config" + +curl -X POST "http://127.0.0.1:2468/v1/processes/config" \ + -H "Content-Type: application/json" \ + -d '{"maxConcurrentProcesses":32,"defaultRunTimeoutMs":60000,"maxRunTimeoutMs":300000,"maxOutputBytes":1048576,"maxLogBytesPerProcess":10485760,"maxInputBytesPerRequest":65536}' +``` +</CodeGroup> diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index 4902344..223a54d 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -6,9 +6,18 @@ icon: "rocket" <Steps> <Step title="Install skill (optional)"> - ```bash - npx skills add rivet-dev/skills -s sandbox-agent - ``` + <Tabs> + <Tab title="npx"> + ```bash + npx skills add rivet-dev/skills -s sandbox-agent + ``` + </Tab> + <Tab title="bunx"> + ```bash + bunx skills add rivet-dev/skills -s sandbox-agent + ``` + </Tab> + </Tabs> </Step> <Step title="Set environment variables"> @@ -52,21 +61,26 @@ icon: "rocket" <Tab title="Docker"> ```bash - docker run -e ANTHROPIC_API_KEY="sk-ant-..." \ + docker run -p 2468:2468 \ + -e ANTHROPIC_API_KEY="sk-ant-..." \ -e OPENAI_API_KEY="sk-..." \ - your-image + rivetdev/sandbox-agent:0.4.2-full \ + server --no-token --host 0.0.0.0 --port 2468 ``` </Tab> </Tabs> - <AccordionGroup> - <Accordion title="Extracting API keys from current machine"> - Use `sandbox-agent credentials extract-env --export` to extract your existing API keys (Anthropic, OpenAI, etc.) from your existing Claude Code or Codex config files on your machine. - </Accordion> - <Accordion title="Testing without API keys"> - If you want to test Sandbox Agent without API keys, use the `mock` agent to test the SDK without any credentials. It simulates agent responses for development and testing. - </Accordion> - </AccordionGroup> + <AccordionGroup> + <Accordion title="Extracting API keys from current machine"> + Use `sandbox-agent credentials extract-env --export` to extract your existing API keys (Anthropic, OpenAI, etc.) from local Claude Code or Codex config files. + </Accordion> + <Accordion title="Testing without API keys"> + Use the `mock` agent for SDK and integration testing without provider credentials. + </Accordion> + <Accordion title="Multi-tenant and per-user billing"> + For per-tenant token tracking, budget enforcement, or usage-based billing, see [LLM Credentials](/llm-credentials) for gateway options like OpenRouter, LiteLLM, and Portkey. + </Accordion> + </AccordionGroup> </Step> <Step title="Run the server"> @@ -75,7 +89,7 @@ icon: "rocket" Install and run the binary directly. ```bash - curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh + curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh sandbox-agent server --no-token --host 0.0.0.0 --port 2468 ``` </Tab> @@ -84,7 +98,15 @@ icon: "rocket" Run without installing globally. ```bash - npx @sandbox-agent/cli server --no-token --host 0.0.0.0 --port 2468 + npx @sandbox-agent/cli@0.4.x server --no-token --host 0.0.0.0 --port 2468 + ``` + </Tab> + + <Tab title="bunx"> + Run without installing globally. + + ```bash + bunx @sandbox-agent/cli@0.4.x server --no-token --host 0.0.0.0 --port 2468 ``` </Tab> @@ -92,11 +114,52 @@ icon: "rocket" Install globally, then run. ```bash - npm install -g @sandbox-agent/cli + npm install -g @sandbox-agent/cli@0.4.x sandbox-agent server --no-token --host 0.0.0.0 --port 2468 ``` </Tab> + <Tab title="bun add -g"> + Install globally, then run. + + ```bash + bun add -g @sandbox-agent/cli@0.4.x + # Allow Bun to run postinstall scripts for native binaries (required for SandboxAgent.start()). + bun pm -g trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 + sandbox-agent server --no-token --host 0.0.0.0 --port 2468 + ``` + </Tab> + + <Tab title="Node.js (local)"> + For local development, use `SandboxAgent.start()` to spawn and manage the server as a subprocess. + + ```bash + npm install sandbox-agent@0.4.x + ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + + const sdk = await SandboxAgent.start(); + ``` + </Tab> + + <Tab title="Bun (local)"> + For local development, use `SandboxAgent.start()` to spawn and manage the server as a subprocess. + + ```bash + bun add sandbox-agent@0.4.x + # Allow Bun to run postinstall scripts for native binaries (required for SandboxAgent.start()). + bun pm trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 + ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + + const sdk = await SandboxAgent.start(); + ``` + </Tab> + <Tab title="Build from source"> If you're running from source instead of the installed CLI. @@ -104,193 +167,123 @@ icon: "rocket" cargo run -p sandbox-agent -- server --no-token --host 0.0.0.0 --port 2468 ``` </Tab> - - <Tab title="TypeScript (local)"> - For local development, use `SandboxAgent.start()` to automatically spawn and manage the server as a subprocess. - - ```typescript - import { SandboxAgent } from "sandbox-agent"; - - const client = await SandboxAgent.start(); - ``` - - This installs the binary and starts the server for you. No manual setup required. - </Tab> </Tabs> - Binding to `0.0.0.0` allows the server to accept connections from any network interface, which is required when running inside a sandbox where clients connect remotely. + Binding to `0.0.0.0` allows the server to accept connections from any network interface, which is required when running inside a sandbox where clients connect remotely. - <AccordionGroup> - <Accordion title="Configuring token"> - Tokens are usually not required. Most sandbox providers (E2B, Daytona, etc.) already secure their networking at the infrastructure level, so the server endpoint is never publicly accessible. For local development, binding to `127.0.0.1` ensures only local connections are accepted. + <AccordionGroup> + <Accordion title="Configuring token"> + Tokens are usually not required. Most sandbox providers (E2B, Daytona, etc.) already secure networking at the infrastructure layer. - If you need to expose the server on a public endpoint, use `--token "$SANDBOX_TOKEN"` to require authentication on all requests: + If you expose the server publicly, use `--token "$SANDBOX_TOKEN"` to require authentication: - ```bash - sandbox-agent server --token "$SANDBOX_TOKEN" --host 0.0.0.0 --port 2468 - ``` + ```bash + sandbox-agent server --token "$SANDBOX_TOKEN" --host 0.0.0.0 --port 2468 + ``` - Then pass the token when connecting: + Then pass the token when connecting: - <Tabs> - <Tab title="TypeScript"> - ```typescript - const client = await SandboxAgent.connect({ - baseUrl: "http://your-server:2468", - token: process.env.SANDBOX_TOKEN, - }); - ``` - </Tab> + <Tabs> + <Tab title="TypeScript"> + ```typescript + import { SandboxAgent } from "sandbox-agent"; - <Tab title="curl"> - ```bash - curl "http://your-server:2468/v1/sessions" \ - -H "Authorization: Bearer $SANDBOX_TOKEN" - ``` - </Tab> + const sdk = await SandboxAgent.connect({ + baseUrl: "http://your-server:2468", + token: process.env.SANDBOX_TOKEN, + }); + ``` + </Tab> - <Tab title="CLI"> - ```bash - sandbox-agent api sessions list \ - --endpoint http://your-server:2468 \ - --token "$SANDBOX_TOKEN" - ``` - </Tab> - </Tabs> - </Accordion> - <Accordion title="CORS"> - If you're calling the server from a browser, see the [CORS configuration guide](/docs/cors). - </Accordion> - </AccordionGroup> + <Tab title="curl"> + ```bash + curl "http://your-server:2468/v1/health" \ + -H "Authorization: Bearer $SANDBOX_TOKEN" + ``` + </Tab> + + <Tab title="CLI"> + ```bash + sandbox-agent --token "$SANDBOX_TOKEN" api agents list \ + --endpoint http://your-server:2468 + ``` + </Tab> + </Tabs> + </Accordion> + <Accordion title="CORS"> + If you're calling the server from a browser, see the [CORS configuration guide](/cors). + </Accordion> + </AccordionGroup> </Step> <Step title="Install agents (optional)"> To preinstall agents: ```bash - sandbox-agent install-agent claude - sandbox-agent install-agent codex - sandbox-agent install-agent opencode - sandbox-agent install-agent amp + sandbox-agent install-agent --all ``` - If agents are not installed up front, they will be lazily installed when creating a session. It's recommended to pre-install agents then take a snapshot of the sandbox for faster coldstarts. + If agents are not installed up front, they are lazily installed when creating a session. + </Step> + + <Step title="Install desktop dependencies (optional, Linux only)"> + If you want to use `/v1/desktop/*`, install the desktop runtime packages first: + + ```bash + sandbox-agent install desktop --yes + ``` + + Then use `GET /v1/desktop/status` or `sdk.getDesktopStatus()` to verify the runtime is ready before calling desktop screenshot or input APIs. </Step> <Step title="Create a session"> - <Tabs> - <Tab title="TypeScript"> - ```typescript - import { SandboxAgent } from "sandbox-agent"; + ```typescript + import { SandboxAgent } from "sandbox-agent"; - const client = await SandboxAgent.connect({ - baseUrl: "http://127.0.0.1:2468", - }); + const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + }); - await client.createSession("my-session", { - agent: "claude", - agentMode: "build", - permissionMode: "default", - }); - ``` - </Tab> + const session = await sdk.createSession({ + agent: "claude", + sessionInit: { + cwd: "/", + mcpServers: [], + }, + }); - <Tab title="curl"> - ```bash - curl -X POST "http://127.0.0.1:2468/v1/sessions/my-session" \ - -H "Content-Type: application/json" \ - -d '{"agent":"claude","agentMode":"build","permissionMode":"default"}' - ``` - </Tab> - - <Tab title="CLI"> - ```bash - sandbox-agent api sessions create my-session \ - --agent claude \ - --endpoint http://127.0.0.1:2468 - ``` - </Tab> - </Tabs> + console.log(session.id); + ``` </Step> <Step title="Send a message"> - <Tabs> - <Tab title="TypeScript"> - ```typescript - await client.postMessage("my-session", { - message: "Summarize the repository and suggest next steps.", - }); - ``` - </Tab> + ```typescript + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); - <Tab title="curl"> - ```bash - curl -X POST "http://127.0.0.1:2468/v1/sessions/my-session/messages" \ - -H "Content-Type: application/json" \ - -d '{"message":"Summarize the repository and suggest next steps."}' - ``` - </Tab> - - <Tab title="CLI"> - ```bash - sandbox-agent api sessions send-message my-session \ - --message "Summarize the repository and suggest next steps." \ - --endpoint http://127.0.0.1:2468 - ``` - </Tab> - </Tabs> + console.log(result.stopReason); + ``` </Step> <Step title="Read events"> - <Tabs> - <Tab title="TypeScript"> - ```typescript - // Poll for events - const events = await client.getEvents("my-session", { offset: 0, limit: 50 }); + ```typescript + const off = session.onEvent((event) => { + console.log(event.sender, event.payload); + }); - // Or stream events - for await (const event of client.streamEvents("my-session", { offset: 0 })) { - console.log(event.type, event.data); - } - ``` - </Tab> + const page = await sdk.getEvents({ + sessionId: session.id, + limit: 50, + }); - <Tab title="curl"> - ```bash - # Poll for events - curl "http://127.0.0.1:2468/v1/sessions/my-session/events?offset=0&limit=50" - - # Stream events via SSE - curl "http://127.0.0.1:2468/v1/sessions/my-session/events/sse?offset=0" - - # Single-turn stream (post message and get streamed response) - curl -N -X POST "http://127.0.0.1:2468/v1/sessions/my-session/messages/stream" \ - -H "Content-Type: application/json" \ - -d '{"message":"Hello"}' - ``` - </Tab> - - <Tab title="CLI"> - ```bash - # Poll for events - sandbox-agent api sessions events my-session \ - --endpoint http://127.0.0.1:2468 - - # Stream events via SSE - sandbox-agent api sessions events-sse my-session \ - --endpoint http://127.0.0.1:2468 - - # Single-turn stream - sandbox-agent api sessions send-message-stream my-session \ - --message "Hello" \ - --endpoint http://127.0.0.1:2468 - ``` - </Tab> - </Tabs> + console.log(page.items.length); + off(); + ``` </Step> <Step title="Test with Inspector"> - Open the Inspector UI at `/ui/` on your server (e.g., `http://localhost:2468/ui/`) to inspect session state using a GUI. + Open the Inspector UI at `/ui/` on your server (for example, `http://localhost:2468/ui/`) to inspect sessions and events in a GUI. <Frame> <img src="/images/inspector.png" alt="Sandbox Agent Inspector" /> @@ -301,13 +294,13 @@ icon: "rocket" ## Next steps <CardGroup cols={3}> - <Card title="Build a Chat UI" icon="comments" href="/building-chat-ui"> - Learn how to build a chat interface for your agent. + <Card title="Session Persistence" icon="database" href="/session-persistence"> + Configure in-memory, Rivet Actor state, IndexedDB, SQLite, and Postgres persistence. </Card> - <Card title="Manage Sessions" icon="database" href="/manage-sessions"> - Persist and replay agent transcripts. + <Card title="Deploy to a Sandbox" icon="box" href="/deploy/local"> + Deploy your agent to E2B, Daytona, Docker, Vercel, or Cloudflare. </Card> - <Card title="Deploy to a Sandbox" icon="box" href="/deploy"> - Deploy your agent to E2B, Daytona, or Vercel Sandboxes. + <Card title="SDK Overview" icon="compass" href="/sdk-overview"> + Use the latest TypeScript SDK API. </Card> </CardGroup> diff --git a/docs/react-components.mdx b/docs/react-components.mdx new file mode 100644 index 0000000..71a76d2 --- /dev/null +++ b/docs/react-components.mdx @@ -0,0 +1,245 @@ +--- +title: "React Components" +description: "Drop-in React components for Sandbox Agent frontends." +icon: "react" +--- + +`@sandbox-agent/react` exposes small React components built on top of the `sandbox-agent` SDK. + +Current exports: + +- `AgentConversation` for a combined transcript + composer surface +- `ProcessTerminal` for attaching to a running tty process +- `AgentTranscript` for rendering session/message timelines without bundling any styles +- `ChatComposer` for a reusable prompt input/send surface +- `useTranscriptVirtualizer` for wiring large transcript lists to a scroll container + +## Install + +```bash +npm install @sandbox-agent/react@0.4.x +``` + +## Full example + +This example connects to a running Sandbox Agent server, starts a tty shell, renders `ProcessTerminal`, and cleans up the process when the component unmounts. + +```tsx TerminalPane.tsx expandable highlight={5,32-36,71} +"use client"; + +import { useEffect, useState } from "react"; +import { SandboxAgent } from "sandbox-agent"; +import { ProcessTerminal } from "@sandbox-agent/react"; + +export default function TerminalPane() { + const [client, setClient] = useState<SandboxAgent | null>(null); + const [processId, setProcessId] = useState<string | null>(null); + const [error, setError] = useState<string | null>(null); + + useEffect(() => { + let cancelled = false; + let sdk: SandboxAgent | null = null; + let createdProcessId: string | null = null; + + const cleanup = async () => { + if (!sdk || !createdProcessId) { + return; + } + + await sdk.killProcess(createdProcessId, { waitMs: 1_000 }).catch(() => {}); + await sdk.deleteProcess(createdProcessId).catch(() => {}); + }; + + const start = async () => { + try { + sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + }); + + const process = await sdk.createProcess({ + command: "sh", + interactive: true, + tty: true, + }); + + if (cancelled) { + createdProcessId = process.id; + await cleanup(); + await sdk.dispose(); + return; + } + + createdProcessId = process.id; + setClient(sdk); + setProcessId(process.id); + } catch (err) { + const message = err instanceof Error ? err.message : "Failed to start terminal."; + setError(message); + } + }; + + void start(); + + return () => { + cancelled = true; + void cleanup(); + void sdk?.dispose(); + }; + }, []); + + if (error) { + return <div>{error}</div>; + } + + if (!client || !processId) { + return <div>Starting terminal...</div>; + } + + return <ProcessTerminal client={client} processId={processId} height={480} />; +} +``` + +## Component + +`ProcessTerminal` attaches to a running tty process. + +- `client`: a `SandboxAgent` client +- `processId`: the process to attach to +- `height`, `style`, `terminalStyle`: optional layout overrides +- `onExit`, `onError`: optional lifecycle callbacks + +See [Processes](/processes) for the lower-level terminal APIs. + +## Headless transcript + +`AgentTranscript` is intentionally unstyled. It follows the common headless React pattern used by libraries like Radix, Headless UI, and React Aria: behavior lives in the component, while styling stays in your app through `className`, slot-level `classNames`, and `data-*` state attributes on the rendered DOM. + +```tsx TranscriptPane.tsx +import { + AgentTranscript, + type AgentTranscriptClassNames, + type TranscriptEntry, +} from "@sandbox-agent/react"; + +const transcriptClasses: Partial<AgentTranscriptClassNames> = { + root: "transcript", + message: "transcript-message", + messageContent: "transcript-message-content", + toolGroupContainer: "transcript-tools", + toolGroupHeader: "transcript-tools-header", + toolItem: "transcript-tool-item", + toolItemHeader: "transcript-tool-item-header", + toolItemBody: "transcript-tool-item-body", + divider: "transcript-divider", + dividerText: "transcript-divider-text", + error: "transcript-error", +}; + +export function TranscriptPane({ entries }: { entries: TranscriptEntry[] }) { + return ( + <AgentTranscript + entries={entries} + classNames={transcriptClasses} + renderMessageText={(entry) => <div>{entry.text}</div>} + renderInlinePendingIndicator={() => <span>...</span>} + renderToolGroupIcon={() => <span>Events</span>} + renderChevron={(expanded) => <span>{expanded ? "Hide" : "Show"}</span>} + /> + ); +} +``` + +```css +.transcript { + display: grid; + gap: 12px; +} + +.transcript [data-slot="message"][data-variant="user"] .transcript-message-content { + background: #161616; + color: white; +} + +.transcript [data-slot="message"][data-variant="assistant"] .transcript-message-content { + background: #f4f4f0; + color: #161616; +} + +.transcript [data-slot="tool-item"][data-failed="true"] { + border-color: #d33; +} + +.transcript [data-slot="tool-item-header"][data-expanded="true"] { + background: rgba(0, 0, 0, 0.06); +} +``` + +`AgentTranscript` accepts `TranscriptEntry[]`, which matches the Inspector timeline shape: + +- `message` entries render user/assistant text +- `tool` entries render expandable tool input/output sections +- `reasoning` entries render expandable reasoning blocks +- `meta` entries render status rows or expandable metadata details + +Useful props: + +- `className`: root class hook +- `classNames`: slot-level class hooks for styling from outside the package +- `scrollRef` + `virtualize`: opt into TanStack Virtual against an external scroll container +- `renderMessageText`: custom text or markdown renderer +- `renderToolItemIcon`, `renderToolGroupIcon`, `renderChevron`, `renderEventLinkContent`: presentation overrides +- `renderInlinePendingIndicator`, `renderThinkingState`: loading/thinking UI overrides +- `isDividerEntry`, `canOpenEvent`, `getToolGroupSummary`: behavior overrides for grouping and labels + +## Transcript virtualization hook + +`useTranscriptVirtualizer` exposes the same TanStack Virtual behavior used by `AgentTranscript` when `virtualize` is enabled. + +- Pass the grouped transcript rows you want to virtualize +- Pass a `scrollRef` that points at the actual scrollable element +- Use it when you need transcript-aware virtualization outside the stock `AgentTranscript` renderer + +## Composer and conversation + +`ChatComposer` is the headless message input. `AgentConversation` composes `AgentTranscript` and `ChatComposer` so apps can reuse the transcript/composer pairing without pulling in Inspector session chrome. + +```tsx ConversationPane.tsx +import { AgentConversation, type TranscriptEntry } from "@sandbox-agent/react"; + +export function ConversationPane({ + entries, + message, + onMessageChange, + onSubmit, +}: { + entries: TranscriptEntry[]; + message: string; + onMessageChange: (value: string) => void; + onSubmit: () => void; +}) { + return ( + <AgentConversation + entries={entries} + emptyState={<div>Start the conversation.</div>} + transcriptProps={{ + renderMessageText: (entry) => <div>{entry.text}</div>, + }} + composerProps={{ + message, + onMessageChange, + onSubmit, + placeholder: "Send a message...", + }} + /> + ); +} +``` + +Useful `ChatComposer` props: + +- `className` and `classNames` for external styling +- `inputRef` to manage focus or autoresize from the consumer +- `textareaProps` for lower-level textarea behavior +- `allowEmptySubmit` when the submit action is valid without draft text, such as a stop button + +Use `transcriptProps` and `composerProps` when you want the shared composition but still need custom rendering or behavior. Use `transcriptClassNames` and `composerClassNames` when you want styling hooks for each subcomponent. diff --git a/docs/sdk-overview.mdx b/docs/sdk-overview.mdx new file mode 100644 index 0000000..73e0d35 --- /dev/null +++ b/docs/sdk-overview.mdx @@ -0,0 +1,276 @@ +--- +title: "SDK Overview" +description: "Use the TypeScript SDK to manage Sandbox Agent sessions and APIs." +icon: "compass" +--- + +The TypeScript SDK is centered on `sandbox-agent` and its `SandboxAgent` class. + +## Install + +<Tabs> + <Tab title="npm"> + ```bash + npm install sandbox-agent@0.4.x + ``` + </Tab> + <Tab title="bun"> + ```bash + bun add sandbox-agent@0.4.x + # Allow Bun to run postinstall scripts for native binaries (required for SandboxAgent.start()). + bun pm trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 + ``` + </Tab> +</Tabs> + +## Optional React components + +```bash +npm install @sandbox-agent/react@0.4.x +``` + +## Create a client + +```ts +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); +``` + +`SandboxAgent.connect(...)` now waits for `/v1/health` by default before other SDK requests proceed. To disable that gate, pass `waitForHealth: false`. To keep the default gate but fail after a bounded wait, pass `waitForHealth: { timeoutMs: 120_000 }`. To cancel the startup wait early, pass `signal: abortController.signal`. + +With a custom fetch handler (for example, proxying requests inside Workers): + +```ts +const sdk = await SandboxAgent.connect({ + fetch: (input, init) => customFetch(input, init), +}); +``` + +With an abort signal for the startup health gate: + +```ts +const controller = new AbortController(); + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + signal: controller.signal, +}); + +controller.abort(); +``` + +With persistence (see [Persisting Sessions](/session-persistence) for driver options): + +```ts +import { SandboxAgent, InMemorySessionPersistDriver } from "sandbox-agent"; + +const persist = new InMemorySessionPersistDriver(); + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + persist, +}); +``` + +Local spawn with a sandbox provider: + +```ts +import { SandboxAgent } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; + +const sdk = await SandboxAgent.start({ + sandbox: local(), +}); + +// sdk.sandboxId — prefixed provider ID (e.g. "local/127.0.0.1:2468") + +await sdk.destroySandbox(); // provider-defined cleanup + disposes client +``` + +`SandboxAgent.start(...)` requires a `sandbox` provider. Built-in providers: + +| Import | Provider | +|--------|----------| +| `sandbox-agent/local` | Local subprocess | +| `sandbox-agent/docker` | Docker container | +| `sandbox-agent/e2b` | E2B sandbox | +| `sandbox-agent/daytona` | Daytona workspace | +| `sandbox-agent/vercel` | Vercel Sandbox | +| `sandbox-agent/cloudflare` | Cloudflare Sandbox | + +Use `sdk.dispose()` to disconnect without changing sandbox state, `sdk.pauseSandbox()` for graceful suspension when supported, or `sdk.killSandbox()` for permanent deletion. + +## Session flow + +```ts +const session = await sdk.createSession({ + agent: "mock", + cwd: "/", +}); + +const prompt = await session.prompt([ + { type: "text", text: "Summarize this repository." }, +]); + +console.log(prompt.stopReason); +``` + +Load and destroy: + +```ts +const restored = await sdk.resumeSession(session.id); +await restored.prompt([{ type: "text", text: "Continue from previous context." }]); + +await sdk.destroySession(restored.id); +``` + +## Session configuration + +Set model, mode, or thought level at creation or on an existing session: + +```ts +const session = await sdk.createSession({ + agent: "codex", + model: "gpt-5.3-codex", +}); + +await session.setModel("gpt-5.2-codex"); +await session.setMode("auto"); + +const options = await session.getConfigOptions(); +const modes = await session.getModes(); +``` + +Handle permission requests from agents that ask before executing tools: + +```ts +const claude = await sdk.createSession({ + agent: "claude", + mode: "default", +}); + +claude.onPermissionRequest((request) => { + void claude.respondPermission(request.id, "once"); +}); +``` + +See [Agent Sessions](/agent-sessions) for full details on config options and error handling. + +## Events + +Subscribe to live events: + +```ts +const unsubscribe = session.onEvent((event) => { + console.log(event.eventIndex, event.sender, event.payload); +}); + +await session.prompt([{ type: "text", text: "Give me a short summary." }]); +unsubscribe(); +``` + +Fetch persisted events: + +```ts +const page = await sdk.getEvents({ + sessionId: session.id, + limit: 100, +}); + +console.log(page.items.length); +``` + +## Control-plane and HTTP helpers + +```ts +const health = await sdk.getHealth(); +const agents = await sdk.listAgents(); +await sdk.installAgent("codex", { reinstall: true }); + +const entries = await sdk.listFsEntries({ path: "." }); +const writeResult = await sdk.writeFsFile({ path: "./hello.txt" }, "hello"); + +console.log(health.status, agents.agents.length, entries.length, writeResult.path); +``` + +## Desktop API + +The SDK also wraps the desktop host/runtime HTTP API. + +Install desktop dependencies first on Linux hosts: + +```bash +sandbox-agent install desktop --yes +``` + +Then query status, surface remediation if needed, and start the runtime: + +```ts +const status = await sdk.getDesktopStatus(); + +if (status.state === "install_required") { + console.log(status.installCommand); +} + +const started = await sdk.startDesktop({ + width: 1440, + height: 900, + dpi: 96, +}); + +const screenshot = await sdk.takeDesktopScreenshot(); +const displayInfo = await sdk.getDesktopDisplayInfo(); + +await sdk.moveDesktopMouse({ x: 400, y: 300 }); +await sdk.clickDesktop({ x: 400, y: 300, button: "left", clickCount: 1 }); +await sdk.typeDesktopText({ text: "hello world", delayMs: 10 }); +await sdk.pressDesktopKey({ key: "ctrl+l" }); + +await sdk.stopDesktop(); +``` + +Screenshot helpers return `Uint8Array` PNG bytes. The SDK does not attempt to install OS packages remotely; callers should surface `missingDependencies` and `installCommand` from `getDesktopStatus()`. + +## Error handling + +```ts +import { SandboxAgentError } from "sandbox-agent"; + +try { + await sdk.listAgents(); +} catch (error) { + if (error instanceof SandboxAgentError) { + console.error(error.status, error.problem); + } +} +``` + +## Inspector URL + +```ts +import { buildInspectorUrl } from "sandbox-agent"; + +const url = buildInspectorUrl({ + baseUrl: "https://your-sandbox-agent.example.com", + headers: { "X-Custom-Header": "value" }, +}); + +console.log(url); +``` + +Parameters: + +- `baseUrl` (required unless `fetch` is provided): Sandbox Agent server URL +- `token` (optional): Bearer token for authenticated servers +- `headers` (optional): Additional request headers +- `fetch` (optional): Custom fetch implementation used by SDK HTTP and session calls +- `skipHealthCheck` (optional): set `true` to skip the startup `/v1/health` wait +- `waitForHealth` (optional, defaults to enabled): waits for `/v1/health` before HTTP helpers and session setup proceed; pass `false` to disable or `{ timeoutMs }` to bound the wait +- `signal` (optional): aborts the startup `/v1/health` wait used by `connect()` + +## LLM credentials + +Sandbox Agent supports personal API keys, shared organization keys, and per-tenant gateway keys with budget enforcement. See [LLM Credentials](/llm-credentials) for setup details. diff --git a/docs/sdks/python.mdx b/docs/sdks/python.mdx deleted file mode 100644 index 80f667a..0000000 --- a/docs/sdks/python.mdx +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: "Python" -description: "Python client for managing sessions and streaming events." -icon: "python" -tag: "Coming Soon" ---- - -The Python SDK is on our roadmap. It will provide a typed client for managing sessions and streaming events, similar to the TypeScript SDK. - -In the meantime, you can use the [HTTP API](/http-api) directly with any HTTP client like `requests` or `httpx`. - -```python -import httpx - -base_url = "http://127.0.0.1:2468" -headers = {"Authorization": f"Bearer {token}"} - -# Create a session -httpx.post( - f"{base_url}/v1/sessions/my-session", - headers=headers, - json={"agent": "claude", "permissionMode": "default"} -) - -# Send a message -httpx.post( - f"{base_url}/v1/sessions/my-session/messages", - headers=headers, - json={"message": "Hello from Python"} -) - -# Get events -response = httpx.get( - f"{base_url}/v1/sessions/my-session/events", - headers=headers, - params={"offset": 0, "limit": 50} -) -events = response.json()["events"] -``` - -Want the Python SDK sooner? [Open an issue](https://github.com/rivet-dev/sandbox-agent/issues) to let us know. diff --git a/docs/sdks/typescript.mdx b/docs/sdks/typescript.mdx deleted file mode 100644 index 7ebc165..0000000 --- a/docs/sdks/typescript.mdx +++ /dev/null @@ -1,151 +0,0 @@ ---- -title: "TypeScript" -description: "Use the generated client to manage sessions and stream events." -icon: "js" ---- - -The TypeScript SDK is generated from the OpenAPI spec that ships with the server. It provides a typed -client for sessions, events, and agent operations. - -## Install - -```bash -npm install sandbox-agent -``` - -## Create a client - -```ts -import { SandboxAgent } from "sandbox-agent"; - -const client = await SandboxAgent.connect({ - baseUrl: "http://127.0.0.1:2468", - token: process.env.SANDBOX_TOKEN, -}); -``` - -## Autospawn (Node only) - -If you run locally, the SDK can launch the server for you. - -```ts -import { SandboxAgent } from "sandbox-agent"; - -const client = await SandboxAgent.start(); - -await client.dispose(); -``` - -Autospawn uses the local `sandbox-agent` binary. Install `@sandbox-agent/cli` (recommended) or set -`SANDBOX_AGENT_BIN` to a custom path. - -## Sessions and messages - -```ts -await client.createSession("demo-session", { - agent: "codex", - agentMode: "default", - permissionMode: "plan", -}); - -await client.postMessage("demo-session", { message: "Hello" }); -``` - -List agents and pick a compatible one: - -```ts -const agents = await client.listAgents(); -const codex = agents.agents.find((agent) => agent.id === "codex"); -console.log(codex?.capabilities); -``` - -## Poll events - -```ts -const events = await client.getEvents("demo-session", { - offset: 0, - limit: 200, - includeRaw: false, -}); - -for (const event of events.events) { - console.log(event.type, event.data); -} -``` - -## Stream events (SSE) - -```ts -for await (const event of client.streamEvents("demo-session", { - offset: 0, - includeRaw: false, -})) { - console.log(event.type, event.data); -} -``` - -The SDK parses `text/event-stream` into `UniversalEvent` objects. If you want full control, use -`getEventsSse()` and parse the stream yourself. - -## Stream a single turn - -```ts -for await (const event of client.streamTurn("demo-session", { message: "Hello" })) { - console.log(event.type, event.data); -} -``` - -This method posts the message and streams only the next turn. For manual control, call -`postMessageStream()` and parse the SSE response yourself. - -## Optional raw payloads - -Set `includeRaw: true` on `getEvents`, `streamEvents`, or `streamTurn` to include the raw provider -payload in `event.raw`. This is useful for debugging and conversion analysis. - -## Error handling - -All HTTP errors throw `SandboxAgentError`: - -```ts -import { SandboxAgentError } from "sandbox-agent"; - -try { - await client.postMessage("missing-session", { message: "Hi" }); -} catch (error) { - if (error instanceof SandboxAgentError) { - console.error(error.status, error.problem); - } -} -``` - -## Inspector URL - -Build a URL to open the sandbox-agent Inspector UI with pre-filled connection settings: - -```ts -import { buildInspectorUrl } from "sandbox-agent"; - -const url = buildInspectorUrl({ - baseUrl: "https://your-sandbox-agent.example.com", - token: "optional-bearer-token", - headers: { "X-Custom-Header": "value" }, -}); -console.log(url); -// https://your-sandbox-agent.example.com/ui/?token=...&headers=... -``` - -Parameters: -- `baseUrl` (required): The sandbox-agent server URL -- `token` (optional): Bearer token for authentication -- `headers` (optional): Extra headers to pass to the server (JSON-encoded in the URL) - -## Types - -The SDK exports OpenAPI-derived types for events, items, and capabilities: - -```ts -import type { UniversalEvent, UniversalItem, AgentCapabilities } from "sandbox-agent"; -``` - -See the [API Reference](/api) for schema details. diff --git a/docs/security.mdx b/docs/security.mdx new file mode 100644 index 0000000..c8b02ad --- /dev/null +++ b/docs/security.mdx @@ -0,0 +1,191 @@ +--- +title: "Security" +description: "Backend-first auth and access control patterns." +icon: "shield" +--- + +As covered in [Orchestration Architecture](/orchestration-architecture), run the Sandbox Agent client on your backend, not in the browser. + +This keeps sandbox credentials private and gives you one place for authz, rate limiting, and audit logging. + +## Auth model + +Implement auth however it fits your stack (sessions, JWT, API keys, etc.), but enforce it before any sandbox-bound request. + +Minimum checks: + +- Authenticate the caller. +- Authorize access to the target workspace/sandbox/session. +- Apply request rate limits and request logging. + +## Examples + +### Rivet + +<CodeGroup> + +```ts Actor (server) +import { UserError, actor } from "rivetkit"; +import { SandboxAgent } from "sandbox-agent"; + +type ConnParams = { + accessToken: string; +}; + +type WorkspaceClaims = { + sub: string; + workspaceId: string; + role: "owner" | "member" | "viewer"; +}; + +async function verifyWorkspaceToken( + token: string, + workspaceId: string, +): Promise<WorkspaceClaims | null> { + // Validate JWT/session token here, then enforce workspace scope. + // Return null when invalid/expired/not a member. + if (!token) return null; + return { sub: "user_123", workspaceId, role: "member" }; +} + +export const workspace = actor({ + state: { + events: [] as Array<{ userId: string; prompt: string; createdAt: number }>, + }, + + onBeforeConnect: async (c, params: ConnParams) => { + const claims = await verifyWorkspaceToken(params.accessToken, c.key[0]); + if (!claims) { + throw new UserError("Forbidden", { code: "forbidden" }); + } + }, + + createConnState: async (c, params: ConnParams) => { + const claims = await verifyWorkspaceToken(params.accessToken, c.key[0]); + if (!claims) { + throw new UserError("Forbidden", { code: "forbidden" }); + } + + return { + userId: claims.sub, + role: claims.role, + workspaceId: claims.workspaceId, + }; + }, + + actions: { + submitPrompt: async (c, prompt: string) => { + if (!c.conn) { + throw new UserError("Connection required", { code: "connection_required" }); + } + + if (c.conn.state.role === "viewer") { + throw new UserError("Insufficient permissions", { code: "forbidden" }); + } + + // Connect to Sandbox Agent from the actor (server-side only). + // Sandbox credentials never reach the client. + const sdk = await SandboxAgent.connect({ + baseUrl: process.env.SANDBOX_URL!, + token: process.env.SANDBOX_TOKEN, + }); + + const session = await sdk.createSession({ + agent: "claude", + cwd: "/workspace", + }); + + session.onEvent((event) => { + c.broadcast("session.event", { + userId: c.conn!.state.userId, + eventIndex: event.eventIndex, + sender: event.sender, + payload: event.payload, + }); + }); + + const result = await session.prompt([ + { type: "text", text: prompt }, + ]); + + c.state.events.push({ + userId: c.conn.state.userId, + prompt, + createdAt: Date.now(), + }); + + return { stopReason: result.stopReason }; + }, + }, +}); +``` + +```ts Client (browser) +import { createClient } from "rivetkit/client"; +import type { registry } from "./actors"; + +const client = createClient<typeof registry>({ + endpoint: process.env.NEXT_PUBLIC_RIVET_ENDPOINT!, +}); + +const handle = client.workspace.getOrCreate(["ws_123"], { + params: { accessToken: userJwt }, +}); + +const conn = handle.connect(); + +conn.on("session.event", (event) => { + console.log(event.sender, event.payload); +}); + +const result = await conn.submitPrompt("Plan a refactor for auth middleware."); +console.log(result.stopReason); +``` + +</CodeGroup> + +Use [onBeforeConnect](https://rivet.dev/docs/actors/authentication), [connection params](https://rivet.dev/docs/actors/connections), and [actor keys](https://rivet.dev/docs/actors/keys) together so each actor enforces auth per workspace. + +### Hono + +```ts +import { Hono } from "hono"; +import { bearerAuth } from "hono/bearer-auth"; + +const app = new Hono(); + +app.use("/sandbox/*", bearerAuth({ token: process.env.APP_API_TOKEN! })); + +app.all("/sandbox/*", async (c) => { + const incoming = new URL(c.req.url); + const upstreamUrl = new URL(process.env.SANDBOX_URL!); + upstreamUrl.pathname = incoming.pathname.replace(/^\/sandbox/, "/v1"); + upstreamUrl.search = incoming.search; + + const headers = new Headers(); + headers.set("authorization", `Bearer ${process.env.SANDBOX_TOKEN ?? ""}`); + + const accept = c.req.header("accept"); + if (accept) headers.set("accept", accept); + + const contentType = c.req.header("content-type"); + if (contentType) headers.set("content-type", contentType); + + const body = + c.req.method === "POST" || c.req.method === "PUT" || c.req.method === "PATCH" + ? await c.req.text() + : undefined; + + const upstream = await fetch(upstreamUrl, { + method: c.req.method, + headers, + body, + }); + + return new Response(upstream.body, { + status: upstream.status, + headers: upstream.headers, + }); +}); +``` + diff --git a/docs/session-persistence.mdx b/docs/session-persistence.mdx new file mode 100644 index 0000000..5505864 --- /dev/null +++ b/docs/session-persistence.mdx @@ -0,0 +1,121 @@ +--- +title: "Persisting Sessions" +description: "Choose and configure session persistence for the TypeScript SDK." +icon: "database" +--- + +The TypeScript SDK uses a `SessionPersistDriver` to store session records and event history. +If you do not provide one, the SDK uses in-memory storage. +With persistence enabled, sessions can be restored after runtime/session loss. See [Session Restoration](/session-restoration). + +Each driver stores: + +- `SessionRecord` (`id`, `agent`, `agentSessionId`, `lastConnectionId`, `createdAt`, optional `destroyedAt`, optional `sandboxId`, optional `sessionInit`, optional `configOptions`, optional `modes`) +- `SessionEvent` (`id`, `eventIndex`, `sessionId`, `connectionId`, `sender`, `payload`, `createdAt`) + +## Persistence drivers + +### Rivet + +Recommended for sandbox orchestration with actor state. See [Multiplayer](/multiplayer) for a full Rivet actor example with persistence in actor state. + +### IndexedDB (browser) + +Best for browser apps that should survive reloads. See the [Inspector source](https://github.com/rivet-dev/sandbox-agent/tree/main/frontend/packages/inspector/src/persist-indexeddb.ts) for a complete IndexedDB driver you can copy into your project. + +### In-memory (built-in) + +Best for local dev and ephemeral workloads. No extra dependencies required. + +```ts +import { InMemorySessionPersistDriver, SandboxAgent } from "sandbox-agent"; + +const persist = new InMemorySessionPersistDriver({ + maxSessions: 1024, + maxEventsPerSession: 500, +}); + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + persist, +}); +``` + +### SQLite + +Best for local/server Node apps that need durable storage without a DB server. + +```bash +npm install better-sqlite3 +``` + +```ts +import { SandboxAgent } from "sandbox-agent"; +import { SQLiteSessionPersistDriver } from "./persist.ts"; + +const persist = new SQLiteSessionPersistDriver({ + filename: "./sandbox-agent.db", +}); + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + persist, +}); +``` + +See the [full SQLite example](https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-sqlite) for the complete driver implementation you can copy into your project. + +### Postgres + +Use when you already run Postgres and want shared relational storage. + +```bash +npm install pg +``` + +```ts +import { SandboxAgent } from "sandbox-agent"; +import { PostgresSessionPersistDriver } from "./persist.ts"; + +const persist = new PostgresSessionPersistDriver({ + connectionString: process.env.DATABASE_URL, + schema: "public", +}); + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + persist, +}); +``` + +See the [full Postgres example](https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-postgres) for the complete driver implementation you can copy into your project. + +### Custom driver + +Implement `SessionPersistDriver` for custom backends. + +```ts +import type { SessionPersistDriver } from "sandbox-agent"; + +class MyDriver implements SessionPersistDriver { + async getSession(id) { return undefined; } + async listSessions(request) { return { items: [] }; } + async updateSession(session) {} + async listEvents(request) { return { items: [] }; } + async insertEvent(sessionId, event) {} +} +``` + +## Replay controls + +`SandboxAgent.connect(...)` supports: + +- `replayMaxEvents` (default `50`) +- `replayMaxChars` (default `12000`) + +These cap replay size when restoring sessions. + +## Related docs + +- [SDK Overview](/sdk-overview) +- [Session Restoration](/session-restoration) diff --git a/docs/session-restoration.mdx b/docs/session-restoration.mdx new file mode 100644 index 0000000..9766633 --- /dev/null +++ b/docs/session-restoration.mdx @@ -0,0 +1,33 @@ +--- +title: "Session Restoration" +description: "How the TypeScript SDK restores sessions after connection/runtime loss." +--- + +Sandbox Agent automatically restores stale sessions when live session state is no longer available. + +This is driven by the configured `SessionPersistDriver` (`inMemory`, IndexedDB, SQLite, Postgres, or custom). + +## How Auto-Restore Works + +When you call `session.prompt(...)` (or `resumeSession(...)`) and the saved session points to a stale connection, the SDK: + +1. Recreates a fresh session for the same local session id. +2. Rebinds the local session to the new runtime session id. +3. Replays recent persisted events into the next prompt as context. + +This happens automatically; you do not need to manually rebuild the session. + +## Replay Limits + +Replay payload size is capped by: + +- `replayMaxEvents` (default `50`) +- `replayMaxChars` (default `12000`) + +These controls limit prompt growth during restore while preserving recent context. + +## Related Docs + +- [SDK Overview](/sdk-overview) +- [Persisting Sessions](/session-persistence) +- [Agent Sessions](/agent-sessions) diff --git a/docs/session-transcript-schema.mdx b/docs/session-transcript-schema.mdx deleted file mode 100644 index 7a527ef..0000000 --- a/docs/session-transcript-schema.mdx +++ /dev/null @@ -1,375 +0,0 @@ ---- -title: "Session Transcript Schema" -description: "Universal event schema for session transcripts across all agents." -icon: "brackets-curly" ---- - -Each coding agent outputs events in its own native format. The sandbox-agent converts these into a universal event schema, giving you a consistent session transcript regardless of which agent you use. - -The schema is defined in [OpenAPI format](https://github.com/rivet-dev/sandbox-agent/blob/main/docs/openapi.json). See the [HTTP API Reference](/api-reference) for endpoint documentation. - -## Coverage Matrix - -This table shows which agent capabilities appear in the universal event stream. All agents retain their full native capabilities—this only reflects what's normalized into the schema. - -| Feature | Claude | Codex | OpenCode | Amp | -|--------------------|:------:|:-----:|:------------:|:------------:| -| Stability | Stable | Stable| Experimental | Experimental | -| Text Messages | ✓ | ✓ | ✓ | ✓ | -| Tool Calls | ✓ | ✓ | ✓ | ✓ | -| Tool Results | ✓ | ✓ | ✓ | ✓ | -| Questions (HITL) | ✓ | | ✓ | | -| Permissions (HITL) | ✓ | ✓ | ✓ | - | -| Images | - | ✓ | ✓ | - | -| File Attachments | - | ✓ | ✓ | - | -| Session Lifecycle | - | ✓ | ✓ | - | -| Error Events | - | ✓ | ✓ | ✓ | -| Reasoning/Thinking | - | ✓ | - | - | -| Command Execution | - | ✓ | - | - | -| File Changes | - | ✓ | - | - | -| MCP Tools | - | ✓ | - | - | -| Streaming Deltas | ✓ | ✓ | ✓ | - | - -Agents: [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview) · [Codex](https://github.com/openai/codex) · [OpenCode](https://github.com/opencode-ai/opencode) · [Amp](https://ampcode.com) - -- ✓ = Appears in session events -- \- = Agent supports natively, schema conversion coming soon -- (blank) = Not supported by agent - -<AccordionGroup> - <Accordion title="Text Messages"> - Basic message exchange between user and assistant. - </Accordion> - <Accordion title="Tool Calls & Results"> - Visibility into tool invocations (file reads, command execution, etc.) and their results. When not natively supported, tool activity is embedded in message content. - </Accordion> - <Accordion title="Questions (HITL)"> - Interactive questions the agent asks the user. Emits `question.requested` and `question.resolved` events. - </Accordion> - <Accordion title="Permissions (HITL)"> - Permission requests for sensitive operations. Emits `permission.requested` and `permission.resolved` events. - </Accordion> - <Accordion title="Images"> - Support for image attachments in messages. - </Accordion> - <Accordion title="File Attachments"> - Support for file attachments in messages. - </Accordion> - <Accordion title="Session Lifecycle"> - Native `session.started` and `session.ended` events. When not supported, the daemon emits synthetic lifecycle events. - </Accordion> - <Accordion title="Error Events"> - Structured error events for runtime failures. - </Accordion> - <Accordion title="Reasoning/Thinking"> - Extended thinking or reasoning content with visibility controls. - </Accordion> - <Accordion title="Command Execution"> - Detailed command execution events with stdout/stderr. - </Accordion> - <Accordion title="File Changes"> - Structured file modification events with diffs. - </Accordion> - <Accordion title="MCP Tools"> - Model Context Protocol tool support. - </Accordion> - <Accordion title="Streaming Deltas"> - Native streaming of content deltas. When not supported, the daemon emits a single synthetic delta before `item.completed`. - </Accordion> -</AccordionGroup> - -Want support for another agent? [Open an issue](https://github.com/rivet-dev/sandbox-agent/issues/new) to request it. - -## UniversalEvent - -Every event from the API is wrapped in a `UniversalEvent` envelope. - -| Field | Type | Description | -|-------|------|-------------| -| `event_id` | string | Unique identifier for this event | -| `sequence` | integer | Monotonic sequence number within the session (starts at 1) | -| `time` | string | RFC3339 timestamp | -| `session_id` | string | Daemon-generated session identifier | -| `native_session_id` | string? | Provider-native session/thread identifier (e.g., Codex `threadId`, OpenCode `sessionID`) | -| `source` | string | Event origin: `agent` (native) or `daemon` (synthetic) | -| `synthetic` | boolean | Whether this event was generated by the daemon to fill gaps | -| `type` | string | Event type (see [Event Types](#event-types)) | -| `data` | object | Event-specific payload | -| `raw` | any? | Original provider payload (only when `include_raw=true`) | - -```json -{ - "event_id": "evt_abc123", - "sequence": 1, - "time": "2025-01-28T12:00:00Z", - "session_id": "my-session", - "native_session_id": "thread_xyz", - "source": "agent", - "synthetic": false, - "type": "item.completed", - "data": { ... } -} -``` - -## Event Types - -### Session Lifecycle - -| Type | Description | Data | -|------|-------------|------| -| `session.started` | Session has started | `{ metadata?: any }` | -| `session.ended` | Session has ended | `{ reason, terminated_by, message?, exit_code? }` | - -**SessionEndedData** - -| Field | Type | Values | -|-------|------|--------| -| `reason` | string | `completed`, `error`, `terminated` | -| `terminated_by` | string | `agent`, `daemon` | -| `message` | string? | Error message (only present when reason is `error`) | -| `exit_code` | int? | Process exit code (only present when reason is `error`) | -| `stderr` | StderrOutput? | Structured stderr output (only present when reason is `error`) | - -**StderrOutput** - -| Field | Type | Description | -|-------|------|-------------| -| `head` | string? | First 20 lines of stderr (if truncated) or full stderr (if not truncated) | -| `tail` | string? | Last 50 lines of stderr (only present if truncated) | -| `truncated` | boolean | Whether the output was truncated | -| `total_lines` | int? | Total number of lines in stderr | - -### Item Lifecycle - -| Type | Description | Data | -|------|-------------|------| -| `item.started` | Item creation | `{ item }` | -| `item.delta` | Streaming content delta | `{ item_id, native_item_id?, delta }` | -| `item.completed` | Item finalized | `{ item }` | - -Items follow a consistent lifecycle: `item.started` → `item.delta` (0 or more) → `item.completed`. - -### HITL (Human-in-the-Loop) - -| Type | Description | Data | -|------|-------------|------| -| `permission.requested` | Permission request pending | `{ permission_id, action, status, metadata? }` | -| `permission.resolved` | Permission granted or denied | `{ permission_id, action, status, metadata? }` | -| `question.requested` | Question pending user input | `{ question_id, prompt, options, status }` | -| `question.resolved` | Question answered or rejected | `{ question_id, prompt, options, status, response? }` | - -**PermissionEventData** - -| Field | Type | Description | -|-------|------|-------------| -| `permission_id` | string | Identifier for the permission request | -| `action` | string | What the agent wants to do | -| `status` | string | `requested`, `approved`, `denied` | -| `metadata` | any? | Additional context | - -**QuestionEventData** - -| Field | Type | Description | -|-------|------|-------------| -| `question_id` | string | Identifier for the question | -| `prompt` | string | Question text | -| `options` | string[] | Available answer options | -| `status` | string | `requested`, `answered`, `rejected` | -| `response` | string? | Selected answer (when resolved) | - -### Errors - -| Type | Description | Data | -|------|-------------|------| -| `error` | Runtime error | `{ message, code?, details? }` | -| `agent.unparsed` | Parse failure | `{ error, location, raw_hash? }` | - -The `agent.unparsed` event indicates the daemon failed to parse an agent payload. This should be treated as a bug. - -## UniversalItem - -Items represent discrete units of content within a session. - -| Field | Type | Description | -|-------|------|-------------| -| `item_id` | string | Daemon-generated identifier | -| `native_item_id` | string? | Provider-native item/message identifier | -| `parent_id` | string? | Parent item ID (e.g., tool call/result parented to a message) | -| `kind` | string | Item category (see below) | -| `role` | string? | Actor role for message items | -| `status` | string | Lifecycle status | -| `content` | ContentPart[] | Ordered list of content parts | - -### ItemKind - -| Value | Description | -|-------|-------------| -| `message` | User or assistant message | -| `tool_call` | Tool invocation | -| `tool_result` | Tool execution result | -| `system` | System message | -| `status` | Status update | -| `unknown` | Unrecognized item type | - -### ItemRole - -| Value | Description | -|-------|-------------| -| `user` | User message | -| `assistant` | Assistant response | -| `system` | System prompt | -| `tool` | Tool-related message | - -### ItemStatus - -| Value | Description | -|-------|-------------| -| `in_progress` | Item is streaming or pending | -| `completed` | Item is finalized | -| `failed` | Item execution failed | - -## Content Parts - -The `content` array contains typed parts that make up an item's payload. - -### text - -Plain text content. - -```json -{ "type": "text", "text": "Hello, world!" } -``` - -### json - -Structured JSON content. - -```json -{ "type": "json", "json": { "key": "value" } } -``` - -### tool_call - -Tool invocation. - -| Field | Type | Description | -|-------|------|-------------| -| `name` | string | Tool name | -| `arguments` | string | JSON-encoded arguments | -| `call_id` | string | Unique call identifier | - -```json -{ - "type": "tool_call", - "name": "read_file", - "arguments": "{\"path\": \"/src/main.ts\"}", - "call_id": "call_abc123" -} -``` - -### tool_result - -Tool execution result. - -| Field | Type | Description | -|-------|------|-------------| -| `call_id` | string | Matching call identifier | -| `output` | string | Tool output | - -```json -{ - "type": "tool_result", - "call_id": "call_abc123", - "output": "File contents here..." -} -``` - -### file_ref - -File reference with optional diff. - -| Field | Type | Description | -|-------|------|-------------| -| `path` | string | File path | -| `action` | string | `read`, `write`, `patch` | -| `diff` | string? | Unified diff (for patches) | - -```json -{ - "type": "file_ref", - "path": "/src/main.ts", - "action": "write", - "diff": "@@ -1,3 +1,4 @@\n+import { foo } from 'bar';" -} -``` - -### image - -Image reference. - -| Field | Type | Description | -|-------|------|-------------| -| `path` | string | Image file path | -| `mime` | string? | MIME type | - -```json -{ "type": "image", "path": "/tmp/screenshot.png", "mime": "image/png" } -``` - -### reasoning - -Model reasoning/thinking content. - -| Field | Type | Description | -|-------|------|-------------| -| `text` | string | Reasoning text | -| `visibility` | string | `public` or `private` | - -```json -{ "type": "reasoning", "text": "Let me think about this...", "visibility": "public" } -``` - -### status - -Status indicator. - -| Field | Type | Description | -|-------|------|-------------| -| `label` | string | Status label | -| `detail` | string? | Additional detail | - -```json -{ "type": "status", "label": "Running tests", "detail": "3 of 10 passed" } -``` - -## Source & Synthetics - -### EventSource - -The `source` field indicates who emitted the event: - -| Value | Description | -|-------|-------------| -| `agent` | Native event from the agent | -| `daemon` | Synthetic event generated by the daemon | - -### Synthetic Events - -The daemon emits synthetic events (`synthetic: true`, `source: "daemon"`) to provide a consistent event stream across all agents. Common synthetics: - -| Synthetic | When | -|-----------|------| -| `session.started` | Agent doesn't emit explicit session start | -| `session.ended` | Agent doesn't emit explicit session end | -| `item.started` | Agent doesn't emit item start events | -| `item.delta` | Agent doesn't stream deltas natively | -| `question.*` | Claude Code plan mode (from ExitPlanMode tool) | - -### Raw Payloads - -Pass `include_raw=true` to event endpoints to receive the original agent payload in the `raw` field. Useful for debugging or accessing agent-specific data not in the universal schema. - -```typescript -const events = await client.getEvents("my-session", { includeRaw: true }); -// events[0].raw contains the original agent payload -``` diff --git a/docs/skills-config.mdx b/docs/skills-config.mdx new file mode 100644 index 0000000..c3145c2 --- /dev/null +++ b/docs/skills-config.mdx @@ -0,0 +1,79 @@ +--- +title: "Skills" +description: "Configure skill sources for agent sessions." +sidebarTitle: "Skills" +icon: "sparkles" +--- + +Skills are local instruction bundles stored in `SKILL.md` files. + +## Configuring skills + +Use `setSkillsConfig` / `getSkillsConfig` / `deleteSkillsConfig` to manage skill source config by directory + skill name. + +```ts +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", +}); + +// Add a skill +await sdk.setSkillsConfig( + { + directory: "/workspace", + skillName: "default", + }, + { + sources: [ + { type: "github", source: "rivet-dev/skills", skills: ["sandbox-agent"] }, + { type: "local", source: "/workspace/my-custom-skill" }, + ], + }, +); + +// Create a session using the configured skills +const session = await sdk.createSession({ + agent: "claude", + cwd: "/workspace", +}); + +await session.prompt([ + { type: "text", text: "Use available skills to help with this task." }, +]); + +// List skills +const config = await sdk.getSkillsConfig({ + directory: "/workspace", + skillName: "default", +}); + +console.log(config.sources.length); + +// Delete skill +await sdk.deleteSkillsConfig({ + directory: "/workspace", + skillName: "default", +}); + +``` + +## Skill sources + +Each `skills.sources` entry describes where to find skills. + +| Type | `source` value | Example | +|------|---------------|---------| +| `github` | `owner/repo` | `"rivet-dev/skills"` | +| `local` | filesystem path | `"/workspace/my-skill"` | +| `git` | git clone URL | `"https://git.example.com/skills.git"` | + +Optional fields: + +- `skills`: subset of skill directory names to include +- `ref`: branch/tag/commit (for `github` and `git`) +- `subpath`: subdirectory within repo to scan + +## Custom skills + +To write, upload, and configure your own skills inside the sandbox, see [Custom Tools](/custom-tools). diff --git a/docs/theme.css b/docs/theme.css index daeb719..4286d2c 100644 --- a/docs/theme.css +++ b/docs/theme.css @@ -20,7 +20,6 @@ body { color: var(--sa-text); } -/* a { color: var(--sa-primary); } @@ -41,6 +40,13 @@ select { color: var(--sa-text); } +code, +pre { + background-color: var(--sa-card); + border: 1px solid var(--sa-border); + color: var(--sa-text); +} + .card, .mintlify-card, .docs-card { @@ -64,4 +70,3 @@ select { .alert-danger { border-color: var(--sa-danger); } -*/ diff --git a/docs/troubleshooting.mdx b/docs/troubleshooting.mdx index 838cc28..18186d6 100644 --- a/docs/troubleshooting.mdx +++ b/docs/troubleshooting.mdx @@ -29,25 +29,6 @@ Verify the agent is installed: ls -la ~/.local/share/sandbox-agent/bin/ ``` -### 4. Binary libc mismatch (musl vs glibc) - -Claude Code binaries are available in both musl and glibc variants. If you see errors like: - -``` -cannot execute: required file not found -Error loading shared library libstdc++.so.6: No such file or directory -``` - -This means the wrong binary variant was downloaded. - -**For sandbox-agent 0.2.0+**: Platform detection is automatic. The correct binary (musl or glibc) is downloaded based on the runtime environment. - -**For sandbox-agent 0.1.x**: Use Alpine Linux which has native musl support: - -```dockerfile -FROM alpine:latest -RUN apk add --no-cache curl ca-certificates libstdc++ libgcc bash -``` ## Daytona Network Restrictions diff --git a/examples/CLAUDE.md b/examples/CLAUDE.md new file mode 100644 index 0000000..048312f --- /dev/null +++ b/examples/CLAUDE.md @@ -0,0 +1,22 @@ +# Examples Instructions + +## Docker Isolation + +- Docker examples must behave like standalone sandboxes. +- Do not bind mount host files or host directories into Docker example containers. +- If an example needs tools, skills, or MCP servers, install them inside the container during setup. + +## Testing Examples (ACP v2) + +Examples should be validated against v2 endpoints: + +1. Start the example: `SANDBOX_AGENT_DEV=1 pnpm start` +2. Create an ACP client by POSTing `initialize` to `/v2/rpc` with `x-acp-agent: mock` (or another installed agent). +3. Capture `x-acp-connection-id` from the response headers. +4. Open SSE stream: `GET /v2/rpc` with `x-acp-connection-id`. +5. Send `session/new` then `session/prompt` via `POST /v2/rpc` with the same connection id. +6. Close connection via `DELETE /v2/rpc` with `x-acp-connection-id`. + +v1 reminder: + +- `/v1/*` is removed and returns `410 Gone`. diff --git a/examples/boxlite-python/.gitignore b/examples/boxlite-python/.gitignore new file mode 100644 index 0000000..f878106 --- /dev/null +++ b/examples/boxlite-python/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +*.pyc +.venv/ +oci-image/ diff --git a/examples/boxlite-python/Dockerfile b/examples/boxlite-python/Dockerfile new file mode 100644 index 0000000..8aba774 --- /dev/null +++ b/examples/boxlite-python/Dockerfile @@ -0,0 +1,5 @@ +FROM node:22-bookworm-slim +RUN apt-get update && apt-get install -y curl ca-certificates && rm -rf /var/lib/apt/lists/* +RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh +RUN sandbox-agent install-agent claude +RUN sandbox-agent install-agent codex diff --git a/examples/boxlite-python/client.py b/examples/boxlite-python/client.py new file mode 100644 index 0000000..29e4609 --- /dev/null +++ b/examples/boxlite-python/client.py @@ -0,0 +1,145 @@ +"""Minimal JSON-RPC client for sandbox-agent's streamable HTTP transport.""" + +import json +import threading +import time +import uuid + +import httpx + + +class SandboxConnection: + """Connects to a sandbox-agent server via JSON-RPC over streamable HTTP. + + Endpoints used: + POST /v1/acp/{server_id}?agent=... (bootstrap + requests) + GET /v1/acp/{server_id} (SSE event stream) + DELETE /v1/acp/{server_id} (close) + """ + + def __init__(self, base_url: str, agent: str): + self.base_url = base_url.rstrip("/") + self.agent = agent + self.server_id = f"py-{uuid.uuid4().hex[:8]}" + self.url = f"{self.base_url}/v1/acp/{self.server_id}" + self._next_id = 0 + self._events: list[dict] = [] + self._stop = threading.Event() + self._sse_thread: threading.Thread | None = None + + def _alloc_id(self) -> int: + self._next_id += 1 + return self._next_id + + def _post(self, method: str, params: dict | None = None, *, bootstrap: bool = False) -> dict: + payload: dict = { + "jsonrpc": "2.0", + "id": self._alloc_id(), + "method": method, + } + if params is not None: + payload["params"] = params + + url = f"{self.url}?agent={self.agent}" if bootstrap else self.url + r = httpx.post(url, json=payload, timeout=120) + r.raise_for_status() + body = r.text.strip() + return json.loads(body) if body else {} + + # -- Lifecycle ----------------------------------------------------------- + + def initialize(self) -> dict: + result = self._post( + "initialize", + { + "protocolVersion": 1, + "clientInfo": {"name": "python-example", "version": "0.1.0"}, + }, + bootstrap=True, + ) + self._start_sse() + + # Auto-authenticate if the agent advertises env-var-based auth methods. + auth_methods = result.get("result", {}).get("authMethods", []) + env_ids = ("anthropic-api-key", "codex-api-key", "openai-api-key") + for method in auth_methods: + if method.get("id") not in env_ids: + continue + try: + resp = self._post("authenticate", {"methodId": method["id"]}) + if "error" not in resp: + break + except Exception: + continue + + return result + + def new_session(self, cwd: str = "/root") -> str: + result = self._post("session/new", {"cwd": cwd, "mcpServers": []}) + if "error" in result: + raise RuntimeError(f"session/new failed: {result['error'].get('message', result['error'])}") + return result["result"]["sessionId"] + + def prompt(self, session_id: str, text: str) -> dict: + result = self._post( + "session/prompt", + { + "sessionId": session_id, + "prompt": [{"type": "text", "text": text}], + }, + ) + return result + + def close(self) -> None: + self._stop.set() + try: + httpx.delete(self.url, timeout=2) + except Exception: + pass + + # -- SSE event stream (background thread) -------------------------------- + + @property + def events(self) -> list[dict]: + return list(self._events) + + def _start_sse(self) -> None: + self._sse_thread = threading.Thread(target=self._sse_loop, daemon=True) + self._sse_thread.start() + + def _sse_loop(self) -> None: + while not self._stop.is_set(): + try: + with httpx.stream( + "GET", + self.url, + headers={"Accept": "text/event-stream"}, + timeout=httpx.Timeout(connect=5, read=None, write=5, pool=5), + ) as resp: + buffer = "" + for chunk in resp.iter_text(): + if self._stop.is_set(): + break + buffer += chunk.replace("\r\n", "\n") + while "\n\n" in buffer: + event_chunk, buffer = buffer.split("\n\n", 1) + self._process_sse_event(event_chunk) + except Exception: + if self._stop.is_set(): + return + time.sleep(0.15) + + def _process_sse_event(self, chunk: str) -> None: + data_lines: list[str] = [] + for line in chunk.split("\n"): + if line.startswith("data:"): + data_lines.append(line[5:].lstrip()) + if not data_lines: + return + payload = "\n".join(data_lines).strip() + if not payload: + return + try: + self._events.append(json.loads(payload)) + except json.JSONDecodeError: + pass diff --git a/examples/boxlite-python/credentials.py b/examples/boxlite-python/credentials.py new file mode 100644 index 0000000..46114dc --- /dev/null +++ b/examples/boxlite-python/credentials.py @@ -0,0 +1,32 @@ +"""Agent detection and credential helpers for sandbox-agent examples.""" + +import os +import sys + + +def detect_agent() -> str: + """Pick an agent based on env vars. Exits if no credentials are found.""" + if os.environ.get("SANDBOX_AGENT"): + return os.environ["SANDBOX_AGENT"] + has_claude = bool( + os.environ.get("ANTHROPIC_API_KEY") + or os.environ.get("CLAUDE_API_KEY") + or os.environ.get("CLAUDE_CODE_OAUTH_TOKEN") + ) + has_codex = (os.environ.get("OPENAI_API_KEY") or "").startswith("sk-") + if has_codex: + return "codex" + if has_claude: + return "claude" + print("No API keys found. Set ANTHROPIC_API_KEY or OPENAI_API_KEY.") + sys.exit(1) + + +def build_box_env() -> list[tuple[str, str]]: + """Collect credential env vars to forward into the BoxLite sandbox.""" + env: list[tuple[str, str]] = [] + for key in ("ANTHROPIC_API_KEY", "CLAUDE_API_KEY", "OPENAI_API_KEY", "CODEX_API_KEY"): + val = os.environ.get(key) + if val: + env.append((key, val)) + return env diff --git a/examples/boxlite-python/main.py b/examples/boxlite-python/main.py new file mode 100644 index 0000000..244985f --- /dev/null +++ b/examples/boxlite-python/main.py @@ -0,0 +1,110 @@ +""" +Sandbox Agent – Python + BoxLite example. + +Builds a Docker image, exports it to OCI layout, runs it inside a BoxLite +sandbox, connects to the sandbox-agent server, creates a session, and sends a prompt. + +Usage: + pip install -r requirements.txt + python main.py +""" + +import asyncio +import json +import signal +import time + +import boxlite +import httpx + +from client import SandboxConnection +from credentials import build_box_env, detect_agent +from setup_image import OCI_DIR, setup_image + +PORT = 3000 + + +def wait_for_health(base_url: str, timeout_s: float = 120) -> None: + deadline = time.monotonic() + timeout_s + last_err: str | None = None + while time.monotonic() < deadline: + try: + r = httpx.get(f"{base_url}/v1/health", timeout=5) + if r.status_code == 200 and r.json().get("status") == "ok": + return + last_err = f"health returned {r.status_code}" + except Exception as exc: + last_err = str(exc) + time.sleep(0.5) + raise RuntimeError(f"Timed out waiting for /v1/health: {last_err}") + + +async def main() -> None: + agent = detect_agent() + print(f"Agent: {agent}") + + setup_image() + + env = build_box_env() + + print("Creating BoxLite sandbox...") + box = boxlite.SimpleBox( + rootfs_path=OCI_DIR, + env=env, + ports=[(PORT, PORT, "tcp")], + ) + + async with box: + print("Starting server...") + result = await box.exec( + "sh", "-c", + f"nohup sandbox-agent server --no-token --host 0.0.0.0 --port {PORT} " + ">/tmp/sandbox-agent.log 2>&1 &", + ) + if result.exit_code != 0: + raise RuntimeError(f"Failed to start server: {result.stderr}") + + base_url = f"http://localhost:{PORT}" + print("Waiting for server...") + wait_for_health(base_url) + print("Server ready.") + print(f"Inspector: {base_url}/ui/") + + # -- Session flow ---------------------------------------------------- + conn = SandboxConnection(base_url, agent) + + print("Connecting...") + init_result = conn.initialize() + agent_info = init_result.get("result", {}).get("agentInfo", {}) + print(f"Connected to: {agent_info.get('title', agent)} {agent_info.get('version', '')}") + + session_id = conn.new_session() + print(f"Session: {session_id}") + + prompt_text = "Say hello and tell me what you are. Be brief (one sentence)." + print(f"\n> {prompt_text}") + response = conn.prompt(session_id, prompt_text) + + if "error" in response: + err = response["error"] + print(f"Error: {err.get('message', err)}") + else: + print(f"Stop reason: {response.get('result', {}).get('stopReason', 'unknown')}") + + # Give SSE events a moment to arrive. + time.sleep(1) + + if conn.events: + for ev in conn.events: + if ev.get("method") == "session/update": + content = ev.get("params", {}).get("update", {}).get("content", {}) + if content.get("text"): + print(content["text"], end="") + print() + + conn.close() + print("\nDone.") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/boxlite-python/requirements.txt b/examples/boxlite-python/requirements.txt new file mode 100644 index 0000000..2977142 --- /dev/null +++ b/examples/boxlite-python/requirements.txt @@ -0,0 +1,2 @@ +boxlite>=0.5.0 +httpx>=0.27.0 diff --git a/examples/boxlite-python/setup_image.py b/examples/boxlite-python/setup_image.py new file mode 100644 index 0000000..f56b76f --- /dev/null +++ b/examples/boxlite-python/setup_image.py @@ -0,0 +1,29 @@ +"""Build the sandbox-agent Docker image and export it to OCI layout.""" + +import os +import subprocess + +DOCKER_IMAGE = "sandbox-agent-boxlite" +OCI_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "oci-image") + + +def setup_image() -> None: + dockerfile_dir = os.path.dirname(os.path.abspath(__file__)) + + print(f'Building image "{DOCKER_IMAGE}" (cached after first run)...') + subprocess.run( + ["docker", "build", "-t", DOCKER_IMAGE, dockerfile_dir], + check=True, + ) + + if not os.path.exists(os.path.join(OCI_DIR, "oci-layout")): + print("Exporting to OCI layout...") + os.makedirs(OCI_DIR, exist_ok=True) + subprocess.run( + [ + "skopeo", "copy", + f"docker-daemon:{DOCKER_IMAGE}:latest", + f"oci:{OCI_DIR}:latest", + ], + check=True, + ) diff --git a/examples/boxlite/.gitignore b/examples/boxlite/.gitignore new file mode 100644 index 0000000..329f592 --- /dev/null +++ b/examples/boxlite/.gitignore @@ -0,0 +1 @@ +oci-image/ diff --git a/examples/boxlite/Dockerfile b/examples/boxlite/Dockerfile new file mode 100644 index 0000000..8aba774 --- /dev/null +++ b/examples/boxlite/Dockerfile @@ -0,0 +1,5 @@ +FROM node:22-bookworm-slim +RUN apt-get update && apt-get install -y curl ca-certificates && rm -rf /var/lib/apt/lists/* +RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh +RUN sandbox-agent install-agent claude +RUN sandbox-agent install-agent codex diff --git a/examples/boxlite/package.json b/examples/boxlite/package.json new file mode 100644 index 0000000..8e7a5d8 --- /dev/null +++ b/examples/boxlite/package.json @@ -0,0 +1,19 @@ +{ + "name": "@sandbox-agent/example-boxlite", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@boxlite-ai/boxlite": "latest", + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/boxlite/src/index.ts b/examples/boxlite/src/index.ts new file mode 100644 index 0000000..171166b --- /dev/null +++ b/examples/boxlite/src/index.ts @@ -0,0 +1,41 @@ +import { SimpleBox } from "@boxlite-ai/boxlite"; +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { setupImage, OCI_DIR } from "./setup-image.ts"; + +const env: Record<string, string> = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +setupImage(); + +console.log("Creating BoxLite sandbox..."); +const box = new SimpleBox({ + rootfsPath: OCI_DIR, + env, + ports: [{ hostPort: 3000, guestPort: 3000 }], + diskSizeGb: 4, +}); + +console.log("Starting server..."); +const result = await box.exec("sh", "-c", "nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &"); +if (result.exitCode !== 0) throw new Error(`Failed to start server: ${result.stderr}`); + +const baseUrl = "http://localhost:3000"; + +console.log("Connecting to server..."); +const client = await SandboxAgent.connect({ baseUrl }); +const session = await client.createSession({ agent: detectAgent(), cwd: "/root" }); +const sessionId = session.id; + +console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); +console.log(" Press Ctrl+C to stop."); + +const keepAlive = setInterval(() => {}, 60_000); +const cleanup = async () => { + clearInterval(keepAlive); + await box.stop(); + process.exit(0); +}; +process.once("SIGINT", cleanup); +process.once("SIGTERM", cleanup); diff --git a/examples/boxlite/src/setup-image.ts b/examples/boxlite/src/setup-image.ts new file mode 100644 index 0000000..9c15c99 --- /dev/null +++ b/examples/boxlite/src/setup-image.ts @@ -0,0 +1,16 @@ +import { execSync } from "node:child_process"; +import { existsSync, mkdirSync } from "node:fs"; + +export const DOCKER_IMAGE = "sandbox-agent-boxlite"; +export const OCI_DIR = new URL("../oci-image", import.meta.url).pathname; + +export function setupImage() { + console.log(`Building image "${DOCKER_IMAGE}" (cached after first run)...`); + execSync(`docker build -t ${DOCKER_IMAGE} ${new URL("..", import.meta.url).pathname}`, { stdio: "inherit" }); + + if (!existsSync(`${OCI_DIR}/oci-layout`)) { + console.log("Exporting to OCI layout..."); + mkdirSync(OCI_DIR, { recursive: true }); + execSync(`docker save ${DOCKER_IMAGE} | tar -xf - -C ${OCI_DIR}`, { stdio: "inherit" }); + } +} diff --git a/examples/boxlite/tsconfig.json b/examples/boxlite/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/boxlite/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/cloudflare/.gitignore b/examples/cloudflare/.gitignore new file mode 100644 index 0000000..a933f10 --- /dev/null +++ b/examples/cloudflare/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +.wrangler/ +.dev.vars diff --git a/examples/cloudflare/Dockerfile b/examples/cloudflare/Dockerfile new file mode 100644 index 0000000..738f8a2 --- /dev/null +++ b/examples/cloudflare/Dockerfile @@ -0,0 +1,11 @@ +FROM cloudflare/sandbox:0.7.0 + +# Install sandbox-agent +RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.4.x/install.sh | sh + +# Pre-install agents +RUN sandbox-agent install-agent claude && \ + sandbox-agent install-agent codex + +# Expose port for local dev (wrangler dev requires EXPOSE directives) +EXPOSE 8000 diff --git a/examples/cloudflare/README.md b/examples/cloudflare/README.md new file mode 100644 index 0000000..b8461f2 --- /dev/null +++ b/examples/cloudflare/README.md @@ -0,0 +1,88 @@ +# Cloudflare Sandbox Agent Example + +Deploy sandbox-agent inside a Cloudflare Sandbox. + +## Prerequisites + +- Cloudflare account with Workers Paid plan +- Docker running locally for `wrangler dev` +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` for the coding agents + +## Setup + +1. Install dependencies: + +```bash +pnpm install +``` + +2. Create `.dev.vars` with your API keys: + +```bash +echo "ANTHROPIC_API_KEY=your-api-key" > .dev.vars +``` + +## Development + +Start the development server: + +```bash +pnpm run dev +``` + +Test the endpoint: + +```bash +curl http://localhost:8787 +``` + +Test prompt routing through the SDK with a custom sandbox fetch handler: + +```bash +curl -N -X POST "http://localhost:8787/sandbox/demo/prompt" \ + -H "Content-Type: application/json" \ + -H "Accept: text/event-stream" \ + -d '{"agent":"codex","prompt":"Reply with one short sentence."}' +``` + +The response is an SSE stream with events: +- `session.created` +- `session.event` +- `prompt.completed` +- `done` + +### Troubleshooting: only two events + +If you only see: +- outbound `session/prompt` +- inbound prompt result with `stopReason: "end_turn"` + +then ACP `session/update` notifications are not flowing. In Cloudflare sandbox paths this can happen if you forward `AbortSignal` from SDK fetch init into `containerFetch(...)` for long-lived ACP SSE requests. + +Use: + +```ts +const sdk = await SandboxAgent.connect({ + fetch: (input, init) => + sandbox.containerFetch( + input as Request | string | URL, + { + ...(init ?? {}), + // Avoid passing AbortSignal through containerFetch; it can drop ACP SSE updates. + signal: undefined, + }, + PORT, + ), +}); +``` + +Without `session/update` events, assistant text/tool deltas will not appear in UI streams. + +## Deploy + +```bash +pnpm run deploy +``` + +Note: Production preview URLs require a custom domain with wildcard DNS routing. +See [Cloudflare Production Deployment](https://developers.cloudflare.com/sandbox/guides/production-deployment/) for details. diff --git a/examples/cloudflare/frontend/App.tsx b/examples/cloudflare/frontend/App.tsx new file mode 100644 index 0000000..499fc63 --- /dev/null +++ b/examples/cloudflare/frontend/App.tsx @@ -0,0 +1,267 @@ +import { useState, useRef, useEffect, useCallback } from "react"; +import { SandboxAgent } from "sandbox-agent"; +import type { PermissionEventData, QuestionEventData } from "sandbox-agent"; + +export function App() { + const [sandboxName, setSandboxName] = useState("demo"); + const [prompt, setPrompt] = useState(""); + const [output, setOutput] = useState(""); + const [status, setStatus] = useState<"idle" | "connecting" | "ready" | "thinking">("idle"); + const [error, setError] = useState<string | null>(null); + + const clientRef = useRef<SandboxAgent | null>(null); + const sessionIdRef = useRef<string>(`session-${Date.now()}`); + const abortRef = useRef<AbortController | null>(null); + const isThinkingRef = useRef(false); + + const log = useCallback((msg: string) => { + setOutput((prev) => prev + msg + "\n"); + }, []); + + const connect = useCallback(async () => { + setStatus("connecting"); + setError(null); + setOutput(""); + + try { + // Connect via proxy endpoint (need full URL for SDK) + const baseUrl = `${window.location.origin}/sandbox/${encodeURIComponent(sandboxName)}/proxy`; + log(`Connecting to sandbox: ${sandboxName}`); + + const client = await SandboxAgent.connect({ baseUrl }); + clientRef.current = client; + + // Wait for health (this also ensures the container is started) + log("Waiting for sandbox-agent to be ready..."); + for (let i = 0; i < 30; i++) { + try { + await client.getHealth(); + break; + } catch { + if (i === 29) throw new Error("Timeout waiting for sandbox-agent"); + await new Promise((r) => setTimeout(r, 1000)); + } + } + + // Create session + await client.createSession(sessionIdRef.current, { agent: "claude" }); + log("Session created. Ready to chat.\n"); + + setStatus("ready"); + + // Start listening for events + startEventStream(client); + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + setStatus("idle"); + } + }, [sandboxName, log]); + + const startEventStream = useCallback( + async (client: SandboxAgent) => { + abortRef.current?.abort(); + const controller = new AbortController(); + abortRef.current = controller; + + try { + for await (const event of client.streamEvents(sessionIdRef.current, undefined, controller.signal)) { + console.log("Event:", event.type, event.data); + + // Auto-approve permissions + if (event.type === "permission.requested") { + const data = event.data as PermissionEventData; + log(`[Auto-approved] ${data.action}`); + await client.respondPermission(sessionIdRef.current, data.permission_id, { reply: "once" }); + } + + // Reject questions (don't support interactive input) + if (event.type === "question.requested") { + const data = event.data as QuestionEventData; + log(`[Question rejected] ${data.prompt}`); + await client.rejectQuestion(sessionIdRef.current, data.question_id); + } + + // Track when assistant starts thinking + if (event.type === "item.started") { + const item = (event.data as any)?.item; + if (item?.role === "assistant") { + isThinkingRef.current = true; + } + } + + // Show deltas while assistant is thinking + if (event.type === "item.delta" && isThinkingRef.current) { + const delta = (event.data as any)?.delta; + if (delta) { + const text = typeof delta === "string" ? delta : delta.type === "text" ? delta.text || "" : ""; + if (text) { + setOutput((prev) => prev + text); + } + } + } + + // Track assistant turn completion + if (event.type === "item.completed") { + const item = (event.data as any)?.item; + if (item?.role === "assistant") { + isThinkingRef.current = false; + setOutput((prev) => prev + "\n\n"); + setStatus("ready"); + } + } + + // Handle errors + if (event.type === "error") { + const data = event.data as any; + log(`Error: ${data?.message || JSON.stringify(data)}`); + } + + // Handle session end + if (event.type === "session.ended") { + const data = event.data as any; + log(`Session ended: ${data?.reason || "unknown"}`); + setStatus("idle"); + } + } + } catch (err) { + if (controller.signal.aborted) return; + console.error("Event stream error:", err); + } + }, + [log], + ); + + const send = useCallback(async () => { + if (!clientRef.current || !prompt.trim() || status !== "ready") return; + + const message = prompt.trim(); + setPrompt(""); + setOutput((prev) => prev + `user: ${message}\n\nassistant: `); + setStatus("thinking"); + + try { + await clientRef.current.postMessage(sessionIdRef.current, { message }); + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + setStatus("ready"); + } + }, [prompt, status]); + + // Cleanup on unmount + useEffect(() => { + return () => { + abortRef.current?.abort(); + }; + }, []); + + return ( + <div style={styles.container}> + <h1 style={styles.title}>Sandbox Agent</h1> + + {status === "idle" && ( + <div style={styles.connectForm}> + <label style={styles.label}> + Sandbox name: + <input style={styles.input} value={sandboxName} onChange={(e) => setSandboxName(e.target.value)} placeholder="demo" /> + </label> + <button style={styles.button} onClick={connect}> + Connect + </button> + </div> + )} + + {status === "connecting" && <div style={styles.status}>Connecting to sandbox...</div>} + + {error && <div style={styles.error}>{error}</div>} + + {(status === "ready" || status === "thinking") && ( + <> + <div style={styles.output}>{output}</div> + <div style={styles.inputRow}> + <input + style={styles.promptInput} + value={prompt} + onChange={(e) => setPrompt(e.target.value)} + onKeyDown={(e) => e.key === "Enter" && send()} + placeholder="Enter prompt..." + disabled={status === "thinking"} + /> + <button style={styles.button} onClick={send} disabled={status === "thinking"}> + {status === "thinking" ? "..." : "Send"} + </button> + </div> + </> + )} + </div> + ); +} + +const styles: Record<string, React.CSSProperties> = { + container: { + fontFamily: "system-ui, sans-serif", + maxWidth: 800, + margin: "2rem auto", + padding: "1rem", + }, + title: { + marginBottom: "1rem", + }, + connectForm: { + display: "flex", + gap: "1rem", + alignItems: "flex-end", + }, + label: { + display: "flex", + flexDirection: "column", + gap: "0.25rem", + fontSize: "0.875rem", + color: "#666", + }, + input: { + padding: "0.5rem", + fontSize: "1rem", + width: 200, + }, + button: { + padding: "0.5rem 1rem", + fontSize: "1rem", + cursor: "pointer", + backgroundColor: "#0066cc", + color: "white", + border: "none", + borderRadius: 4, + }, + status: { + color: "#666", + fontStyle: "italic", + }, + error: { + color: "#cc0000", + padding: "0.5rem", + backgroundColor: "#fff0f0", + borderRadius: 4, + marginBottom: "1rem", + }, + output: { + whiteSpace: "pre-wrap", + background: "#1e1e1e", + color: "#d4d4d4", + padding: "1rem", + minHeight: 300, + fontFamily: "monospace", + fontSize: 14, + overflow: "auto", + borderRadius: 4, + }, + inputRow: { + display: "flex", + gap: "0.5rem", + marginTop: "1rem", + }, + promptInput: { + flex: 1, + padding: "0.5rem", + fontSize: "1rem", + }, +}; diff --git a/examples/cloudflare/frontend/index.html b/examples/cloudflare/frontend/index.html new file mode 100644 index 0000000..2378265 --- /dev/null +++ b/examples/cloudflare/frontend/index.html @@ -0,0 +1,12 @@ +<!DOCTYPE html> +<html lang="en"> + <head> + <meta charset="UTF-8" /> + <meta name="viewport" content="width=device-width, initial-scale=1.0" /> + <title>Sandbox Agent + + +
+ + + diff --git a/examples/cloudflare/frontend/main.tsx b/examples/cloudflare/frontend/main.tsx new file mode 100644 index 0000000..1c711a7 --- /dev/null +++ b/examples/cloudflare/frontend/main.tsx @@ -0,0 +1,9 @@ +import { StrictMode } from "react"; +import { createRoot } from "react-dom/client"; +import { App } from "./App"; + +createRoot(document.getElementById("root")!).render( + + + , +); diff --git a/examples/cloudflare/package.json b/examples/cloudflare/package.json new file mode 100644 index 0000000..0385703 --- /dev/null +++ b/examples/cloudflare/package.json @@ -0,0 +1,29 @@ +{ + "name": "@sandbox-agent/example-cloudflare", + "private": true, + "type": "module", + "scripts": { + "dev": "vite build --watch & wrangler dev", + "build": "vite build", + "deploy": "vite build && wrangler deploy", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@cloudflare/sandbox": "latest", + "hono": "^4.12.2", + "react": "^19.1.0", + "react-dom": "^19.1.0", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@cloudflare/workers-types": "latest", + "@types/node": "latest", + "@types/react": "^19.1.0", + "@types/react-dom": "^19.1.0", + "@vitejs/plugin-react": "^4.5.0", + "typescript": "latest", + "vite": "^6.2.0", + "vitest": "^3.0.0", + "wrangler": "latest" + } +} diff --git a/examples/cloudflare/src/index.ts b/examples/cloudflare/src/index.ts new file mode 100644 index 0000000..1efa742 --- /dev/null +++ b/examples/cloudflare/src/index.ts @@ -0,0 +1,114 @@ +import { getSandbox, type Sandbox } from "@cloudflare/sandbox"; +import { Hono } from "hono"; +import { HTTPException } from "hono/http-exception"; +import { streamSSE } from "hono/streaming"; +import { runPromptEndpointStream, type PromptRequest } from "./prompt-endpoint"; + +export { Sandbox } from "@cloudflare/sandbox"; + +type Bindings = { + Sandbox: DurableObjectNamespace; + ASSETS: Fetcher; + ANTHROPIC_API_KEY?: string; + OPENAI_API_KEY?: string; + CODEX_API_KEY?: string; +}; + +type AppEnv = { Bindings: Bindings }; + +const PORT = 8000; + +/** Check if sandbox-agent is already running by probing its health endpoint */ +async function isServerRunning(sandbox: Sandbox): Promise { + try { + const result = await sandbox.exec(`curl -sf http://localhost:${PORT}/v1/health`); + return result.success; + } catch { + return false; + } +} + +async function getReadySandbox(name: string, env: Bindings): Promise { + const sandbox = getSandbox(env.Sandbox, name); + const envVars: Record = {}; + if (env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = env.ANTHROPIC_API_KEY; + if (env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = env.OPENAI_API_KEY; + if (env.CODEX_API_KEY) envVars.CODEX_API_KEY = env.CODEX_API_KEY; + if (!envVars.CODEX_API_KEY && envVars.OPENAI_API_KEY) envVars.CODEX_API_KEY = envVars.OPENAI_API_KEY; + await sandbox.setEnvVars(envVars); + + if (!(await isServerRunning(sandbox))) { + await sandbox.startProcess(`sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`); + + for (let i = 0; i < 30; i++) { + if (await isServerRunning(sandbox)) break; + await new Promise((r) => setTimeout(r, 200)); + } + } + return sandbox; +} + +async function proxyToSandbox(sandbox: Sandbox, request: Request, path: string): Promise { + const query = new URL(request.url).search; + return sandbox.containerFetch( + `http://localhost${path}${query}`, + { + method: request.method, + headers: request.headers, + body: request.body, + }, + PORT, + ); +} + +const app = new Hono(); + +app.onError((error) => { + return new Response(String(error), { status: 500 }); +}); + +app.post("/sandbox/:name/prompt", async (c) => { + if (!(c.req.header("content-type") ?? "").includes("application/json")) { + throw new HTTPException(400, { message: "Content-Type must be application/json" }); + } + + let payload: PromptRequest; + try { + payload = await c.req.json(); + } catch { + throw new HTTPException(400, { message: "Invalid JSON body" }); + } + + const sandbox = await getReadySandbox(c.req.param("name"), c.env); + return streamSSE(c, async (stream) => { + try { + await runPromptEndpointStream(sandbox, payload, PORT, async (event) => { + await stream.writeSSE({ + event: event.type, + data: JSON.stringify(event), + }); + }); + await stream.writeSSE({ + event: "done", + data: JSON.stringify({ ok: true }), + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + await stream.writeSSE({ + event: "error", + data: JSON.stringify({ message }), + }); + } + }); +}); + +app.all("/sandbox/:name/proxy/*", async (c) => { + const sandbox = await getReadySandbox(c.req.param("name"), c.env); + const wildcard = c.req.param("*"); + const path = wildcard ? `/${wildcard}` : "/"; + return proxyToSandbox(sandbox, c.req.raw, path); +}); + +app.all("*", (c) => c.env.ASSETS.fetch(c.req.raw)); + +export default app; diff --git a/examples/cloudflare/src/prompt-endpoint.ts b/examples/cloudflare/src/prompt-endpoint.ts new file mode 100644 index 0000000..f3c3366 --- /dev/null +++ b/examples/cloudflare/src/prompt-endpoint.ts @@ -0,0 +1,62 @@ +import type { Sandbox } from "@cloudflare/sandbox"; +import { SandboxAgent } from "sandbox-agent"; + +export type PromptRequest = { + agent?: string; + prompt?: string; +}; + +export async function runPromptEndpointStream( + sandbox: Sandbox, + request: PromptRequest, + port: number, + emit: (event: { type: string; [key: string]: unknown }) => Promise | void, +): Promise { + const client = await SandboxAgent.connect({ + fetch: (req, init) => + sandbox.containerFetch( + req, + { + ...(init ?? {}), + // Cloudflare containerFetch may drop long-lived update streams when + // a forwarded AbortSignal is cancelled; clear it for this path. + signal: undefined, + }, + port, + ), + }); + + let unsubscribe: (() => void) | undefined; + try { + const session = await client.createSession({ + agent: request.agent ?? "codex", + }); + + const promptText = request.prompt?.trim() || "Reply with a short confirmation."; + await emit({ + type: "session.created", + sessionId: session.id, + agent: session.agent, + prompt: promptText, + }); + + let pendingWrites: Promise = Promise.resolve(); + unsubscribe = session.onEvent((event) => { + pendingWrites = pendingWrites + .then(async () => { + await emit({ type: "session.event", event }); + }) + .catch(() => {}); + }); + + const response = await session.prompt([{ type: "text", text: promptText }]); + await pendingWrites; + await emit({ type: "prompt.response", response }); + await emit({ type: "prompt.completed" }); + } finally { + if (unsubscribe) { + unsubscribe(); + } + await Promise.race([client.dispose(), new Promise((resolve) => setTimeout(resolve, 250))]); + } +} diff --git a/examples/cloudflare/tests/cloudflare.test.ts b/examples/cloudflare/tests/cloudflare.test.ts new file mode 100644 index 0000000..d00c2ce --- /dev/null +++ b/examples/cloudflare/tests/cloudflare.test.ts @@ -0,0 +1,154 @@ +import { describe, it, expect } from "vitest"; +import { spawn, type ChildProcess } from "node:child_process"; +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import { execSync } from "node:child_process"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const PROJECT_DIR = resolve(__dirname, ".."); + +/** + * Cloudflare Workers integration test. + * + * Set RUN_CLOUDFLARE_EXAMPLES=1 to enable. Requires wrangler and Docker. + * + * This starts `wrangler dev` which: + * 1. Builds the Dockerfile (cloudflare/sandbox base + sandbox-agent) + * 2. Starts a local Workers runtime with Durable Objects and containers + * 3. Exposes the app on a local port + * + * We then test through the proxy endpoint which forwards to sandbox-agent + * running inside the container. + */ +const shouldRun = process.env.RUN_CLOUDFLARE_EXAMPLES === "1"; +const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 600_000; + +const testFn = shouldRun ? it : it.skip; + +interface WranglerDev { + baseUrl: string; + cleanup: () => void; +} + +async function startWranglerDev(): Promise { + // Build frontend assets first (wrangler expects dist/ to exist) + execSync("npx vite build", { cwd: PROJECT_DIR, stdio: "pipe" }); + + return new Promise((resolve, reject) => { + const child: ChildProcess = spawn("npx", ["wrangler", "dev", "--port", "0"], { + cwd: PROJECT_DIR, + stdio: ["ignore", "pipe", "pipe"], + detached: true, + env: { + ...process.env, + // Ensure wrangler picks up API keys to pass to the container + NODE_ENV: "development", + }, + }); + + let stdout = ""; + let stderr = ""; + let resolved = false; + + const cleanup = () => { + if (child.pid) { + // Kill process group to ensure wrangler and its children are cleaned up + try { + process.kill(-child.pid, "SIGTERM"); + } catch { + try { + child.kill("SIGTERM"); + } catch {} + } + } + }; + + const timer = setTimeout(() => { + if (!resolved) { + resolved = true; + cleanup(); + reject(new Error(`wrangler dev did not start within 120s.\nstdout: ${stdout}\nstderr: ${stderr}`)); + } + }, 120_000); + + const onData = (chunk: Buffer) => { + const text = chunk.toString(); + stdout += text; + + // wrangler dev prints "Ready on http://localhost:XXXX" when ready + const match = stdout.match(/Ready on (https?:\/\/[^\s]+)/i) ?? stdout.match(/(https?:\/\/(?:localhost|127\.0\.0\.1):\d+)/); + if (match && !resolved) { + resolved = true; + clearTimeout(timer); + resolve({ baseUrl: match[1], cleanup }); + } + }; + + child.stdout?.on("data", onData); + child.stderr?.on("data", (chunk: Buffer) => { + const text = chunk.toString(); + stderr += text; + // Some wrangler versions print ready message to stderr + const match = text.match(/Ready on (https?:\/\/[^\s]+)/i) ?? text.match(/(https?:\/\/(?:localhost|127\.0\.0\.1):\d+)/); + if (match && !resolved) { + resolved = true; + clearTimeout(timer); + resolve({ baseUrl: match[1], cleanup }); + } + }); + + child.on("error", (err) => { + if (!resolved) { + resolved = true; + clearTimeout(timer); + reject(new Error(`wrangler dev failed to start: ${err.message}`)); + } + }); + + child.on("exit", (code) => { + if (!resolved) { + resolved = true; + clearTimeout(timer); + reject(new Error(`wrangler dev exited with code ${code}.\nstdout: ${stdout}\nstderr: ${stderr}`)); + } + }); + }); +} + +describe("cloudflare example", () => { + testFn( + "starts wrangler dev and sandbox-agent responds via proxy", + async () => { + const { baseUrl, cleanup } = await startWranglerDev(); + try { + // The Cloudflare example proxies requests through /sandbox/:name/proxy/* + // Wait for the container inside the Durable Object to start sandbox-agent + const healthUrl = `${baseUrl}/sandbox/test/proxy/v1/health`; + + let healthy = false; + for (let i = 0; i < 120; i++) { + try { + const res = await fetch(healthUrl); + if (res.ok) { + const data = await res.json(); + // The proxied health endpoint returns {name: "Sandbox Agent", ...} + if (data.status === "ok" || data.name === "Sandbox Agent") { + healthy = true; + break; + } + } + } catch {} + await new Promise((r) => setTimeout(r, 2000)); + } + expect(healthy).toBe(true); + + // Confirm a second request also works + const response = await fetch(healthUrl); + expect(response.ok).toBe(true); + } finally { + cleanup(); + } + }, + timeoutMs, + ); +}); diff --git a/examples/cloudflare/tsconfig.json b/examples/cloudflare/tsconfig.json new file mode 100644 index 0000000..92b9cad --- /dev/null +++ b/examples/cloudflare/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "esnext", + "lib": ["esnext"], + "module": "esnext", + "moduleResolution": "bundler", + "types": ["@cloudflare/workers-types"], + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "skipLibCheck": true, + "noEmit": true + }, + "include": ["src/**/*.ts"] +} diff --git a/examples/cloudflare/vite.config.ts b/examples/cloudflare/vite.config.ts new file mode 100644 index 0000000..657e846 --- /dev/null +++ b/examples/cloudflare/vite.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; + +export default defineConfig({ + plugins: [react()], + root: "frontend", + build: { + outDir: "../dist", + emptyOutDir: true, + }, +}); diff --git a/examples/cloudflare/vitest.config.ts b/examples/cloudflare/vitest.config.ts new file mode 100644 index 0000000..52a3740 --- /dev/null +++ b/examples/cloudflare/vitest.config.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + root: ".", + include: ["tests/**/*.test.ts"], + }, +}); diff --git a/examples/cloudflare/wrangler.jsonc b/examples/cloudflare/wrangler.jsonc new file mode 100644 index 0000000..5959215 --- /dev/null +++ b/examples/cloudflare/wrangler.jsonc @@ -0,0 +1,33 @@ +{ + "$schema": "node_modules/wrangler/config-schema.json", + "name": "sandbox-agent-cloudflare", + "main": "src/index.ts", + "compatibility_date": "2025-01-01", + "compatibility_flags": ["nodejs_compat"], + "assets": { + "directory": "./dist", + "binding": "ASSETS" + }, + "containers": [ + { + "class_name": "Sandbox", + "image": "./Dockerfile", + "instance_type": "lite", + "max_instances": 1 + } + ], + "durable_objects": { + "bindings": [ + { + "class_name": "Sandbox", + "name": "Sandbox" + } + ] + }, + "migrations": [ + { + "new_sqlite_classes": ["Sandbox"], + "tag": "v1" + } + ] +} diff --git a/examples/computesdk/package.json b/examples/computesdk/package.json new file mode 100644 index 0000000..243b3b1 --- /dev/null +++ b/examples/computesdk/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-computesdk", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "computesdk": "latest", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest", + "vitest": "^3.0.0" + } +} diff --git a/examples/computesdk/src/index.ts b/examples/computesdk/src/index.ts new file mode 100644 index 0000000..63d4aee --- /dev/null +++ b/examples/computesdk/src/index.ts @@ -0,0 +1,30 @@ +import { SandboxAgent } from "sandbox-agent"; +import { computesdk } from "sandbox-agent/computesdk"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const envs: Record = {}; +if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: computesdk({ + create: { envs }, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from ComputeSDK in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/computesdk/tests/computesdk.test.ts b/examples/computesdk/tests/computesdk.test.ts new file mode 100644 index 0000000..61ebb2c --- /dev/null +++ b/examples/computesdk/tests/computesdk.test.ts @@ -0,0 +1,37 @@ +import { describe, it, expect } from "vitest"; +import { SandboxAgent } from "sandbox-agent"; +import { computesdk } from "sandbox-agent/computesdk"; + +const hasModal = Boolean(process.env.MODAL_TOKEN_ID && process.env.MODAL_TOKEN_SECRET); +const hasVercel = Boolean(process.env.VERCEL_TOKEN || process.env.VERCEL_OIDC_TOKEN); +const hasProviderKey = Boolean( + process.env.BLAXEL_API_KEY || process.env.CSB_API_KEY || process.env.DAYTONA_API_KEY || process.env.E2B_API_KEY || hasModal || hasVercel, +); + +const shouldRun = Boolean(process.env.COMPUTESDK_API_KEY) && hasProviderKey; +const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; + +const testFn = shouldRun ? it : it.skip; + +describe("computesdk provider", () => { + testFn( + "starts sandbox-agent and responds to /v1/health", + async () => { + const envs: Record = {}; + if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + + const sdk = await SandboxAgent.start({ + sandbox: computesdk({ create: { envs } }), + }); + + try { + const health = await sdk.getHealth(); + expect(health.status).toBe("ok"); + } finally { + await sdk.destroySandbox(); + } + }, + timeoutMs, + ); +}); diff --git a/examples/computesdk/tsconfig.json b/examples/computesdk/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/computesdk/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/daytona/package.json b/examples/daytona/package.json index 281ba81..ba5b0ac 100644 --- a/examples/daytona/package.json +++ b/examples/daytona/package.json @@ -3,13 +3,13 @@ "private": true, "type": "module", "scripts": { - "start": "tsx src/daytona.ts", - "start:snapshot": "tsx src/daytona-with-snapshot.ts", + "start": "tsx src/index.ts", "typecheck": "tsc --noEmit" }, "dependencies": { "@daytonaio/sdk": "latest", - "@sandbox-agent/example-shared": "workspace:*" + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" }, "devDependencies": { "@types/node": "latest", diff --git a/examples/daytona/src/daytona-with-snapshot.ts b/examples/daytona/src/daytona-with-snapshot.ts deleted file mode 100644 index d0d1ce8..0000000 --- a/examples/daytona/src/daytona-with-snapshot.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Daytona, Image } from "@daytonaio/sdk"; -import { runPrompt } from "@sandbox-agent/example-shared"; - -const daytona = new Daytona(); - -const envVars: Record = {}; -if (process.env.ANTHROPIC_API_KEY) - envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) - envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - -// Build a custom image with sandbox-agent pre-installed (slower first run, faster subsequent runs) -const image = Image.base("ubuntu:22.04").runCommands( - "apt-get update && apt-get install -y curl ca-certificates", - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh", -); - -console.log("Creating Daytona sandbox (first run builds the base image and may take a few minutes, subsequent runs are fast)..."); -const sandbox = await daytona.create({ envVars, image, autoStopInterval: 0 }, { timeout: 180 }); - -await sandbox.process.executeCommand( - "nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &", -); - -const baseUrl = (await sandbox.getSignedPreviewUrl(3000, 4 * 60 * 60)).url; - -const cleanup = async () => { - await sandbox.delete(60); - process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); - -await runPrompt(baseUrl); -await cleanup(); diff --git a/examples/daytona/src/daytona.ts b/examples/daytona/src/daytona.ts index 4fe6a3b..ccffc94 100644 --- a/examples/daytona/src/daytona.ts +++ b/examples/daytona/src/daytona.ts @@ -1,36 +1,33 @@ -import { Daytona } from "@daytonaio/sdk"; -import { runPrompt } from "@sandbox-agent/example-shared"; +import { SandboxAgent } from "sandbox-agent"; +import { daytona } from "sandbox-agent/daytona"; -const daytona = new Daytona(); +function collectEnvVars(): Record { + const envVars: Record = {}; + if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return envVars; +} -const envVars: Record = {}; -if (process.env.ANTHROPIC_API_KEY) - envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) - envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +function inspectorUrlToBaseUrl(inspectorUrl: string): string { + return inspectorUrl.replace(/\/ui\/$/, ""); +} -// Use default image and install sandbox-agent at runtime (faster startup, no snapshot build) -console.log("Creating Daytona sandbox..."); -const sandbox = await daytona.create({ envVars, autoStopInterval: 0 }); +export async function setupDaytonaSandboxAgent(): Promise<{ + baseUrl: string; + token?: string; + extraHeaders?: Record; + cleanup: () => Promise; +}> { + const client = await SandboxAgent.start({ + sandbox: daytona({ + create: { envVars: collectEnvVars() }, + }), + }); -// Install sandbox-agent and start server -console.log("Installing sandbox-agent..."); -await sandbox.process.executeCommand( - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh", -); - -await sandbox.process.executeCommand( - "nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &", -); - -const baseUrl = (await sandbox.getSignedPreviewUrl(3000, 4 * 60 * 60)).url; - -const cleanup = async () => { - await sandbox.delete(60); - process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); - -await runPrompt(baseUrl); -await cleanup(); + return { + baseUrl: inspectorUrlToBaseUrl(client.inspectorUrl), + cleanup: async () => { + await client.killSandbox(); + }, + }; +} diff --git a/examples/daytona/src/index.ts b/examples/daytona/src/index.ts new file mode 100644 index 0000000..9c4cf85 --- /dev/null +++ b/examples/daytona/src/index.ts @@ -0,0 +1,30 @@ +import { SandboxAgent } from "sandbox-agent"; +import { daytona } from "sandbox-agent/daytona"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const envVars: Record = {}; +if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: daytona({ + create: { envVars }, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from Daytona in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/daytona/tests/daytona.test.ts b/examples/daytona/tests/daytona.test.ts index 9e88d7b..ddc81d4 100644 --- a/examples/daytona/tests/daytona.test.ts +++ b/examples/daytona/tests/daytona.test.ts @@ -23,6 +23,6 @@ describe("daytona example", () => { await cleanup(); } }, - timeoutMs + timeoutMs, ); }); diff --git a/examples/daytona/tsconfig.json b/examples/daytona/tsconfig.json index 96ba2fd..ad591c3 100644 --- a/examples/daytona/tsconfig.json +++ b/examples/daytona/tsconfig.json @@ -9,7 +9,8 @@ "esModuleInterop": true, "strict": true, "skipLibCheck": true, - "resolveJsonModule": true + "resolveJsonModule": true, + "types": ["node"] }, "include": ["src/**/*"], "exclude": ["node_modules", "**/*.test.ts"] diff --git a/examples/docker-python/.gitignore b/examples/docker-python/.gitignore new file mode 100644 index 0000000..00f2d38 --- /dev/null +++ b/examples/docker-python/.gitignore @@ -0,0 +1,3 @@ +__pycache__/ +*.pyc +.venv/ diff --git a/examples/docker-python/client.py b/examples/docker-python/client.py new file mode 100644 index 0000000..29e4609 --- /dev/null +++ b/examples/docker-python/client.py @@ -0,0 +1,145 @@ +"""Minimal JSON-RPC client for sandbox-agent's streamable HTTP transport.""" + +import json +import threading +import time +import uuid + +import httpx + + +class SandboxConnection: + """Connects to a sandbox-agent server via JSON-RPC over streamable HTTP. + + Endpoints used: + POST /v1/acp/{server_id}?agent=... (bootstrap + requests) + GET /v1/acp/{server_id} (SSE event stream) + DELETE /v1/acp/{server_id} (close) + """ + + def __init__(self, base_url: str, agent: str): + self.base_url = base_url.rstrip("/") + self.agent = agent + self.server_id = f"py-{uuid.uuid4().hex[:8]}" + self.url = f"{self.base_url}/v1/acp/{self.server_id}" + self._next_id = 0 + self._events: list[dict] = [] + self._stop = threading.Event() + self._sse_thread: threading.Thread | None = None + + def _alloc_id(self) -> int: + self._next_id += 1 + return self._next_id + + def _post(self, method: str, params: dict | None = None, *, bootstrap: bool = False) -> dict: + payload: dict = { + "jsonrpc": "2.0", + "id": self._alloc_id(), + "method": method, + } + if params is not None: + payload["params"] = params + + url = f"{self.url}?agent={self.agent}" if bootstrap else self.url + r = httpx.post(url, json=payload, timeout=120) + r.raise_for_status() + body = r.text.strip() + return json.loads(body) if body else {} + + # -- Lifecycle ----------------------------------------------------------- + + def initialize(self) -> dict: + result = self._post( + "initialize", + { + "protocolVersion": 1, + "clientInfo": {"name": "python-example", "version": "0.1.0"}, + }, + bootstrap=True, + ) + self._start_sse() + + # Auto-authenticate if the agent advertises env-var-based auth methods. + auth_methods = result.get("result", {}).get("authMethods", []) + env_ids = ("anthropic-api-key", "codex-api-key", "openai-api-key") + for method in auth_methods: + if method.get("id") not in env_ids: + continue + try: + resp = self._post("authenticate", {"methodId": method["id"]}) + if "error" not in resp: + break + except Exception: + continue + + return result + + def new_session(self, cwd: str = "/root") -> str: + result = self._post("session/new", {"cwd": cwd, "mcpServers": []}) + if "error" in result: + raise RuntimeError(f"session/new failed: {result['error'].get('message', result['error'])}") + return result["result"]["sessionId"] + + def prompt(self, session_id: str, text: str) -> dict: + result = self._post( + "session/prompt", + { + "sessionId": session_id, + "prompt": [{"type": "text", "text": text}], + }, + ) + return result + + def close(self) -> None: + self._stop.set() + try: + httpx.delete(self.url, timeout=2) + except Exception: + pass + + # -- SSE event stream (background thread) -------------------------------- + + @property + def events(self) -> list[dict]: + return list(self._events) + + def _start_sse(self) -> None: + self._sse_thread = threading.Thread(target=self._sse_loop, daemon=True) + self._sse_thread.start() + + def _sse_loop(self) -> None: + while not self._stop.is_set(): + try: + with httpx.stream( + "GET", + self.url, + headers={"Accept": "text/event-stream"}, + timeout=httpx.Timeout(connect=5, read=None, write=5, pool=5), + ) as resp: + buffer = "" + for chunk in resp.iter_text(): + if self._stop.is_set(): + break + buffer += chunk.replace("\r\n", "\n") + while "\n\n" in buffer: + event_chunk, buffer = buffer.split("\n\n", 1) + self._process_sse_event(event_chunk) + except Exception: + if self._stop.is_set(): + return + time.sleep(0.15) + + def _process_sse_event(self, chunk: str) -> None: + data_lines: list[str] = [] + for line in chunk.split("\n"): + if line.startswith("data:"): + data_lines.append(line[5:].lstrip()) + if not data_lines: + return + payload = "\n".join(data_lines).strip() + if not payload: + return + try: + self._events.append(json.loads(payload)) + except json.JSONDecodeError: + pass diff --git a/examples/docker-python/credentials.py b/examples/docker-python/credentials.py new file mode 100644 index 0000000..5d5e489 --- /dev/null +++ b/examples/docker-python/credentials.py @@ -0,0 +1,32 @@ +"""Agent detection and credential helpers for sandbox-agent examples.""" + +import os +import sys + + +def detect_agent() -> str: + """Pick an agent based on env vars. Exits if no credentials are found.""" + if os.environ.get("SANDBOX_AGENT"): + return os.environ["SANDBOX_AGENT"] + has_claude = bool( + os.environ.get("ANTHROPIC_API_KEY") + or os.environ.get("CLAUDE_API_KEY") + or os.environ.get("CLAUDE_CODE_OAUTH_TOKEN") + ) + has_codex = (os.environ.get("OPENAI_API_KEY") or "").startswith("sk-") + if has_codex: + return "codex" + if has_claude: + return "claude" + print("No API keys found. Set ANTHROPIC_API_KEY or OPENAI_API_KEY.") + sys.exit(1) + + +def build_container_env() -> dict[str, str]: + """Collect credential env vars to forward into the Docker container.""" + env: dict[str, str] = {} + for key in ("ANTHROPIC_API_KEY", "CLAUDE_API_KEY", "OPENAI_API_KEY", "CODEX_API_KEY"): + val = os.environ.get(key) + if val: + env[key] = val + return env diff --git a/examples/docker-python/main.py b/examples/docker-python/main.py new file mode 100644 index 0000000..b8f0f86 --- /dev/null +++ b/examples/docker-python/main.py @@ -0,0 +1,143 @@ +""" +Sandbox Agent – Python + Docker example. + +Starts a Docker container running sandbox-agent, connects to the sandbox-agent server, creates a session, sends a prompt, and +prints the streamed response. + +Usage: + pip install -r requirements.txt + python main.py +""" + +import json +import os +import signal +import subprocess +import sys +import time + +import docker +import httpx + +from client import SandboxConnection +from credentials import build_container_env, detect_agent + +PORT = 3000 +DOCKERFILE_DIR = os.path.join(os.path.dirname(__file__), "..", "shared") +IMAGE_NAME = "sandbox-agent-examples:latest" + + +def build_image(client: docker.DockerClient) -> str: + """Build the shared example Docker image if it doesn't exist.""" + try: + client.images.get(IMAGE_NAME) + return IMAGE_NAME + except docker.errors.ImageNotFound: + pass + + print(f"Building {IMAGE_NAME} (first run only)...") + subprocess.run( + ["docker", "build", "-t", IMAGE_NAME, DOCKERFILE_DIR], + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.PIPE, + ) + return IMAGE_NAME + + +def wait_for_health(base_url: str, timeout_s: float = 120) -> None: + deadline = time.monotonic() + timeout_s + last_err: str | None = None + while time.monotonic() < deadline: + try: + r = httpx.get(f"{base_url}/v1/health", timeout=5) + if r.status_code == 200 and r.json().get("status") == "ok": + return + last_err = f"health returned {r.status_code}" + except Exception as exc: + last_err = str(exc) + time.sleep(0.5) + raise RuntimeError(f"Timed out waiting for /v1/health: {last_err}") + + +def main() -> None: + agent = detect_agent() + print(f"Agent: {agent}") + + client = docker.from_env() + image = build_image(client) + + env = build_container_env() + + print("Starting container...") + container = client.containers.run( + image, + command=[ + "sh", "-c", + f"sandbox-agent install-agent {agent} && " + f"sandbox-agent server --no-token --host 0.0.0.0 --port {PORT}", + ], + environment=env, + ports={f"{PORT}/tcp": PORT}, + detach=True, + auto_remove=True, + ) + + def cleanup(*_args: object) -> None: + print("\nCleaning up...") + try: + container.stop(timeout=5) + except Exception: + pass + + signal.signal(signal.SIGINT, cleanup) + signal.signal(signal.SIGTERM, cleanup) + + try: + base_url = f"http://127.0.0.1:{PORT}" + print(f"Waiting for server at {base_url}...") + wait_for_health(base_url) + print("Server ready.") + print(f"Inspector: {base_url}/ui/") + + # -- Session flow ---------------------------------------------------- + conn = SandboxConnection(base_url, agent) + + print("Connecting...") + init_result = conn.initialize() + agent_info = init_result.get("result", {}).get("agentInfo", {}) + print(f"Connected to: {agent_info.get('title', agent)} {agent_info.get('version', '')}") + + session_id = conn.new_session() + print(f"Session: {session_id}") + + prompt_text = "Say hello and tell me what you are. Be brief (one sentence)." + print(f"\n> {prompt_text}") + response = conn.prompt(session_id, prompt_text) + + if "error" in response: + err = response["error"] + print(f"Error: {err.get('message', err)}") + else: + print(f"Stop reason: {response.get('result', {}).get('stopReason', 'unknown')}") + + # Give SSE events a moment to arrive. + time.sleep(1) + + if conn.events: + for ev in conn.events: + if ev.get("method") == "session/update": + content = ev.get("params", {}).get("update", {}).get("content", {}) + if content.get("text"): + print(content["text"], end="") + print() + + conn.close() + print("\nDone.") + + finally: + cleanup() + + +if __name__ == "__main__": + main() diff --git a/examples/docker-python/requirements.txt b/examples/docker-python/requirements.txt new file mode 100644 index 0000000..f7fd028 --- /dev/null +++ b/examples/docker-python/requirements.txt @@ -0,0 +1,2 @@ +docker>=7.0.0 +httpx>=0.27.0 diff --git a/examples/docker/package.json b/examples/docker/package.json index 289b0c3..7b796c9 100644 --- a/examples/docker/package.json +++ b/examples/docker/package.json @@ -3,15 +3,16 @@ "private": true, "type": "module", "scripts": { - "start": "tsx src/docker.ts", + "start": "tsx src/index.ts", "typecheck": "tsc --noEmit" }, "dependencies": { "@sandbox-agent/example-shared": "workspace:*", - "dockerode": "latest" + "dockerode": "latest", + "get-port": "latest", + "sandbox-agent": "workspace:*" }, "devDependencies": { - "@types/dockerode": "latest", "@types/node": "latest", "tsx": "latest", "typescript": "latest", diff --git a/examples/docker/src/docker.ts b/examples/docker/src/docker.ts deleted file mode 100644 index 20fafe4..0000000 --- a/examples/docker/src/docker.ts +++ /dev/null @@ -1,56 +0,0 @@ -import Docker from "dockerode"; -import { runPrompt, waitForHealth } from "@sandbox-agent/example-shared"; - -const IMAGE = "alpine:latest"; -const PORT = 3000; - -const docker = new Docker({ socketPath: "/var/run/docker.sock" }); - -// Pull image if needed -try { - await docker.getImage(IMAGE).inspect(); -} catch { - console.log(`Pulling ${IMAGE}...`); - await new Promise((resolve, reject) => { - docker.pull(IMAGE, (err: Error | null, stream: NodeJS.ReadableStream) => { - if (err) return reject(err); - docker.modem.followProgress(stream, (err: Error | null) => err ? reject(err) : resolve()); - }); - }); -} - -console.log("Starting container..."); -const container = await docker.createContainer({ - Image: IMAGE, - Cmd: ["sh", "-c", [ - "apk add --no-cache curl ca-certificates libstdc++ libgcc bash", - "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh", - "sandbox-agent install-agent claude", - "sandbox-agent install-agent codex", - `sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`, - ].join(" && ")], - Env: [ - process.env.ANTHROPIC_API_KEY ? `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}` : "", - process.env.OPENAI_API_KEY ? `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}` : "", - ].filter(Boolean), - ExposedPorts: { [`${PORT}/tcp`]: {} }, - HostConfig: { - AutoRemove: true, - PortBindings: { [`${PORT}/tcp`]: [{ HostPort: `${PORT}` }] }, - }, -}); -await container.start(); - -const baseUrl = `http://127.0.0.1:${PORT}`; -await waitForHealth({ baseUrl }); - -const cleanup = async () => { - try { await container.stop({ t: 5 }); } catch {} - try { await container.remove({ force: true }); } catch {} - process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); - -await runPrompt(baseUrl); -await cleanup(); diff --git a/examples/docker/src/index.ts b/examples/docker/src/index.ts new file mode 100644 index 0000000..9f50859 --- /dev/null +++ b/examples/docker/src/index.ts @@ -0,0 +1,40 @@ +import fs from "node:fs"; +import path from "node:path"; +import { SandboxAgent } from "sandbox-agent"; +import { docker } from "sandbox-agent/docker"; +import { detectAgent } from "@sandbox-agent/example-shared"; +import { FULL_IMAGE } from "@sandbox-agent/example-shared/docker"; + +const codexAuthPath = process.env.HOME ? path.join(process.env.HOME, ".codex", "auth.json") : null; +const bindMounts = codexAuthPath && fs.existsSync(codexAuthPath) ? [`${codexAuthPath}:/home/sandbox/.codex/auth.json:ro`] : []; +const env = [ + process.env.ANTHROPIC_API_KEY ? `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}` : "", + process.env.OPENAI_API_KEY ? `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}` : "", + process.env.CODEX_API_KEY ? `CODEX_API_KEY=${process.env.CODEX_API_KEY}` : "", +].filter(Boolean); + +const client = await SandboxAgent.start({ + sandbox: docker({ + image: FULL_IMAGE, + env, + binds: bindMounts, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), + cwd: "/home/sandbox", +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from Docker in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/docker/tests/docker.test.ts b/examples/docker/tests/docker.test.ts index 0b51407..683f033 100644 --- a/examples/docker/tests/docker.test.ts +++ b/examples/docker/tests/docker.test.ts @@ -1,8 +1,15 @@ import { describe, it, expect } from "vitest"; -import { buildHeaders } from "@sandbox-agent/example-shared"; -import { setupDockerSandboxAgent } from "../src/docker.ts"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; -const shouldRun = process.env.RUN_DOCKER_EXAMPLES === "1"; +/** + * Docker integration test. + * + * Set SANDBOX_AGENT_DOCKER_IMAGE to the image tag to test (e.g. a locally-built + * full image). The test starts a container from that image, waits for + * sandbox-agent to become healthy, and validates the /v1/health endpoint. + */ +const image = process.env.SANDBOX_AGENT_DOCKER_IMAGE; +const shouldRun = Boolean(image); const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; const testFn = shouldRun ? it : it.skip; @@ -11,11 +18,29 @@ describe("docker example", () => { testFn( "starts sandbox-agent and responds to /v1/health", async () => { - const { baseUrl, token, cleanup } = await setupDockerSandboxAgent(); + const { baseUrl, cleanup } = await startDockerSandbox({ + port: 2468, + image: image!, + }); try { - const response = await fetch(`${baseUrl}/v1/health`, { - headers: buildHeaders({ token }), - }); + // Wait for health check + let healthy = false; + for (let i = 0; i < 60; i++) { + try { + const res = await fetch(`${baseUrl}/v1/health`); + if (res.ok) { + const data = await res.json(); + if (data.status === "ok") { + healthy = true; + break; + } + } + } catch {} + await new Promise((r) => setTimeout(r, 1000)); + } + expect(healthy).toBe(true); + + const response = await fetch(`${baseUrl}/v1/health`); expect(response.ok).toBe(true); const data = await response.json(); expect(data.status).toBe("ok"); @@ -23,6 +48,6 @@ describe("docker example", () => { await cleanup(); } }, - timeoutMs + timeoutMs, ); }); diff --git a/examples/docker/tsconfig.json b/examples/docker/tsconfig.json index 96ba2fd..ad591c3 100644 --- a/examples/docker/tsconfig.json +++ b/examples/docker/tsconfig.json @@ -9,7 +9,8 @@ "esModuleInterop": true, "strict": true, "skipLibCheck": true, - "resolveJsonModule": true + "resolveJsonModule": true, + "types": ["node"] }, "include": ["src/**/*"], "exclude": ["node_modules", "**/*.test.ts"] diff --git a/examples/e2b/package.json b/examples/e2b/package.json index f44574c..3e28ae2 100644 --- a/examples/e2b/package.json +++ b/examples/e2b/package.json @@ -3,7 +3,7 @@ "private": true, "type": "module", "scripts": { - "start": "tsx src/e2b.ts", + "start": "tsx src/index.ts", "typecheck": "tsc --noEmit" }, "dependencies": { diff --git a/examples/e2b/src/e2b.ts b/examples/e2b/src/e2b.ts index 8d54c88..17762a2 100644 --- a/examples/e2b/src/e2b.ts +++ b/examples/e2b/src/e2b.ts @@ -1,40 +1,34 @@ -import { Sandbox } from "@e2b/code-interpreter"; -import { runPrompt, waitForHealth } from "@sandbox-agent/example-shared"; +import { SandboxAgent } from "sandbox-agent"; +import { e2b } from "sandbox-agent/e2b"; -const envs: Record = {}; -if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +function collectEnvVars(): Record { + const envs: Record = {}; + if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return envs; +} -console.log("Creating E2B sandbox..."); -const sandbox = await Sandbox.create({ allowInternetAccess: true, envs }); +function inspectorUrlToBaseUrl(inspectorUrl: string): string { + return inspectorUrl.replace(/\/ui\/$/, ""); +} -const run = async (cmd: string) => { - const result = await sandbox.commands.run(cmd); - if (result.exitCode !== 0) throw new Error(`Command failed: ${cmd}\n${result.stderr}`); - return result; -}; +export async function setupE2BSandboxAgent(): Promise<{ + baseUrl: string; + token?: string; + cleanup: () => Promise; +}> { + const template = process.env.E2B_TEMPLATE; + const client = await SandboxAgent.start({ + sandbox: e2b({ + template, + create: { envs: collectEnvVars() }, + }), + }); -console.log("Installing sandbox-agent..."); -await run("curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh"); - -console.log("Installing agents..."); -await run("sandbox-agent install-agent claude"); -await run("sandbox-agent install-agent codex"); - -console.log("Starting server..."); -await sandbox.commands.run("sandbox-agent server --no-token --host 0.0.0.0 --port 3000", { background: true }); - -const baseUrl = `https://${sandbox.getHost(3000)}`; - -console.log("Waiting for server..."); -await waitForHealth({ baseUrl }); - -const cleanup = async () => { - await sandbox.kill(); - process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); - -await runPrompt(baseUrl); -await cleanup(); + return { + baseUrl: inspectorUrlToBaseUrl(client.inspectorUrl), + cleanup: async () => { + await client.killSandbox(); + }, + }; +} diff --git a/examples/e2b/src/index.ts b/examples/e2b/src/index.ts new file mode 100644 index 0000000..67b74dc --- /dev/null +++ b/examples/e2b/src/index.ts @@ -0,0 +1,28 @@ +import { SandboxAgent } from "sandbox-agent"; +import { e2b } from "sandbox-agent/e2b"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const envs: Record = {}; +if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +const template = process.env.E2B_TEMPLATE; + +const client = await SandboxAgent.start({ + // ✨ NEW ✨ + sandbox: e2b({ template, create: { envs } }), +}); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from E2B in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/e2b/tests/e2b.test.ts b/examples/e2b/tests/e2b.test.ts index b3fdbfc..843e578 100644 --- a/examples/e2b/tests/e2b.test.ts +++ b/examples/e2b/tests/e2b.test.ts @@ -23,6 +23,6 @@ describe("e2b example", () => { await cleanup(); } }, - timeoutMs + timeoutMs, ); }); diff --git a/examples/e2b/tsconfig.json b/examples/e2b/tsconfig.json index 96ba2fd..ad591c3 100644 --- a/examples/e2b/tsconfig.json +++ b/examples/e2b/tsconfig.json @@ -9,7 +9,8 @@ "esModuleInterop": true, "strict": true, "skipLibCheck": true, - "resolveJsonModule": true + "resolveJsonModule": true, + "types": ["node"] }, "include": ["src/**/*"], "exclude": ["node_modules", "**/*.test.ts"] diff --git a/examples/file-system/package.json b/examples/file-system/package.json new file mode 100644 index 0000000..87921a3 --- /dev/null +++ b/examples/file-system/package.json @@ -0,0 +1,19 @@ +{ + "name": "@sandbox-agent/example-file-system", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*", + "tar": "^7" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/file-system/src/index.ts b/examples/file-system/src/index.ts new file mode 100644 index 0000000..71d65c0 --- /dev/null +++ b/examples/file-system/src/index.ts @@ -0,0 +1,57 @@ +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; +import * as tar from "tar"; +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +console.log("Starting sandbox..."); +const { baseUrl, cleanup } = await startDockerSandbox({ port: 3003 }); + +console.log("Creating sample files..."); +const tmpDir = path.resolve(__dirname, "../.tmp-upload"); +const projectDir = path.join(tmpDir, "my-project"); +fs.mkdirSync(path.join(projectDir, "src"), { recursive: true }); +fs.writeFileSync(path.join(projectDir, "README.md"), "# My Project\n\nUploaded via batch tar.\n"); +fs.writeFileSync(path.join(projectDir, "src", "index.ts"), 'console.log("hello from uploaded project");\n'); +fs.writeFileSync(path.join(projectDir, "package.json"), JSON.stringify({ name: "my-project", version: "1.0.0" }, null, 2) + "\n"); +console.log(" Created 3 files in my-project/"); + +console.log("Uploading files via batch tar..."); +const client = await SandboxAgent.connect({ baseUrl }); + +const tarPath = path.join(tmpDir, "upload.tar"); +await tar.create({ file: tarPath, cwd: tmpDir }, ["my-project"]); +const tarBuffer = await fs.promises.readFile(tarPath); +const uploadResult = await client.uploadFsBatch(tarBuffer, { path: "/opt" }); +console.log(` Uploaded ${uploadResult.paths.length} files: ${uploadResult.paths.join(", ")}`); + +// Cleanup temp files +fs.rmSync(tmpDir, { recursive: true, force: true }); + +console.log("Verifying uploaded files..."); +const entries = await client.listFsEntries({ path: "/opt/my-project" }); +console.log(` Found ${entries.length} entries in /opt/my-project`); +for (const entry of entries) { + console.log(` ${entry.entryType === "directory" ? "d" : "-"} ${entry.name}`); +} + +const readmeBytes = await client.readFsFile({ path: "/opt/my-project/README.md" }); +const readmeText = new TextDecoder().decode(readmeBytes); +console.log(` README.md content: ${readmeText.trim()}`); + +console.log("Creating session..."); +const session = await client.createSession({ agent: detectAgent(), cwd: "/opt/my-project" }); +const sessionId = session.id; +console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); +console.log(' Try: "read the README in /opt/my-project"'); +console.log(" Press Ctrl+C to stop."); + +const keepAlive = setInterval(() => {}, 60_000); +process.on("SIGINT", () => { + clearInterval(keepAlive); + cleanup().then(() => process.exit(0)); +}); diff --git a/examples/file-system/tsconfig.json b/examples/file-system/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/file-system/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/mcp-custom-tool/package.json b/examples/mcp-custom-tool/package.json new file mode 100644 index 0000000..250bfb0 --- /dev/null +++ b/examples/mcp-custom-tool/package.json @@ -0,0 +1,22 @@ +{ + "name": "@sandbox-agent/example-mcp-custom-tool", + "private": true, + "type": "module", + "scripts": { + "build:mcp": "esbuild src/mcp-server.ts --bundle --format=cjs --platform=node --target=node18 --minify --outfile=dist/mcp-server.cjs", + "start": "pnpm build:mcp && tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "latest", + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*", + "zod": "latest" + }, + "devDependencies": { + "@types/node": "latest", + "esbuild": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/mcp-custom-tool/src/index.ts b/examples/mcp-custom-tool/src/index.ts new file mode 100644 index 0000000..a609777 --- /dev/null +++ b/examples/mcp-custom-tool/src/index.ts @@ -0,0 +1,54 @@ +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +// Verify the bundled MCP server exists (built by `pnpm build:mcp`). +const serverFile = path.resolve(__dirname, "../dist/mcp-server.cjs"); +if (!fs.existsSync(serverFile)) { + console.error("Error: dist/mcp-server.cjs not found. Run `pnpm build:mcp` first."); + process.exit(1); +} + +// Start a Docker container running sandbox-agent. +console.log("Starting sandbox..."); +const { baseUrl, cleanup } = await startDockerSandbox({ port: 3004 }); + +// Upload the bundled MCP server into the sandbox filesystem. +console.log("Uploading MCP server bundle..."); +const client = await SandboxAgent.connect({ baseUrl }); + +const bundle = await fs.promises.readFile(serverFile); +const written = await client.writeFsFile({ path: "/opt/mcp/custom-tools/mcp-server.cjs" }, bundle); +console.log(` Written: ${written.path} (${written.bytesWritten} bytes)`); + +// Create a session with the uploaded MCP server as a local command. +console.log("Creating session with custom MCP tool..."); +const session = await client.createSession({ + agent: detectAgent(), + sessionInit: { + cwd: "/root", + mcpServers: [ + { + name: "customTools", + command: "node", + args: ["/opt/mcp/custom-tools/mcp-server.cjs"], + env: [], + }, + ], + }, +}); +const sessionId = session.id; +console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); +console.log(' Try: "generate a random number between 1 and 100"'); +console.log(" Press Ctrl+C to stop."); + +const keepAlive = setInterval(() => {}, 60_000); +process.on("SIGINT", () => { + clearInterval(keepAlive); + cleanup().then(() => process.exit(0)); +}); diff --git a/examples/mcp-custom-tool/src/mcp-server.ts b/examples/mcp-custom-tool/src/mcp-server.ts new file mode 100644 index 0000000..38c79b7 --- /dev/null +++ b/examples/mcp-custom-tool/src/mcp-server.ts @@ -0,0 +1,24 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; +import { z } from "zod"; + +async function main() { + const server = new McpServer({ name: "rand", version: "1.0.0" }); + + server.tool( + "random_number", + "Generate a random integer between min and max (inclusive)", + { + min: z.number().describe("Minimum value"), + max: z.number().describe("Maximum value"), + }, + async ({ min, max }) => ({ + content: [{ type: "text", text: String(Math.floor(Math.random() * (max - min + 1)) + min) }], + }), + ); + + const transport = new StdioServerTransport(); + await server.connect(transport); +} + +main(); diff --git a/examples/mcp-custom-tool/tsconfig.json b/examples/mcp-custom-tool/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/mcp-custom-tool/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/mcp/package.json b/examples/mcp/package.json new file mode 100644 index 0000000..950cbb7 --- /dev/null +++ b/examples/mcp/package.json @@ -0,0 +1,18 @@ +{ + "name": "@sandbox-agent/example-mcp", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/mcp/src/index.ts b/examples/mcp/src/index.ts new file mode 100644 index 0000000..dfaf43f --- /dev/null +++ b/examples/mcp/src/index.ts @@ -0,0 +1,36 @@ +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; + +console.log("Starting sandbox..."); +const { baseUrl, cleanup } = await startDockerSandbox({ + port: 3002, + setupCommands: ["npm install -g --silent @modelcontextprotocol/server-everything@2026.1.26"], +}); + +console.log("Creating session with everything MCP server..."); +const client = await SandboxAgent.connect({ baseUrl }); +const session = await client.createSession({ + agent: detectAgent(), + sessionInit: { + cwd: "/root", + mcpServers: [ + { + name: "everything", + command: "mcp-server-everything", + args: [], + env: [], + }, + ], + }, +}); +const sessionId = session.id; +console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); +console.log(' Try: "generate a random number between 1 and 100"'); +console.log(" Press Ctrl+C to stop."); + +const keepAlive = setInterval(() => {}, 60_000); +process.on("SIGINT", () => { + clearInterval(keepAlive); + cleanup().then(() => process.exit(0)); +}); diff --git a/examples/mcp/tsconfig.json b/examples/mcp/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/mcp/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/mock-acp-agent/README.md b/examples/mock-acp-agent/README.md new file mode 100644 index 0000000..46fc583 --- /dev/null +++ b/examples/mock-acp-agent/README.md @@ -0,0 +1,9 @@ +# @sandbox-agent/mock-acp-agent + +Minimal newline-delimited ACP JSON-RPC mock agent. + +Behavior: +- Echoes every inbound message as `mock/echo` notification. +- For requests (`method` + `id`), returns `result.echoed` payload. +- For `mock/ask_client`, emits an agent-initiated `mock/request` before response. +- For responses from client (`id` without `method`), emits `mock/client_response` notification. diff --git a/examples/mock-acp-agent/package.json b/examples/mock-acp-agent/package.json new file mode 100644 index 0000000..124901f --- /dev/null +++ b/examples/mock-acp-agent/package.json @@ -0,0 +1,24 @@ +{ + "name": "@sandbox-agent/mock-acp-agent", + "version": "0.1.0", + "private": false, + "type": "module", + "description": "Mock ACP agent for adapter integration testing", + "license": "Apache-2.0", + "main": "./dist/index.js", + "exports": { + ".": "./dist/index.js" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc -p tsconfig.build.json", + "typecheck": "tsc --noEmit", + "start": "node ./dist/index.js" + }, + "devDependencies": { + "@types/node": "latest", + "typescript": "latest" + } +} diff --git a/examples/mock-acp-agent/src/index.ts b/examples/mock-acp-agent/src/index.ts new file mode 100644 index 0000000..bde006c --- /dev/null +++ b/examples/mock-acp-agent/src/index.ts @@ -0,0 +1,100 @@ +import { createInterface } from "node:readline"; + +interface JsonRpcRequest { + jsonrpc?: unknown; + id?: unknown; + method?: unknown; + params?: unknown; + result?: unknown; + error?: unknown; +} + +let outboundRequestSeq = 0; + +function writeMessage(payload: unknown): void { + process.stdout.write(`${JSON.stringify(payload)}\n`); +} + +function echoNotification(message: unknown): void { + writeMessage({ + jsonrpc: "2.0", + method: "mock/echo", + params: { + message, + }, + }); +} + +function handleMessage(raw: string): void { + if (!raw.trim()) { + return; + } + + let msg: JsonRpcRequest; + try { + msg = JSON.parse(raw) as JsonRpcRequest; + } catch (error) { + writeMessage({ + jsonrpc: "2.0", + method: "mock/parse_error", + params: { + error: error instanceof Error ? error.message : String(error), + raw, + }, + }); + return; + } + + echoNotification(msg); + + const hasMethod = typeof msg.method === "string"; + const hasId = msg.id !== undefined; + + if (hasMethod && hasId) { + if (msg.method === "mock/ask_client") { + outboundRequestSeq += 1; + writeMessage({ + jsonrpc: "2.0", + id: `agent-req-${outboundRequestSeq}`, + method: "mock/request", + params: { + prompt: "please respond", + }, + }); + } + + writeMessage({ + jsonrpc: "2.0", + id: msg.id, + result: { + echoed: msg, + }, + }); + return; + } + + if (!hasMethod && hasId) { + writeMessage({ + jsonrpc: "2.0", + method: "mock/client_response", + params: { + id: msg.id, + result: msg.result ?? null, + error: msg.error ?? null, + }, + }); + } +} + +const rl = createInterface({ + input: process.stdin, + crlfDelay: Infinity, +}); + +rl.on("line", (line) => { + handleMessage(line); +}); + +rl.on("close", () => { + process.exit(0); +}); diff --git a/examples/mock-acp-agent/tsconfig.build.json b/examples/mock-acp-agent/tsconfig.build.json new file mode 100644 index 0000000..8ca8089 --- /dev/null +++ b/examples/mock-acp-agent/tsconfig.build.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "allowImportingTsExtensions": false, + "noEmit": false, + "declaration": true, + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"] +} diff --git a/examples/mock-acp-agent/tsconfig.json b/examples/mock-acp-agent/tsconfig.json new file mode 100644 index 0000000..8f7a8cc --- /dev/null +++ b/examples/mock-acp-agent/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022"], + "module": "ES2022", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules"] +} diff --git a/examples/modal/package.json b/examples/modal/package.json new file mode 100644 index 0000000..d3e51ec --- /dev/null +++ b/examples/modal/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-modal", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "modal": "latest", + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest", + "vitest": "^3.0.0" + } +} diff --git a/examples/modal/src/index.ts b/examples/modal/src/index.ts new file mode 100644 index 0000000..35eef8d --- /dev/null +++ b/examples/modal/src/index.ts @@ -0,0 +1,30 @@ +import { SandboxAgent } from "sandbox-agent"; +import { modal } from "sandbox-agent/modal"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const secrets: Record = {}; +if (process.env.ANTHROPIC_API_KEY) secrets.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) secrets.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: modal({ + create: { secrets }, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from Modal in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/modal/tests/modal.test.ts b/examples/modal/tests/modal.test.ts new file mode 100644 index 0000000..010256a --- /dev/null +++ b/examples/modal/tests/modal.test.ts @@ -0,0 +1,31 @@ +import { describe, it, expect } from "vitest"; +import { SandboxAgent } from "sandbox-agent"; +import { modal } from "sandbox-agent/modal"; + +const shouldRun = Boolean(process.env.MODAL_TOKEN_ID && process.env.MODAL_TOKEN_SECRET); +const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; + +const testFn = shouldRun ? it : it.skip; + +describe("modal provider", () => { + testFn( + "starts sandbox-agent and responds to /v1/health", + async () => { + const secrets: Record = {}; + if (process.env.ANTHROPIC_API_KEY) secrets.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) secrets.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + + const sdk = await SandboxAgent.start({ + sandbox: modal({ create: { secrets } }), + }); + + try { + const health = await sdk.getHealth(); + expect(health.status).toBe("ok"); + } finally { + await sdk.destroySandbox(); + } + }, + timeoutMs, + ); +}); diff --git a/examples/modal/tsconfig.json b/examples/modal/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/modal/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/permissions/package.json b/examples/permissions/package.json new file mode 100644 index 0000000..5b82ef9 --- /dev/null +++ b/examples/permissions/package.json @@ -0,0 +1,18 @@ +{ + "name": "@sandbox-agent/example-permissions", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "commander": "^12.1.0", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/permissions/src/index.ts b/examples/permissions/src/index.ts new file mode 100644 index 0000000..e684e34 --- /dev/null +++ b/examples/permissions/src/index.ts @@ -0,0 +1,173 @@ +import { createInterface } from "node:readline/promises"; +import { stdin as input, stdout as output } from "node:process"; +import { Command } from "commander"; +import { SandboxAgent, type PermissionReply, type SessionPermissionRequest } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; + +const options = parseOptions(); +const agent = options.agent.trim().toLowerCase(); +const autoReply = parsePermissionReply(options.reply); +const promptText = options.prompt?.trim() || `Create ./permission-example.txt with the text 'hello from the ${agent} permissions example'.`; + +const sdk = await SandboxAgent.start({ + sandbox: local({ log: "inherit" }), +}); + +try { + await sdk.installAgent(agent); + + const agents = await sdk.listAgents({ config: true }); + const selectedAgent = agents.agents.find((entry) => entry.id === agent); + const configOptions = Array.isArray(selectedAgent?.configOptions) + ? (selectedAgent.configOptions as Array<{ category?: string; currentValue?: string; options?: unknown[] }>) + : []; + const modeOption = configOptions.find((option) => option.category === "mode"); + const availableModes = extractOptionValues(modeOption); + const mode = options.mode?.trim() || (typeof modeOption?.currentValue === "string" ? modeOption.currentValue : "") || availableModes[0] || ""; + + console.log(`Agent: ${agent}`); + console.log(`Mode: ${mode || "(default)"}`); + if (availableModes.length > 0) { + console.log(`Available modes: ${availableModes.join(", ")}`); + } + console.log(`Working directory: ${process.cwd()}`); + console.log(`Prompt: ${promptText}`); + if (autoReply) { + console.log(`Automatic permission reply: ${autoReply}`); + } else { + console.log("Interactive permission replies enabled."); + } + + const session = await sdk.createSession({ + agent, + ...(mode ? { mode } : {}), + cwd: process.cwd(), + }); + + const rl = autoReply + ? null + : createInterface({ + input, + output, + }); + + session.onPermissionRequest((request: SessionPermissionRequest) => { + void handlePermissionRequest(session, request, autoReply, rl); + }); + + const response = await session.prompt([{ type: "text", text: promptText }]); + console.log(`Prompt finished with stopReason=${response.stopReason}`); + + await rl?.close(); +} finally { + await sdk.dispose(); +} + +async function handlePermissionRequest( + session: { + respondPermission(permissionId: string, reply: PermissionReply): Promise; + }, + request: SessionPermissionRequest, + auto: PermissionReply | null, + rl: ReturnType | null, +): Promise { + const reply = auto ?? (await promptForReply(request, rl)); + console.log(`Permission ${reply}: ${request.toolCall.title ?? request.toolCall.toolCallId}`); + await session.respondPermission(request.id, reply); +} + +async function promptForReply(request: SessionPermissionRequest, rl: ReturnType | null): Promise { + if (!rl) { + return "reject"; + } + + const title = request.toolCall.title ?? request.toolCall.toolCallId; + const available = request.availableReplies; + console.log(""); + console.log(`Permission request: ${title}`); + console.log(`Available replies: ${available.join(", ")}`); + const answer = (await rl.question("Reply [once|always|reject]: ")).trim().toLowerCase(); + const parsed = parsePermissionReply(answer); + if (parsed && available.includes(parsed)) { + return parsed; + } + + console.log("Invalid reply, defaulting to reject."); + return "reject"; +} + +function extractOptionValues(option: { options?: unknown[] } | undefined): string[] { + if (!option?.options) { + return []; + } + + const values: string[] = []; + for (const entry of option.options) { + if (!entry || typeof entry !== "object") { + continue; + } + const value = "value" in entry && typeof entry.value === "string" ? entry.value : null; + if (value) { + values.push(value); + continue; + } + if (!("options" in entry) || !Array.isArray(entry.options)) { + continue; + } + for (const nested of entry.options) { + if (!nested || typeof nested !== "object") { + continue; + } + const nestedValue = "value" in nested && typeof nested.value === "string" ? nested.value : null; + if (nestedValue) { + values.push(nestedValue); + } + } + } + + return [...new Set(values)]; +} + +function parsePermissionReply(value: string | undefined): PermissionReply | null { + if (!value) { + return null; + } + + switch (value.trim().toLowerCase()) { + case "once": + return "once"; + case "always": + return "always"; + case "reject": + case "deny": + return "reject"; + default: + return null; + } +} + +function parseOptions(): { + agent: string; + mode?: string; + prompt?: string; + reply?: string; +} { + const argv = process.argv.slice(2); + const normalizedArgv = argv[0] === "--" ? argv.slice(1) : argv; + const program = new Command(); + program + .name("permissions") + .description("Run a permissions example against an agent session.") + .requiredOption("--agent ", "Agent to run, for example 'claude' or 'codex'") + .option("--mode ", "Mode to configure for the session (uses agent default if omitted)") + .option("--prompt ", "Prompt to send after the session starts") + .option("--reply ", "Automatically answer permission prompts with once, always, or reject"); + + program.parse(normalizedArgv, { from: "user" }); + return program.opts<{ + agent: string; + mode?: string; + prompt?: string; + reply?: string; + }>(); +} diff --git a/examples/permissions/tsconfig.json b/examples/permissions/tsconfig.json new file mode 100644 index 0000000..4eec283 --- /dev/null +++ b/examples/permissions/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "types": ["node"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true + }, + "include": ["src/**/*"] +} diff --git a/examples/persist-memory/package.json b/examples/persist-memory/package.json new file mode 100644 index 0000000..0514ad9 --- /dev/null +++ b/examples/persist-memory/package.json @@ -0,0 +1,18 @@ +{ + "name": "@sandbox-agent/example-persist-memory", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/persist-memory/src/index.ts b/examples/persist-memory/src/index.ts new file mode 100644 index 0000000..2065a50 --- /dev/null +++ b/examples/persist-memory/src/index.ts @@ -0,0 +1,33 @@ +import { SandboxAgent, InMemorySessionPersistDriver } from "sandbox-agent"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const persist = new InMemorySessionPersistDriver(); + +console.log("Starting sandbox..."); +const sandbox = await startDockerSandbox({ + port: 3000, +}); + +const sdk = await SandboxAgent.connect({ baseUrl: sandbox.baseUrl, persist }); + +const session = await sdk.createSession({ agent: detectAgent() }); +console.log(`Created session ${session.id}`); + +await session.prompt([{ type: "text", text: "Say hello in one sentence." }]); +console.log("Prompt complete."); + +const sessions = await sdk.listSessions(); +console.log(`\nSessions (${sessions.items.length}):`); +for (const s of sessions.items) { + console.log(` ${s.id} agent=${s.agent}`); +} + +const events = await sdk.getEvents({ sessionId: session.id }); +console.log(`\nSession history (${events.items.length} events):`); +for (const e of events.items) { + console.log(` [${e.eventIndex}] ${e.sender}: ${JSON.stringify(e.payload).slice(0, 120)}`); +} + +await sdk.dispose(); +await sandbox.cleanup(); diff --git a/examples/persist-memory/tsconfig.json b/examples/persist-memory/tsconfig.json new file mode 100644 index 0000000..ec2723c --- /dev/null +++ b/examples/persist-memory/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "types": ["node"] + }, + "include": ["src"] +} diff --git a/examples/persist-postgres/package.json b/examples/persist-postgres/package.json new file mode 100644 index 0000000..8445516 --- /dev/null +++ b/examples/persist-postgres/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-persist-postgres", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "pg": "latest", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "@types/pg": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/persist-postgres/src/index.ts b/examples/persist-postgres/src/index.ts new file mode 100644 index 0000000..43eecbd --- /dev/null +++ b/examples/persist-postgres/src/index.ts @@ -0,0 +1,100 @@ +import { execFileSync } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import { Client } from "pg"; +import { setTimeout as delay } from "node:timers/promises"; +import { SandboxAgent } from "sandbox-agent"; +import { PostgresSessionPersistDriver } from "./persist.ts"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +// --- Postgres setup (Docker or DATABASE_URL) --- + +let containerId: string | undefined; +let connectionString: string; + +if (process.env.DATABASE_URL) { + connectionString = process.env.DATABASE_URL; +} else { + const name = `persist-example-${randomUUID().slice(0, 8)}`; + containerId = execFileSync( + "docker", + [ + "run", + "-d", + "--rm", + "--name", + name, + "-e", + "POSTGRES_USER=postgres", + "-e", + "POSTGRES_PASSWORD=postgres", + "-e", + "POSTGRES_DB=sandbox", + "-p", + "127.0.0.1::5432", + "postgres:16-alpine", + ], + { encoding: "utf8" }, + ).trim(); + const port = execFileSync("docker", ["port", containerId, "5432/tcp"], { encoding: "utf8" }) + .trim() + .split("\n")[0] + ?.match(/:(\d+)$/)?.[1]; + connectionString = `postgres://postgres:postgres@127.0.0.1:${port}/sandbox`; + console.log(`Postgres on port ${port}`); + + const deadline = Date.now() + 30_000; + while (Date.now() < deadline) { + const c = new Client({ connectionString }); + try { + await c.connect(); + await c.query("SELECT 1"); + await c.end(); + break; + } catch { + try { + await c.end(); + } catch {} + await delay(250); + } + } +} + +try { + const persist = new PostgresSessionPersistDriver({ connectionString }); + + console.log("Starting sandbox..."); + const sandbox = await startDockerSandbox({ + port: 3000, + }); + + const sdk = await SandboxAgent.connect({ baseUrl: sandbox.baseUrl, persist }); + + const session = await sdk.createSession({ agent: detectAgent() }); + console.log(`Created session ${session.id}`); + + await session.prompt([{ type: "text", text: "Say hello in one sentence." }]); + console.log("Prompt complete."); + + const sessions = await sdk.listSessions(); + console.log(`\nSessions (${sessions.items.length}):`); + for (const s of sessions.items) { + console.log(` ${s.id} agent=${s.agent}`); + } + + const events = await sdk.getEvents({ sessionId: session.id }); + console.log(`\nSession history (${events.items.length} events):`); + for (const e of events.items) { + console.log(` [${e.eventIndex}] ${e.sender}: ${JSON.stringify(e.payload).slice(0, 120)}`); + } + + await persist.close(); + await sdk.dispose(); + await sandbox.cleanup(); +} finally { + if (containerId) { + try { + execFileSync("docker", ["rm", "-f", containerId], { stdio: "ignore" }); + } catch {} + } +} diff --git a/examples/persist-postgres/src/persist.ts b/examples/persist-postgres/src/persist.ts new file mode 100644 index 0000000..2a6ccff --- /dev/null +++ b/examples/persist-postgres/src/persist.ts @@ -0,0 +1,336 @@ +import { Pool, type PoolConfig } from "pg"; +import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; + +const DEFAULT_LIST_LIMIT = 100; + +export interface PostgresSessionPersistDriverOptions { + connectionString?: string; + pool?: Pool; + poolConfig?: PoolConfig; + schema?: string; +} + +export class PostgresSessionPersistDriver implements SessionPersistDriver { + private readonly pool: Pool; + private readonly ownsPool: boolean; + private readonly schema: string; + private readonly initialized: Promise; + + constructor(options: PostgresSessionPersistDriverOptions = {}) { + this.schema = normalizeSchema(options.schema ?? "public"); + + if (options.pool) { + this.pool = options.pool; + this.ownsPool = false; + } else { + this.pool = new Pool({ + connectionString: options.connectionString, + ...options.poolConfig, + }); + this.ownsPool = true; + } + + this.initialized = this.initialize(); + } + + async getSession(id: string): Promise { + await this.ready(); + + const result = await this.pool.query( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM ${this.table("sessions")} + WHERE id = $1`, + [id], + ); + + if (result.rows.length === 0) { + return undefined; + } + + return decodeSessionRow(result.rows[0]); + } + + async listSessions(request: ListPageRequest = {}): Promise> { + await this.ready(); + + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rowsResult = await this.pool.query( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM ${this.table("sessions")} + ORDER BY created_at ASC, id ASC + LIMIT $1 OFFSET $2`, + [limit, offset], + ); + + const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("sessions")}`); + const total = parseInteger(countResult.rows[0]?.count ?? "0"); + const nextOffset = offset + rowsResult.rows.length; + + return { + items: rowsResult.rows.map(decodeSessionRow), + nextCursor: nextOffset < total ? String(nextOffset) : undefined, + }; + } + + async updateSession(session: SessionRecord): Promise { + await this.ready(); + + await this.pool.query( + `INSERT INTO ${this.table("sessions")} ( + id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + ON CONFLICT(id) DO UPDATE SET + agent = EXCLUDED.agent, + agent_session_id = EXCLUDED.agent_session_id, + last_connection_id = EXCLUDED.last_connection_id, + created_at = EXCLUDED.created_at, + destroyed_at = EXCLUDED.destroyed_at, + sandbox_id = EXCLUDED.sandbox_id, + session_init_json = EXCLUDED.session_init_json, + config_options_json = EXCLUDED.config_options_json, + modes_json = EXCLUDED.modes_json`, + [ + session.id, + session.agent, + session.agentSessionId, + session.lastConnectionId, + session.createdAt, + session.destroyedAt ?? null, + session.sandboxId ?? null, + session.sessionInit ? JSON.stringify(session.sessionInit) : null, + session.configOptions ? JSON.stringify(session.configOptions) : null, + session.modes !== undefined ? JSON.stringify(session.modes) : null, + ], + ); + } + + async listEvents(request: ListEventsRequest): Promise> { + await this.ready(); + + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rowsResult = await this.pool.query( + `SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json + FROM ${this.table("events")} + WHERE session_id = $1 + ORDER BY event_index ASC, id ASC + LIMIT $2 OFFSET $3`, + [request.sessionId, limit, offset], + ); + + const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("events")} WHERE session_id = $1`, [ + request.sessionId, + ]); + const total = parseInteger(countResult.rows[0]?.count ?? "0"); + const nextOffset = offset + rowsResult.rows.length; + + return { + items: rowsResult.rows.map(decodeEventRow), + nextCursor: nextOffset < total ? String(nextOffset) : undefined, + }; + } + + async insertEvent(_sessionId: string, event: SessionEvent): Promise { + await this.ready(); + + await this.pool.query( + `INSERT INTO ${this.table("events")} ( + id, event_index, session_id, created_at, connection_id, sender, payload_json + ) VALUES ($1, $2, $3, $4, $5, $6, $7) + ON CONFLICT(id) DO UPDATE SET + event_index = EXCLUDED.event_index, + session_id = EXCLUDED.session_id, + created_at = EXCLUDED.created_at, + connection_id = EXCLUDED.connection_id, + sender = EXCLUDED.sender, + payload_json = EXCLUDED.payload_json`, + [event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, event.payload], + ); + } + + async close(): Promise { + if (!this.ownsPool) { + return; + } + await this.pool.end(); + } + + private async ready(): Promise { + await this.initialized; + } + + private table(name: "sessions" | "events"): string { + return `"${this.schema}"."${name}"`; + } + + private async initialize(): Promise { + await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${this.schema}"`); + + await this.pool.query(` + CREATE TABLE IF NOT EXISTS ${this.table("sessions")} ( + id TEXT PRIMARY KEY, + agent TEXT NOT NULL, + agent_session_id TEXT NOT NULL, + last_connection_id TEXT NOT NULL, + created_at BIGINT NOT NULL, + destroyed_at BIGINT, + sandbox_id TEXT, + session_init_json JSONB, + config_options_json JSONB, + modes_json JSONB + ) + `); + + await this.pool.query(` + ALTER TABLE ${this.table("sessions")} + ADD COLUMN IF NOT EXISTS sandbox_id TEXT + `); + + await this.pool.query(` + ALTER TABLE ${this.table("sessions")} + ADD COLUMN IF NOT EXISTS config_options_json JSONB + `); + + await this.pool.query(` + ALTER TABLE ${this.table("sessions")} + ADD COLUMN IF NOT EXISTS modes_json JSONB + `); + + await this.pool.query(` + CREATE TABLE IF NOT EXISTS ${this.table("events")} ( + id TEXT PRIMARY KEY, + event_index BIGINT NOT NULL, + session_id TEXT NOT NULL, + created_at BIGINT NOT NULL, + connection_id TEXT NOT NULL, + sender TEXT NOT NULL, + payload_json JSONB NOT NULL + ) + `); + + await this.pool.query(` + ALTER TABLE ${this.table("events")} + ALTER COLUMN id TYPE TEXT USING id::TEXT + `); + + await this.pool.query(` + ALTER TABLE ${this.table("events")} + ADD COLUMN IF NOT EXISTS event_index BIGINT + `); + + await this.pool.query(` + WITH ranked AS ( + SELECT id, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC) AS ranked_index + FROM ${this.table("events")} + ) + UPDATE ${this.table("events")} AS current_events + SET event_index = ranked.ranked_index + FROM ranked + WHERE current_events.id = ranked.id + AND current_events.event_index IS NULL + `); + + await this.pool.query(` + ALTER TABLE ${this.table("events")} + ALTER COLUMN event_index SET NOT NULL + `); + + await this.pool.query(` + CREATE INDEX IF NOT EXISTS idx_events_session_order + ON ${this.table("events")}(session_id, event_index, id) + `); + } +} + +type SessionRow = { + id: string; + agent: string; + agent_session_id: string; + last_connection_id: string; + created_at: string | number; + destroyed_at: string | number | null; + sandbox_id: string | null; + session_init_json: unknown | null; + config_options_json: unknown | null; + modes_json: unknown | null; +}; + +type EventRow = { + id: string | number; + event_index: string | number; + session_id: string; + created_at: string | number; + connection_id: string; + sender: string; + payload_json: unknown; +}; + +function decodeSessionRow(row: SessionRow): SessionRecord { + return { + id: row.id, + agent: row.agent, + agentSessionId: row.agent_session_id, + lastConnectionId: row.last_connection_id, + createdAt: parseInteger(row.created_at), + destroyedAt: row.destroyed_at === null ? undefined : parseInteger(row.destroyed_at), + sandboxId: row.sandbox_id ?? undefined, + sessionInit: row.session_init_json ? (row.session_init_json as SessionRecord["sessionInit"]) : undefined, + configOptions: row.config_options_json ? (row.config_options_json as SessionRecord["configOptions"]) : undefined, + modes: row.modes_json ? (row.modes_json as SessionRecord["modes"]) : undefined, + }; +} + +function decodeEventRow(row: EventRow): SessionEvent { + return { + id: String(row.id), + eventIndex: parseInteger(row.event_index), + sessionId: row.session_id, + createdAt: parseInteger(row.created_at), + connectionId: row.connection_id, + sender: parseSender(row.sender), + payload: row.payload_json as SessionEvent["payload"], + }; +} + +function normalizeLimit(limit: number | undefined): number { + if (!Number.isFinite(limit) || (limit ?? 0) < 1) { + return DEFAULT_LIST_LIMIT; + } + return Math.floor(limit as number); +} + +function parseCursor(cursor: string | undefined): number { + if (!cursor) { + return 0; + } + const parsed = Number.parseInt(cursor, 10); + if (!Number.isFinite(parsed) || parsed < 0) { + return 0; + } + return parsed; +} + +function parseInteger(value: string | number): number { + const parsed = typeof value === "number" ? value : Number.parseInt(value, 10); + if (!Number.isFinite(parsed)) { + throw new Error(`Invalid integer value returned by postgres: ${String(value)}`); + } + return parsed; +} + +function parseSender(value: string): SessionEvent["sender"] { + if (value === "agent" || value === "client") { + return value; + } + throw new Error(`Invalid sender value returned by postgres: ${value}`); +} + +function normalizeSchema(schema: string): string { + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(schema)) { + throw new Error(`Invalid schema name '${schema}'. Use letters, numbers, and underscores only.`); + } + return schema; +} diff --git a/examples/persist-postgres/tsconfig.json b/examples/persist-postgres/tsconfig.json new file mode 100644 index 0000000..ec2723c --- /dev/null +++ b/examples/persist-postgres/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "types": ["node"] + }, + "include": ["src"] +} diff --git a/examples/persist-sqlite/package.json b/examples/persist-sqlite/package.json new file mode 100644 index 0000000..be6bf0d --- /dev/null +++ b/examples/persist-sqlite/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-persist-sqlite", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "better-sqlite3": "^11.0.0", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/better-sqlite3": "^7.0.0", + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/persist-sqlite/src/index.ts b/examples/persist-sqlite/src/index.ts new file mode 100644 index 0000000..943e902 --- /dev/null +++ b/examples/persist-sqlite/src/index.ts @@ -0,0 +1,35 @@ +import { SandboxAgent } from "sandbox-agent"; +import { SQLiteSessionPersistDriver } from "./persist.ts"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const persist = new SQLiteSessionPersistDriver({ filename: "./sessions.db" }); + +console.log("Starting sandbox..."); +const sandbox = await startDockerSandbox({ + port: 3000, +}); + +const sdk = await SandboxAgent.connect({ baseUrl: sandbox.baseUrl, persist }); + +const session = await sdk.createSession({ agent: detectAgent() }); +console.log(`Created session ${session.id}`); + +await session.prompt([{ type: "text", text: "Say hello in one sentence." }]); +console.log("Prompt complete."); + +const sessions = await sdk.listSessions(); +console.log(`\nSessions (${sessions.items.length}):`); +for (const s of sessions.items) { + console.log(` ${s.id} agent=${s.agent}`); +} + +const events = await sdk.getEvents({ sessionId: session.id }); +console.log(`\nSession history (${events.items.length} events):`); +for (const e of events.items) { + console.log(` [${e.eventIndex}] ${e.sender}: ${JSON.stringify(e.payload).slice(0, 120)}`); +} + +persist.close(); +await sdk.dispose(); +await sandbox.cleanup(); diff --git a/examples/persist-sqlite/src/persist.ts b/examples/persist-sqlite/src/persist.ts new file mode 100644 index 0000000..2292903 --- /dev/null +++ b/examples/persist-sqlite/src/persist.ts @@ -0,0 +1,310 @@ +import Database from "better-sqlite3"; +import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; + +const DEFAULT_LIST_LIMIT = 100; + +export interface SQLiteSessionPersistDriverOptions { + filename?: string; +} + +export class SQLiteSessionPersistDriver implements SessionPersistDriver { + private readonly db: Database.Database; + + constructor(options: SQLiteSessionPersistDriverOptions = {}) { + this.db = new Database(options.filename ?? ":memory:"); + this.initialize(); + } + + async getSession(id: string): Promise { + const row = this.db + .prepare( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM sessions WHERE id = ?`, + ) + .get(id) as SessionRow | undefined; + + if (!row) { + return undefined; + } + + return decodeSessionRow(row); + } + + async listSessions(request: ListPageRequest = {}): Promise> { + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rows = this.db + .prepare( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM sessions + ORDER BY created_at ASC, id ASC + LIMIT ? OFFSET ?`, + ) + .all(limit, offset) as SessionRow[]; + + const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM sessions`).get() as { count: number }; + const nextOffset = offset + rows.length; + + return { + items: rows.map(decodeSessionRow), + nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined, + }; + } + + async updateSession(session: SessionRecord): Promise { + this.db + .prepare( + `INSERT INTO sessions ( + id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + agent = excluded.agent, + agent_session_id = excluded.agent_session_id, + last_connection_id = excluded.last_connection_id, + created_at = excluded.created_at, + destroyed_at = excluded.destroyed_at, + sandbox_id = excluded.sandbox_id, + session_init_json = excluded.session_init_json, + config_options_json = excluded.config_options_json, + modes_json = excluded.modes_json`, + ) + .run( + session.id, + session.agent, + session.agentSessionId, + session.lastConnectionId, + session.createdAt, + session.destroyedAt ?? null, + session.sandboxId ?? null, + session.sessionInit ? JSON.stringify(session.sessionInit) : null, + session.configOptions ? JSON.stringify(session.configOptions) : null, + session.modes !== undefined ? JSON.stringify(session.modes) : null, + ); + } + + async listEvents(request: ListEventsRequest): Promise> { + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rows = this.db + .prepare( + `SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json + FROM events + WHERE session_id = ? + ORDER BY event_index ASC, id ASC + LIMIT ? OFFSET ?`, + ) + .all(request.sessionId, limit, offset) as EventRow[]; + + const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM events WHERE session_id = ?`).get(request.sessionId) as { count: number }; + + const nextOffset = offset + rows.length; + + return { + items: rows.map(decodeEventRow), + nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined, + }; + } + + async insertEvent(_sessionId: string, event: SessionEvent): Promise { + this.db + .prepare( + `INSERT INTO events ( + id, event_index, session_id, created_at, connection_id, sender, payload_json + ) VALUES (?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + event_index = excluded.event_index, + session_id = excluded.session_id, + created_at = excluded.created_at, + connection_id = excluded.connection_id, + sender = excluded.sender, + payload_json = excluded.payload_json`, + ) + .run(event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, JSON.stringify(event.payload)); + } + + close(): void { + this.db.close(); + } + + private initialize(): void { + this.db.exec(` + CREATE TABLE IF NOT EXISTS sessions ( + id TEXT PRIMARY KEY, + agent TEXT NOT NULL, + agent_session_id TEXT NOT NULL, + last_connection_id TEXT NOT NULL, + created_at INTEGER NOT NULL, + destroyed_at INTEGER, + sandbox_id TEXT, + session_init_json TEXT, + config_options_json TEXT, + modes_json TEXT + ) + `); + + const sessionColumns = this.db.prepare(`PRAGMA table_info(sessions)`).all() as TableInfoRow[]; + if (!sessionColumns.some((column) => column.name === "sandbox_id")) { + this.db.exec(`ALTER TABLE sessions ADD COLUMN sandbox_id TEXT`); + } + if (!sessionColumns.some((column) => column.name === "config_options_json")) { + this.db.exec(`ALTER TABLE sessions ADD COLUMN config_options_json TEXT`); + } + if (!sessionColumns.some((column) => column.name === "modes_json")) { + this.db.exec(`ALTER TABLE sessions ADD COLUMN modes_json TEXT`); + } + + this.ensureEventsTable(); + } + + private ensureEventsTable(): void { + const tableInfo = this.db.prepare(`PRAGMA table_info(events)`).all() as TableInfoRow[]; + if (tableInfo.length === 0) { + this.createEventsTable(); + return; + } + + const idColumn = tableInfo.find((column) => column.name === "id"); + const hasEventIndex = tableInfo.some((column) => column.name === "event_index"); + const idType = (idColumn?.type ?? "").trim().toUpperCase(); + const idIsText = idType === "TEXT"; + + if (!idIsText || !hasEventIndex) { + this.rebuildEventsTable(hasEventIndex); + } + + this.db.exec(` + CREATE INDEX IF NOT EXISTS idx_events_session_order + ON events(session_id, event_index, id) + `); + } + + private createEventsTable(): void { + this.db.exec(` + CREATE TABLE IF NOT EXISTS events ( + id TEXT PRIMARY KEY, + event_index INTEGER NOT NULL, + session_id TEXT NOT NULL, + created_at INTEGER NOT NULL, + connection_id TEXT NOT NULL, + sender TEXT NOT NULL, + payload_json TEXT NOT NULL + ); + + CREATE INDEX IF NOT EXISTS idx_events_session_order + ON events(session_id, event_index, id) + `); + } + + private rebuildEventsTable(hasEventIndex: boolean): void { + this.db.exec(` + ALTER TABLE events RENAME TO events_legacy; + `); + + this.createEventsTable(); + + if (hasEventIndex) { + this.db.exec(` + INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json) + SELECT + CAST(id AS TEXT), + COALESCE(event_index, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC)), + session_id, + created_at, + connection_id, + sender, + payload_json + FROM events_legacy + `); + } else { + this.db.exec(` + INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json) + SELECT + CAST(id AS TEXT), + ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC), + session_id, + created_at, + connection_id, + sender, + payload_json + FROM events_legacy + `); + } + + this.db.exec(`DROP TABLE events_legacy`); + } +} + +type SessionRow = { + id: string; + agent: string; + agent_session_id: string; + last_connection_id: string; + created_at: number; + destroyed_at: number | null; + sandbox_id: string | null; + session_init_json: string | null; + config_options_json: string | null; + modes_json: string | null; +}; + +type EventRow = { + id: string; + event_index: number; + session_id: string; + created_at: number; + connection_id: string; + sender: "client" | "agent"; + payload_json: string; +}; + +type TableInfoRow = { + name: string; + type: string; +}; + +function decodeSessionRow(row: SessionRow): SessionRecord { + return { + id: row.id, + agent: row.agent, + agentSessionId: row.agent_session_id, + lastConnectionId: row.last_connection_id, + createdAt: row.created_at, + destroyedAt: row.destroyed_at ?? undefined, + sandboxId: row.sandbox_id ?? undefined, + sessionInit: row.session_init_json ? (JSON.parse(row.session_init_json) as SessionRecord["sessionInit"]) : undefined, + configOptions: row.config_options_json ? (JSON.parse(row.config_options_json) as SessionRecord["configOptions"]) : undefined, + modes: row.modes_json ? (JSON.parse(row.modes_json) as SessionRecord["modes"]) : undefined, + }; +} + +function decodeEventRow(row: EventRow): SessionEvent { + return { + id: row.id, + eventIndex: row.event_index, + sessionId: row.session_id, + createdAt: row.created_at, + connectionId: row.connection_id, + sender: row.sender, + payload: JSON.parse(row.payload_json), + }; +} + +function normalizeLimit(limit: number | undefined): number { + if (!Number.isFinite(limit) || (limit ?? 0) < 1) { + return DEFAULT_LIST_LIMIT; + } + return Math.floor(limit as number); +} + +function parseCursor(cursor: string | undefined): number { + if (!cursor) { + return 0; + } + const parsed = Number.parseInt(cursor, 10); + if (!Number.isFinite(parsed) || parsed < 0) { + return 0; + } + return parsed; +} diff --git a/examples/persist-sqlite/tsconfig.json b/examples/persist-sqlite/tsconfig.json new file mode 100644 index 0000000..ec2723c --- /dev/null +++ b/examples/persist-sqlite/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "types": ["node"] + }, + "include": ["src"] +} diff --git a/examples/shared/package.json b/examples/shared/package.json index 18906ef..4c868ed 100644 --- a/examples/shared/package.json +++ b/examples/shared/package.json @@ -3,15 +3,18 @@ "private": true, "type": "module", "exports": { - ".": "./src/sandbox-agent-client.ts" + ".": "./src/sandbox-agent-client.ts", + "./docker": "./src/docker.ts" }, "scripts": { "typecheck": "tsc --noEmit" }, "dependencies": { + "dockerode": "latest", "sandbox-agent": "workspace:*" }, "devDependencies": { + "@types/dockerode": "latest", "@types/node": "latest", "typescript": "latest" } diff --git a/examples/shared/src/docker.ts b/examples/shared/src/docker.ts new file mode 100644 index 0000000..f4161fb --- /dev/null +++ b/examples/shared/src/docker.ts @@ -0,0 +1,265 @@ +import Docker from "dockerode"; +import { execFileSync } from "node:child_process"; +import fs from "node:fs"; +import path from "node:path"; +import { PassThrough } from "node:stream"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +const REPO_ROOT = path.resolve(__dirname, "..", "..", ".."); + +/** Pre-built Docker image with all agents installed. */ +export const FULL_IMAGE = "rivetdev/sandbox-agent:0.4.2-full"; + +export interface DockerSandboxOptions { + /** Container port used by sandbox-agent inside Docker. */ + port: number; + /** Optional fixed host port mapping. If omitted, Docker assigns a free host port automatically. */ + hostPort?: number; + /** Additional shell commands to run before starting sandbox-agent. */ + setupCommands?: string[]; + /** Docker image to use. Defaults to the pre-built full image. */ + image?: string; +} + +export interface DockerSandbox { + baseUrl: string; + cleanup: () => Promise; +} + +const DIRECT_CREDENTIAL_KEYS = [ + "ANTHROPIC_API_KEY", + "CLAUDE_API_KEY", + "CLAUDE_CODE_OAUTH_TOKEN", + "ANTHROPIC_AUTH_TOKEN", + "OPENAI_API_KEY", + "CODEX_API_KEY", + "CEREBRAS_API_KEY", + "OPENCODE_API_KEY", +] as const; + +function stripShellQuotes(value: string): string { + const trimmed = value.trim(); + if (trimmed.length >= 2 && trimmed.startsWith('"') && trimmed.endsWith('"')) { + return trimmed.slice(1, -1); + } + if (trimmed.length >= 2 && trimmed.startsWith("'") && trimmed.endsWith("'")) { + return trimmed.slice(1, -1); + } + return trimmed; +} + +function parseExtractedCredentials(output: string): Record { + const parsed: Record = {}; + for (const rawLine of output.split("\n")) { + const line = rawLine.trim(); + if (!line) continue; + const cleanLine = line.startsWith("export ") ? line.slice(7) : line; + const match = cleanLine.match(/^([A-Z0-9_]+)=(.*)$/); + if (!match) continue; + const [, key, rawValue] = match; + const value = stripShellQuotes(rawValue); + if (!value) continue; + parsed[key] = value; + } + return parsed; +} + +interface ClaudeCredentialFile { + hostPath: string; + containerPath: string; + base64Content: string; +} + +function readClaudeCredentialFiles(): ClaudeCredentialFile[] { + const homeDir = process.env.HOME || ""; + if (!homeDir) return []; + + const candidates: Array<{ hostPath: string; containerPath: string }> = [ + { + hostPath: path.join(homeDir, ".claude", ".credentials.json"), + containerPath: ".claude/.credentials.json", + }, + { + hostPath: path.join(homeDir, ".claude-oauth-credentials.json"), + containerPath: ".claude-oauth-credentials.json", + }, + ]; + + const files: ClaudeCredentialFile[] = []; + for (const candidate of candidates) { + if (!fs.existsSync(candidate.hostPath)) continue; + try { + const raw = fs.readFileSync(candidate.hostPath, "utf8"); + files.push({ + hostPath: candidate.hostPath, + containerPath: candidate.containerPath, + base64Content: Buffer.from(raw, "utf8").toString("base64"), + }); + } catch { + // Ignore unreadable credential file candidates. + } + } + return files; +} + +function collectCredentialEnv(): Record { + const merged: Record = {}; + let extracted: Record = {}; + try { + const output = execFileSync("sandbox-agent", ["credentials", "extract-env"], { encoding: "utf8", stdio: ["ignore", "pipe", "pipe"] }); + extracted = parseExtractedCredentials(output); + } catch { + // Fall back to direct env vars if extraction is unavailable. + } + + for (const [key, value] of Object.entries(extracted)) { + if (value) merged[key] = value; + } + for (const key of DIRECT_CREDENTIAL_KEYS) { + const direct = process.env[key]; + if (direct) merged[key] = direct; + } + return merged; +} + +function shellSingleQuotedLiteral(value: string): string { + return `'${value.replace(/'/g, `'\"'\"'`)}'`; +} + +function stripAnsi(value: string): string { + return value.replace(/[\u001B\u009B][[\]()#;?]*(?:(?:[a-zA-Z\d]*(?:;[a-zA-Z\d]*)*)?\u0007|(?:\d{1,4}(?:;\d{0,4})*)?[0-9A-ORZcf-nqry=><])/g, ""); +} + +async function ensureImage(docker: Docker, image: string): Promise { + if (process.env.SANDBOX_AGENT_DEV) { + console.log(" Building sandbox image from source (may take a while)..."); + try { + execFileSync("docker", ["build", "-t", image, "-f", path.join(REPO_ROOT, "docker/runtime/Dockerfile.full"), REPO_ROOT], { + stdio: ["ignore", "ignore", "pipe"], + }); + } catch (err: unknown) { + const stderr = err instanceof Error && "stderr" in err ? String((err as { stderr: unknown }).stderr) : ""; + throw new Error(`Failed to build sandbox image: ${stderr}`); + } + return; + } + + try { + await docker.getImage(image).inspect(); + } catch { + console.log(` Pulling ${image}...`); + await new Promise((resolve, reject) => { + docker.pull(image, (err: Error | null, stream: NodeJS.ReadableStream) => { + if (err) return reject(err); + docker.modem.followProgress(stream, (err: Error | null) => (err ? reject(err) : resolve())); + }); + }); + } +} + +/** + * Start a Docker container running sandbox-agent. + * Registers SIGINT/SIGTERM handlers for cleanup. + */ +export async function startDockerSandbox(opts: DockerSandboxOptions): Promise { + const { port, hostPort } = opts; + const image = opts.image ?? FULL_IMAGE; + // TODO: Replace setupCommands shell bootstrapping with native sandbox-agent exec API once available. + const setupCommands = [...(opts.setupCommands ?? [])]; + const credentialEnv = collectCredentialEnv(); + const claudeCredentialFiles = readClaudeCredentialFiles(); + const bootstrapEnv: Record = {}; + + if (claudeCredentialFiles.length > 0) { + delete credentialEnv.ANTHROPIC_API_KEY; + delete credentialEnv.CLAUDE_API_KEY; + delete credentialEnv.CLAUDE_CODE_OAUTH_TOKEN; + delete credentialEnv.ANTHROPIC_AUTH_TOKEN; + + const credentialBootstrapCommands = claudeCredentialFiles.flatMap((file, index) => { + const envKey = `SANDBOX_AGENT_CLAUDE_CREDENTIAL_${index}_B64`; + bootstrapEnv[envKey] = file.base64Content; + // Use $HOME-relative paths so credentials work regardless of container user + const containerDir = path.posix.dirname(file.containerPath); + return [`mkdir -p "$HOME/${containerDir}"`, `printf %s "$${envKey}" | base64 -d > "$HOME/${file.containerPath}"`]; + }); + setupCommands.unshift(...credentialBootstrapCommands); + } + + for (const [key, value] of Object.entries(credentialEnv)) { + if (!process.env[key]) process.env[key] = value; + } + + const docker = new Docker({ socketPath: "/var/run/docker.sock" }); + + await ensureImage(docker, image); + + const bootCommands = [...setupCommands, `sandbox-agent server --no-token --host 0.0.0.0 --port ${port}`]; + + const container = await docker.createContainer({ + Image: image, + Entrypoint: ["/bin/sh", "-c"], + WorkingDir: "/home/sandbox", + Cmd: [bootCommands.join(" && ")], + Env: [...Object.entries(credentialEnv).map(([key, value]) => `${key}=${value}`), ...Object.entries(bootstrapEnv).map(([key, value]) => `${key}=${value}`)], + ExposedPorts: { [`${port}/tcp`]: {} }, + HostConfig: { + AutoRemove: true, + PortBindings: { [`${port}/tcp`]: [{ HostPort: hostPort ? `${hostPort}` : "0" }] }, + }, + }); + await container.start(); + + const logChunks: string[] = []; + const startupLogs = (await container.logs({ + follow: true, + stdout: true, + stderr: true, + since: 0, + })) as NodeJS.ReadableStream; + const stdoutStream = new PassThrough(); + const stderrStream = new PassThrough(); + stdoutStream.on("data", (chunk) => { + logChunks.push(stripAnsi(String(chunk))); + }); + stderrStream.on("data", (chunk) => { + logChunks.push(stripAnsi(String(chunk))); + }); + docker.modem.demuxStream(startupLogs, stdoutStream, stderrStream); + const stopStartupLogs = () => { + const stream = startupLogs as NodeJS.ReadableStream & { destroy?: () => void }; + try { + stream.destroy?.(); + } catch {} + }; + + const inspect = await container.inspect(); + const mappedPorts = inspect.NetworkSettings?.Ports?.[`${port}/tcp`]; + const mappedHostPort = mappedPorts?.[0]?.HostPort; + if (!mappedHostPort) { + throw new Error(`Failed to resolve mapped host port for container port ${port}`); + } + const baseUrl = `http://127.0.0.1:${mappedHostPort}`; + + stopStartupLogs(); + console.log(` Started (${baseUrl})`); + + const cleanup = async () => { + stopStartupLogs(); + try { + await container.stop({ t: 5 }); + } catch {} + try { + await container.remove({ force: true }); + } catch {} + }; + const signalCleanup = async () => { + await cleanup(); + process.exit(0); + }; + process.once("SIGINT", signalCleanup); + process.once("SIGTERM", signalCleanup); + + return { baseUrl, cleanup }; +} diff --git a/examples/shared/src/sandbox-agent-client.ts b/examples/shared/src/sandbox-agent-client.ts index 8258ee8..8efec19 100644 --- a/examples/shared/src/sandbox-agent-client.ts +++ b/examples/shared/src/sandbox-agent-client.ts @@ -3,12 +3,6 @@ * Provides minimal helpers for connecting to and interacting with sandbox-agent servers. */ -import { createInterface } from "node:readline/promises"; -import { randomUUID } from "node:crypto"; -import { setTimeout as delay } from "node:timers/promises"; -import { SandboxAgent } from "sandbox-agent"; -import type { PermissionEventData, QuestionEventData } from "sandbox-agent"; - function normalizeBaseUrl(baseUrl: string): string { return baseUrl.replace(/\/+$/, ""); } @@ -27,10 +21,12 @@ export function buildInspectorUrl({ baseUrl, token, headers, + sessionId, }: { baseUrl: string; token?: string; headers?: Record; + sessionId?: string; }): string { const normalized = normalizeBaseUrl(ensureUrl(baseUrl)); const params = new URLSearchParams(); @@ -41,18 +37,11 @@ export function buildInspectorUrl({ params.set("headers", JSON.stringify(headers)); } const queryString = params.toString(); - return `${normalized}/ui/${queryString ? `?${queryString}` : ""}`; + const sessionPath = sessionId ? `sessions/${sessionId}` : ""; + return `${normalized}/ui/${sessionPath}${queryString ? `?${queryString}` : ""}`; } -export function logInspectorUrl({ - baseUrl, - token, - headers, -}: { - baseUrl: string; - token?: string; - headers?: Record; -}): void { +export function logInspectorUrl({ baseUrl, token, headers }: { baseUrl: string; token?: string; headers?: Record }): void { console.log(`Inspector: ${buildInspectorUrl({ baseUrl, token, headers })}`); } @@ -75,160 +64,35 @@ export function buildHeaders({ return headers; } -export async function waitForHealth({ - baseUrl, - token, - extraHeaders, - timeoutMs = 120_000, -}: { - baseUrl: string; - token?: string; - extraHeaders?: Record; - timeoutMs?: number; -}): Promise { - const normalized = normalizeBaseUrl(baseUrl); - const deadline = Date.now() + timeoutMs; - let lastError: unknown; - while (Date.now() < deadline) { - try { - const headers = buildHeaders({ token, extraHeaders }); - const response = await fetch(`${normalized}/v1/health`, { headers }); - if (response.ok) { - const data = await response.json(); - if (data?.status === "ok") { - return; - } - lastError = new Error(`Unexpected health response: ${JSON.stringify(data)}`); - } else { - lastError = new Error(`Health check failed: ${response.status}`); - } - } catch (error) { - lastError = error; - } - await delay(500); +export function generateSessionId(): string { + const chars = "abcdefghijklmnopqrstuvwxyz0123456789"; + let id = "session-"; + for (let i = 0; i < 8; i++) { + id += chars[Math.floor(Math.random() * chars.length)]; } - throw (lastError ?? new Error("Timed out waiting for /v1/health")) as Error; + return id; } -function detectAgent(): string { +export function detectAgent(): string { if (process.env.SANDBOX_AGENT) return process.env.SANDBOX_AGENT; - if (process.env.ANTHROPIC_API_KEY) return "claude"; - if (process.env.OPENAI_API_KEY) return "codex"; + const hasClaude = Boolean( + process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY || process.env.CLAUDE_CODE_OAUTH_TOKEN || process.env.ANTHROPIC_AUTH_TOKEN, + ); + const openAiLikeKey = process.env.OPENAI_API_KEY || process.env.CODEX_API_KEY || ""; + const hasCodexApiKey = openAiLikeKey.startsWith("sk-"); + if (hasCodexApiKey && hasClaude) { + console.log("Both Claude and Codex API keys detected; defaulting to codex. Set SANDBOX_AGENT to override."); + return "codex"; + } + if (!hasCodexApiKey && openAiLikeKey) { + console.log("OpenAI/Codex credential is not an API key (expected sk-...), skipping codex auto-select."); + } + if (hasCodexApiKey) return "codex"; + if (hasClaude) { + if (openAiLikeKey && !hasCodexApiKey) { + console.log("Using claude by default."); + } + return "claude"; + } return "claude"; } - -export async function runPrompt(baseUrl: string): Promise { - console.log(`UI: ${buildInspectorUrl({ baseUrl })}`); - - const client = await SandboxAgent.connect({ baseUrl }); - - const agent = detectAgent(); - console.log(`Using agent: ${agent}`); - const sessionId = randomUUID(); - await client.createSession(sessionId, { agent }); - console.log(`Session ${sessionId}. Press Ctrl+C to quit.`); - - const rl = createInterface({ input: process.stdin, output: process.stdout }); - - let isThinking = false; - let hasStartedOutput = false; - let turnResolve: (() => void) | null = null; - let sessionEnded = false; - - const processEvents = async () => { - for await (const event of client.streamEvents(sessionId)) { - if (event.type === "item.started") { - const item = (event.data as any)?.item; - if (item?.role === "assistant") { - isThinking = true; - hasStartedOutput = false; - process.stdout.write("Thinking..."); - } - } - - if (event.type === "item.delta" && isThinking) { - const delta = (event.data as any)?.delta; - if (delta) { - if (!hasStartedOutput) { - process.stdout.write("\r\x1b[K"); - hasStartedOutput = true; - } - const text = typeof delta === "string" ? delta : delta.type === "text" ? delta.text || "" : ""; - if (text) process.stdout.write(text); - } - } - - if (event.type === "item.completed") { - const item = (event.data as any)?.item; - if (item?.role === "assistant") { - isThinking = false; - process.stdout.write("\n"); - turnResolve?.(); - turnResolve = null; - } - } - - if (event.type === "permission.requested") { - const data = event.data as PermissionEventData; - if (isThinking && !hasStartedOutput) { - process.stdout.write("\r\x1b[K"); - } - console.log(`[Auto-approved] ${data.action}`); - await client.replyPermission(sessionId, data.permission_id, { reply: "once" }); - } - - if (event.type === "question.requested") { - const data = event.data as QuestionEventData; - if (isThinking && !hasStartedOutput) { - process.stdout.write("\r\x1b[K"); - } - console.log(`[Question rejected] ${data.prompt}`); - await client.rejectQuestion(sessionId, data.question_id); - } - - if (event.type === "error") { - const data = event.data as any; - console.error(`\nError: ${data?.message || JSON.stringify(data)}`); - } - - if (event.type === "session.ended") { - const data = event.data as any; - const reason = data?.reason || "unknown"; - if (reason === "error") { - console.error(`\nAgent exited with error: ${data?.message || ""}`); - if (data?.exit_code !== undefined) { - console.error(` Exit code: ${data.exit_code}`); - } - } else { - console.log(`Agent session ${reason}`); - } - sessionEnded = true; - turnResolve?.(); - turnResolve = null; - } - } - }; - - processEvents().catch((err) => { - if (!sessionEnded) { - console.error("Event stream error:", err instanceof Error ? err.message : err); - } - }); - - while (true) { - const line = await rl.question("> "); - if (!line.trim()) continue; - - const turnComplete = new Promise((resolve) => { - turnResolve = resolve; - }); - - try { - await client.postMessage(sessionId, { message: line.trim() }); - await turnComplete; - } catch (error) { - console.error(error instanceof Error ? error.message : error); - turnResolve = null; - } - } -} diff --git a/examples/skills-custom-tool/SKILL.md b/examples/skills-custom-tool/SKILL.md new file mode 100644 index 0000000..67afa25 --- /dev/null +++ b/examples/skills-custom-tool/SKILL.md @@ -0,0 +1,12 @@ +--- +name: random-number +description: Generate a random integer between min and max (inclusive). Use when the user asks for a random number. +--- + +To generate a random number, run: + +```bash +node /opt/skills/random-number/random-number.cjs +``` + +This prints a single random integer between min and max (inclusive). diff --git a/examples/skills-custom-tool/package.json b/examples/skills-custom-tool/package.json new file mode 100644 index 0000000..7edf635 --- /dev/null +++ b/examples/skills-custom-tool/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-skills-custom-tool", + "private": true, + "type": "module", + "scripts": { + "build:script": "esbuild src/random-number.ts --bundle --format=cjs --platform=node --target=node18 --minify --outfile=dist/random-number.cjs", + "start": "pnpm build:script && tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "esbuild": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/skills-custom-tool/src/index.ts b/examples/skills-custom-tool/src/index.ts new file mode 100644 index 0000000..490be64 --- /dev/null +++ b/examples/skills-custom-tool/src/index.ts @@ -0,0 +1,49 @@ +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + +// Verify the bundled script exists (built by `pnpm build:script`). +const scriptFile = path.resolve(__dirname, "../dist/random-number.cjs"); +if (!fs.existsSync(scriptFile)) { + console.error("Error: dist/random-number.cjs not found. Run `pnpm build:script` first."); + process.exit(1); +} + +// Start a Docker container running sandbox-agent. +console.log("Starting sandbox..."); +const { baseUrl, cleanup } = await startDockerSandbox({ port: 3005 }); + +// Upload the bundled script and SKILL.md into the sandbox filesystem. +console.log("Uploading script and skill file..."); +const client = await SandboxAgent.connect({ baseUrl }); + +const script = await fs.promises.readFile(scriptFile); +const scriptResult = await client.writeFsFile({ path: "/opt/skills/random-number/random-number.cjs" }, script); +console.log(` Script: ${scriptResult.path} (${scriptResult.bytesWritten} bytes)`); + +const skillMd = await fs.promises.readFile(path.resolve(__dirname, "../SKILL.md")); +const skillResult = await client.writeFsFile({ path: "/opt/skills/random-number/SKILL.md" }, skillMd); +console.log(` Skill: ${skillResult.path} (${skillResult.bytesWritten} bytes)`); + +// Configure the uploaded skill. +console.log("Configuring custom skill..."); +await client.setSkillsConfig({ directory: "/", skillName: "random-number" }, { sources: [{ type: "local", source: "/opt/skills/random-number" }] }); + +// Create a session. +console.log("Creating session with custom skill..."); +const session = await client.createSession({ agent: detectAgent(), cwd: "/root" }); +const sessionId = session.id; +console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); +console.log(' Try: "generate a random number between 1 and 100"'); +console.log(" Press Ctrl+C to stop."); + +const keepAlive = setInterval(() => {}, 60_000); +process.on("SIGINT", () => { + clearInterval(keepAlive); + cleanup().then(() => process.exit(0)); +}); diff --git a/examples/skills-custom-tool/src/random-number.ts b/examples/skills-custom-tool/src/random-number.ts new file mode 100644 index 0000000..2b3d758 --- /dev/null +++ b/examples/skills-custom-tool/src/random-number.ts @@ -0,0 +1,9 @@ +const min = Number(process.argv[2]); +const max = Number(process.argv[3]); + +if (Number.isNaN(min) || Number.isNaN(max)) { + console.error("Usage: random-number "); + process.exit(1); +} + +console.log(Math.floor(Math.random() * (max - min + 1)) + min); diff --git a/examples/skills-custom-tool/tsconfig.json b/examples/skills-custom-tool/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/skills-custom-tool/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/skills/package.json b/examples/skills/package.json new file mode 100644 index 0000000..65829dc --- /dev/null +++ b/examples/skills/package.json @@ -0,0 +1,18 @@ +{ + "name": "@sandbox-agent/example-skills", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest" + } +} diff --git a/examples/skills/src/index.ts b/examples/skills/src/index.ts new file mode 100644 index 0000000..3087ecc --- /dev/null +++ b/examples/skills/src/index.ts @@ -0,0 +1,28 @@ +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; + +console.log("Starting sandbox..."); +const { baseUrl, cleanup } = await startDockerSandbox({ + port: 3001, +}); + +console.log("Configuring skill source..."); +const client = await SandboxAgent.connect({ baseUrl }); +await client.setSkillsConfig( + { directory: "/", skillName: "rivet-dev-skills" }, + { sources: [{ type: "github", source: "rivet-dev/skills", skills: ["sandbox-agent"] }] }, +); + +console.log("Creating session..."); +const session = await client.createSession({ agent: detectAgent(), cwd: "/root" }); +const sessionId = session.id; +console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); +console.log(' Try: "How do I start sandbox-agent?"'); +console.log(" Press Ctrl+C to stop."); + +const keepAlive = setInterval(() => {}, 60_000); +process.on("SIGINT", () => { + clearInterval(keepAlive); + cleanup().then(() => process.exit(0)); +}); diff --git a/examples/skills/tsconfig.json b/examples/skills/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/skills/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/sprites/package.json b/examples/sprites/package.json new file mode 100644 index 0000000..df808e8 --- /dev/null +++ b/examples/sprites/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-sprites", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/index.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "@fly/sprites": "latest", + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest", + "vitest": "^3.0.0" + } +} diff --git a/examples/sprites/src/index.ts b/examples/sprites/src/index.ts new file mode 100644 index 0000000..bf95e5d --- /dev/null +++ b/examples/sprites/src/index.ts @@ -0,0 +1,21 @@ +import { SandboxAgent } from "sandbox-agent"; +import { sprites } from "sandbox-agent/sprites"; + +const env: Record = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: sprites({ + token: process.env.SPRITES_API_KEY ?? process.env.SPRITE_TOKEN ?? process.env.SPRITES_TOKEN, + env, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); +console.log(await client.getHealth()); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/sprites/tests/sprites.test.ts b/examples/sprites/tests/sprites.test.ts new file mode 100644 index 0000000..dfd1594 --- /dev/null +++ b/examples/sprites/tests/sprites.test.ts @@ -0,0 +1,34 @@ +import { describe, it, expect } from "vitest"; +import { SandboxAgent } from "sandbox-agent"; +import { sprites } from "sandbox-agent/sprites"; + +const shouldRun = Boolean(process.env.SPRITES_API_KEY || process.env.SPRITE_TOKEN || process.env.SPRITES_TOKEN); +const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; + +const testFn = shouldRun ? it : it.skip; + +describe("sprites provider", () => { + testFn( + "starts sandbox-agent and responds to /v1/health", + async () => { + const env: Record = {}; + if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + + const sdk = await SandboxAgent.start({ + sandbox: sprites({ + token: process.env.SPRITES_API_KEY ?? process.env.SPRITE_TOKEN ?? process.env.SPRITES_TOKEN, + env, + }), + }); + + try { + const health = await sdk.getHealth(); + expect(health.status).toBe("ok"); + } finally { + await sdk.destroySandbox(); + } + }, + timeoutMs, + ); +}); diff --git a/examples/sprites/tsconfig.json b/examples/sprites/tsconfig.json new file mode 100644 index 0000000..ad591c3 --- /dev/null +++ b/examples/sprites/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "types": ["node"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/examples/vercel/package.json b/examples/vercel/package.json index 9f0569d..a193a36 100644 --- a/examples/vercel/package.json +++ b/examples/vercel/package.json @@ -3,7 +3,7 @@ "private": true, "type": "module", "scripts": { - "start": "tsx src/vercel.ts", + "start": "tsx src/index.ts", "typecheck": "tsc --noEmit" }, "dependencies": { diff --git a/examples/vercel/src/index.ts b/examples/vercel/src/index.ts new file mode 100644 index 0000000..5a83e0c --- /dev/null +++ b/examples/vercel/src/index.ts @@ -0,0 +1,33 @@ +import { SandboxAgent } from "sandbox-agent"; +import { vercel } from "sandbox-agent/vercel"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const env: Record = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: vercel({ + create: { + runtime: "node24", + env, + }, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from Vercel in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/vercel/src/vercel.ts b/examples/vercel/src/vercel.ts index ed2d836..742cd5a 100644 --- a/examples/vercel/src/vercel.ts +++ b/examples/vercel/src/vercel.ts @@ -1,51 +1,35 @@ -import { Sandbox } from "@vercel/sandbox"; -import { runPrompt, waitForHealth } from "@sandbox-agent/example-shared"; +import { SandboxAgent } from "sandbox-agent"; +import { vercel } from "sandbox-agent/vercel"; -const envs: Record = {}; -if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +function collectEnvVars(): Record { + const env: Record = {}; + if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return env; +} -console.log("Creating Vercel sandbox..."); -const sandbox = await Sandbox.create({ - runtime: "node24", - ports: [3000], -}); +function inspectorUrlToBaseUrl(inspectorUrl: string): string { + return inspectorUrl.replace(/\/ui\/$/, ""); +} -const run = async (cmd: string, args: string[] = []) => { - const result = await sandbox.runCommand({ cmd, args, env: envs }); - if (result.exitCode !== 0) { - const stderr = await result.stderr(); - throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${stderr}`); - } - return result; -}; +export async function setupVercelSandboxAgent(): Promise<{ + baseUrl: string; + token?: string; + cleanup: () => Promise; +}> { + const client = await SandboxAgent.start({ + sandbox: vercel({ + create: { + runtime: "node24", + env: collectEnvVars(), + }, + }), + }); -console.log("Installing sandbox-agent..."); -await run("sh", ["-c", "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh"]); - -console.log("Installing agents..."); -await run("sandbox-agent", ["install-agent", "claude"]); -await run("sandbox-agent", ["install-agent", "codex"]); - -console.log("Starting server..."); -await sandbox.runCommand({ - cmd: "sandbox-agent", - args: ["server", "--no-token", "--host", "0.0.0.0", "--port", "3000"], - env: envs, - detached: true, -}); - -const baseUrl = sandbox.domain(3000); - -console.log("Waiting for server..."); -await waitForHealth({ baseUrl }); - -const cleanup = async () => { - await sandbox.stop(); - process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); - -await runPrompt(baseUrl); -await cleanup(); + return { + baseUrl: inspectorUrlToBaseUrl(client.inspectorUrl), + cleanup: async () => { + await client.killSandbox(); + }, + }; +} diff --git a/examples/vercel/tests/vercel.test.ts b/examples/vercel/tests/vercel.test.ts index dd6de67..40b8aa8 100644 --- a/examples/vercel/tests/vercel.test.ts +++ b/examples/vercel/tests/vercel.test.ts @@ -23,6 +23,6 @@ describe("vercel example", () => { await cleanup(); } }, - timeoutMs + timeoutMs, ); }); diff --git a/examples/vercel/tsconfig.json b/examples/vercel/tsconfig.json index ad92bfa..ad591c3 100644 --- a/examples/vercel/tsconfig.json +++ b/examples/vercel/tsconfig.json @@ -1,10 +1,7 @@ { "compilerOptions": { "target": "ES2022", - "lib": [ - "ES2022", - "DOM" - ], + "lib": ["ES2022", "DOM"], "module": "ESNext", "moduleResolution": "Bundler", "allowImportingTsExtensions": true, @@ -12,13 +9,9 @@ "esModuleInterop": true, "strict": true, "skipLibCheck": true, - "resolveJsonModule": true + "resolveJsonModule": true, + "types": ["node"] }, - "include": [ - "src/**/*" - ], - "exclude": [ - "node_modules", - "**/*.test.ts" - ] + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] } diff --git a/foundry/AGENT-HANDOFF.md b/foundry/AGENT-HANDOFF.md new file mode 100644 index 0000000..20bade7 --- /dev/null +++ b/foundry/AGENT-HANDOFF.md @@ -0,0 +1,179 @@ +# Foundry Agent Handoff + +## Baseline + +- Repo: `rivet-dev/sandbox-agent` +- Branch: `columbus-v2` +- Last pushed commit: `3174fe73` (`feat(foundry): checkpoint actor and workspace refactor`) +- Progress/spec tracker: [FOUNDRY-CHANGES.md](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/FOUNDRY-CHANGES.md) + +## What is already landed + +These spec slices are already implemented and pushed: + +- Item `1`: backend actor rename `auth-user` -> `user` +- Item `2`: Better Auth mapping comments +- Item `5`: task raw SQL cleanup into migrations +- Item `6`: `history` -> `audit-log` +- Item `7`: default model moved to user-scoped app state +- Item `20`: admin action prefixing +- Item `23`: dead `getTaskEnriched` / `enrichTaskRecord` removal +- Item `25`: `Workbench` -> `Workspace` rename across backend/shared/client/frontend +- Item `26`: branch rename deleted +- Organization realtime was already collapsed to full-snapshot `organizationUpdated` +- Task realtime was already aligned to `taskUpdated` + +## Known blocker + +Spec item `3` is only partially done. The singleton constraint for the Better Auth `user` table is still blocked. + +- File: [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts) +- Reason: Better Auth still depends on external string `user.id`, so a literal singleton `CHECK (id = 1)` on that table is not a safe mechanical change. + +## Important current state + +There are uncommitted edits on top of the pushed checkpoint. Another agent should start from the current worktree, not just `origin/columbus-v2`. + +Current dirty files: + +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts) +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- [foundry/packages/client/src/mock/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/mock/backend-client.ts) + +These files are the current hot path for the unfinished structural work. + +## What is partially in place but not finished + +### User-owned task UI state + +The user actor already has the schema and CRUD surface for per-user task/session UI state: + +- [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts) + `user_task_state` +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) + `getTaskState`, `upsertTaskState`, `deleteTaskState` + +But the task actor and UI are still reading/writing the old task-global fields: + +- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts) + still contains `task_runtime.active_session_id` and session `unread` / `draft_*` +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) + still derives unread/draft/active-session from task-local rows +- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx) + still treats `activeSessionId` as frontend-local and uses task-level unread/draft state + +So items `21`, `22`, `24`, and part of `19` are only half-done. + +### Coordinator ownership + +The current architecture still violates the intended coordinator pattern: + +- Organization still owns `taskLookup` and `taskSummaries` + - [foundry/packages/backend/src/actors/organization/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/db/schema.ts) +- Organization still resolves `taskId -> repoId` + - [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- Task still pushes summary updates to organization instead of repository + - [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- Repository still does not own a `tasks` projection table yet + - [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts) + +So items `9`, `13`, and `15` are still open. + +### Queue-only mutations + +Task actor workspace commands already go through queue sends. Other actors still do not fully follow the queue-only mutation rule: + +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/organization/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/app-shell.ts) + +So items `4`, `10`, and `11` are still open. + +### Dynamic model/agent data + +The frontend/client still hardcode model groups: + +- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/view-model.ts) +- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts) +- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts) + `WorkspaceModelId` is still a hardcoded union + +The repo already has the API source of truth available through the TypeScript SDK: + +- [sdks/typescript/src/client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/sdks/typescript/src/client.ts) + `SandboxAgent.listAgents({ config: true })` +- [server/packages/sandbox-agent/src/router.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router.rs) + `/v1/agents` +- [server/packages/sandbox-agent/src/router/support.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router/support.rs) + `fallback_config_options` + +So item `8` is still open. + +### GitHub sync chunking/progress + +GitHub data sync is still a delete-and-replace flow: + +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) + `replaceRepositories`, `replaceBranches`, `replaceMembers`, `replacePullRequests`, and full-sync flow +- [foundry/packages/backend/src/actors/github-data/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/db/schema.ts) + no generation/progress columns yet +- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts) + no structured sync progress field yet + +So item `16` is still open. + +## Recommended next order + +If another agent picks this up, this is the safest order: + +1. Finish items `21`, `22`, `24`, `19` together. + Reason: user-owned task UI state is already half-wired, and task schema cleanup depends on the same files. + +2. Finish items `9`, `13`, `15` together. + Reason: coordinator ownership, repo-owned task projections, and PR/task unification are the same refactor seam. + +3. Finish item `16`. + Reason: GitHub sync chunking is mostly isolated to `github-data` plus app-shell/shared snapshot wiring. + +4. Finish item `8`. + Reason: dynamic model/agent data is largely independent once user default model is already user-scoped. + +5. Finish items `4`, `10`, `11`, `12`, `18`, final event audit. + +6. Do item `17` last. + +## Concrete file hotspots for the next agent + +Backend: + +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts) +- [foundry/packages/backend/src/actors/task/workflow/common.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/common.ts) +- [foundry/packages/backend/src/actors/task/workflow/commands.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/commands.ts) +- [foundry/packages/backend/src/actors/task/workflow/init.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/init.ts) +- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts) +- [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) + +Shared/client/frontend: + +- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts) +- [foundry/packages/shared/src/contracts.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/contracts.ts) +- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts) +- [foundry/packages/client/src/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/backend-client.ts) +- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts) +- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx) +- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx) +- [foundry/packages/frontend/src/features/tasks/status.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/features/tasks/status.ts) + +## Notes that matter + +- The pushed checkpoint is useful, but it is not the full current state. There are uncommitted edits in the hot-path backend files listed above. +- The current tree already contains a partially added `user_task_state` path. Do not duplicate that work; finish the migration by removing the old task-owned fields and rewiring readers/writers. +- The current task actor still reads mutable fields from `c.state` such as `repoRemote`, `branchName`, `title`, `task`, `sandboxProviderId`, and `agentType`. That is part of item `19`. +- The current frontend still synthesizes PR-only rows into fake tasks. That should go away as part of repo-owned task projection / PR unification. diff --git a/foundry/AGENTS.md b/foundry/AGENTS.md new file mode 120000 index 0000000..681311e --- /dev/null +++ b/foundry/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md new file mode 100644 index 0000000..2d9bcbb --- /dev/null +++ b/foundry/CLAUDE.md @@ -0,0 +1,391 @@ +# Project Instructions + +## Language Policy + +Use TypeScript for all source code. + +- Never add raw JavaScript source files (`.js`, `.mjs`, `.cjs`). +- Prefer `.ts`/`.tsx` for runtime code, scripts, tests, and tooling. +- If touching old JavaScript, migrate it to TypeScript instead of extending it. + +## Monorepo + Tooling + +Use `pnpm` workspaces and Turborepo. + +- Repository root uses `pnpm-workspace.yaml` and `turbo.json`. +- Packages live in `packages/*`. +- `core` is renamed to `shared`. +- `packages/cli` is disabled and excluded from active monorepo validation. +- Integrations and providers live under `packages/backend/src/{integrations,providers}`. + +## CLI Status + +- `packages/cli` is fully disabled for active development. +- Do not implement new behavior in `packages/cli` unless explicitly requested. +- Frontend is the primary product surface; prioritize `packages/frontend` + supporting `packages/client`/`packages/backend`. +- Monorepo `build`, `typecheck`, and `test` intentionally exclude `@sandbox-agent/foundry-cli`. +- `pnpm-workspace.yaml` excludes `packages/cli` from monorepo package resolution. + +## Common Commands + +- Foundry is the canonical name for this product tree. Do not introduce or preserve legacy pre-Foundry naming in code, docs, commands, or runtime paths. +- Install deps: `pnpm install` +- Full active-monorepo validation: `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test` +- Start the full dev stack (real backend + frontend): `just foundry-dev` — frontend on **port 4173**, backend on **port 7741** (Docker via `compose.dev.yaml`) +- Start the mock frontend stack (no backend): `just foundry-mock` — mock frontend on **port 4174** (Docker via `compose.mock.yaml`) +- Start the local production-build preview stack: `just foundry-preview` +- Start only the backend locally: `just foundry-backend-start` +- Start only the frontend locally: `pnpm --filter @sandbox-agent/foundry-frontend dev` +- Start the mock frontend locally (no Docker): `just foundry-dev-mock` — mock frontend on **port 4174** +- Dev and mock stacks can run simultaneously on different ports (4173 and 4174). +- Stop the compose dev stack: `just foundry-dev-down` +- Tail compose dev logs: `just foundry-dev-logs` +- Stop the mock stack: `just foundry-mock-down` +- Tail mock logs: `just foundry-mock-logs` +- Stop the preview stack: `just foundry-preview-down` +- Tail preview logs: `just foundry-preview-logs` + +## Dev Environment Setup + +- `compose.dev.yaml` loads `foundry/.env` (optional) for credentials needed by the backend (GitHub OAuth, Stripe, Daytona, API keys, etc.). +- The canonical source for these credentials is `~/misc/the-foundry.env`. If `foundry/.env` does not exist, copy it: `cp ~/misc/the-foundry.env foundry/.env` +- `foundry/.env` is gitignored and must never be committed. +- If your changes affect the dev server, mock server, frontend runtime, backend runtime, Vite wiring, compose files, or other server-startup/runtime behavior, you must start or restart the relevant stack before finishing the task. +- Use the matching stack for verification: + - real backend + frontend changes: `just foundry-dev` or restart with `just foundry-dev-down && just foundry-dev` + - mock frontend changes: `just foundry-mock` or restart with `just foundry-mock-down && just foundry-mock` + - local frontend-only work outside Docker: restart `pnpm --filter @sandbox-agent/foundry-frontend dev` or `just foundry-dev-mock` as appropriate +- The backend does **not** hot reload. Bun's `--hot` flag causes the server to re-bind on a different port (e.g. 6421 instead of 6420), breaking all client connections while the container still exposes the original port. After backend code changes, restart the backend container: `just foundry-dev-down && just foundry-dev`. +- The dev server has debug logging enabled by default (`RIVET_LOG_LEVEL=debug`, `FOUNDRY_LOG_LEVEL=debug`) via `compose.dev.yaml`. Error stacks and timestamps are also enabled. +- The frontend client uses JSON encoding for RivetKit in development (`import.meta.env.DEV`) for easier debugging. Production uses the default encoding. + +## Foundry Base Sandbox Image + +Local Docker sandboxes use the `rivetdev/sandbox-agent:foundry-base-latest` image by default. This image extends the sandbox-agent runtime with sudo, git, neovim, gh, node, bun, chromium, and agent-browser. + +- **Dockerfile:** `docker/foundry-base.Dockerfile` (builds sandbox-agent from source, x86_64 only) +- **Publish script:** `scripts/publish-foundry-base.sh` (builds and pushes to Docker Hub `rivetdev/sandbox-agent`) +- **Tags:** `foundry-base-TZ` (timestamped) + `foundry-base-latest` (rolling) +- **Build from repo root:** `./foundry/scripts/publish-foundry-base.sh` (or `--dry-run` to skip push) +- **Override image in dev:** set `HF_LOCAL_SANDBOX_IMAGE` in `foundry/.env` or environment. The env var is passed through `compose.dev.yaml` to the backend. +- **Resolution order:** `config.sandboxProviders.local.image` (config.toml) > `HF_LOCAL_SANDBOX_IMAGE` (env var) > `DEFAULT_LOCAL_SANDBOX_IMAGE` constant in `packages/backend/src/actors/sandbox/index.ts`. +- The image must be built with `--platform linux/amd64`. The Rust build is memory-intensive; Docker Desktop needs at least 8GB RAM allocated. +- When updating the base image contents (new system packages, agent versions), rebuild and push with the publish script, then update the `foundry-base-latest` tag. + +## Production GitHub App + OAuth App + +Foundry uses two separate GitHub entities in production: + +- **OAuth App** (`GITHUB_CLIENT_ID` / `GITHUB_CLIENT_SECRET`) — handles "Sign in with GitHub" via Better Auth. This is a standard OAuth App. +- **GitHub App** (`GITHUB_APP_ID` / `GITHUB_APP_CLIENT_ID` / `GITHUB_APP_CLIENT_SECRET` / `GITHUB_APP_PRIVATE_KEY`) — handles webhooks, installation tokens for repo access, and GitHub API sync (repos, PRs). Must be manually installed on each org. + +Key env vars and where they connect: + +- `GITHUB_REDIRECT_URI` — OAuth callback, must point to `https://api.sandboxagent.dev/v1/auth/callback/github` +- `GITHUB_WEBHOOK_SECRET` — must match the secret configured on the GitHub App's Webhook settings page exactly. Mismatches cause silent 500s on webhook delivery (signature verification fails inside the actor, surfaced as a generic RivetKit `internal_error`). +- `BETTER_AUTH_URL` — must be the **API** URL (`https://api.sandboxagent.dev`), not the frontend URL. Better Auth uses this internally for sign-out and session management calls. +- `APP_URL` — the **frontend** URL (`https://foundry.sandboxagent.dev`). + +Troubleshooting: + +- **"GitHub App not installed"** — The GitHub App must be manually installed on each org. Sign-in does not auto-install it. Go to the GitHub App settings → Install App tab. The sign-in flow can only detect existing installations, not create them. +- **Webhooks not arriving** — Check the GitHub App → Advanced tab for delivery history. If deliveries show 500, the webhook secret likely doesn't match `GITHUB_WEBHOOK_SECRET`. Test with: `echo -n '{"test":true}' | openssl dgst -sha256 -hmac "$SECRET"` and curl the endpoint with the computed signature. +- **Deleting all actors wipes GitHub App installation state.** After a full actor reset, you must trigger a webhook (e.g. redeliver from GitHub App Advanced tab, or re-install the app) to repopulate installation records. + +## Railway Logs + +- Production Foundry Railway logs can be read from a linked checkout with `railway logs --deployment --lines 200` or `railway logs --deployment --lines 200`. +- Production deploys should go through `git push` to the deployment branch/workflow. Do not use `railway up` for Foundry deploys. +- If Railway logs fail because the checkout is not linked to the correct Railway project/service/environment, run: + `railway link --project 33e3e2df-32c5-41c5-a4af-dca8654acb1d --environment cf387142-61fd-4668-8cf7-b3559e0983cb --service 91c7e450-d6d2-481a-b2a4-0a916f4160fc` +- That links this directory to the `sandbox-agent` project, `production` environment, and `foundry-api` service. +- Production proxy chain: `api.sandboxagent.dev` routes through Cloudflare → Fastly/Varnish → Railway. When debugging request duplication, timeouts, or retry behavior, check headers like `cf-ray`, `x-varnish`, `x-railway-edge`, and `cdn-loop` to identify which layer is involved. + +## Frontend + Client Boundary + +- Keep a browser-friendly GUI implementation aligned with the TUI interaction model wherever possible. +- Do not import `rivetkit` directly in CLI or GUI packages. RivetKit client access must stay isolated inside `packages/client`. +- All backend interaction (actor calls, metadata/health checks, backend HTTP endpoint access) must go through the dedicated client library in `packages/client`. +- Outside `packages/client`, do not call backend endpoints directly (for example `fetch(.../v1/rivet...)`), except in black-box E2E tests that intentionally exercise raw transport behavior. +- GUI state should update in realtime (no manual refresh buttons). Prefer RivetKit push reactivity and actor-driven events; do not add polling/refetch for normal product flows. +- Keep the mock workspace types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up. +- Keep frontend route/state coverage current in code and tests; there is no separate page-inventory doc to maintain. +- If Foundry uses a shared component from `@sandbox-agent/react`, make changes in `sdks/react` instead of copying or forking that component into Foundry. +- When changing shared React components in `sdks/react` for Foundry, verify they still work in the Sandbox Agent Inspector before finishing. +- When making UI changes, verify the live flow with the Chrome DevTools MCP or `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish. +- When asked for screenshots, capture all relevant affected screens and modal states, not just a single viewport. Include empty, populated, success, and blocked/error states when they are part of the changed flow. +- If a screenshot catches a transition frame, blank modal, or otherwise misleading state, retake it before reporting it. +- When verifying UI in the browser, attempt to sign in by navigating to `/signin` and clicking "Continue with GitHub". If the browser lands on the GitHub login page (github.com/login) and you don't have credentials, stop and ask the user to complete the sign-in. Do not assume the session is invalid just because you see the Foundry sign-in page — always attempt the OAuth flow first. + +## Realtime Data Architecture + +### Core pattern: fetch initial state + subscribe to deltas + +All client data flows follow the same pattern: + +1. **Connect** to the actor via WebSocket. +2. **Fetch initial state** via an action call to get the current materialized snapshot. +3. **Subscribe to events** on the connection. Events carry **full replacement payloads** for the changed entity (not empty notifications, not patches — the complete new state of the thing that changed). +4. **Unsubscribe** after a 30-second grace period when interest ends (screen navigation, component unmount). The grace period prevents thrashing during screen transitions and React double-renders. + +Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or full-snapshot re-fetches on every mutation. Every mutation broadcasts the new absolute state of the changed entity to connected clients. + +### Materialized state in coordinator actors + +- **Organization actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the organization actor when they mutate. The organization actor broadcasts the updated entity to connected clients. `getOrganizationSummary` reads from local tables only — no fan-out to child actors. +- **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it. +- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). +- There is no fan-out on the read path. The organization actor owns all task summaries locally. + +### Subscription manager + +The subscription manager (`packages/client`) is a global singleton that manages WebSocket connections, cached state, and subscriptions for all topics. It: + +- **Deduplicates** — multiple subscribers to the same topic share one connection and one cached state. +- **Grace period (30s)** — when the last subscriber leaves, the connection and state stay alive for 30 seconds before teardown. This keeps data warm for back-navigation and prevents thrashing. +- **Exposes a single hook** — `useSubscription(topicKey, params)` returns `{ data, status, error }`. Null params = no subscription (conditional subscription). +- **Shared harness, separate implementations** — the `SubscriptionManager` interface is shared between mock and remote implementations. The mock implementation uses in-memory state. The remote implementation uses WebSocket connections. The API/client exposure is identical for both. + +### Topics + +Each topic maps to one actor connection and one event stream: + +| Topic | Actor | Event | Data | +|---|---|---|---| +| `app` | Organization `"app"` | `appUpdated` | Auth, orgs, onboarding | +| `organization` | Organization `{organizationId}` | `organizationUpdated` | Repo catalog, task summaries, repo summaries | +| `task` | Task `{organizationId, repoId, taskId}` | `taskUpdated` | Session summaries, sandbox info, diffs, file tree | +| `session` | Task `{organizationId, repoId, taskId}` (filtered by sessionId) | `sessionUpdated` | Transcript, draft state | +| `sandboxProcesses` | SandboxInstance | `processesUpdated` | Process list | + +The client subscribes to `app` always, `organization` when entering an organization, `task` when viewing a task, and `session` when viewing a specific session. At most 4 actor connections at a time (app + organization + task + sandbox if terminal is open). The `session` topic reuses the task actor connection and filters by session ID. + +### Rules + +- Do not add `useQuery` with `refetchInterval` for data that should be push-based. +- Do not broadcast empty notification events. Events must carry the full new state of the changed entity. +- Do not re-fetch full snapshots after mutations. The mutation triggers a server-side broadcast with the new entity state; the client replaces it in local state. +- All event subscriptions go through the subscription manager. Do not create ad-hoc `handle.connect()` + `conn.on()` patterns. +- Backend mutations that affect sidebar data (task title, status, branch, PR state) must push the updated summary to the parent organization actor, which broadcasts to organization subscribers. +- Comment architecture-related code: add doc comments explaining the materialized state pattern, why deltas flow the way they do, and the relationship between parent/child actor broadcasts. New contributors should understand the data flow from comments alone. + +## Sandbox Architecture + +- Structurally, the system supports multiple sandboxes per task, but in practice there is exactly one active sandbox per task. Design features assuming one sandbox per task. If multi-sandbox is needed in the future, extend at that time. +- Each task has a **primary user** (owner) whose GitHub OAuth credentials are injected into the sandbox for git operations. The owner swaps when a different user sends a message. See `.context/proposal-task-owner-git-auth.md` for the full design. +- **Security: OAuth token scope.** The user's GitHub OAuth token has `repo` scope, granting full control of all private repositories the user has access to. When the user is the active task owner, their token is injected into the sandbox. This means the agent can read/write ANY repo the user has access to, not just the task's target repo. This is the standard trade-off for OAuth-based git integrations (same as GitHub Codespaces, Gitpod). The user consents to `repo` scope at sign-in time. Credential files in the sandbox are `chmod 600` and overwritten on owner swap. +- All git operations in the sandbox must be auto-authenticated. Never configure git to prompt for credentials (no interactive `GIT_ASKPASS` prompts). Use a credential store file that is pre-populated with the active owner's token. +- All git operation errors (push 401, clone failure, branch protection rejection) must surface in the UI with actionable context. Never silently swallow git errors. + +## Git State Policy + +- The backend stores zero git state. No local clones, no refs, no working trees, and no git-spice. +- Repository metadata (branches, default branch, pull requests) comes from GitHub API data and webhook events already flowing into the system. +- All git operations that require a working tree run inside the task's sandbox via `executeInSandbox()`. +- Do not add backend git clone paths, `git fetch`, `git for-each-ref`, or direct backend git CLI calls. If you need git data, either read stored GitHub metadata or run the command inside a sandbox. +- The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain. + +## React Hook Dependency Safety + +- **Never use unstable references as `useEffect`/`useMemo`/`useCallback` dependencies.** React compares dependencies by reference, not value. Expressions like `?? []`, `?? {}`, `.map(...)`, `.filter(...)`, or object/array literals create new references every render, causing infinite re-render loops when used as dependencies. +- If the upstream value may be `undefined`/`null` and you need a fallback, either: + - Use the raw upstream value as the dependency and apply the fallback inside the effect body: `useEffect(() => { doThing(value ?? []); }, [value]);` + - Derive a stable primitive key: `const key = JSON.stringify(value ?? []);` then depend on `key` + - Memoize: `const stable = useMemo(() => value ?? [], [value]);` +- When reviewing code, treat any `?? []`, `?? {}`, or inline `.map()/.filter()` in a dependency array as a bug. + +## UI System + +- Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation. +- The full `BaseUI` reference for available components and guidance on animations, customization, composition, and forms is at `https://base-ui.com/llms.txt`. +- Prefer existing `BaseUI` components and composition patterns whenever possible instead of building custom controls from scratch. +- Reuse the established Foundry theme/token layer for colors, typography, spacing, and surfaces instead of introducing ad hoc visual values. +- If the same UI pattern is shared with the Inspector or other consumers, prefer extracting or reusing it through `@sandbox-agent/react` rather than duplicating it in Foundry. +- If a requested UI cannot be implemented cleanly with an existing `BaseUI` component, stop and ask the user whether they are sure they want to diverge from the system. +- In that case, recommend the closest existing `BaseUI` components or compositions that could satisfy the need before proposing custom UI work. +- Only introduce custom UI primitives when `BaseUI` and existing Foundry patterns are not sufficient, or when the user explicitly confirms they want the divergence. +- **Styletron atomic CSS rule:** Never mix CSS shorthand properties with their longhand equivalents in the same style object (including nested pseudo-selectors like `:hover`), or in a base styled component whose consumers override with longhand via `$style`. This includes `padding`/`paddingLeft`, `margin`/`marginTop`, `background`/`backgroundColor`, `border`/`borderLeft`, etc. Styletron generates independent atomic classes for shorthand and longhand, so they conflict unpredictably. Use `backgroundColor: "transparent"` instead of `background: "none"` for button resets. Always use longhand properties when any side may be overridden individually. + +## Runtime Policy + +- Runtime is Bun-native. +- Use Bun for CLI/backend execution paths and process spawning. +- Do not add Node compatibility fallbacks for OpenTUI/runtime execution. + +## Defensive Error Handling + +- Write code defensively: validate assumptions at boundaries and state transitions. +- If the system reaches an unexpected state, raise an explicit error with actionable context. +- Do not fail silently, swallow errors, or auto-ignore inconsistent data. +- Prefer fail-fast behavior over hidden degradation when correctness is uncertain. +- **Never use bare `catch {}` or `catch { }` blocks.** Every catch must at minimum log the error with `logActorWarning` or `console.warn`. Silent catches hide bugs and make debugging impossible. If a catch is intentionally degrading (e.g. returning empty data when a sandbox is expired), it must still log so operators can see what happened. Use `catch (error) { logActorWarning(..., { error: resolveErrorMessage(error) }); }` or equivalent. + +## RivetKit Dependency Policy + +For all Rivet/RivetKit implementation: + +1. Use SQLite + Drizzle for persistent state. +2. SQLite is **per actor instance** (per actor key), not a shared backend-global database: + - Each actor instance gets its own SQLite DB. + - Schema design should assume a single actor instance owns the entire DB. + - Do not add `organizationId`/`repoId`/`taskId` columns just to "namespace" rows for a given actor instance; use actor state and/or the actor key instead. + - Example: the `task` actor instance already represents `(organizationId, repoId, taskId)`, so its SQLite tables should not need those columns for primary keys. +3. Do not use backend-global SQLite singletons; database access must go through actor `db` providers (`c.db`). +4. The default dependency source for RivetKit is the published `rivetkit` package so monorepo installs and CI remain self-contained. + +## Rivet Routing + +- Mount RivetKit directly on `/v1/rivet` via `registry.handler(c.req.raw)`. +- Do not add an extra proxy or manager-specific route layer in the backend. +- Let RivetKit own metadata/public endpoint behavior for `/v1/rivet`. + +## Organization + Actor Rules + +- Everything is scoped to an organization. +- Organization resolution order: `--organization` flag -> config default -> `"default"`. +- `ControlPlaneActor` is replaced by `OrganizationActor` (organization coordinator). +- Every actor key must be prefixed with organization namespace (`["org", organizationId, ...]`). +- CLI/TUI/GUI must use `@sandbox-agent/foundry-client` (`packages/client`) for backend access; `rivetkit/client` imports are only allowed inside `packages/client`. +- Do not add custom backend REST endpoints (no `/v1/*` shim layer). +- We own the sandbox-agent project; treat sandbox-agent defects as first-party bugs and fix them instead of working around them. +- Keep strict single-writer ownership: each table/row has exactly one actor writer. +- Parent actors (`organization`, `task`, `sandbox-instance`) use command-only loops with no timeout. +- Periodic syncing lives in dedicated child actors with one timeout cadence each. +- **Task actors must be created lazily** — never during sync or bulk operations. PR sync writes virtual entries to the org's local `taskIndex`/`taskSummaries` tables. The task actor is created on first user interaction via `getOrCreate`. See `packages/backend/CLAUDE.md` "Lazy Task Actor Creation" for details. +- Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes. +- Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs. +- `send` policy: always `await` the `send(...)` call itself so enqueue failures surface immediately, but default to `wait: false`. +- Never self-send with `wait: true` from inside a workflow handler — the workflow processes one message at a time, so the handler would deadlock waiting for the new message to be dequeued. +- Read paths must not force refresh/sync work inline. Serve the latest cached projection, mark staleness explicitly, and trigger background refresh separately when needed. +- If a workflow needs to resume after some external work completes, model that as workflow state plus follow-up messages/events instead of holding the original request open. +- No retries: never add retry loops (`withRetries`, `setTimeout` retry, exponential backoff) anywhere in the codebase. If an operation fails, surface the error immediately. If a dependency is not ready yet, model that explicitly with workflow state and resume from a push/event instead of polling or retry loops. +- Never throw errors that expect the caller to retry (e.g. `throw new Error("... retry shortly")`). If a dependency is not ready, write the current state to the DB with an appropriate pending status, enqueue the async work, and return successfully. Let the client observe the pending → ready transition via push events. +- Action return contract: every action that creates a resource must write the resource record to the DB before returning, so the client can immediately query/render it. The record may have a pending status, but it must exist. Never return an ID that doesn't yet have a corresponding DB row. + +### Action handler responsiveness + +Action handlers must return fast. The pattern: + +1. **Creating an entity** — `wait: true` is fine. Do the DB write, return the ID/record. The caller needs the ID to proceed. The record may have a pending status; that's expected. +2. **Enqueuing work** (sending a message, triggering a sandbox operation, starting a sync) — `wait: false`. Write any precondition state to the DB synchronously, enqueue the work, and return. The client observes progress via push events on the relevant topic (session status, task status, etc.). +3. **Validating preconditions** — check state synchronously in the action handler *before* enqueuing. If a precondition isn't met (e.g. session not ready, task not initialized), throw an error immediately. Do not implicitly provision missing dependencies or poll for readiness inside the action handler. It is the client's responsibility to ensure preconditions are met before calling the action. + +Examples: +- `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via organization events. +- `sendWorkspaceMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. +- `createWorkspaceSession` → `wait: true` (returns `{ sessionId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. + +Never use `wait: true` for operations that depend on external readiness, sandbox I/O, agent responses, git network operations, polling loops, or long-running queue drains. Never hold an action open while waiting for an external system to become ready — that is a polling/retry loop in disguise. + +### Timeout policy + +All `wait: true` sends must have an explicit `timeout`. Maximum timeout for any `wait: true` send is **10 seconds** (`10_000`). If an operation cannot reliably complete within 10 seconds, it must be restructured: write the initial record to the DB, return it to the caller, and continue the work asynchronously with `wait: false`. The client observes completion via push events. + +`wait: false` sends do not need a timeout (the enqueue is instant; the work runs in the workflow loop with its own step-level timeouts). + +### Task creation: resolve metadata before creating the actor + +When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the organization actor *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. + +- Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O. +- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against the repository's task index. +- The organization actor owns the task index and reads GitHub-backed default branch metadata from the github-data actor. Resolve the branch name there without local git fetches. +- Do not defer naming to a background provision workflow. Do not poll for names to become available. +- The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return. +- Actor handle policy: +- Prefer explicit `get` or explicit `create` based on workflow intent; do not default to `getOrCreate`. +- Use `get`/`getForId` when the actor is expected to already exist; if missing, surface an explicit `Actor not found` error with recovery context. +- Use create semantics only on explicit provisioning/create paths where creating a new actor instance is intended. +- `getOrCreate` is a last resort for create paths when an explicit create API is unavailable; never use it in read/command paths. +- For long-lived cross-actor links (for example sandbox/session runtime access), persist actor identity (`actorId`) and keep a fallback lookup path by actor id. +- RivetKit actor `c.state` is durable, but in Docker it is stored under `/root/.local/share/rivetkit`. If that path is not persisted, actor state-derived indexes can be lost after container recreation even when other data still exists. +- Workflow history divergence policy: +- Production: never auto-delete actor state to resolve `HistoryDivergedError`; ship explicit workflow migrations (`ctx.removed(...)`, step compatibility). +- Development: manual local state reset is allowed as an operator recovery path when migrations are not yet available. +- Storage rule of thumb: +- Put simple metadata in `c.state` (KV state): small scalars and identifiers like `{ taskId }`, `{ repoId }`, booleans, counters, timestamps, status strings. +- If it grows beyond trivial (arrays, maps, histories, query/filter needs, relational consistency), use SQLite + Drizzle in `c.db`. + +## Testing Policy + +- Never use vitest mocks (`vi.mock`, `vi.spyOn`, `vi.fn`). Instead, define driver interfaces for external I/O and pass test implementations via the actor runtime context. +- All external service calls (git CLI, GitHub CLI, sandbox-agent HTTP, tmux) must go through the `BackendDriver` interface on the runtime context. +- Integration tests use `setupTest()` from `rivetkit/test` and are gated behind `HF_ENABLE_ACTOR_INTEGRATION_TESTS=1`. +- End-to-end testing must run against the dev backend started via `docker compose -f compose.dev.yaml up` (host -> container). Do not run E2E against an in-process test runtime. + - E2E tests should talk to the backend over HTTP (default `http://127.0.0.1:7741/v1/rivet`) and use real GitHub repos/PRs. + - For Foundry live verification, use `rivet-dev/sandbox-agent-testing` as the default testing repo unless the task explicitly says otherwise. + - Secrets (e.g. `OPENAI_API_KEY`, `GITHUB_TOKEN`/`GH_TOKEN`) must be provided via environment variables, never hardcoded in the repo. + - `~/misc/env.txt` and `~/misc/the-foundry.env` contain the expected local OpenAI + GitHub OAuth/App config for dev. + - For local GitHub webhook development, use the configured Smee proxy (`SMEE_URL`) to forward deliveries into `POST /v1/webhooks/github`. Check `.env` / `foundry/.env` if you need the current channel URL. + - If GitHub repos, PRs, or install state are not showing up, verify that the GitHub App is installed for the organization and that webhook delivery is enabled and healthy. Foundry depends on webhook events for GitHub-backed state; missing webhooks means the product will appear broken. + - Do not assume `gh auth token` is sufficient for Foundry task provisioning against private repos. Sandbox/bootstrap git clone, push, and PR flows require a repo-capable `GITHUB_TOKEN`/`GH_TOKEN` in the backend container. + - Preferred product behavior for organizations is to mint a GitHub App installation token from the organization installation and inject it into backend/sandbox git operations. Do not rely on an operator's ambient CLI auth as the long-term solution. +- Treat client E2E tests in `packages/client/test` as the primary end-to-end source of truth for product behavior. +- Keep backend tests small and targeted. Only retain backend-only tests for invariants or persistence rules that are not well-covered through client E2E. +- Do not keep large browser E2E suites around in a broken state. If a frontend browser E2E is not maintained and producing signal, remove it until it can be replaced with a reliable test. + +## Config + +- Keep config path at `~/.config/foundry/config.toml`. +- Evolve properties in place; do not move config location. + +## Project Guidance + +Project-specific guidance lives in `README.md`, `CONTRIBUTING.md`, and the relevant files under `research/`. + +Keep those updated when: + +- Commands change +- Configuration options change +- Architecture changes +- Plugins/providers change +- Actor ownership changes + +## Friction Logs + +Track friction at: + +- `research/friction/rivet.mdx` +- `research/friction/sandbox-agent.mdx` +- `research/friction/sandboxes.mdx` +- `research/friction/general.mdx` + +Category mapping: + +- `rivet`: Rivet/RivetKit runtime, actor model, queues, keys +- `sandbox-agent`: sandbox-agent SDK/API behavior +- `sandboxes`: provider implementations (worktree/daytona/etc) +- `general`: everything else + +Each entry must include: + +- Date (`YYYY-MM-DD`) +- Commit SHA (or `uncommitted`) +- What you were implementing +- Friction/issue +- Attempted fix/workaround and outcome + +## Audit Log Events + +Log notable workflow changes to `events` so the audit log remains complete: + +- create +- attach +- push/sync/merge +- archive/kill +- status transitions +- PR state transitions + +When adding new task/workspace commands, always add a corresponding audit log event. + +## Validation After Changes + +Always run and fix failures: + +```bash +pnpm -w typecheck +pnpm -w build +pnpm -w test +``` + +After making code changes, always update the dev server before declaring the work complete. If the dev stack is running through Docker Compose, restart or recreate the relevant dev services so the running app reflects the latest code. diff --git a/foundry/CONTRIBUTING.md b/foundry/CONTRIBUTING.md new file mode 100644 index 0000000..29875a9 --- /dev/null +++ b/foundry/CONTRIBUTING.md @@ -0,0 +1,64 @@ +# Contributing + +## Development Setup + +1. Clone: + +```bash +git clone https://github.com/rivet-dev/sandbox-agent.git +cd sandbox-agent/foundry +``` + +2. Install dependencies: + +```bash +pnpm install +``` + +3. Build all packages: + +```bash +pnpm -w build +``` + +## Package Layout + +- `packages/shared`: contracts/schemas +- `packages/backend`: RivetKit actors + DB + providers + integrations +- `packages/cli`: `hf` and `hf tui` (OpenTUI) + +## Local RivetKit Dependency + +Build local RivetKit before backend changes that depend on Rivet internals: + +```bash +cd ../rivet +pnpm build -F rivetkit + +cd /path/to/sandbox-agent/foundry +just sync-rivetkit +``` + +## Validation + +Run before opening a PR: + +```bash +pnpm -w typecheck +pnpm -w build +pnpm -w test +``` + +## Dev Backend (Docker Compose) + +Start the dev backend (hot reload via `bun --watch`) and Vite frontend via Docker Compose: + +```bash +just foundry-dev +``` + +Stop it: + +```bash +just foundry-dev-down +``` diff --git a/foundry/FOUNDRY-CHANGES.md b/foundry/FOUNDRY-CHANGES.md new file mode 100644 index 0000000..2bd76d2 --- /dev/null +++ b/foundry/FOUNDRY-CHANGES.md @@ -0,0 +1,1456 @@ +# Foundry Planned Changes + +## How to use this document + +Work through items checking boxes as you go. Some items have dependencies — do not start an item until its dependencies are checked off. After each item, run `pnpm -w typecheck && pnpm -w build && pnpm -w test` to validate. If an item includes a "CLAUDE.md update" section, apply it in the same change. Commit after each item passes validation. + +## Progress Log + +- 2026-03-14 10: Initial architecture mapping complete. + - Confirmed the current hot spots match the spec: `auth-user` is still mutation-by-action, `history` is still a separate actor with an `append` action wrapper, organization still owns `taskLookup`/`taskSummaries`, and the `Workbench*` surface is still shared across backend/client/frontend. + - Started foundational rename and migration planning for items `1`, `6`, and `25` because they drive most of the later fallout. +- 2026-03-14 11: Audit-log rename slice landed. + - Renamed the backend actor from `history` to `audit-log`, switched the queue name to `auditLog.command.append`, and removed the `append` action wrapper. + - Updated task/repository/organization call sites to send directly to the audit-log queue or read through the renamed audit-log handle. +- 2026-03-14 12: Foundational naming and dead-surface cleanup landed. + - Renamed the backend auth actor surface from `authUser` to `user`, including actor registration, key helpers, handles, and Better Auth service routing. + - Deleted the dead `getTaskEnriched` / `enrichTaskRecord` fan-out path and changed organization task reads to go straight to the task actor. + - Renamed admin-only GitHub rebuild/reload actions with the `admin*` prefix across backend, client, and frontend. + - Collapsed organization realtime to full-snapshot `organizationUpdated` events and aligned task events to `type: "taskUpdated"`. +- 2026-03-14 13: Task schema migration cleanup landed. + - Removed the task actor's runtime `CREATE TABLE IF NOT EXISTS` / `ALTER TABLE` helpers from `task/workbench.ts` and `task/workflow/init.ts`. + - Updated the checked-in task migration artifacts so the schema-defined task/session/runtime columns are created directly by migrations. +- 2026-03-14 14: Item 3 blocker documented. + - The spec's requested literal singleton `CHECK (id = 1)` on the Better Auth `user` table conflicts with the existing Better Auth adapter contract, which relies on external string `user.id`. + - Proceeding safely will require a design adjustment for that table rather than a straight mechanical migration. +- 2026-03-14 15: Better Auth mapping comments landed. + - Added Better Auth vs custom Foundry table/action comments in the user and organization actor schema/action surfaces so the adapter-constrained paths are explicit. +- 2026-03-15 09: Branch rename surface deleted and stale organization subscription fixed. + - Removed the remaining branch-rename surface from the client, mock backend, frontend UI, and repository action layer. There are no remaining `renameBranch` / `renameWorkbenchBranch` references in Foundry. + - Fixed the remote backend client to listen for `organizationUpdated` on the organization connection instead of the dead `workspaceUpdated` event name. +- 2026-03-15 10: Backend workspace rename landed. + - Renamed the backend task UI/workflow surface from `workbench` to `workspace`, including the task actor file, queue topic family, organization proxy actions, and the task session table name (`task_workspace_sessions`). + - Backend actor code no longer contains `Workbench` / `workbench` references, so the remaining shared/client/frontend rename can align to a stable backend target. +- 2026-03-15 11: Default model moved to user-scoped app state. + - Removed `defaultModel` from the organization schema/snapshot and stored it on the user profile instead, exposed through the app snapshot as a user preference. + - Wired `setAppDefaultModel` through the backend/app clients and changed the model picker to persist the starred/default model instead of resetting local React state on reload. +- 2026-03-15 11: Workspace surface completed across Foundry packages. + - Renamed the shared/client/frontend surface from `Workbench` to `Workspace`, including `workspace.ts`, workspace client/model files, DTO/type names, backend-client method names, frontend view-model imports, and the affected e2e/test files. + - Verified that Foundry backend/shared/client/frontend packages no longer contain `Workbench` / `workbench` references. +- 2026-03-15 11: Singleton constraints tightened where safe. + - Added `CHECK (id = 1)` enforcement for `github_meta`, `repo_meta`, `organization_profile`, and `user_profiles`, and updated the affected code paths/migrations to use row id `1`. + - The Better Auth `user` table remains blocked by the adapter contract, so item `3` is still open overall. +- 2026-03-14 12: Confirmed blocker for later user-table singleton work. + - Item `3` conflicts with the current Better Auth adapter contract for the `user` table: the adapter depends on the external string `user.id`, while the spec also asks for a literal singleton `CHECK (id = 1)` on that same table. + - That cannot be applied mechanically without redesigning the Better Auth adapter contract or introducing a separate surrogate identity column. I have not forced that change yet. +- 2026-03-15 13: Task/repository durable-state cleanup and auth-scoped workspace reads landed. + - Removed the remaining task/repository actor durable-state duplication: task `createState` now holds only `(organizationId, repoId, taskId)`, repository `createState` now holds only `(organizationId, repoId)`, task initialization seeds SQLite from the initialize queue payload, and task record reads fetch `repoRemote` through repository metadata instead of stale actor state. + - Removed the repository creation-time `remoteUrl` dependency from actor handles/callers and changed repository metadata to backfill/persist `remoteUrl` from GitHub data when needed. + - Wired Better Auth session ids through the remote client workspace/task-detail reads and through the task workflow queue handlers so user-scoped workspace state is no longer dropped on the floor by the organization/task proxy path. +- 2026-03-15 14: Coordinator routing boundary tightened. + - Removed the organization actor's fallback `taskId -> repoId` scan across repositories; task proxy actions now require `repoId` and route directly to the repository/task coordinator path the client already uses. + - Updated backend architecture notes to reflect the live repo-owned task projection (`tasks`) and the removal of the old organization-owned `taskLookup` / `taskSummaries` indexes. +- 2026-03-15 15: Workspace session-selection and dead task-status cleanup landed. + - Surfaced viewer-scoped `activeSessionId` through workspace task summary/detail DTOs, threaded it through the backend/client/mock surfaces, and added a dedicated workspace `select_session` mutation so session-tab selection now persists in `user_task_state` instead of living only in frontend local state. + - Removed dead task `diffStat` and sandbox `statusMessage` fields from the live workspace/task contracts and backend writes, and updated stale frontend/mock/e2e consumers to stop reading them. +- 2026-03-15 16: GitHub sync progress is now live on the organization topic. + - Added persisted GitHub sync phase/generation/progress fields to the github-data actor meta row and the organization profile projection, and exposed them through `organizationUpdated` snapshots so workspace consumers no longer wait on stale app-topic state during repo imports. + - Chunked branch and pull-request fetches by repository batches, added generation markers to imported GitHub rows, switched sync refreshes to upsert+sweep instead of delete-then-replace, and updated the workspace shell/dev panel to show live sync phase progress from the organization subscription. +- 2026-03-15 17: Foundry-local model lists now route through shared Sandbox Agent config resources. + - Removed the remaining duplicated hardcoded model tables from the frontend/client workspace view-model layer and switched backend default-model / agent-inference fallbacks to the shared catalog helpers in `shared/src/models.ts`. + - Updated mock/default app state to stop seeding deleted `claude-sonnet-4` / `claude-opus-4` ids, and aligned the user-profile default-model migration fallback with the shared catalog default. +- 2026-03-15 17: Shared model catalog moved off the old fixed union. + - Replaced the shared `WorkspaceModelId` closed union with string ids, introduced a shared model catalog derived from the sandbox-agent agent-config resources, and switched the client/frontend picker label helpers to consume that catalog instead of maintaining separate hardcoded `MODEL_GROUPS` arrays. + - Updated backend default-model and model→agent fallback logic to use the shared catalog/default id, and relaxed e2e env parsing so new sandbox-agent model ids can flow through without patching Foundry first. +- 2026-03-15 18: Workspace task status collapsed to a single live field. + - Removed the duplicate `runtimeStatus` field from workspace task/detail DTOs and all current backend/client/frontend consumers, so workspace task `status` is now the only task-state field on that surface. + - Removed the remaining synthetic `"new"` task status from the live workspace path; mock task creation now starts in the first concrete init state instead of exposing a frontend-only status. +- 2026-03-15 19: GitHub sync now persists branch and PR batches as they are fetched. + - The branch and pull-request phases now upsert each fetched repository batch immediately and only sweep stale rows after the phase completes, instead of buffering the full dataset in memory until the end of the sync. + - This aligns chunked progress reporting with chunked persistence and tightens recovery behavior for large repository imports. +- 2026-03-15 20: Repository-owned task projection artifacts are now aligned with runtime. + - Removed the last stale `task_lookup` Drizzle artifacts from the organization actor so the checked-in schema snapshots match the live repository-owned `tasks` projection. + - There are no remaining org/repo runtime references to the old org-side task lookup table. +- 2026-03-15 21: Legacy task/runtime fields are fully gone from the live Foundry surface. + - Confirmed the old task-table/runtime fields from item `21` are removed across backend/shared/client/frontend, and renamed the last leftover `agentTypeForModel()` helper to the neutral `sandboxAgentIdForModel()`. + - Deleted the final dead frontend diff-stat formatter/test that only referenced already-removed task diff state. +- 2026-03-15 22: Task status tracking is now fully collapsed to the canonical task status enum. + - With the earlier backend `statusMessage` removal plus this turn's workspace contract cleanup, the workspace/task surface now derives all task status UI from the canonical backend `status` enum. + - There are no remaining live workspace `runtimeStatus` or synthetic `"new"` task-state branches. +- 2026-03-15 23: Per-user workspace UI state is fully sourced from the user actor overlay. + - Confirmed the shared task actor no longer stores per-user `activeSessionId`, unread, or draft columns; those values are persisted in `user_task_state` and only projected back into workspace DTOs for the current viewer. + - The remaining active-session/unread/draft references in client/frontend code are consumer fields of that user-scoped overlay, not shared task-actor storage. +- 2026-03-15 24: Subscription topics are now fully normalized to single-snapshot events. + - Confirmed the shared realtime contracts now expose one full replacement event per topic (`appUpdated`, `organizationUpdated`, `taskUpdated`, `sessionUpdated`, `processesUpdated`) with matching wire event names and type fields. + - The client subscription manager already treats organization/task topics as full-snapshot refreshes, so there are no remaining multi-variant organization events or `taskDetailUpdated` name mismatches in live code. +- 2026-03-15 25: Sidebar PR/task split dead branches trimmed further. + - Removed the remaining dead `pr:`-id sidebar branch and switched the workspace sidebar to the real `pullRequest.isDraft` field instead of stale `pullRequest.status` reads. + - This does not finish item `15`, but it reduces the remaining synthetic PR/task split surface in the frontend. +- 2026-03-15 26: User-actor mutations now flow through a dedicated workflow queue. + - Added [user/workflow.ts](/home/nathan/sandbox-agent/foundry/packages/backend/src/actors/user/workflow.ts) plus shared query helpers, wired the user actor up with explicit queue names, and moved auth/profile/session/task-state mutations behind workflow handlers instead of direct action bodies. +- 2026-03-15 27: Organization GitHub/shell/billing mutations now route through workflow queues. + - Added shared organization queue definitions in `organization/queues.ts`, taught the organization workflow to handle the remaining GitHub projection, org-profile, and billing mutation commands, and switched the app-shell, Better Auth, GitHub-data actor, and org-isolation test to send queue messages instead of calling direct org mutation actions. + - Deleted the dead organization shell mutation actions that no longer had callers (`applyOrganizationSyncCompleted`, `markOrganizationSyncFailed`, `applyGithubInstallationCreated`, `applyGithubInstallationRemoved`, `applyGithubRepositoryChanges`), which moves items `4`, `10`, and `12` forward even though the broader org action split is still open. +- 2026-03-15 28: Organization action split trimmed more of the monolith and removed dead event types. + - Moved `starSandboxAgentRepo` into `organization/actions/onboarding.ts` and the admin GitHub reload actions into `organization/actions/github.ts`, so `organization/actions.ts` is carrying fewer unrelated app-shell responsibilities. + - Deleted the dead backend-only `actors/events.ts` type file after confirming nothing in Foundry still imports those old task/PR event interfaces. +- 2026-03-15 29: Repo overview branch rows now carry a single PR object. + - Replaced the repo-overview branch DTO's scalar PR fields (`prNumber`, `prState`, `prUrl`, `reviewStatus`, `reviewer`) with `pullRequest: WorkspacePullRequestSummary | null`, and updated repository overview assembly plus the organization dashboard to consume that unified PR shape. + - This does not finish item `15`, but it removes another synthetic PR-only read surface and makes the repo overview align better with the task summary PR model. +- 2026-03-15 30: Repo overview stopped falling back to raw GitHub PR rows. + - Changed repository overview assembly to read PR metadata only from the repo-owned task projection instead of rejoining live GitHub PR rows on read, so the dashboard is one step closer to treating PRs as task data rather than a separate UI entity. +- 2026-03-15 31: GitHub organization-shell repair now uses the org workflow queue. + - Converted `syncOrganizationShellFromGithub` from a direct org action into a workflow-backed mutation command and updated the GitHub org sync path to send `organization.command.github.organization_shell.sync_from_github` instead of calling the action directly. + - Updated Better Auth adapter writes and task user-overlay writes to send directly to the user workflow queue, which partially lands item `4` and sets up item `11` for the user actor. +- 2026-03-15 27: Workflow layout standardized and queue-only write paths expanded. + - Split the remaining inline actor workflows into dedicated files for `audit-log`, `repository`, `github-data`, and `organization`, and moved user read actions into `user/actions/*` with Better Auth-prefixed action names. + - Removed the task actor's public mutation action wrappers entirely, moved organization/repository/github-data/task coordination onto direct queue sends, and made repository metadata reads stop mutating `repo_meta` on cache misses. +- 2026-03-15 28: PR-only admin/UI seams trimmed and PR branches now claim real tasks. + - Removed the remaining dedicated "reload pull requests" / "reload pull request" admin hooks from the backend/client/frontend surfaces and deleted the sidebar PR-only context action. + - Repository PR refresh now lazily creates a branch-owned task when a pull request arrives for an unclaimed branch, so PR-only branches stop living purely as a side table in GitHub sync flows. +- 2026-03-15 29: Organization Better Auth writes now use workflow queues. + - Split the organization actor's Better Auth routing and verification reads into `organization/actions/better-auth.ts`, moved `APP_SHELL_ORGANIZATION_ID` to `organization/constants.ts`, and renamed the org Better Auth read surface to the `betterAuth*` form. + - Added dedicated organization workflow queue handlers for session/email/account index writes plus verification CRUD, and updated `services/better-auth.ts` to send those mutations directly to organization queues instead of calling mutation actions. +- 2026-03-15 30: Shared model routing metadata is now centralized. + - Extended the shared model catalog with explicit `agentKind` and `sandboxAgentId` metadata, changed `WorkspaceAgentKind` to a dynamic string, and switched backend task session creation to resolve sandbox agent ids through the shared catalog instead of hardcoded `Codex` vs `Claude` branching. + - Updated the mock app/workspace and frontend model picker/new-task flows to consume the shared catalog/default model instead of forcing stale `Claude`/`Codex` fallbacks or a baked-in `gpt-5.3-codex` create-task default. +- 2026-03-15 31: Dead GitHub-data PR reload surface removed and fixture PR shapes aligned. + - Deleted the unused GitHub-data `reloadPullRequest` workflow command plus the dead `listOpenPullRequests` / `getPullRequestForBranch` action surface that no longer has live Foundry callers. + - Fixed the stale client `workspace-model.ts` pull-request fixtures to use the live `WorkspacePullRequestSummary` shape, which removes the last targeted client type errors in the touched slice. +- 2026-03-15 32: Organization action splitting continued past Better Auth. + - Moved the app snapshot/default-model/org-profile actions into `organization/actions/organization.ts`, onboarding actions into `organization/actions/onboarding.ts`, and app-level GitHub token/import actions into `organization/actions/github.ts`, then composed those files at the actor boundary. + - `organization/app-shell.ts` now exports shared helpers for those domains and no longer directly defines the moved action handlers, shrinking the remaining monolith and advancing item `10`. +- 2026-03-15 33: Task PR detail now reads the repository-owned task projection. + - Removed duplicate scalar PR fields from `TaskRecord` and `WorkspaceTaskDetail`, switched the remaining frontend/client consumers to the canonical `pullRequest` object, and trimmed stale mock/test scaffolding that still populated those dead fields. + - Replaced the task actor's PR lookup path with a repository projection read (`getProjectedTaskSummary`) so task detail/summary no longer ask the repo actor to re-query GitHub PR rows by branch. +- 2026-03-15 34: Workspace model catalogs now come from the live sandbox-agent API. + - Added a shared normalizer for `/v1/agents?config=true` payloads, exposed sandbox-scoped `listWorkspaceModelGroups()` from the task sandbox actor, and switched backend workspace session creation to resolve sandbox agent ids from the live sandbox catalog instead of only the checked-in default tables. + - Updated the frontend workspace model picker to query the active sandbox for model groups and use that live catalog for labels/options, while keeping the shared default catalog only as a fallback when no sandbox is available yet or the sandbox-agent connection is unavailable. +- 2026-03-15 35: Backend-only organization snapshot refresh is now queue-backed. + - Added `organization.command.snapshot.broadcast` to the organization workflow, switched repository and app-import callers to send that queue message instead of calling the organization actor's `refreshOrganizationSnapshot` action directly, and removed the direct action wrapper. + - Deleted the dead `adminReconcileWorkspaceState` organization action/interface entry after confirming nothing in Foundry still calls it. +- 2026-03-15 36: Dead backend actor export cleanup continued. + - Removed the stale `export * from "./events.js"` line from `backend/src/actors/index.ts`, which was left behind after deleting the dead backend event type file. + - This keeps the backend actor barrel aligned with the live file set and advances the final dead-code/event audit. +- 2026-03-15 34: Item 17 removed from this checklist; do not leave started items half-finished. + - By request, item `17` (`Type all actor context parameters — remove c: any`) is deferred out of this Foundry task and should not block completion here. + - Process note for the remaining checklist work: once an item is started, finish that item to completion before opening a different partial seam. Item `15` is the current priority under that rule. +- 2026-03-15 35: Task/PR unification now routes live PR changes through repository-owned task summaries only. + - GitHub PR sync and webhook handling now send concrete PR summaries directly to the repository coordinator, which lazily creates a real branch-owned task when needed and persists PR metadata on the task projection instead of re-querying raw `github_pull_requests` rows from repository reads. + - Cleared the last stale scalar PR test references (`prUrl`, `reviewStatus`, `reviewer`) so the remaining Foundry surfaces consistently use the canonical `pullRequest` object. +- 2026-03-15 36: Organization action entrypoints are now fully organized under `actions/`, and the public mutation surface is queue-only. + - Moved organization task/workspace proxy actions plus `createTaskMutation` into `organization/actions/tasks.ts`, added `organization/actions/app.ts` so every composed org action bundle now lives under `organization/actions/*`, and removed dead `app-shell` exports that no longer had external callers. + - Audited the remaining public organization actor actions and confirmed the write paths go through organization/repository/task/github-data workflow queues instead of direct mutation actions, which closes item `4` and item `10`. +- 2026-03-15 37: Organization dead-code audit completed. + - Removed the leftover exported-only Better Auth predicate helper from `organization/actions/better-auth.ts`; it is now module-private because nothing outside that file uses it. + - Audited the remaining organization actor surface and confirmed the live public reads/writes still in use are the composed `actions/*` bundles plus workflow mutation helpers. There are no remaining dead org action exports from the pre-refactor monolith. +- 2026-03-15 38: Final dead-event and dead-surface audit completed for the in-scope Foundry refactor. + - Confirmed the live Foundry realtime topics each have a single event type (`appUpdated`, `organizationUpdated`, `taskUpdated`, `sessionUpdated`), and the deleted legacy event names (`workspaceUpdated`, `taskSummaryUpdated`, `taskDetailUpdated`, `pullRequestUpdated`, `pullRequestRemoved`) no longer exist in live Foundry code. + - Re-audited the major removed compatibility seams (`Workbench`, branch rename, PR-only sidebar ids, duplicate runtime task status, `getTaskEnriched`, organization-owned task lookup tables) and found no remaining live references beyond expected domain strings like GitHub webhook event names or CLI `pr` labels. +- 2026-03-15 39: Item 15 was finished for real by moving PR ownership into the task actor. + - Added task-local `pull_request_json` storage, switched task detail/summary reads to the task DB, and added `task.command.pull_request.sync` so GitHub/repository flows update PR metadata through the task coordinator instead of overlaying it in the repository projection. + - The mock right sidebar now trusts the canonical `task.pullRequest.url` field instead of rebuilding a PR URL from repo name + PR number. +- 2026-03-15 40: Better Auth user singleton constraint is now enforced without breaking the adapter contract. + - The user actor's `user` table now uses an integer singleton primary key with `CHECK (id = 1)` plus a separate `auth_user_id` column for Better Auth's external string identity. + - Updated the user actor query/join/mutation helpers so Better Auth still reads and writes logical `user.id` as the external string id while SQLite enforces the singleton row invariant locally. + +No backwards compatibility — delete old code, don't deprecate. If something is removed, remove it everywhere (backend, client, shared types, frontend, tests, mocks). + +### Suggested execution order (respects dependencies) + +**Wave 1 — no dependencies, can be done in any order:** +1, 2, 3, 4, 5, 6, 13, 16, 20, 21, 23, 25 + +**Wave 2 — depends on wave 1:** +7 (after 1), 9 (after 13), 10 (after 1+6), 11 (after 4), 22 (after 1), 24 (after 21), 26 (after 25) + +**Wave 3 — depends on wave 2:** +8 (after 7+25), 12 (after 10), 15 (after 9+13), 19 (after 21+24) + +**Wave 4 — depends on wave 3:** +14 (after 15) + +**Final:** +18 (after everything), final audit pass (after everything) + +### Index + +- [x] 1. Rename Auth User actor → User actor +- [x] 2. Add Better Auth mapping comments to user/org actor tables +- [x] 3. Enforce `id = 1` CHECK constraint on single-row tables +- [x] 4. Move all mutation actions to queue messages +- [x] 5. Migrate task actor raw SQL to Drizzle migrations +- [x] 6. Rename History actor → Audit Log actor +- [x] 7. Move starred/default model to user actor settings *(depends on: 1)* +- [x] 8. Replace hardcoded model/agent lists with sandbox-agent API data *(depends on: 7, 25)* +- [x] 9. Flatten `taskLookup` + `taskSummaries` into single `tasks` table *(depends on: 13)* +- [x] 10. Reorganize user and org actor actions into `actions/` folders *(depends on: 1, 6)* +- [x] 11. Standardize workflow file structure across all actors *(depends on: 4)* +- [x] 12. Audit and remove dead code in organization actor *(depends on: 10)* +- [x] 13. Enforce coordinator pattern and fix ownership violations +- [x] 14. Standardize one event per subscription topic *(depends on: 15)* +- [x] 15. Unify tasks and pull requests — PRs are just task data *(depends on: 9, 13)* +- [x] 16. Chunk GitHub data sync and publish progress +- [x] 18. Final pass: remove all dead code *(depends on: all other items)* +- [x] 19. Remove duplicate data between `c.state` and SQLite *(depends on: 21, 24)* +- [x] 20. Prefix admin/recovery actions with `admin` +- [x] 21. Remove legacy/session-scoped fields from task table +- [x] 22. Move per-user UI state from task actor to user actor *(depends on: 1)* +- [x] 23. Delete `getTaskEnriched` and `enrichTaskRecord` (dead code) +- [x] 24. Clean up task status tracking *(depends on: 21)* +- [x] 25. Remove "Workbench" prefix from all types, functions, files, tables +- [x] 26. Delete branch rename (branches immutable after creation) *(depends on: 25)* +- [x] Final audit pass: dead events scan *(depends on: all other items)* + +Deferred follow-up outside this checklist: + +- 17. Type all actor context parameters — remove `c: any` *(removed from this task's scope by request)* + +--- + +## [ ] 1. Rename Auth User actor → User actor + +**Rationale:** The actor is already a single per-user actor storing all user data. The "Auth" prefix is unnecessary. + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/`** → rename directory to `user/` + - `index.ts` — rename export `authUser` → `user`, display name `"Auth User"` → `"User"` + - `db/schema.ts`, `db/db.ts`, `db/migrations.ts`, `db/drizzle.config.ts` — update any auth-prefixed references +- **`foundry/packages/backend/src/actors/keys.ts`** — `authUserKey()` → `userKey()` +- **`foundry/packages/backend/src/actors/handles.ts`** — `getOrCreateAuthUser` → `getOrCreateUser`, `getAuthUser` → `getUser`, `selfAuthUser` → `selfUser` +- **`foundry/packages/backend/src/actors/index.ts`** — update import path and registration +- **`foundry/packages/backend/src/services/better-auth.ts`** — update all `authUser` references +- **Action names** — consider dropping "Auth" prefix from `createAuthRecord`, `findOneAuthRecord`, `updateAuthRecord`, `deleteAuthRecord`, `countAuthRecords`, etc. + +--- + +## [ ] 2. Add Better Auth mapping comments to user/org actor tables, actions, and queues + +**Rationale:** The user and organization actors contain a mix of Better Auth-driven and custom Foundry code. Tables, actions, and queues that exist to serve Better Auth's adapter need comments so developers know which pieces are constrained by Better Auth's schema/contract and which are ours to change freely. + +### Table mapping + +| Actor | Table | Better Auth? | Notes | +|---|---|---|---| +| user | `user` | Yes — 1:1 `user` model | All fields from Better Auth | +| user | `session` | Yes — 1:1 `session` model | All fields from Better Auth | +| user | `account` | Yes — 1:1 `account` model | All fields from Better Auth | +| user | `user_profiles` | No — custom Foundry | GitHub login, role, eligible orgs, starter repo status | +| user | `session_state` | No — custom Foundry | Active organization per session | +| org | `auth_verification` | Yes — Better Auth `verification` model | Lives on org actor because verification happens before user exists | +| org | `auth_session_index` | No — custom routing index | Maps session tokens → user actor IDs for Better Auth adapter routing | +| org | `auth_email_index` | No — custom routing index | Maps emails → user actor IDs for Better Auth adapter routing | +| org | `auth_account_index` | No — custom routing index | Maps OAuth accounts → user actor IDs for Better Auth adapter routing | + +### Action/queue mapping (user actor) + +| Action/Queue | Better Auth? | Notes | +|---|---|---| +| `createAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to create user/session/account records | +| `findOneAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter for single-record lookups with joins | +| `findManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for multi-record queries | +| `updateAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to update records | +| `updateManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for bulk updates | +| `deleteAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to delete records | +| `deleteManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for bulk deletes | +| `countAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for count queries | +| `getAppAuthState` | No — custom Foundry | Aggregates auth state for frontend consumption | +| `upsertUserProfile` | No — custom Foundry | Manages Foundry-specific user profile data | +| `upsertSessionState` | No — custom Foundry | Manages Foundry-specific session state | + +### Action/queue mapping (organization actor app-shell) + +| Action/Queue | Better Auth? | Notes | +|---|---|---| +| App-shell auth index CRUD actions | Yes — Better Auth adapter routing | Maintain lookup indexes so the adapter can route by session/email/account to the correct user actor | +| `auth_verification` CRUD | Yes — Better Auth `verification` model | Used for email verification and password resets | + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — add doc comments to each table: + - `user`, `session`, `account`: "Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database" + - `user_profiles`, `session_state`: "Custom Foundry table — not part of Better Auth" +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** — add doc comments to each action/queue: + - Better Auth adapter actions: "Better Auth adapter — called by the Better Auth adapter in better-auth.ts. Schema constrained by Better Auth." + - Custom actions: "Custom Foundry action — not part of Better Auth" +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — add doc comments to `auth_verification` (Better Auth core), and the three index tables (Better Auth adapter routing) +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — add doc comments to auth index actions marking them as Better Auth adapter routing infrastructure + +--- + +## [x] 3. Enforce `id = 1` CHECK constraint on all single-row actor tables + +**Rationale:** When an actor instance represents a single entity, tables that hold exactly one row should enforce this at the DB level with a `CHECK (id = 1)` constraint. The task actor already does this correctly; other actors don't. + +### Tables needing the constraint + +| Actor | Table | Current enforcement | Fix needed | +|---|---|---|---| +| auth-user (→ user) | `user` | None | Add `CHECK (id = 1)`, use integer PK | +| auth-user (→ user) | `user_profiles` | None | Add `CHECK (id = 1)`, use integer PK | +| github-data | `github_meta` | Hardcoded `id=1` in code only | Add `CHECK (id = 1)` in schema | +| organization | `organization_profile` | None | Add `CHECK (id = 1)`, use integer PK | +| repository | `repo_meta` | Hardcoded `id=1` in code only | Add `CHECK (id = 1)` in schema | +| task | `task` | CHECK constraint | Already correct | +| task | `task_runtime` | CHECK constraint | Already correct | + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — change `user` and `user_profiles` tables to integer PK with CHECK constraint +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** — update queries to use `id = 1` pattern +- **`foundry/packages/backend/src/services/better-auth.ts`** — update adapter to use fixed `id = 1` +- **`foundry/packages/backend/src/actors/github-data/db/schema.ts`** — add CHECK constraint to `github_meta` (already uses `id=1` in code) +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — change `organization_profile` to integer PK with CHECK constraint +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — update queries to use `id = 1` +- **`foundry/packages/backend/src/actors/repository/db/schema.ts`** — add CHECK constraint to `repo_meta` (already uses `id=1` in code) +- All affected actors — regenerate `db/migrations.ts` + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Single-row tables (tables that hold exactly one record per actor instance, e.g. metadata or profile tables) must use an integer primary key with a `CHECK (id = 1)` constraint to enforce the singleton invariant at the database level. Follow the pattern established in the task actor's `task` and `task_runtime` tables." + +--- + +## [x] 4. Move all mutation actions to queue messages + +**Rationale:** Actions should be read-only (queries). All mutations (INSERT/UPDATE/DELETE) should go through queue messages processed by workflow handlers. This ensures single-writer consistency and aligns with the actor model. No actor currently does this correctly — the history actor has the mutation in the workflow handler, but the `append` action wraps a `wait: true` queue send, which is the same anti-pattern (callers should send to the queue directly). + +### Violations by actor + +**User actor (auth-user)** — `auth-user/index.ts` — 7 mutation actions: +- `createAuthRecord` (INSERT, line 164) +- `updateAuthRecord` (UPDATE, line 205) +- `updateManyAuthRecords` (UPDATE, line 219) +- `deleteAuthRecord` (DELETE, line 234) +- `deleteManyAuthRecords` (DELETE, line 243) +- `upsertUserProfile` (UPSERT, line 283) +- `upsertSessionState` (UPSERT, line 331) + +**GitHub Data actor** — `github-data/index.ts` — 7 mutation actions: +- `fullSync` (batch INSERT/DELETE/UPDATE, line 686) +- `reloadOrganization` (batch, line 690) +- `reloadAllPullRequests` (batch, line 694) +- `reloadRepository` (INSERT/UPDATE, line 698) +- `reloadPullRequest` (INSERT/DELETE/UPDATE, line 763) +- `clearState` (batch DELETE, line 851) +- `handlePullRequestWebhook` (INSERT/UPDATE/DELETE, line 879) + +**Organization actor — `actions.ts`** — 5 mutation actions: +- `applyTaskSummaryUpdate` (UPSERT, line 464) +- `removeTaskSummary` (DELETE, line 476) +- `applyGithubRepositoryProjection` (UPSERT, line 521) +- `applyGithubDataProjection` (INSERT/UPDATE/DELETE, line 547) +- `recordGithubWebhookReceipt` (UPDATE, line 620) + +**Organization actor — `app-shell.ts`** — 38 mutation actions: + +Better Auth index mutations (11): +- `authUpsertSessionIndex` (UPSERT) +- `authDeleteSessionIndex` (DELETE) +- `authUpsertEmailIndex` (UPSERT) +- `authDeleteEmailIndex` (DELETE) +- `authUpsertAccountIndex` (UPSERT) +- `authDeleteAccountIndex` (DELETE) +- `authCreateVerification` (INSERT) +- `authUpdateVerification` (UPDATE) +- `authUpdateManyVerification` (UPDATE) +- `authDeleteVerification` (DELETE) +- `authDeleteManyVerification` (DELETE) + +Organization profile/state mutations (13): +- `updateOrganizationShellProfile` (UPDATE on organizationProfile) +- `markOrganizationSyncStarted` (UPDATE on organizationProfile) +- `applyOrganizationSyncCompleted` (UPDATE on organizationProfile) +- `markOrganizationSyncFailed` (UPDATE on organizationProfile) +- `applyOrganizationStripeCustomer` (UPDATE on organizationProfile) +- `applyOrganizationStripeSubscription` (UPSERT on organizationProfile) +- `applyOrganizationFreePlan` (UPDATE on organizationProfile) +- `setOrganizationBillingPaymentMethod` (UPDATE on organizationProfile) +- `setOrganizationBillingStatus` (UPDATE on organizationProfile) +- `upsertOrganizationInvoice` (UPSERT on invoices) +- `recordOrganizationSeatUsage` (UPSERT on seatAssignments) +- `applyGithubInstallationCreated` (UPDATE on organizationProfile) +- `applyGithubInstallationRemoved` (UPDATE on organizationProfile) + +App-level mutations that delegate + mutate (8): +- `skipAppStarterRepo` (calls upsertUserProfile) +- `starAppStarterRepo` (calls upsertUserProfile + child mutation) +- `selectAppOrganization` (calls setActiveOrganization) +- `triggerAppRepoImport` (calls markOrganizationSyncStarted) +- `createAppCheckoutSession` (calls applyOrganizationFreePlan + applyOrganizationStripeCustomer) +- `finalizeAppCheckoutSession` (calls applyOrganizationStripeCustomer) +- `cancelAppScheduledRenewal` (calls setOrganizationBillingStatus) +- `resumeAppSubscription` (calls setOrganizationBillingStatus) +- `recordAppSeatUsage` (calls recordOrganizationSeatUsage) +- `handleAppStripeWebhook` (calls multiple org mutations) +- `handleAppGithubWebhook` (calls org mutations + github-data mutations) +- `syncOrganizationShellFromGithub` (multiple DB operations) +- `applyGithubRepositoryChanges` (calls applyGithubRepositoryProjection) + +**Task actor workbench** — `task/workbench.ts` — 14 mutation actions: +- `renameWorkbenchTask` (UPDATE, line 970) +- `renameWorkbenchBranch` (UPDATE, line 988) +- `createWorkbenchSession` (INSERT, line 1039) +- `renameWorkbenchSession` (UPDATE, line 1125) +- `setWorkbenchSessionUnread` (UPDATE, line 1136) +- `updateWorkbenchDraft` (UPDATE, line 1143) +- `changeWorkbenchModel` (UPDATE, line 1152) +- `sendWorkbenchMessage` (UPDATE, line 1205) +- `stopWorkbenchSession` (UPDATE, line 1255) +- `syncWorkbenchSessionStatus` (UPDATE, line 1265) +- `closeWorkbenchSession` (UPDATE, line 1331) +- `markWorkbenchUnread` (UPDATE, line 1363) +- `publishWorkbenchPr` (UPDATE, line 1375) +- `revertWorkbenchFile` (UPDATE, line 1403) + +**Repository actor** — `repository/actions.ts` — 5 mutation actions/helpers: +- `createTask` → calls `createTaskMutation()` (INSERT on taskIndex + creates task actor) +- `registerTaskBranch` → calls `registerTaskBranchMutation()` (INSERT/UPDATE on taskIndex) +- `reinsertTaskIndexRow()` (INSERT/UPDATE, called from `getTaskEnriched`) +- `deleteStaleTaskIndexRow()` (DELETE) +- `persistRemoteUrl()` (INSERT/UPDATE on repoMeta, called from `getRepoOverview`) + +### History (audit log) actor — `append` action must also be removed + +The history actor's workflow handler is correct (mutation in queue handler), but the `append` action (line 77) is a `wait: true` wrapper around the queue send — same anti-pattern. Delete the `append` action. Callers (the `appendHistory()` helper in `task/workflow/common.ts`) should send directly to the `auditLog.command.append` queue with `wait: false` (audit log writes are fire-and-forget, no need to block the caller). + +### Reference patterns (queue handlers only, no action wrappers) +- **Task actor core** — initialize, attach, push, sync, merge, archive, kill all use queue messages directly + +### Migration approach + +This is NOT about wrapping queue sends inside actions. The mutation actions must be **removed entirely** and replaced with queue messages that callers (including `packages/client`) send directly. + +Each actor needs: +1. Define queue message types for each mutation +2. Move mutation logic from action handlers into workflow/queue handlers +3. **Delete the mutation actions** — do not wrap them +4. Update `packages/client` to send queue messages directly to the actor instead of calling the old action +5. Update any inter-actor callers (e.g. `better-auth.ts`, `app-shell.ts`, other actors) to send queue messages instead of calling actions + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Actions must be read-only. All database mutations (INSERT, UPDATE, DELETE, UPSERT) must be queue messages processed by workflow handlers. Callers (client, other actors, services) send messages directly to the queue — do not wrap queue sends inside actions. Follow the pattern established in the task workflow actor's queue handlers." + +--- + +## [ ] 5. Migrate task actor raw SQL to Drizzle migrations + +**Rationale:** The task actor uses raw `db.execute()` with `ALTER TABLE ... ADD COLUMN` in `workbench.ts` and `workflow/init.ts` instead of proper Drizzle migrations. All actor DBs should use the standard Drizzle migration pattern. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/workbench.ts`** (lines 24-56) — remove `ALTER TABLE` raw SQL, add columns to `db/schema.ts` and generate a proper migration +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** (lines 12-15) — same treatment +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — add the missing columns that are currently added via `ALTER TABLE` +- **`foundry/packages/backend/src/actors/task/db/migrations.ts`** — regenerate with new migration + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "All actor databases must use Drizzle ORM with proper schema definitions and generated migrations. No raw SQL (`db.execute()`, `ALTER TABLE`, etc.). Schema changes must go through `schema.ts` + migration generation." + +--- + +## [ ] 6. Rename History actor → Audit Log actor + +**Rationale:** The actor functions as a comprehensive audit log tracking task lifecycle events. "Audit Log" better describes its purpose. + +### Files to change + +- **`foundry/packages/backend/src/actors/history/`** → rename directory to `audit-log/` + - `index.ts` — rename export `history` → `auditLog`, display name `"History"` → `"Audit Log"`, queue `history.command.append` → `auditLog.command.append` + - Internal types: `HistoryInput` → `AuditLogInput`, `AppendHistoryCommand` → `AppendAuditLogCommand`, `ListHistoryParams` → `ListAuditLogParams` +- **`foundry/packages/backend/src/actors/keys.ts`** — `historyKey()` → `auditLogKey()` +- **`foundry/packages/backend/src/actors/handles.ts`** — `getOrCreateHistory` → `getOrCreateAuditLog`, `selfHistory` → `selfAuditLog` +- **`foundry/packages/backend/src/actors/index.ts`** — update import path and registration +- **`foundry/packages/shared/src/contracts.ts`** — `HistoryEvent` → `AuditLogEvent` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — `history()` action → `auditLog()`, update imports +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update `getOrCreateHistory` calls +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — `appendHistory()` → `appendAuditLog()` +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — update imports and calls +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — update imports and calls +- **`foundry/packages/backend/src/actors/task/workflow/push.ts`** — update imports and calls + +### Coverage gaps to fix + +The audit log only covers 9 of ~24 significant events (37.5%). The entire `task/workbench.ts` file has zero logging. Add audit log calls for: + +**High priority (missing lifecycle events):** +- `task.switch` — in `task/workflow/index.ts` handleSwitchActivity +- `task.session.created` — in `task/workbench.ts` createWorkbenchSession +- `task.session.closed` — in `task/workbench.ts` closeWorkbenchSession +- `task.session.stopped` — in `task/workbench.ts` stopWorkbenchSession + +**Medium priority (missing user actions):** +- `task.session.renamed` — renameWorkbenchSession +- `task.message.sent` — sendWorkbenchMessage +- `task.model.changed` — changeWorkbenchModel +- `task.title.changed` — renameWorkbenchTask +- `task.branch.renamed` — renameWorkbenchBranch +- `task.pr.published` — publishWorkbenchPr +- `task.file.reverted` — revertWorkbenchFile + +**Low priority / debatable:** +- `task.draft.updated`, `task.session.unread`, `task.derived.refreshed`, `task.transcript.refreshed` + +### CLAUDE.md updates needed + +- **`foundry/packages/backend/CLAUDE.md`** — rename `HistoryActor` → `AuditLogActor` in actor hierarchy, add maintenance rule: "Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of all significant operations." +- **`foundry/CLAUDE.md`** — rename "History Events" section → "Audit Log Events", update the list to include all events above, add note: "When adding new task/workbench commands, always add a corresponding audit log event." + +--- + +## [ ] 7. Move starred/default model to user actor settings + +**Dependencies:** item 1 + +**Rationale:** The starred/default model preference is currently broken — the frontend stores it in local React state that resets on reload. The org actor's `organizationProfile` table has a `defaultModel` column but there's no action to update it and it's the wrong scope anyway. This is a per-user preference, not an org setting. + +### Current state (broken) + +- **Frontend** (`mock-layout.tsx` line 313) — `useState("claude-sonnet-4")` — local state, lost on reload +- **Model picker UI** (`model-picker.tsx`) — has star icons + `onSetDefault` callback, but it only updates local state +- **Org actor** (`organization/db/schema.ts` line 43) — `defaultModel` column exists but nothing writes to it +- **No backend persistence** — starred model is not saved anywhere + +### Changes needed + +1. **Add `user_settings` table to user actor** (or add `defaultModel` column to `user_profiles`): + - `defaultModel` (text) — the user's starred/preferred model + - File: `foundry/packages/backend/src/actors/auth-user/db/schema.ts` + +2. **Add queue message to user actor** to update the default model: + - File: `foundry/packages/backend/src/actors/auth-user/index.ts` + +3. **Remove `defaultModel` from org actor** `organizationProfile` table (wrong scope): + - File: `foundry/packages/backend/src/actors/organization/db/schema.ts` + +4. **Update frontend** to read starred model from user settings (via `app` subscription) and send queue message on star click: + - File: `foundry/packages/frontend/src/components/mock-layout/model-picker.tsx` + - File: `foundry/packages/frontend/src/components/mock-layout.tsx` + +5. **Update shared types** — move `defaultModel` from `FoundryOrganizationSettings` to user settings type: + - File: `foundry/packages/shared/src/app-shell.ts` + +6. **Update client** to send the queue message to user actor: + - File: `foundry/packages/client/` + +--- + +## [ ] 8. Replace hardcoded model/agent lists with sandbox-agent API data + +**Dependencies:** items 7, 25 + +**Rationale:** The frontend hardcodes 8 models in a static list and ignores the sandbox-agent API's `GET /v1/agents` endpoint which already exposes the full agent config — models, modes, and reasoning/thought levels per agent. The frontend should consume this API 1:1 instead of maintaining its own stale copy. + +### Current state (hardcoded) + +- **`foundry/packages/frontend/src/components/mock-layout/view-model.ts`** (lines 20-39) — hardcoded `MODEL_GROUPS` with 8 models +- **`foundry/packages/client/src/workbench-model.ts`** (lines 18-37) — identical hardcoded `MODEL_GROUPS` copy +- **`foundry/packages/shared/src/workbench.ts`** (lines 5-13) — `WorkbenchModelId` hardcoded union type +- No modes or thought/reasoning levels exposed in UI at all +- No API calls to discover available models + +### What the sandbox-agent API already provides (`GET /v1/agents`) + +Per agent, the API returns: +- **models** — full list with display names (Claude: 4, Codex: 6, Cursor: 35+, OpenCode: 239) +- **modes** — execution modes (Claude: 5, Codex: 3, OpenCode: 2) +- **thought_level** — reasoning levels (Codex: low/medium/high/xhigh, Mock: low/medium/high) +- **capabilities** — plan_mode, reasoning, status support +- **credentialsAvailable** / **installed** — agent availability + +### Changes needed + +1. **Remove hardcoded model lists** from: + - `foundry/packages/frontend/src/components/mock-layout/view-model.ts` — delete `MODEL_GROUPS` + - `foundry/packages/client/src/workbench-model.ts` — delete `MODEL_GROUPS` + - `foundry/packages/shared/src/workbench.ts` — replace `WorkbenchModelId` union type with `string` (dynamic from API) + +2. **Backend: fetch and cache agent config from sandbox-agent API** + - Add an action or startup flow that calls `GET /v1/agents?config=true` on the sandbox-agent API + - Cache the result (agent list + models + modes + thought levels) in the appropriate actor + - Expose it to the frontend via the existing subscription/event system + +3. **Frontend: consume API-driven config** + - Model picker reads available models from backend-provided agent config, not hardcoded list + - Expose modes selector per agent + - Expose thought/reasoning level selector for agents that support it (Codex, Mock) + - Group models by agent as the API does (not by arbitrary provider grouping) + +4. **Update shared types** — make model/mode/thought_level types dynamic strings rather than hardcoded unions: + - `foundry/packages/shared/src/workbench.ts` + +5. **No backwards compatibility needed** — we're cleaning up, not preserving old behavior + +--- + +## [ ] 9. Flatten `taskLookup` + `taskSummaries` into single `tasks` table on org actor + +**Dependencies:** item 13 + +**Rationale:** `taskLookup` (taskId → repoId) is a strict subset of `taskSummaries` (which also has repoId + title, status, branch, PR, sessions). There's no reason for two tables with the same primary key. Flatten into one `tasks` table. + +### Current state + +- **`taskLookup`** — `taskId` (PK), `repoId` — used only for taskId → repoId resolution +- **`taskSummaries`** — `taskId` (PK), `repoId`, `title`, `status`, `repoName`, `updatedAtMs`, `branch`, `pullRequestJson`, `sessionsSummaryJson` — materialized sidebar data + +### Changes needed + +1. **Merge into single `tasks` table** in `foundry/packages/backend/src/actors/organization/db/schema.ts`: + - Drop `taskLookup` table + - Rename `taskSummaries` → `tasks` + - Keep all columns from `taskSummaries` (already includes `repoId`) + +2. **Update all references**: + - `foundry/packages/backend/src/actors/organization/actions.ts` — replace `taskLookup` queries with `tasks` table lookups + - `foundry/packages/backend/src/actors/organization/app-shell.ts` — if it references either table + - Any imports of the old table names from schema + +3. **Regenerate migrations** — `foundry/packages/backend/src/actors/organization/db/migrations.ts` + +--- + +## [x] 10. Reorganize user and organization actor actions into `actions/` folders + +**Dependencies:** items 1, 6 + +**Rationale:** Both actors cram too many concerns into single files. The organization actor has `app-shell.ts` (1,947 lines) + `actions.ts` mixing Better Auth, Stripe, GitHub, onboarding, workbench proxying, and org state. The user actor mixes Better Auth adapter CRUD with custom Foundry actions. Split into `actions/` folders grouped by domain, with `betterAuth` prefix on all Better Auth actions. + +### User actor → `user/actions/` + +| File | Actions | Source | +|---|---|---| +| `actions/better-auth.ts` | `betterAuthCreateRecord`, `betterAuthFindOneRecord`, `betterAuthFindManyRecords`, `betterAuthUpdateRecord`, `betterAuthUpdateManyRecords`, `betterAuthDeleteRecord`, `betterAuthDeleteManyRecords`, `betterAuthCountRecords` + all helper functions (`tableFor`, `columnFor`, `normalizeValue`, `clauseToExpr`, `buildWhere`, `applyJoinToRow`, `applyJoinToRows`) | Currently in `index.ts` | +| `actions/user.ts` | `getAppAuthState`, `upsertUserProfile`, `upsertSessionState` | Currently in `index.ts` | + +### Organization actor → `organization/actions/` + +**Delete `app-shell.ts`** — split its ~50 actions + helpers across these files: + +| File | Actions | Source | +|---|---|---| +| `actions/better-auth.ts` | `betterAuthFindSessionIndex`, `betterAuthUpsertSessionIndex`, `betterAuthDeleteSessionIndex`, `betterAuthFindEmailIndex`, `betterAuthUpsertEmailIndex`, `betterAuthDeleteEmailIndex`, `betterAuthFindAccountIndex`, `betterAuthUpsertAccountIndex`, `betterAuthDeleteAccountIndex`, `betterAuthCreateVerification`, `betterAuthFindOneVerification`, `betterAuthFindManyVerification`, `betterAuthUpdateVerification`, `betterAuthUpdateManyVerification`, `betterAuthDeleteVerification`, `betterAuthDeleteManyVerification`, `betterAuthCountVerification` + auth clause builder helpers | Currently in `app-shell.ts` | +| `actions/stripe.ts` | `createAppCheckoutSession`, `finalizeAppCheckoutSession`, `createAppBillingPortalSession`, `cancelAppScheduledRenewal`, `resumeAppSubscription`, `recordAppSeatUsage`, `handleAppStripeWebhook`, `applyOrganizationStripeCustomer`, `applyOrganizationStripeSubscription`, `applyOrganizationFreePlan`, `setOrganizationBillingPaymentMethod`, `setOrganizationBillingStatus`, `upsertOrganizationInvoice`, `recordOrganizationSeatUsage` | Currently in `app-shell.ts` | +| `actions/github.ts` | `resolveAppGithubToken`, `beginAppGithubInstall`, `triggerAppRepoImport`, `handleAppGithubWebhook`, `syncOrganizationShellFromGithub`, `syncGithubOrganizations`, `applyGithubInstallationCreated`, `applyGithubInstallationRemoved`, `applyGithubRepositoryChanges`, `reloadGithubOrganization`, `reloadGithubPullRequests`, `reloadGithubRepository`, `reloadGithubPullRequest`, `applyGithubRepositoryProjection`, `applyGithubDataProjection`, `recordGithubWebhookReceipt`, `refreshTaskSummaryForGithubBranch` | Currently split across `app-shell.ts` and `actions.ts` | +| `actions/onboarding.ts` | `skipAppStarterRepo`, `starAppStarterRepo`, `starSandboxAgentRepo`, `selectAppOrganization` | Currently in `app-shell.ts` | +| `actions/organization.ts` | `getAppSnapshot`, `getOrganizationShellState`, `getOrganizationShellStateIfInitialized`, `updateOrganizationShellProfile`, `updateAppOrganizationProfile`, `markOrganizationSyncStarted`, `applyOrganizationSyncCompleted`, `markOrganizationSyncFailed`, `useOrganization`, `getOrganizationSummary`, `reconcileWorkbenchState` | Currently split across `app-shell.ts` and `actions.ts` | +| `actions/tasks.ts` | `createTask`, `createWorkbenchTask`, `listTasks`, `getTask`, `switchTask`, `applyTaskSummaryUpdate`, `removeTaskSummary`, `findTaskForGithubBranch`, `applyOpenPullRequestUpdate`, `removeOpenPullRequest`, `attachTask`, `pushTask`, `syncTask`, `mergeTask`, `archiveTask`, `killTask` | Currently in `actions.ts` | +| `actions/workbench.ts` | `markWorkbenchUnread`, `renameWorkbenchTask`, `renameWorkbenchBranch`, `createWorkbenchSession`, `renameWorkbenchSession`, `setWorkbenchSessionUnread`, `updateWorkbenchDraft`, `changeWorkbenchModel`, `sendWorkbenchMessage`, `stopWorkbenchSession`, `closeWorkbenchSession`, `publishWorkbenchPr`, `revertWorkbenchFile` | Currently in `actions.ts` (proxy calls to task actor) | +| `actions/repos.ts` | `listRepos`, `getRepoOverview` | Currently in `actions.ts` | +| `actions/history.ts` | `history` (→ `auditLog` after rename) | Currently in `actions.ts` | + +Also move: +- `APP_SHELL_ORGANIZATION_ID` constant → `organization/constants.ts` +- `runOrganizationWorkflow` → `organization/workflow.ts` +- Private helpers (`buildAppSnapshot`, `assertAppOrganization`, `collectAllTaskSummaries`, etc.) → colocate with the action file that uses them + +### Files to update + +- **`foundry/packages/backend/src/services/better-auth.ts`** — update all action name references to use `betterAuth` prefix +- **`foundry/packages/backend/src/actors/organization/index.ts`** — import and spread action objects from `actions/` files instead of `app-shell.ts` + `actions.ts` +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** (or `user/index.ts`) — import actions from `actions/` files + +--- + +## [ ] 11. Standardize workflow file structure across all actors + +**Dependencies:** item 4 + +**Rationale:** Workflow logic is inconsistently placed — inline in `index.ts`, in `actions.ts`, or in a `workflow/` directory. Standardize: every actor with a workflow gets a `workflow.ts` file. If the workflow is large, use `workflow/{index,...}.ts`. + +### Changes per actor + +| Actor | Current location | New location | Notes | +|---|---|---|---| +| user (auth-user) | None | `workflow.ts` (new) | Needs a workflow for mutations (item 4) | +| github-data | Inline in `index.ts` (~57 lines) | `workflow.ts` | Extract `runGithubDataWorkflow` + handler | +| history (→ audit-log) | Inline in `index.ts` (~18 lines) | `workflow.ts` | Extract `runHistoryWorkflow` + `appendHistoryRow` | +| organization | In `actions.ts` (~51 lines) | `workflow.ts` | Extract `runOrganizationWorkflow` + queue handlers | +| repository | In `actions.ts` (~42 lines) | `workflow.ts` | Extract `runRepositoryWorkflow` + queue handlers | +| task | `workflow/` directory (926 lines) | `workflow/` directory — already correct | Keep as-is: `workflow/index.ts`, `workflow/queue.ts`, `workflow/common.ts`, `workflow/init.ts`, `workflow/commands.ts`, `workflow/push.ts` | +| sandbox | None (wrapper) | N/A | No custom workflow needed | + +### Pattern + +- **Small workflows** (< ~200 lines): single `workflow.ts` file +- **Large workflows** (> ~200 lines): `workflow/index.ts` holds the main loop, other files hold step groups: + - `workflow/index.ts` — main loop + handler dispatch + - `workflow/queue.ts` — queue name definitions (if many) + - `workflow/{group}.ts` — step/activity functions grouped by domain + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Every actor with a message queue must have its workflow logic in a dedicated `workflow.ts` file (or `workflow/index.ts` for complex actors). Do not inline workflow logic in `index.ts` or `actions.ts`. Actions are read-only handlers; workflow handlers process queue messages and perform mutations." + +--- + +--- + +## [ ] 12. Audit and remove dead code in organization actor + +**Dependencies:** item 10 + +**Rationale:** The organization actor has ~50+ actions across `app-shell.ts` and `actions.ts`. Likely some are unused or vestigial. Audit all actions and queues for dead code and remove anything that has no callers. + +### Scope + +- All actions in `organization/actions.ts` and `organization/app-shell.ts` +- All queue message types and their handlers +- Helper functions that may no longer be called +- Shared types in `packages/shared` that only served removed actions + +### Approach + +- Trace each action/queue from caller → handler to confirm it's live +- Remove any action with no callers (client, other actors, services, HTTP endpoints) +- Remove any queue handler with no senders +- Remove associated types and helpers + +--- + +## [ ] 13. Enforce coordinator pattern and fix ownership violations + +**Rationale:** The actor hierarchy follows a coordinator pattern: org → repo → task → session. The coordinator owns the index/summary of its children, handles create/destroy, and children push updates up to their coordinator. Several violations exist where levels are skipped. + +### Coordinator hierarchy (add to CLAUDE.md) + +``` +Organization (coordinator for repos) +├── Repository (coordinator for tasks) +│ └── Task (coordinator for sessions) +│ └── Session +``` + +**Rules:** +- The coordinator owns the index/summary table for its direct children +- The coordinator handles create/destroy of its direct children +- Children push summary updates UP to their direct coordinator (not skipping levels) +- Read paths go through the coordinator, not direct cross-level access +- No backwards compatibility needed — we're cleaning up + +### Violations to fix + +#### V1: Task index tables on wrong actor (HIGH) + +`taskLookup` and `taskSummaries` (item 9 merges these into `tasks`) are on the **organization** actor but should be on the **repository** actor, since repo is the coordinator for tasks. + +**Fix:** +- Move the merged `tasks` table (from item 9) to `repository/db/schema.ts` +- Repository owns task summaries, not organization +- Organization gets a `repoSummaries` table instead (repo count, latest activity, etc.) — the repo pushes its summary up to org + +#### V2: Tasks push summaries directly to org, skipping repo (HIGH) + +Task actors call `organization.applyTaskSummaryUpdate()` directly (line 464 in `actions.ts`), bypassing the repository coordinator. + +**Fix:** +- Task pushes summary to `repository.applyTaskSummaryUpdate()` instead +- Repository updates its `tasks` table, then pushes a repo summary up to organization +- Organization never receives task-level updates directly + +#### V3: Org resolves taskId → repoId from its own table (MEDIUM) + +`resolveRepoId(c, taskId)` in `organization/actions.ts` queries `taskLookup` directly. Used by `switchTask`, `attachTask`, `pushTask`, `syncTask`, `mergeTask`, `archiveTask`, `killTask` (7 actions). + +**Fix:** +- Remove `resolveRepoId()` from org actor +- Org must know the `repoId` from the caller (frontend already knows which repo a task belongs to) or query the repo actor +- Update all 7 proxy actions to require `repoId` in their input instead of looking it up + +#### V4: Duplicate task creation bookkeeping at org level (MEDIUM) + +`createTaskMutation` in org actor calls `repository.createTask()`, then independently inserts `taskLookup` and seeds `taskSummaries`. Repository already inserts its own `taskIndex` row. + +**Fix:** +- Org calls `repository.createTask()` — that's it +- Repository handles all task index bookkeeping internally +- Repository pushes the new task summary back up to org as part of its repo summary update + +### Files to change + +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — remove `taskLookup` and `taskSummaries`, add `repoSummaries` if needed +- **`foundry/packages/backend/src/actors/repository/db/schema.ts`** — add merged `tasks` table (task summaries) +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove `resolveRepoId()`, `applyTaskSummaryUpdate`, `removeTaskSummary`, `findTaskForGithubBranch`, `refreshTaskSummaryForGithubBranch`; update proxy actions to require `repoId` in input +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — add `applyTaskSummaryUpdate` action (receives from task), push repo summary to org +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — change summary push target from org → repo +- **`foundry/packages/shared/src/contracts.ts`** — update input types to include `repoId` where needed +- **`foundry/packages/client/`** — update calls to pass `repoId` + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add coordinator pattern rules: + ``` + ## Coordinator Pattern + + The actor hierarchy follows a strict coordinator pattern: + - Organization = coordinator for repositories + - Repository = coordinator for tasks + - Task = coordinator for sessions + + Rules: + - Each coordinator owns the index/summary table for its direct children. + - Only the coordinator handles create/destroy of its direct children. + - Children push summary updates to their direct coordinator only (never skip levels). + - Cross-level access (e.g. org directly querying task state) is not allowed — go through the coordinator. + - Proxy actions at higher levels (e.g. org.pushTask) must delegate to the correct coordinator, not bypass it. + ``` + +--- + +--- + +## [ ] 14. Standardize one event per subscription topic across all actors + +**Dependencies:** item 15 + +**Rationale:** Each subscription topic should have exactly one event type carrying the full replacement snapshot. The organization topic currently violates this with 7 subtypes. Additionally, event naming is inconsistent across actors. Standardize all of them. + +### Current state + +| Topic | Wire event name | Event type field | Subtypes | Issue | +|---|---|---|---|---| +| `app` | `appUpdated` | `type: "appUpdated"` | 1 | Name is fine | +| `organization` | `organizationUpdated` | 7 variants | **7** | Needs consolidation | +| `task` | `taskUpdated` | `type: "taskDetailUpdated"` | 1 | Wire name ≠ type name | +| `session` | `sessionUpdated` | `type: "sessionUpdated"` | 1 | Fine | +| `sandboxProcesses` | `processesUpdated` | `type: "processesUpdated"` | 1 | Fine | + +### Target state + +Every topic gets exactly one event. Wire event name = type field = `{topic}Updated`. Each carries the full snapshot for that topic. + +| Topic | Event name | Payload | +|---|---|---| +| `app` | `appUpdated` | `FoundryAppSnapshot` | +| `organization` | `organizationUpdated` | `OrganizationSummarySnapshot` | +| `task` | `taskUpdated` | `WorkbenchTaskDetail` | +| `session` | `sessionUpdated` | `WorkbenchSessionDetail` | +| `sandboxProcesses` | `processesUpdated` | `SandboxProcessSnapshot[]` | + +### Organization — consolidate 7 subtypes into 1 + +Remove the discriminated union. Replace all 7 subtypes: +- `taskSummaryUpdated`, `taskRemoved`, `repoAdded`, `repoUpdated`, `repoRemoved`, `pullRequestUpdated`, `pullRequestRemoved` + +With a single `organizationUpdated` event carrying the full `OrganizationSummarySnapshot`. The client replaces its cached state — same pattern as every other topic. + +### Task — fix event type name mismatch + +Wire event is `taskUpdated` but the type field says `taskDetailUpdated`. Rename to `taskUpdated` everywhere for consistency. + +### Files to change + +- **`foundry/packages/shared/src/realtime-events.ts`** — replace `OrganizationEvent` union with single event type; rename `TaskEvent.type` from `taskDetailUpdated` → `taskUpdated` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — update all 7 `c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", ... })` calls to emit single event with full snapshot +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — same for any broadcasts here +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — rename `taskDetailUpdated` → `taskUpdated` in broadcast calls +- **`foundry/packages/client/src/subscription/topics.ts`** — simplify `applyEvent` for organization topic (no more discriminated union handling); update task event type name +- **`foundry/packages/client/src/subscription/mock-manager.ts`** — update mock event handling +- **`foundry/packages/frontend/`** — update any direct references to event type names + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Each subscription topic must have exactly one event type. The event carries the full replacement snapshot for that topic — no discriminated unions, no partial patches, no subtypes. Event name must match the pattern `{topic}Updated` (e.g. `organizationUpdated`, `taskUpdated`). When state changes, broadcast the full snapshot; the client replaces its cached state." + +--- + +## [x] 15. Unify tasks and pull requests — PRs are just task data + +**Dependencies:** items 9, 13 + +**Rationale:** From the client's perspective, tasks and PRs are the same thing — a branch with work on it. The frontend already merges them into one sorted list, converting PRs to synthetic task objects with `pr:{prId}` IDs. The distinction is artificial. A "task" should represent any branch, and the task actor lazily wraps it. PR metadata is just data the task holds. + +### Current state (separate entities) + +- **Tasks**: stored in task actor SQLite, surfaced via `WorkbenchTaskSummary`, events via `taskSummaryUpdated` +- **PRs**: stored in GitHub data actor (`githubPullRequests` table), surfaced via `WorkbenchOpenPrSummary`, events via `pullRequestUpdated`/`pullRequestRemoved` +- **Frontend hack**: converts PRs to fake task objects with `pr:{prId}` IDs, merges into one list +- **Filtering logic**: org actor silently swallows `pullRequestUpdated` if a task claims the same branch — fragile coupling +- **Two separate types**: `WorkbenchTaskSummary` and `WorkbenchOpenPrSummary` with overlapping fields + +### Target state (unified) + +- **One entity**: a "task" represents a branch. Task actors are lazily created when needed (user creates one, or a PR arrives for an unclaimed branch). +- **PR data lives on the task**: the task actor stores PR metadata (number, title, state, url, isDraft, authorLogin, etc.) as part of its state, not as a separate entity +- **One type**: `WorkbenchTaskSummary` includes full PR fields (nullable). No separate `WorkbenchOpenPrSummary`. +- **One event**: `organizationUpdated` carries task summaries that include PR data. No separate PR events. +- **No synthetic IDs**: every item in the sidebar is a real task with a real taskId + +### Changes needed + +1. **Remove `WorkbenchOpenPrSummary` type** from `packages/shared/src/workbench.ts` — merge its fields into `WorkbenchTaskSummary` +2. **Expand task's `pullRequest` field** from `{ number, status }` to full PR metadata (number, title, state, url, headRefName, baseRefName, isDraft, authorLogin, updatedAtMs) +3. **Remove `openPullRequests` from `OrganizationSummarySnapshot`** — all items are tasks now +4. **Remove PR-specific events** from `realtime-events.ts`: `pullRequestUpdated`, `pullRequestRemoved` +5. **Remove PR-specific actions** from organization actor: `applyOpenPullRequestUpdate`, `removeOpenPullRequest` +6. **Remove branch-claiming filter logic** in org actor (the `if task claims branch, skip PR` check) +7. **GitHub data actor PR sync**: when PRs arrive (webhook or sync), create/update a task for that branch lazily via the repository coordinator +8. **Task actor**: store PR metadata in its DB (new columns or table), update when GitHub data pushes changes +9. **Frontend**: remove `toOpenPrTaskModel` conversion, remove `pr:` ID prefix hack, remove separate `openPullRequests` state — sidebar is just tasks +10. **Repository actor**: when a PR arrives for a branch with no task, lazily create a task actor for it (lightweight, no sandbox needed) + +### Implications for coordinator pattern (item 13) + +This reinforces: repo is the coordinator for tasks. When GitHub data detects a new PR for a branch, it tells the repo coordinator, which creates/updates the task. The task holds the PR data and pushes its summary to the repo coordinator. + +### No backwards compatibility needed + +The `authSessionIndex`, `authEmailIndex`, `authAccountIndex`, and `authVerification` tables stay on the org actor. They're routing indexes needed by the Better Auth adapter to resolve user identity before the user actor can be accessed (e.g. session token → userId lookup). Already covered in item 2 for adding comments explaining this. + +--- + +## [ ] 16. Chunk GitHub data sync and publish progress + +**Rationale:** `runFullSync` in the github-data actor fetches everything at once (all repos, branches, members, PRs), replaces all tables atomically, and has a 5-minute timeout. For large orgs this will timeout or lose all data mid-sync (replace pattern deletes everything first). Needs to be chunked with incremental progress. + +### Current state (broken for large orgs) + +- `runFullSync()` (`github-data/index.ts` line 486-538): + 1. Fetches ALL repos, branches, members, PRs in 4 sequential calls + 2. `replaceRepositories/Branches/Members/PullRequests` — deletes all rows then inserts all new rows + 3. Single 5-minute timeout wraps the entire operation + 4. No progress reporting to the client — just "Syncing GitHub data..." → "Synced N repositories" + 5. If it fails mid-sync, data is partially deleted with no recovery + +### Changes needed + +1. **Chunk the sync by repository** — sync repos first (paginated from GitHub API), then for each repo chunk, sync its branches and PRs. Members can be a separate chunk. + +2. **Incremental upsert, not replace** — don't delete-then-insert. Use upsert per row so partial sync doesn't lose data. Mark rows with a sync generation ID; after full sync completes, delete rows from previous generations. + +3. **Run in a loop, not a single step** — each chunk is a separate workflow step with its own timeout. If one chunk fails, previous chunks are persisted. + +4. **Publish progress per chunk** — after each chunk completes: + - Update `github_meta` with progress (e.g. `syncedRepos: 15/42`) + - Push progress to the organization actor + - Organization broadcasts to clients so the UI shows progress (e.g. "Syncing repositories... 15/42") + +5. **Initial sync uses the same chunked approach** — `github-data-initial-sync` step should kick off the chunked loop, not call `runFullSync` directly + +### Files to change + +- **`foundry/packages/backend/src/actors/github-data/index.ts`**: + - Refactor `runFullSync` into chunked loop + - Replace `replaceRepositories/Branches/Members/PullRequests` with upsert + generation sweep + - Add progress metadata to `github_meta` table + - Publish progress to org actor after each chunk +- **`foundry/packages/backend/src/actors/github-data/db/schema.ts`** — add sync generation column to all tables, add progress fields to `github_meta` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** (or `app-shell.ts`) — handle sync progress updates and broadcast to clients +- **`foundry/packages/shared/src/app-shell.ts`** — add sync progress fields to `FoundryGithubState` (e.g. `syncProgress: { current: number; total: number } | null`) +- **`foundry/packages/frontend/`** — show sync progress in UI (e.g. "Syncing repositories... 15/42") + +--- + +--- + +# Deferred follow-up outside this task + +## 17. Type all actor context parameters — remove `c: any` + +**Rationale:** 272+ instances of `c: any`, `ctx: any`, `loopCtx: any` across all actor code. This eliminates type safety for DB access, state access, broadcasts, and queue operations. All context parameters should use RivetKit's proper context types. + +### Scope (by file, approximate count) + +| File | `any` contexts | +|---|---| +| `organization/app-shell.ts` | ~108 | +| `organization/actions.ts` | ~56 | +| `task/workbench.ts` | ~53 | +| `github-data/index.ts` | ~23 | +| `repository/actions.ts` | ~22 | +| `sandbox/index.ts` | ~21 | +| `handles.ts` | ~19 | +| `task/workflow/commands.ts` | ~10 | +| `task/workflow/init.ts` | ~4 | +| `auth-user/index.ts` | ~2 | +| `history/index.ts` | ~2 | +| `task/workflow/index.ts` | ~2 | +| `task/workflow/common.ts` | ~2 | +| `task/workflow/push.ts` | ~1 | +| `polling.ts` | ~1 | + +### Changes needed + +1. **Determine correct RivetKit context types** — check RivetKit exports for `ActionContext`, `ActorContextOf`, `WorkflowContext`, `LoopContext`, or equivalent. Reference `polling.ts` which already defines typed contexts (`PollingActorContext`, `WorkflowPollingActorContext`). + +2. **Define per-actor context types** — each actor has its own state shape and DB schema, so the context type should be specific (e.g. `ActionContext` or similar). + +3. **Replace all `c: any`** with the proper typed context across every file listed above. + +4. **Type workflow/loop contexts** — `ctx: any` in workflow functions and `loopCtx: any` in loop callbacks need proper types too. + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "All actor context parameters (`c`, `ctx`, `loopCtx`) must be properly typed using RivetKit's context types. Never use `any` for actor contexts. Each actor should define or derive its context type from the actor definition." + +--- + +## [ ] 18. Final pass: remove all dead code + +**Dependencies:** all other items (do this last, after 17) + +**Rationale:** After completing all changes above, many actions, queues, SQLite tables, workflow steps, shared types, and helper functions will be orphaned. Do a full scan to find and remove everything that's dead. + +### Scope + +Scan the entire foundry codebase for: +- **Dead actions** — actions with no callers (client, other actors, services, HTTP endpoints) +- **Dead queues** — queue message types with no senders +- **Dead SQLite tables** — tables with no reads or writes +- **Dead workflow steps** — step names that are no longer referenced +- **Dead shared types** — types in `packages/shared` that are no longer imported +- **Dead helper functions** — private functions with no callers +- **Dead imports** — unused imports across all files + +### When to do this + +After all items 1–17 are complete. Not before — removing code while other items are in progress will create conflicts. + +--- + +## [ ] 19. Remove duplicate data between `c.state` and SQLite + +**Dependencies:** items 21, 24 + +**Rationale:** Several actors store the same data in both `c.state` (RivetKit durable state) and their SQLite tables. Mutable fields that exist in both can silently diverge — `c.state` becomes stale when the SQLite copy is updated. Per the existing CLAUDE.md rule, `c.state` should hold only small scalars/identifiers; anything queryable or mutable belongs in SQLite. + +### Duplicates found + +**Task actor** — `c.state` (`createState` in `task/index.ts` lines 124-139) vs `task`/`taskRuntime` tables: + +| Field | In SQLite? | Mutable? | Verdict | +|---|---|---|---| +| `organizationId` | No | No | **KEEP** — identity field | +| `repoId` | No | No | **KEEP** — identity field | +| `taskId` | No | No | **KEEP** — identity field | +| `repoRemote` | No (but org `repos` table has it) | No | **DELETE** — not needed on task, read from repo/org | +| `branchName` | Yes (`task.branch_name`) | Yes | **REMOVE from c.state** — HIGH risk, goes stale on rename | +| `title` | Yes (`task.title`) | Yes | **REMOVE from c.state** — HIGH risk, goes stale on rename | +| `task` (description) | Yes (`task.task`) | No | **REMOVE from c.state** — redundant | +| `sandboxProviderId` | Yes (`task.sandbox_provider_id`) | No | **REMOVE from c.state** — redundant | +| `agentType` | Yes (`task.agent_type`) | Yes | **DELETE entirely** — session-specific (item 21) | +| `explicitTitle` | No | No | **MOVE to SQLite** — creation metadata | +| `explicitBranchName` | No | No | **MOVE to SQLite** — creation metadata | +| `initialPrompt` | No | No | **DELETE entirely** — dead code, session-specific (item 21) | +| `initialized` | No | Yes | **DELETE entirely** — dead code, `status` already tracks init progress | +| `previousStatus` | No | No | **DELETE entirely** — never set, never read | + +**Repository actor** — `c.state` (`createState` in `repository/index.ts`) vs `repoMeta` table: + +| Field | Mutable? | Risk | +|---|---|---| +| `remoteUrl` | No | Low — redundant but safe | + +### Fix + +Remove all duplicated fields from `c.state`. Keep only identity fields needed for actor key resolution (e.g. `organizationId`, `repoId`, `taskId`). Read mutable data from SQLite. + +**Task actor `c.state` should become:** +```typescript +createState: (_c, input) => ({ + organizationId: input.organizationId, + repoId: input.repoId, + taskId: input.taskId, +}) +``` + +Fields already in SQLite (`branchName`, `title`, `task`, `sandboxProviderId`) — remove from `c.state`, read from SQLite only. Fields not yet in SQLite (`explicitTitle`, `explicitBranchName`) — add to `task` table, remove from `c.state`. Dead code to delete entirely: `agentType`, `initialPrompt` (item 21), `initialized`, `previousStatus`, `repoRemote`. + +**Repository actor `c.state` should become:** +```typescript +createState: (_c, input) => ({ + organizationId: input.organizationId, + repoId: input.repoId, +}) +``` + +`remoteUrl` is removed from repo actor `c.state` entirely. The repo actor reads `remoteUrl` from its own `repoMeta` SQLite table when needed. The org actor already stores `remoteUrl` in its `repos` table (source of truth from GitHub data). The `getOrCreateRepository()` helper in `handles.ts` currently requires `remoteUrl` as a parameter and passes it as `createWithInput` — this parameter must be removed. Every call site in `organization/actions.ts` and `organization/app-shell.ts` currently does a DB lookup for `remoteUrl` just to pass it to `getOrCreateRepository()` — all of those lookups go away. On actor creation, the repo actor should populate its `repoMeta.remoteUrl` by querying the org actor or github-data actor, not by receiving it as a create input. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/index.ts`** — trim `createState`, update all `c.state.*` reads for removed fields to read from SQLite instead +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — update `c.state.*` reads +- **`foundry/packages/backend/src/actors/task/workflow/*.ts`** — update `c.state.*` reads +- **`foundry/packages/backend/src/actors/repository/index.ts`** — trim `createState`, remove `remoteUrl` from input type +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update all `c.state.remoteUrl` reads to query `repoMeta` table; remove `persistRemoteUrl()` helper +- **`foundry/packages/backend/src/actors/handles.ts`** — remove `remoteUrl` parameter from `getOrCreateRepository()` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove all `remoteUrl` lookups done solely to pass to `getOrCreateRepository()` (~10 call sites) +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — same cleanup for app-shell call sites + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Never duplicate data between `c.state` and SQLite. `c.state` holds only immutable identity fields needed for actor key resolution (e.g. `organizationId`, `repoId`, `taskId`). All mutable data and anything queryable must live exclusively in SQLite. If a field can change after actor creation, it must not be in `c.state`." + +--- + +## [ ] 20. Prefix all admin/recovery actions with `admin` + +**Rationale:** Several actions are admin-only recovery/rebuild operations but their names don't distinguish them from normal product flows. Prefix with `admin` so it's immediately clear these are not part of regular user flows. + +### Actions to rename + +**Organization actor:** + +| Current name | New name | Why it's admin | +|---|---|---| +| `reconcileWorkbenchState` | `adminReconcileWorkbenchState` | Full fan-out rebuild of task summary projection | +| `reloadGithubOrganization` | `adminReloadGithubOrganization` | Manual trigger to refetch all org GitHub data | +| `reloadGithubPullRequests` | `adminReloadGithubPullRequests` | Manual trigger to refetch all PR data | +| `reloadGithubRepository` | `adminReloadGithubRepository` | Manual trigger to refetch single repo | +| `reloadGithubPullRequest` | `adminReloadGithubPullRequest` | Manual trigger to refetch single PR | + +**GitHub Data actor:** + +| Current name | New name | Why it's admin | +|---|---|---| +| `fullSync` | `adminFullSync` | Full replace of all GitHub data — recovery operation | +| `reloadOrganization` | `adminReloadOrganization` | Triggers full sync manually | +| `reloadAllPullRequests` | `adminReloadAllPullRequests` | Triggers full sync manually | +| `clearState` | `adminClearState` | Deletes all GitHub data — recovery from lost access | + +**NOT renamed** (these are triggered by webhooks/normal flows, not manual admin actions): +- `reloadRepository` — called by push/create/delete webhooks (incremental, normal flow) +- `reloadPullRequest` — called by PR webhooks (incremental, normal flow) +- `handlePullRequestWebhook` — webhook handler (normal flow) +- `syncGithubOrganizations` — called during OAuth callback (normal flow, though also used for repair) + +### Files to change + +- **`foundry/packages/backend/src/actors/github-data/index.ts`** — rename actions +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — rename actions +- **`foundry/packages/client/src/backend-client.ts`** — update method names +- **`foundry/packages/frontend/`** — update any references to renamed actions + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Admin-only actions (recovery, rebuild, manual resync, state reset) must be prefixed with `admin` (e.g. `adminReconcileState`, `adminClearState`). This makes it clear they are not part of normal product flows and should not be called from regular client code paths." + +--- + +## [ ] 21. Remove legacy/session-scoped fields from task table + +**Rationale:** The `task` table has fields that either belong on the session, are redundant with data from other actors, or are dead code from the removed local git clone. These should be cleaned up. + +### Fields to remove from `task` table and `c.state` + +**`agentType`** — Legacy from when task = 1 session. Only used for `defaultModelForAgent(c.state.agentType)` to pick the default model when creating a new session. Sessions already have their own `model` column in `taskWorkbenchSessions`. The default model for new sessions should come from user settings (see item 16 — starred model stored in user actor). Remove `agentType` from task table, `c.state`, `createState`, `TaskRecord`, and all `defaultModelForAgent()` call sites. Replace with user settings lookup. + +**`initialPrompt`** — Stored on `c.state` at task creation but **never read anywhere**. Completely dead code. This is also session-specific, not task-specific — the initial prompt belongs on the first session, not the task. Remove from `c.state`, `createState` input type, and `CreateTaskCommand`/`CreateTaskInput` types. Remove from `repository/actions.ts` create flow. + +**`prSubmitted`** — Redundant boolean set when `submitPullRequest` runs. PR state already flows from GitHub webhooks → github-data actor → branch name lookup. This boolean can go stale (PR closed and reopened, PR deleted, etc.). Remove entirely — PR existence is derivable from github-data by branch name (already how `enrichTaskRecord` and `buildTaskSummary` work). + +### Dead fields on `taskRuntime` table + +**`provisionStage`** — Values: `"queued"`, `"ready"`, `"error"`. Redundant with `status` — `init_complete` implies ready, `error` implies error. Never read in business logic. Delete. + +**`provisionStageUpdatedAt`** — Timestamp for `provisionStage` changes. Never read anywhere. Delete. + +### Dead fields on `TaskRecord` (in `workflow/common.ts`) + +These are always hardcoded to `null` — remnants of the removed local git clone: + +- `diffStat` — was populated from `branches` table (deleted) +- `hasUnpushed` — was populated from `branches` table (deleted) +- `conflictsWithMain` — was populated from `branches` table (deleted) +- `parentBranch` — was populated from `branches` table (deleted) + +Remove from `TaskRecord` type, `getCurrentRecord()`, and all consumers (contracts, mock client, tests, frontend). + +### Files to change + +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `agentType` and `prSubmitted` columns from `task` table; remove `provisionStage` and `provisionStageUpdatedAt` from `taskRuntime` table +- **`foundry/packages/backend/src/actors/task/index.ts`** — remove `agentType`, `initialPrompt`, `initialized`, `previousStatus`, `repoRemote` from `createState` and input type +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove `defaultModelForAgent()`, `agentTypeForModel()`, update session creation to use user settings for default model; remove `prSubmitted` set in `submitPullRequest` +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `agentType`, `prSubmitted`, `diffStat`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from `getCurrentRecord()` and `TaskRecord` construction +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `agentType` from task row inserts +- **`foundry/packages/shared/src/contracts.ts`** — remove `agentType`, `prSubmitted`, `diffStat`, `prUrl`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from `TaskRecord` schema (note: `prUrl` and `prAuthor` should stay if still populated by `enrichTaskRecord`, or move to the unified task/PR model from item 15) +- **`foundry/packages/client/src/mock/backend-client.ts`** — update mock to remove dead fields +- **`foundry/packages/client/test/view-model.test.ts`** — update test fixtures +- **`foundry/packages/frontend/src/features/tasks/model.test.ts`** — update test fixtures +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove any references to `agentType` in task creation input +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update `enrichTaskRecord()` to stop setting dead fields + +--- + +## [ ] 22. Move per-user UI state from task actor to user actor + +**Dependencies:** item 1 + +**Rationale:** The task actor stores UI-facing state that is user-specific, not task-global. With multiplayer (multiple users viewing the same task), this breaks — each user has their own active session, their own unread state, their own drafts. These must live on the user actor, keyed by `(taskId, sessionId)`, not on the shared task actor. + +### Per-user state currently on the task actor (wrong) + +**`taskRuntime.activeSessionId`** — Which session the user is "looking at." Used to: +- Determine which session's status drives the task-level status (running/idle) — this is wrong, the task status should reflect ALL sessions, not one user's active tab +- Return a "current" session in `attachTask` responses — this is per-user +- Migration path for legacy single-session tasks in `ensureWorkbenchSeeded` + +This should move to the user actor as `activeSessionId` per `(userId, taskId)`. + +**`taskWorkbenchSessions.unread`** — Per-user unread state stored globally on the session. If user A reads a session, user B's unread state is also cleared. Move to user actor keyed by `(userId, taskId, sessionId)`. + +**`taskWorkbenchSessions.draftText` / `draftAttachmentsJson` / `draftUpdatedAt`** — Per-user draft state stored globally. If user A starts typing a draft, it overwrites user B's draft. Move to user actor keyed by `(userId, taskId, sessionId)`. + +### What stays on the task actor (correct — task-global state) + +- `taskRuntime.activeSandboxId` — which sandbox is running (global to the task) +- `taskRuntime.activeSwitchTarget` / `activeCwd` — sandbox connection state (global) +- `taskRuntime.statusMessage` — provisioning/runtime status (global) +- `taskWorkbenchSessions.model` — which model the session uses (global) +- `taskWorkbenchSessions.status` — session runtime status (global) +- `taskWorkbenchSessions.transcriptJson` — session transcript (global) + +### Fix + +Add a `userTaskState` table to the user actor: + +```typescript +export const userTaskState = sqliteTable("user_task_state", { + taskId: text("task_id").notNull(), + sessionId: text("session_id").notNull(), + activeSessionId: text("active_session_id"), // per-user active tab + unread: integer("unread").notNull().default(0), + draftText: text("draft_text").notNull().default(""), + draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), + draftUpdatedAt: integer("draft_updated_at"), + updatedAt: integer("updated_at").notNull(), +}, (table) => ({ + pk: primaryKey(table.taskId, table.sessionId), +})); +``` + +Remove `activeSessionId` from `taskRuntime`. Remove `unread`, `draftText`, `draftAttachmentsJson`, `draftUpdatedAt` from `taskWorkbenchSessions`. + +The task-level status should be derived from ALL sessions (e.g., task is "running" if ANY session is running), not from one user's `activeSessionId`. + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — add `userTaskState` table +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `activeSessionId` from `taskRuntime`; remove `unread`, `draftText`, `draftAttachmentsJson`, `draftUpdatedAt` from `taskWorkbenchSessions` +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove all `activeSessionId` reads/writes; remove draft/unread mutation functions; task status derivation should check all sessions +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `activeSessionId` from `getCurrentRecord()` +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — remove `activeSessionId` references in `attachTask` +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `activeSessionId` initialization +- **`foundry/packages/client/`** — draft/unread/activeSession operations route to user actor instead of task actor +- **`foundry/packages/frontend/`** — update subscription to fetch per-user state from user actor + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Per-user UI state (active session tab, unread counts, draft text, draft attachments) must live on the user actor, not on shared task/session actors. Task actors hold only task-global state visible to all users. This is critical for multiplayer correctness — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts." + +--- + +## [ ] 23. Delete `getTaskEnriched` and `enrichTaskRecord` (dead code) + +**Rationale:** `getTaskEnriched` is dead code with zero callers from the client. It's also the worst fan-out pattern in the codebase: org → repo actor → task actor (`.get()`) → github-data actor (`listPullRequestsForRepository` fetches ALL PRs, then `.find()`s by branch name). This is exactly the pattern the coordinator model eliminates — task detail comes from `getTaskDetail` on the task actor, sidebar data comes from materialized `taskSummaries` on the org actor. + +### What to delete + +- **`enrichTaskRecord()`** — `repository/actions.ts:117-143`. Fetches all PRs for a repo to find one by branch name. Dead code. +- **`getTaskEnriched` action** — `repository/actions.ts:432-450`. Only caller of `enrichTaskRecord`. Dead code. +- **`getTaskEnriched` org proxy** — `organization/actions.ts:838-849`. Only caller of the repo action. Dead code. +- **`GetTaskEnrichedCommand` type** — wherever defined. + +### Files to change + +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — delete `enrichTaskRecord()` and `getTaskEnriched` action +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — delete `getTaskEnriched` proxy action + +--- + +## [ ] 24. Clean up task status tracking + +**Dependencies:** item 21 + +**Rationale:** Task status tracking is spread across `c.state`, the `task` SQLite table, and the `taskRuntime` table with redundant and dead fields. Consolidate to a single `status` enum on the `task` table. Remove `statusMessage` — human-readable status text should be derived on the client from the `status` enum, not stored on the backend. + +### Fields to delete + +| Field | Location | Why | +|---|---|---| +| `initialized` | `c.state` | Dead code — never read. `status` already tracks init progress. | +| `previousStatus` | `c.state` | Dead code — never set, never read. | +| `statusMessage` | `taskRuntime` table | Client concern — the client should derive display text from the `status` enum. The backend should not store UI copy. | +| `provisionStage` | `taskRuntime` table | Redundant — `status` already encodes provision progress (`init_bootstrap_db` → `init_enqueue_provision` → `init_complete`). | +| `provisionStageUpdatedAt` | `taskRuntime` table | Dead — never read. | + +### What remains + +- **`status`** on the `task` table — the single canonical state machine enum. Values: `init_bootstrap_db`, `init_enqueue_provision`, `init_complete`, `running`, `idle`, `error`, `archive_*`, `kill_*`, `archived`, `killed`. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `statusMessage`, `provisionStage`, `provisionStageUpdatedAt` from `taskRuntime` table +- **`foundry/packages/backend/src/actors/task/index.ts`** — remove `initialized`, `previousStatus` from `createState` +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `statusMessage` parameter from `setTaskState()`, remove it from `getCurrentRecord()` query +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `statusMessage`, `provisionStage`, `provisionStageUpdatedAt` from taskRuntime inserts/updates; remove `ensureTaskRuntimeCacheColumns()` raw ALTER TABLE for these columns +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — remove `statusMessage` from handler updates +- **`foundry/packages/backend/src/actors/task/workflow/push.ts`** — remove `statusMessage` updates +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove `statusMessage` from `buildTaskDetail()`, remove `ensureTaskRuntimeCacheColumns()` for these columns +- **`foundry/packages/shared/src/workbench.ts`** — remove `statusMessage` from `WorkbenchTaskDetail` +- **`foundry/packages/frontend/`** — derive display text from `status` enum instead of reading `statusMessage` + +--- + +## [ ] 25. Remove "Workbench" prefix from all types, functions, files, and tables + +**Rationale:** "Workbench" is not a real concept in the system. It's a namespace prefix applied to every type, function, file, and table name. The actual entities are Task, Session, Repository, Sandbox, Transcript, Draft, etc. — "Workbench" adds zero information and obscures what things actually are. + +### Rename strategy + +Drop "Workbench" everywhere. If the result collides with an existing name (e.g., auth `Session`), use the domain prefix (e.g., `TaskSession` vs auth `Session`). + +### Type renames (`shared/src/workbench.ts`) + +| Before | After | +|---|---| +| `WorkbenchTaskStatus` | `TaskStatus` (already exists as base, merge) | +| `WorkbenchAgentKind` | `AgentKind` | +| `WorkbenchModelId` | `ModelId` | +| `WorkbenchSessionStatus` | `SessionStatus` | +| `WorkbenchTranscriptEvent` | `TranscriptEvent` | +| `WorkbenchComposerDraft` | `ComposerDraft` | +| `WorkbenchSessionSummary` | `SessionSummary` | +| `WorkbenchSessionDetail` | `SessionDetail` | +| `WorkbenchFileChange` | `FileChange` | +| `WorkbenchFileTreeNode` | `FileTreeNode` | +| `WorkbenchLineAttachment` | `LineAttachment` | +| `WorkbenchHistoryEvent` | `HistoryEvent` | +| `WorkbenchDiffLineKind` | `DiffLineKind` | +| `WorkbenchParsedDiffLine` | `ParsedDiffLine` | +| `WorkbenchPullRequestSummary` | `PullRequestSummary` | +| `WorkbenchOpenPrSummary` | `OpenPrSummary` | +| `WorkbenchSandboxSummary` | `SandboxSummary` | +| `WorkbenchTaskSummary` | `TaskSummary` | +| `WorkbenchTaskDetail` | `TaskDetail` | +| `WorkbenchRepositorySummary` | `RepositorySummary` | +| `WorkbenchSession` | `TaskSession` (avoids auth `Session` collision) | +| `WorkbenchTask` | `TaskSnapshot` (avoids `task` table collision) | +| `WorkbenchRepo` | `RepoSnapshot` | +| `WorkbenchRepositorySection` | `RepositorySection` | +| `TaskWorkbenchSnapshot` | `DashboardSnapshot` | +| `WorkbenchModelOption` | `ModelOption` | +| `WorkbenchModelGroup` | `ModelGroup` | +| `TaskWorkbenchSelectInput` | `SelectTaskInput` | +| `TaskWorkbenchCreateTaskInput` | `CreateTaskInput` | +| `TaskWorkbenchRenameInput` | `RenameTaskInput` | +| `TaskWorkbenchSendMessageInput` | `SendMessageInput` | +| `TaskWorkbenchSessionInput` | `SessionInput` | +| `TaskWorkbenchRenameSessionInput` | `RenameSessionInput` | +| `TaskWorkbenchChangeModelInput` | `ChangeModelInput` | +| `TaskWorkbenchUpdateDraftInput` | `UpdateDraftInput` | +| `TaskWorkbenchSetSessionUnreadInput` | `SetSessionUnreadInput` | +| `TaskWorkbenchDiffInput` | `DiffInput` | +| `TaskWorkbenchCreateTaskResponse` | `CreateTaskResponse` | +| `TaskWorkbenchAddSessionResponse` | `AddSessionResponse` | + +### File renames + +| Before | After | +|---|---| +| `shared/src/workbench.ts` | `shared/src/types.ts` (or split into `task.ts`, `session.ts`, etc.) | +| `backend/src/actors/task/workbench.ts` | `backend/src/actors/task/sessions.ts` (already planned in item 7) | +| `client/src/workbench-client.ts` | `client/src/task-client.ts` | +| `client/src/workbench-model.ts` | `client/src/model.ts` | +| `client/src/remote/workbench-client.ts` | `client/src/remote/task-client.ts` | +| `client/src/mock/workbench-client.ts` | `client/src/mock/task-client.ts` | + +### Table rename + +| Before | After | +|---|---| +| `task_workbench_sessions` | `task_sessions` | + +### Function renames (backend — drop "Workbench" infix) + +All functions in `backend/src/actors/task/workbench.ts`: +- `createWorkbenchSession` → `createSession` +- `closeWorkbenchSession` → `closeSession` +- `changeWorkbenchModel` → `changeModel` +- `sendWorkbenchMessage` → `sendMessage` +- `stopWorkbenchSession` → `stopSession` +- `renameWorkbenchBranch` → deleted (see item 26) +- `renameWorkbenchTask` → `renameTask` +- `renameWorkbenchSession` → `renameSession` +- `revertWorkbenchFile` → `revertFile` +- `publishWorkbenchPr` → `publishPr` +- `updateWorkbenchDraft` → `updateDraft` +- `setWorkbenchSessionUnread` → `setSessionUnread` +- `markWorkbenchUnread` → `markUnread` +- `syncWorkbenchSessionStatus` → `syncSessionStatus` +- `ensureWorkbenchSeeded` → `ensureSessionSeeded` + +### Queue/command type renames (backend) + +- `TaskWorkbenchValueCommand` → `TaskValueCommand` +- `TaskWorkbenchSessionTitleCommand` → `SessionTitleCommand` +- `TaskWorkbenchSessionUnreadCommand` → `SessionUnreadCommand` + +### Scope + +~420 occurrences across shared (35+ types), backend (200+ refs), client (324 refs), frontend (96 refs). Mechanical find-and-replace once the rename map is settled. + +### Files to change + +- **`foundry/packages/shared/src/workbench.ts`** — rename file, rename all exported types +- **`foundry/packages/shared/src/index.ts`** — update re-export path +- **`foundry/packages/shared/src/app-shell.ts`** — update `WorkbenchModelId` → `ModelId` import +- **`foundry/packages/shared/src/realtime-events.ts`** — update all `Workbench*` type imports +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — rename file + all functions +- **`foundry/packages/backend/src/actors/task/index.ts`** — update imports and action registrations +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — rename `taskWorkbenchSessions` → `taskSessions` +- **`foundry/packages/backend/src/actors/task/workflow/`** — update all workbench references +- **`foundry/packages/backend/src/actors/organization/`** — update type imports and action names +- **`foundry/packages/backend/src/actors/repository/`** — update type imports +- **`foundry/packages/client/src/`** — rename files + update all type/function references +- **`foundry/packages/frontend/src/`** — update all type imports + +### CLAUDE.md update + +Update `foundry/packages/backend/CLAUDE.md` coordinator hierarchy diagram: `taskWorkbenchSessions` → `taskSessions`. + +--- + +## [ ] 26. Delete branch rename (branches immutable after creation) + +**Dependencies:** item 25 + +**Rationale:** Branch name is assigned once at task creation and never changes. Branch rename is unused in the frontend UI and SDK, adds ~80 lines of code, and creates a transactional consistency risk (git rename succeeds but index update fails). + +### Delete + +- **`task/workbench.ts`** — delete `renameWorkbenchBranch()` (~50 lines) +- **`task/index.ts`** — delete `renameWorkbenchBranch` action +- **`task/workflow/queue.ts`** — remove `"task.command.workbench.rename_branch"` queue type +- **`task/workflow/index.ts`** — remove `"task.command.workbench.rename_branch"` handler +- **`organization/actions.ts`** — delete `renameWorkbenchBranch` proxy action +- **`repository/actions.ts`** — delete `registerTaskBranch` action (only caller was rename flow) +- **`client/src/workbench-client.ts`** — remove `renameBranch` from interface +- **`client/src/remote/workbench-client.ts`** — delete `renameBranch()` method +- **`client/src/mock/workbench-client.ts`** — delete `renameBranch()` method +- **`client/src/backend-client.ts`** — delete `renameWorkbenchBranch` from interface + implementation +- **`client/src/mock/backend-client.ts`** — delete `renameWorkbenchBranch` implementation +- **`frontend/src/components/mock-layout.tsx`** — remove `renameBranch` from client interface, delete `onRenameBranch` callbacks and all `renameBranch` wiring (~8 refs) +- **`shared/src/workbench.ts`** — delete `TaskWorkbenchRenameInput` (if only used by branch rename; check if task title rename shares it) + +### Keep + +- `deriveFallbackTitle()` + `sanitizeBranchName()` + `resolveCreateFlowDecision()` — initial branch derivation at creation +- `registerTaskBranchMutation()` — used during task creation for `onBranch` path +- `renameWorkbenchTask()` — title rename is independent, stays +- `taskIndex` table — still the coordinator index for branch→task mapping + +--- + +## [ ] Final audit pass (run after all items above are complete) + +### Dead code scan + +Already tracked in item 18: once all changes are complete, do a full scan to find dead actions, queues, SQLite tables, and workflow steps that need to be removed. + +### Dead events audit + +Scan all event types emitted by actors (in `packages/shared/src/realtime-events.ts` and anywhere actors call `c.broadcast()` or similar). Cross-reference against all client subscribers (in `packages/client/` and `packages/frontend/`). Remove any events that are emitted but never subscribed to by any client. This includes events that may have been superseded by the consolidated single-topic-per-actor pattern (item 14). diff --git a/foundry/README.md b/foundry/README.md new file mode 100644 index 0000000..47501ef --- /dev/null +++ b/foundry/README.md @@ -0,0 +1,24 @@ +# Foundry + +TypeScript organization task system powered by RivetKit actors, SQLite/Drizzle state, and OpenTUI. + +**Documentation**: see `../docs/` in the repository root + +## Quick Install + +```bash +curl -fsSL https://bun.sh/install | bash +pnpm install +pnpm -w build +``` + +## Repository Goals + +- **Simple**: There's one screen. It has everything you need. You can use it blindfolded. +- **Fast**: No waiting around. +- **Collaborative**: Built for fast moving teams that need code reviewed & shipped fast. +- **Pluggable**: Works for small side repositories to enterprise teams. + +## License + +MIT diff --git a/foundry/compose.dev.yaml b/foundry/compose.dev.yaml new file mode 100644 index 0000000..7fa492d --- /dev/null +++ b/foundry/compose.dev.yaml @@ -0,0 +1,127 @@ +name: foundry + +services: + backend: + build: + context: .. + dockerfile: foundry/docker/backend.dev.Dockerfile + image: foundry-backend-dev + working_dir: /app + env_file: + - path: .env + required: false + environment: + HF_BACKEND_HOST: "0.0.0.0" + HF_BACKEND_PORT: "7741" + RIVETKIT_STORAGE_PATH: "/root/.local/share/foundry/rivetkit" + RIVET_LOG_ERROR_STACK: "${RIVET_LOG_ERROR_STACK:-1}" + RIVET_LOG_LEVEL: "${RIVET_LOG_LEVEL:-debug}" + RIVET_LOG_TIMESTAMP: "${RIVET_LOG_TIMESTAMP:-1}" + FOUNDRY_LOG_LEVEL: "${FOUNDRY_LOG_LEVEL:-debug}" + # Pass through credentials needed for agent execution + PR creation in dev/e2e. + # Do not hardcode secrets; set these in your environment when starting compose. + ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY:-}" + CLAUDE_API_KEY: "${CLAUDE_API_KEY:-${ANTHROPIC_API_KEY:-}}" + OPENAI_API_KEY: "${OPENAI_API_KEY:-}" + # sandbox-agent codex plugin currently expects CODEX_API_KEY. Map from OPENAI_API_KEY for convenience. + CODEX_API_KEY: "${CODEX_API_KEY:-${OPENAI_API_KEY:-}}" + # Support either GITHUB_TOKEN or GITHUB_PAT in local env files. + GITHUB_TOKEN: "${GITHUB_TOKEN:-${GITHUB_PAT:-}}" + GH_TOKEN: "${GH_TOKEN:-${GITHUB_TOKEN:-${GITHUB_PAT:-}}}" + APP_URL: "${APP_URL:-}" + BETTER_AUTH_URL: "${BETTER_AUTH_URL:-}" + BETTER_AUTH_SECRET: "${BETTER_AUTH_SECRET:-}" + GITHUB_CLIENT_ID: "${GITHUB_CLIENT_ID:-}" + GITHUB_CLIENT_SECRET: "${GITHUB_CLIENT_SECRET:-}" + GITHUB_REDIRECT_URI: "${GITHUB_REDIRECT_URI:-}" + GITHUB_APP_ID: "${GITHUB_APP_ID:-}" + GITHUB_APP_CLIENT_ID: "${GITHUB_APP_CLIENT_ID:-}" + GITHUB_APP_CLIENT_SECRET: "${GITHUB_APP_CLIENT_SECRET:-}" + GITHUB_APP_PRIVATE_KEY: "${GITHUB_APP_PRIVATE_KEY:-}" + GITHUB_WEBHOOK_SECRET: "${GITHUB_WEBHOOK_SECRET:-${GITHUB_APP_WEBHOOK_SECRET:-}}" + STRIPE_PUBLISHABLE_KEY: "${STRIPE_PUBLISHABLE_KEY:-}" + STRIPE_SECRET_KEY: "${STRIPE_SECRET_KEY:-}" + STRIPE_WEBHOOK_SECRET: "${STRIPE_WEBHOOK_SECRET:-}" + STRIPE_PRICE_TEAM: "${STRIPE_PRICE_TEAM:-}" + FOUNDRY_SANDBOX_PROVIDER: "${FOUNDRY_SANDBOX_PROVIDER:-local}" + HF_LOCAL_SANDBOX_IMAGE: "${HF_LOCAL_SANDBOX_IMAGE:-rivetdev/sandbox-agent:foundry-base-latest}" + E2B_API_KEY: "${E2B_API_KEY:-}" + E2B_TEMPLATE: "${E2B_TEMPLATE:-}" + HF_E2B_TEMPLATE: "${HF_E2B_TEMPLATE:-${E2B_TEMPLATE:-}}" + DAYTONA_ENDPOINT: "${DAYTONA_ENDPOINT:-}" + DAYTONA_API_KEY: "${DAYTONA_API_KEY:-}" + HF_DAYTONA_ENDPOINT: "${HF_DAYTONA_ENDPOINT:-}" + HF_DAYTONA_API_KEY: "${HF_DAYTONA_API_KEY:-}" + ports: + - "6420:6420" + - "7741:7741" + volumes: + - "..:/app" + # Reuse the host Codex auth profile for local sandbox-agent Codex sessions in dev. + - "${HOME}/.codex:/root/.codex" + - "/var/run/docker.sock:/var/run/docker.sock" + # Keep backend dependency installs Linux-native instead of using host node_modules. + - "foundry_backend_root_node_modules:/app/node_modules" + - "foundry_backend_backend_node_modules:/app/foundry/packages/backend/node_modules" + - "foundry_backend_shared_node_modules:/app/foundry/packages/shared/node_modules" + - "foundry_backend_typescript_node_modules:/app/sdks/typescript/node_modules" + - "foundry_backend_pnpm_store:/root/.local/share/pnpm/store" + # Persist RivetKit local storage across container restarts. + - "foundry_rivetkit_storage:/root/.local/share/foundry/rivetkit" + + frontend: + build: + context: .. + dockerfile: foundry/docker/frontend.dev.Dockerfile + working_dir: /app + depends_on: + - backend + environment: + HOME: "/tmp" + HF_BACKEND_HTTP: "http://backend:7741" + ports: + - "4173:4173" + volumes: + - "..:/app" + # Ensure logs in .foundry/ persist on the host even if we change source mounts later. + - "./.foundry:/app/foundry/.foundry" + # Use Linux-native repo dependencies inside the container instead of host node_modules. + - "foundry_node_modules:/app/node_modules" + - "foundry_client_node_modules:/app/foundry/packages/client/node_modules" + - "foundry_frontend_node_modules:/app/foundry/packages/frontend/node_modules" + - "foundry_shared_node_modules:/app/foundry/packages/shared/node_modules" + - "foundry_pnpm_store:/tmp/.local/share/pnpm/store" + + smee: + image: node:20-alpine + depends_on: + - backend + env_file: + - path: .env + required: false + environment: + SMEE_URL: "${SMEE_URL:-}" + SMEE_TARGET: "${SMEE_TARGET:-http://backend:7741/v1/webhooks/github}" + command: + - /bin/sh + - -lc + - | + if [ -z "$SMEE_URL" ]; then + echo "SMEE_URL is required for local GitHub webhook forwarding" >&2 + exit 1 + fi + exec npx --yes smee-client --url "$SMEE_URL" --target "$SMEE_TARGET" + restart: unless-stopped + +volumes: + foundry_backend_root_node_modules: {} + foundry_backend_backend_node_modules: {} + foundry_backend_shared_node_modules: {} + foundry_backend_typescript_node_modules: {} + foundry_backend_pnpm_store: {} + foundry_rivetkit_storage: {} + foundry_node_modules: {} + foundry_client_node_modules: {} + foundry_frontend_node_modules: {} + foundry_shared_node_modules: {} + foundry_pnpm_store: {} diff --git a/foundry/compose.mock.yaml b/foundry/compose.mock.yaml new file mode 100644 index 0000000..6c57875 --- /dev/null +++ b/foundry/compose.mock.yaml @@ -0,0 +1,29 @@ +name: foundry-mock + +services: + frontend: + build: + context: .. + dockerfile: foundry/docker/frontend.dev.Dockerfile + working_dir: /app + environment: + HOME: "/tmp" + FOUNDRY_FRONTEND_CLIENT_MODE: "mock" + ports: + - "4174:4174" + command: ["bash", "-lc", "pnpm install --force --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4174"] + volumes: + - "..:/app" + - "./.foundry:/app/foundry/.foundry" + - "mock_node_modules:/app/node_modules" + - "mock_client_node_modules:/app/foundry/packages/client/node_modules" + - "mock_frontend_node_modules:/app/foundry/packages/frontend/node_modules" + - "mock_shared_node_modules:/app/foundry/packages/shared/node_modules" + - "mock_pnpm_store:/tmp/.local/share/pnpm/store" + +volumes: + mock_node_modules: {} + mock_client_node_modules: {} + mock_frontend_node_modules: {} + mock_shared_node_modules: {} + mock_pnpm_store: {} diff --git a/foundry/compose.preview.yaml b/foundry/compose.preview.yaml new file mode 100644 index 0000000..aa43b52 --- /dev/null +++ b/foundry/compose.preview.yaml @@ -0,0 +1,40 @@ +name: foundry-preview + +services: + backend: + build: + context: .. + dockerfile: foundry/docker/backend.preview.Dockerfile + image: foundry-backend-preview + environment: + HF_BACKEND_HOST: "0.0.0.0" + HF_BACKEND_PORT: "7841" + RIVETKIT_STORAGE_PATH: "/root/.local/share/foundry/rivetkit" + ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY:-}" + CLAUDE_API_KEY: "${CLAUDE_API_KEY:-${ANTHROPIC_API_KEY:-}}" + OPENAI_API_KEY: "${OPENAI_API_KEY:-}" + CODEX_API_KEY: "${CODEX_API_KEY:-${OPENAI_API_KEY:-}}" + GITHUB_TOKEN: "${GITHUB_TOKEN:-${GITHUB_PAT:-}}" + GH_TOKEN: "${GH_TOKEN:-${GITHUB_TOKEN:-${GITHUB_PAT:-}}}" + DAYTONA_ENDPOINT: "${DAYTONA_ENDPOINT:-}" + DAYTONA_API_KEY: "${DAYTONA_API_KEY:-}" + HF_DAYTONA_ENDPOINT: "${HF_DAYTONA_ENDPOINT:-}" + HF_DAYTONA_API_KEY: "${HF_DAYTONA_API_KEY:-}" + ports: + - "7841:7841" + volumes: + - "${HOME}/.codex:/root/.codex" + - "foundry_preview_rivetkit_storage:/root/.local/share/foundry/rivetkit" + + frontend: + build: + context: .. + dockerfile: foundry/docker/frontend.preview.Dockerfile + image: foundry-frontend-preview + depends_on: + - backend + ports: + - "4273:4273" + +volumes: + foundry_preview_rivetkit_storage: {} diff --git a/foundry/docker/backend.Dockerfile b/foundry/docker/backend.Dockerfile new file mode 100644 index 0000000..ae14ddf --- /dev/null +++ b/foundry/docker/backend.Dockerfile @@ -0,0 +1,40 @@ +# syntax=docker/dockerfile:1.7 + +FROM node:22-bookworm-slim AS build +ENV PNPM_HOME=/pnpm +ENV PATH=$PNPM_HOME:$PATH +WORKDIR /app +RUN corepack enable && corepack prepare pnpm@10.28.2 --activate + +COPY . . + +RUN pnpm install --frozen-lockfile +RUN pnpm --filter @sandbox-agent/foundry-shared build +RUN pnpm --filter acp-http-client build +RUN pnpm --filter @sandbox-agent/cli-shared build +RUN SKIP_OPENAPI_GEN=1 pnpm --filter sandbox-agent build +RUN pnpm --filter @sandbox-agent/foundry-backend build +RUN pnpm --filter @sandbox-agent/foundry-backend deploy --prod /out + +FROM oven/bun:1.2 AS runtime +ENV NODE_ENV=production +ENV HOME=/home/task +ENV RIVET_RUNNER_VERSION_FILE=/etc/foundry/rivet-runner-version +WORKDIR /app +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + git \ + gh \ + openssh-client \ + && rm -rf /var/lib/apt/lists/* +RUN addgroup --system --gid 1001 task \ + && adduser --system --uid 1001 --home /home/task --ingroup task task \ + && mkdir -p /home/task \ + && chown -R task:task /home/task /app +RUN mkdir -p /etc/foundry \ + && date +%s > /etc/foundry/rivet-runner-version +COPY --from=build /out ./ +USER task +EXPOSE 7741 +CMD ["bun", "dist/index.js", "start", "--host", "0.0.0.0"] diff --git a/foundry/docker/backend.dev.Dockerfile b/foundry/docker/backend.dev.Dockerfile new file mode 100644 index 0000000..c4b6c3a --- /dev/null +++ b/foundry/docker/backend.dev.Dockerfile @@ -0,0 +1,34 @@ +# syntax=docker/dockerfile:1.7 + +FROM oven/bun:1.3 + +ARG SANDBOX_AGENT_VERSION=0.3.0 + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + git \ + gh \ + nodejs \ + npm \ + openssh-client \ + && rm -rf /var/lib/apt/lists/* + +RUN npm install -g pnpm@10.28.2 + +RUN curl -fsSL "https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/install.sh" | sh + +ENV PATH="/root/.local/bin:${PATH}" +ENV SANDBOX_AGENT_BIN="/root/.local/bin/sandbox-agent" +ENV RIVET_RUNNER_VERSION_FILE=/etc/foundry/rivet-runner-version +RUN mkdir -p /etc/foundry \ + && date +%s > /etc/foundry/rivet-runner-version + +WORKDIR /app + +# NOTE: Do NOT use `bun --hot` here. Bun's hot reloading re-initializes the +# server on a new port (e.g. 6421 instead of 6420) while the container still +# exposes the original port, breaking all client connections. Restart the +# backend container instead: `just foundry-dev-down && just foundry-dev` +CMD ["bash", "-lc", "git config --global --add safe.directory /app >/dev/null 2>&1 || true; pnpm install --frozen-lockfile --filter @sandbox-agent/foundry-backend... && exec bun foundry/packages/backend/src/index.ts start --host 0.0.0.0 --port 7741"] diff --git a/foundry/docker/backend.preview.Dockerfile b/foundry/docker/backend.preview.Dockerfile new file mode 100644 index 0000000..91cd7c7 --- /dev/null +++ b/foundry/docker/backend.preview.Dockerfile @@ -0,0 +1,36 @@ +# syntax=docker/dockerfile:1.7 + +FROM oven/bun:1.3 + +ARG SANDBOX_AGENT_VERSION=0.3.0 + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + git \ + gh \ + nodejs \ + npm \ + openssh-client \ + && npm install -g pnpm@10.28.2 \ + && rm -rf /var/lib/apt/lists/* + +RUN curl -fsSL "https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/install.sh" | sh + +ENV PATH="/root/.local/bin:${PATH}" +ENV SANDBOX_AGENT_BIN="/root/.local/bin/sandbox-agent" +ENV RIVET_RUNNER_VERSION_FILE=/etc/foundry/rivet-runner-version +RUN mkdir -p /etc/foundry \ + && date +%s > /etc/foundry/rivet-runner-version + +WORKDIR /workspace/quebec + +COPY quebec /workspace/quebec + +RUN pnpm install --frozen-lockfile +RUN pnpm --filter @sandbox-agent/foundry-shared build +RUN pnpm --filter @sandbox-agent/foundry-client build +RUN pnpm --filter @sandbox-agent/foundry-backend build + +CMD ["bash", "-lc", "git config --global --add safe.directory /workspace/quebec >/dev/null 2>&1 || true; exec bun packages/backend/dist/index.js start --host 0.0.0.0 --port 7841"] diff --git a/foundry/docker/foundry-base.Dockerfile b/foundry/docker/foundry-base.Dockerfile new file mode 100644 index 0000000..b4b9e26 --- /dev/null +++ b/foundry/docker/foundry-base.Dockerfile @@ -0,0 +1,190 @@ +# syntax=docker/dockerfile:1.10.0 +# +# Foundry base sandbox image. +# +# Builds sandbox-agent from source (reusing the upstream Dockerfile.full build +# stages) and layers Foundry-specific tooling on top: sudo, git, neovim, gh, +# node, bun, chromium, and agent-browser. +# +# Build: +# docker build --platform linux/amd64 \ +# -f foundry/docker/foundry-base.Dockerfile \ +# -t rivetdev/sandbox-agent:foundry-base- . +# +# Must be invoked from the repository root so the COPY . picks up the full +# source tree for the Rust + inspector build stages. + +# ============================================================================ +# Build inspector frontend +# ============================================================================ +FROM --platform=linux/amd64 node:22-alpine AS inspector-build +WORKDIR /app +RUN npm install -g pnpm + +COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ +COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ +COPY sdks/cli-shared/package.json ./sdks/cli-shared/ +COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ +COPY sdks/react/package.json ./sdks/react/ +COPY sdks/typescript/package.json ./sdks/typescript/ + +RUN pnpm install --filter @sandbox-agent/inspector... + +COPY docs/openapi.json ./docs/ +COPY sdks/cli-shared ./sdks/cli-shared +COPY sdks/acp-http-client ./sdks/acp-http-client +COPY sdks/react ./sdks/react +COPY sdks/typescript ./sdks/typescript + +RUN cd sdks/cli-shared && pnpm exec tsup +RUN cd sdks/acp-http-client && pnpm exec tsup +RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup +RUN cd sdks/react && pnpm exec tsup + +COPY frontend/packages/inspector ./frontend/packages/inspector +RUN cd frontend/packages/inspector && pnpm exec vite build + +# ============================================================================ +# AMD64 Builder - sandbox-agent static binary +# ============================================================================ +FROM --platform=linux/amd64 rust:1.88.0 AS builder + +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && apt-get install -y \ + musl-tools \ + musl-dev \ + llvm-14-dev \ + libclang-14-dev \ + clang-14 \ + libssl-dev \ + pkg-config \ + ca-certificates \ + g++ \ + g++-multilib \ + git \ + curl \ + wget && \ + rm -rf /var/lib/apt/lists/* + +RUN wget -q https://github.com/cross-tools/musl-cross/releases/latest/download/x86_64-unknown-linux-musl.tar.xz && \ + tar -xf x86_64-unknown-linux-musl.tar.xz -C /opt/ && \ + rm x86_64-unknown-linux-musl.tar.xz && \ + rustup target add x86_64-unknown-linux-musl + +ENV PATH="/opt/x86_64-unknown-linux-musl/bin:$PATH" \ + LIBCLANG_PATH=/usr/lib/llvm-14/lib \ + CLANG_PATH=/usr/bin/clang-14 \ + CC_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-gcc \ + CXX_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-g++ \ + AR_x86_64_unknown_linux_musl=x86_64-unknown-linux-musl-ar \ + CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=x86_64-unknown-linux-musl-gcc \ + CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true + +ENV SSL_VER=1.1.1w +RUN wget https://www.openssl.org/source/openssl-$SSL_VER.tar.gz && \ + tar -xzf openssl-$SSL_VER.tar.gz && \ + cd openssl-$SSL_VER && \ + ./Configure no-shared no-async --prefix=/musl --openssldir=/musl/ssl linux-x86_64 && \ + make -j$(nproc) && \ + make install_sw && \ + cd .. && \ + rm -rf openssl-$SSL_VER* + +ENV OPENSSL_DIR=/musl \ + OPENSSL_INCLUDE_DIR=/musl/include \ + OPENSSL_LIB_DIR=/musl/lib \ + PKG_CONFIG_ALLOW_CROSS=1 \ + RUSTFLAGS="-C target-feature=+crt-static -C link-arg=-static-libgcc" + +WORKDIR /build +COPY . . + +COPY --from=inspector-build /app/frontend/packages/inspector/dist ./frontend/packages/inspector/dist + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-agent --release --target x86_64-unknown-linux-musl -j4 && \ + cp target/x86_64-unknown-linux-musl/release/sandbox-agent /sandbox-agent + +# ============================================================================ +# Runtime - Foundry base sandbox image +# ============================================================================ +FROM --platform=linux/amd64 node:22-bookworm-slim + +ENV DEBIAN_FRONTEND=noninteractive + +# --- System packages -------------------------------------------------------- +RUN apt-get update && apt-get install -y --no-install-recommends \ + bash \ + ca-certificates \ + curl \ + git \ + gnupg \ + neovim \ + sudo \ + unzip \ + wget \ + # Chromium and its runtime deps + chromium \ + fonts-liberation \ + libasound2 \ + libatk-bridge2.0-0 \ + libatk1.0-0 \ + libcups2 \ + libdbus-1-3 \ + libdrm2 \ + libgbm1 \ + libgtk-3-0 \ + libnspr4 \ + libnss3 \ + libx11-xcb1 \ + libxcomposite1 \ + libxdamage1 \ + libxrandr2 \ + xdg-utils \ + && rm -rf /var/lib/apt/lists/* + +# --- GitHub CLI (gh) ------------------------------------------------------- +RUN curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" \ + > /etc/apt/sources.list.d/github-cli.list \ + && apt-get update && apt-get install -y gh \ + && rm -rf /var/lib/apt/lists/* + +# --- Bun -------------------------------------------------------------------- +RUN curl -fsSL https://bun.sh/install | bash \ + && mv /root/.bun/bin/bun /usr/local/bin/bun \ + && ln -sf /usr/local/bin/bun /usr/local/bin/bunx \ + && rm -rf /root/.bun + +# --- sandbox-agent binary (from local build) -------------------------------- +COPY --from=builder /sandbox-agent /usr/local/bin/sandbox-agent +RUN chmod +x /usr/local/bin/sandbox-agent + +# --- sandbox user with passwordless sudo ------------------------------------ +RUN useradd -m -s /bin/bash sandbox \ + && echo "sandbox ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/sandbox \ + && chmod 0440 /etc/sudoers.d/sandbox + +USER sandbox +WORKDIR /home/sandbox + +# Point Chromium/Playwright at the system binary +ENV CHROME_PATH=/usr/bin/chromium +ENV CHROMIUM_PATH=/usr/bin/chromium +ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium +ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true + +# --- Install all sandbox-agent agents + agent-browser ----------------------- +RUN sandbox-agent install-agent --all +RUN sudo npm install -g agent-browser + +EXPOSE 2468 + +ENTRYPOINT ["sandbox-agent"] +CMD ["server", "--host", "0.0.0.0", "--port", "2468"] diff --git a/foundry/docker/frontend-caddy-entrypoint.sh b/foundry/docker/frontend-caddy-entrypoint.sh new file mode 100644 index 0000000..44a42ad --- /dev/null +++ b/foundry/docker/frontend-caddy-entrypoint.sh @@ -0,0 +1,27 @@ +#!/bin/sh +set -eu + +escape_js() { + printf '%s' "${1:-}" | sed 's/\\/\\\\/g; s/"/\\"/g' +} + +normalize_backend_endpoint() { + case "${1:-}" in + */api/rivet) + printf '%s/v1/rivet' "${1%/api/rivet}" + ;; + *) + printf '%s' "${1:-}" + ;; + esac +} + +cat > /srv/__foundry_runtime_config.js < searches for exsiting funcionality, creates plan asking clarying questions +- automatically check off of todo list when done +- fix opencode path, cannot find config file +- unread indicato + - add inbox that is the source of truth for this + - show this on hf above everything else +- sync command +- refactor sessions: ~/.claude/plans/sleepy-frolicking-nest.md +- keep switch active after archive +- add an icon if there are merge conflicts +- add `hf -` +- ask -> do research in a codebase +- todo list integrations (linear, github, etc) + - show issues due soon in switch + - search issues from cli + - create issues from cli +- keep tmux window name in sync with the agent status +- move all tools (github, graphite, git) too tools/ folder +- show git tree +- editor plugins + - vs code + - tmux + - zed + - opencode web +- have hf switch periodically refresh on agent status +- add new columns + - model (for the agent) +- todo list & plan management -> with simplenote sync +- sqlite (global) +- list of all global task repos +- heartbeat status to tell openclaw what it needs to send you +- sandbox agent sdk support +- serve command to run server +- multi-repo support (list for all repos) +- pluggable notification system +- cron jobs +- sandbox support + - auto-boot sandboxes for prs +- menubar +- notes integration + +## cool details + +- automatically uses your opencode theme +- auto symlink target/node_modules/etc +- auto-archives tasks when closed +- shows agent status in the tmux window name diff --git a/foundry/packages/backend/CLAUDE.md b/foundry/packages/backend/CLAUDE.md new file mode 100644 index 0000000..f7e054d --- /dev/null +++ b/foundry/packages/backend/CLAUDE.md @@ -0,0 +1,338 @@ +# Backend Notes + +## Actor Hierarchy + +Keep the backend actor tree aligned with this shape unless we explicitly decide to change it: + +```text +OrganizationActor (direct coordinator for tasks) +├─ AuditLogActor (organization-scoped global feed) +├─ GithubDataActor +├─ TaskActor(task) +│ ├─ taskSessions → session metadata/transcripts +│ └─ taskSandboxes → sandbox instance index +└─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N +``` + +## Coordinator Pattern + +Actors follow a coordinator pattern where each coordinator is responsible for: +1. **Index tables** — keeping a local SQLite index/summary of its child actors' data +2. **Create/destroy** — handling lifecycle of child actors +3. **Routing** — resolving lookups to the correct child actor + +Children push updates **up** to their direct coordinator only. Coordinators broadcast changes to connected clients. This keeps the read path local (no fan-out to children). + +### Coordinator hierarchy and index tables + +```text +OrganizationActor (coordinator for tasks + auth users) +│ +│ Index tables: +│ ├─ taskIndex → TaskActor index (taskId → repoId + branchName) +│ ├─ taskSummaries → TaskActor materialized sidebar projection +│ ├─ authSessionIndex → UserActor index (session token → userId) +│ ├─ authEmailIndex → UserActor index (email → userId) +│ └─ authAccountIndex → UserActor index (OAuth account → userId) +│ +├─ TaskActor (coordinator for sessions + sandboxes) +│ │ +│ │ Index tables: +│ │ ├─ taskWorkspaceSessions → Session index (session metadata + transcript) +│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history) +│ │ +│ └─ SandboxInstanceActor (leaf) +│ +├─ AuditLogActor (organization-scoped audit log, not a coordinator) +└─ GithubDataActor (GitHub API cache, not a coordinator) +``` + +When adding a new index table, annotate it in the schema file with a doc comment identifying it as a coordinator index and which child actor it indexes (see existing examples). + +## GitHub Sync Data Model + +The GithubDataActor syncs **repositories** and **pull requests** from GitHub, not branches. We only need repos (to know which repos exist and their metadata) and PRs (to lazily populate virtual tasks in the sidebar). Branch data is not synced because we only create tasks from PRs or fresh user-initiated creation, never from bare branches. Generated branch names for new tasks are treated as unique enough to skip conflict detection against remote branches. + +Tasks are either: +1. **Created fresh** by the user (no PR yet, branch name generated from task description) +2. **Lazily populated from pull requests** during PR sync (virtual task entries in org tables, no actor spawned) + +## Lazy Task Actor Creation — CRITICAL + +**Task actors must NEVER be created during GitHub sync or bulk operations.** Creating hundreds of task actors simultaneously causes OOM crashes. An org can have 200+ PRs; spawning an actor per PR kills the process. + +### The two creation points + +There are exactly **two** places that may create a task actor: + +1. **`createTaskMutation`** in `task-mutations.ts` — the only backend code that calls `getOrCreateTask`. Triggered by explicit user action ("New Task" button). One actor at a time. + +2. **`backend-client.ts` client helper** — calls `client.task.getOrCreate(...)`. This is the lazy materialization point: when a user clicks a virtual task in the sidebar, the client creates the actor, and it self-initializes in `getCurrentRecord()` (`workflow/common.ts`) by reading branch/title from the org's `getTaskIndexEntry` action. + +### The rule + +### The rule + +**Never use `getOrCreateTask` inside a sync loop, webhook handler, or any bulk operation.** That's what caused the OOM — 186 actors spawned simultaneously during PR sync. + +`getOrCreateTask` IS allowed in: +- `createTaskMutation` — explicit user "New Task" action +- `requireWorkspaceTask` — user-initiated actions (createSession, sendMessage, etc.) that may hit a virtual task +- `getTask` action on the org — called by sandbox actor and client, needs to materialize virtual tasks +- `backend-client.ts` client helper — lazy materialization when user views a task + +### Virtual tasks (PR-driven) + +During PR sync, `refreshTaskSummaryForBranchMutation` is called for every changed PR (via github-data's `emitPullRequestChangeEvents`). It writes **virtual task entries** to the org actor's local `taskIndex` + `taskSummaries` tables only. No task actor is spawned. No cross-actor calls to task actors. + +When the user interacts with a virtual task (clicks it, creates a session): +1. Client or org actor calls `getOrCreate` on the task actor key → actor is created with empty DB +2. Any action on the actor calls `getCurrentRecord()` → sees empty DB → reads branch/title from org's `getTaskIndexEntry` → calls `initBootstrapDbActivity` + `initCompleteActivity` → task is now real + +### Call sites to watch + +- `refreshTaskSummaryForBranchMutation` — called in bulk during sync. Must ONLY write to org local tables. Never create task actors or call task actor actions. +- `emitPullRequestChangeEvents` in github-data — iterates all changed PRs. Must remain fire-and-forget with no actor fan-out. + +## Queue vs Action Decision Framework + +The default is a direct action. Use a queue only if the answer to one or more of these questions is **yes**. + +Actions are pure RPCs with no DB overhead on send — fast, but if the call fails the operation is lost. Queues persist the message to the database on send, guaranteeing it will be processed even if the target actor is busy, slow, or recovering. The tradeoff: queues add write overhead and serialize processing. + +### 1. Does this operation coordinate multi-step work? + +Does it involve external I/O (sandbox API, GitHub API, agent process management) or state machine transitions where interleaving would corrupt state? This is different from database-level serialization — a simple read-then-write on SQLite can use a transaction. The queue is for ordering operations that span DB writes + external I/O. + +**Queue examples:** +- `workspace.send_message` — sends to sandbox agent, writes session status, does owner-swap. Multi-step with external I/O. +- `push` / `sync` / `merge` — git operations in sandbox that must not interleave. +- `createTask` — read-then-write across task index + actor creation. Returns result, so `wait: true`. + +**Action examples:** +- `billing.stripe_customer.apply` — single column upsert, no external I/O. +- `workspace.update_draft` — writes draft text, no coordination with sandbox ops. +- `workspace.rename_task` — updates title column, queue handlers don't touch title. + +### 2. Must this message be processed no matter what? + +Is this a cross-actor fire-and-forget where the caller won't retry and data loss is unacceptable? A queue persists the message — if the target is down, it waits. An action RPC that fails is gone. + +**Queue examples:** +- `audit.append` — caller must never be affected by audit failures, and audit entries must not be lost. +- `applyTaskSummaryUpdate` — task actor pushes summary to org and moves on. Won't retry if org is busy. +- `refreshTaskSummaryForBranch` — webhook-driven, won't be redelivered for the same event. + +**Action examples:** +- `billing.invoice.upsert` — Stripe retries handle failures externally. No durability need on our side. +- `workspace.mark_unread` — UI convenience state. Acceptable to lose on transient failure. +- `github.webhook_receipt.record` — timestamp columns with no downstream effects. + +### Once on a queue: wait or fire-and-forget? + +If the caller needs a return value, use `wait: true`. If the UI updates via push events, use `wait: false`. + +Full migration plan: `QUEUE_TO_ACTION_MIGRATION.md`. + +## Ownership Rules + +- `OrganizationActor` is the organization coordinator, direct coordinator for tasks, and lookup/index owner. It owns the task index, task summaries, and repo catalog. +- `AuditLogActor` is organization-scoped. There is one organization-level audit log feed. +- `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized. +- `TaskActor` can have many sessions. +- `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time. +- Session unread state and draft prompts are backend-owned workspace state, not frontend-local state. +- Branch names are immutable after task creation. Do not implement branch-rename flows. +- `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity. +- The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`. +- When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default. +- Only a coordinator creates/destroys its children. Do not create child actors from outside the coordinator. +- Children push state changes up to their direct coordinator only. Task actors push summary updates directly to the organization actor. +- Read paths must use the coordinator's local index tables. Do not fan out to child actors on the hot read path. +- Never build "enriched" read actions that chain through multiple actors (e.g., coordinator → child actor → sibling actor). If data from multiple actors is needed for a read, it should already be materialized in the coordinator's index tables via push updates. If it's not there, fix the write path to push it — do not add a fan-out read path. + +## Drizzle Migration Maintenance + +After changing any actor's `db/schema.ts`, you **must** regenerate the corresponding migration so the runtime creates the tables that match the schema. Forgetting this step causes `no such table` errors at runtime. + +1. **Generate a new drizzle migration.** Run from `packages/backend`: + ```bash + npx drizzle-kit generate --config=./src/actors//db/drizzle.config.ts + ``` + If the interactive prompt is unavailable (e.g. in a non-TTY), manually create a new `.sql` file under `./src/actors//db/drizzle/` and add the corresponding entry to `meta/_journal.json`. + +2. **Regenerate the compiled `migrations.ts`.** Run from the foundry root: + ```bash + npx tsx packages/backend/src/actors/_scripts/generate-actor-migrations.ts + ``` + +3. **Verify insert/upsert calls.** Every column with `.notNull()` (and no `.default(...)`) must be provided a value in all `insert()` and `onConflictDoUpdate()` calls. Missing a NOT NULL column causes a runtime constraint violation, not a type error. + +4. **Nuke RivetKit state in dev** after migration changes to start fresh: + ```bash + docker compose -f compose.dev.yaml down + docker volume rm foundry_foundry_rivetkit_storage + docker compose -f compose.dev.yaml up -d + ``` + +Actors with drizzle migrations: `organization`, `audit-log`, `task`. Other actors (`user`, `github-data`) use inline migrations without drizzle. + +## Workflow Step Nesting — FORBIDDEN + +**Never call `c.step()` / `ctx.step()` from inside another step's `run` callback.** RivetKit workflow steps cannot be nested. Doing so causes the runtime error: *"Cannot start a new workflow entry while another is in progress."* + +This means: +- Functions called from within a step `run` callback must NOT use `c.step()`, `c.loop()`, `c.sleep()`, or `c.queue.next()`. +- If a mutation function needs to be called both from a step and standalone, it must only do plain DB/API work — no workflow primitives. The workflow step wrapping belongs in the workflow file, not in the mutation. +- Helper wrappers that conditionally call `c.step()` (like a `runSyncStep` pattern) are dangerous — if the caller is already inside a step, the nested `c.step()` will crash at runtime with no compile-time warning. + +**Rule of thumb:** Workflow primitives (`step`, `loop`, `sleep`, `queue.next`) may only appear at the top level of a workflow function or inside a `loop` callback — never inside a step's `run`. + +## SQLite Constraints + +- Single-row tables must use an integer primary key with `CHECK (id = 1)` to enforce the singleton invariant at the database level. +- Follow the task actor pattern for metadata/profile rows and keep the fixed row id in code as `1`, not a string sentinel. + +## Multiplayer Correctness + +Per-user UI state must live on the user actor, not on shared task/session actors. This is critical for multiplayer — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts. + +**Per-user state (user actor):** active session tab, unread counts, draft text, draft attachments. Keyed by `(userId, taskId, sessionId)`. + +**Task-global state (task actor):** session transcript, session model, session runtime status, sandbox identity, task status, branch name, PR state. These are shared across all users viewing the task — that is correct behavior. + +Do not store per-user preferences, selections, or ephemeral UI state on shared actors. If a field's value should differ between two users looking at the same task, it belongs on the user actor. + +## Audit Log Maintenance + +Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of significant operations. + +## Debugging Actors + +### RivetKit Inspector UI + +The RivetKit inspector UI at `http://localhost:6420/ui/` is the most reliable way to debug actor state in local development. The inspector HTTP API (`/inspector/workflow-history`) has a known bug where it returns empty `{}` even when the workflow has entries — always cross-check with the UI. + +**Useful inspector URL pattern:** +``` +http://localhost:6420/ui/?u=http%3A%2F%2F127.0.0.1%3A6420&ns=default&r=default&n=[%22%22]&actorId=&tab= +``` + +Tabs: `workflow`, `database`, `state`, `queue`, `connections`, `metadata`. + +**To find actor IDs:** +```bash +curl -s 'http://127.0.0.1:6420/actors?name=organization' +``` + +**To query actor DB via bun (inside container):** +```bash +docker compose -f compose.dev.yaml exec -T backend bun -e ' + var Database = require("bun:sqlite"); + var db = new Database("/root/.local/share/foundry/rivetkit/databases/.db", { readonly: true }); + console.log(JSON.stringify(db.query("SELECT name FROM sqlite_master WHERE type=?").all("table"))); +' +``` + +**To call actor actions via inspector:** +```bash +curl -s -X POST 'http://127.0.0.1:6420/gateway//inspector/action/' \ + -H 'Content-Type: application/json' -d '{"args":[{}]}' +``` + +### Known inspector API bugs + +- `GET /inspector/workflow-history` may return `{"history":{}}` even when workflow has run. Use the UI's Workflow tab instead. +- `GET /inspector/queue` is reliable for checking pending messages. +- `GET /inspector/state` is reliable for checking actor state. + +## Inbox & Notification System + +The user actor owns two per-user systems: a **task feed** (sidebar ordering) and **notifications** (discrete events). These are distinct concepts that share a common "bump" mechanism. + +### Core distinction: bumps vs. notifications + +A **bump** updates the task's position in the user's sidebar feed. A **notification** is a discrete event entry shown in the notification panel. Every notification also triggers a bump, but not every bump creates a notification. + +| Event | Bumps task? | Creates notification? | +|-------|-------------|----------------------| +| User sends a message | Yes | No | +| User opens/clicks a task | Yes | No | +| User creates a session | Yes | No | +| Agent finishes responding | Yes | Yes | +| PR review requested | Yes | Yes | +| PR merged | Yes | Yes | +| PR comment added | Yes | Yes | +| Agent error/needs input | Yes | Yes | + +### Recipient resolution + +Notifications and bumps go to the **task owner** only. Each task has exactly one owner at a time (the user who last sent a message or explicitly took ownership). This is an acceptable race condition — it rarely makes sense for two users to work on the same task simultaneously, and ownership transfer is explicit. + +The system supports multiplayer (multiple users can view the same task), but the notification/bump target is always the single current owner. Each user has their own independent notification and unread state on their own user actor. + +### Tables (on user actor) + +Two new tables: + +- **`userTaskFeed`** — one row per task. Tracks `bumpedAtMs` and `bumpReason` for sidebar sort order. Does NOT denormalize task content (title, repo, etc.) — the frontend queries the org actor for task content and uses the feed only for ordering/filtering. +- **`userNotifications`** — discrete notification entries with `type`, `message`, `read` state, and optional `sessionId`. Retention: notifications are retained for a configurable number of days after being marked read, then cleaned up. + +### Queue commands (user actor workflow) + +- `user.bump_task` — upserts `userTaskFeed` row, no notification created. Used for user-initiated actions (send message, open task, create session). +- `user.notify` — inserts `userNotifications` row AND upserts `userTaskFeed` (auto-bump). Used for system events (agent finished, PR review requested). +- `user.mark_read` — marks notifications read for a given `(taskId, sessionId?)`. Also updates `userTaskState.unread` for the session. + +### Data flow + +Task actor (or org actor) resolves the current task owner, then sends to the owner's user actor queue: +1. `user.notify(...)` for notification-worthy events (auto-bumps the feed) +2. `user.bump_task(...)` for non-notification bumps (send message, open task) + +The user actor processes the queue message, writes to its local tables, and broadcasts a `userFeedUpdated` event to connected clients. + +### Sidebar architecture change + +The left sidebar changes from showing the repo/PR tree to showing **recent tasks** ordered by `userTaskFeed.bumpedAtMs`. Two new buttons at the top of the sidebar: +- **All Repositories** — navigates to a page showing the current repo + PR list (preserving existing functionality) +- **Notifications** — navigates to a page showing the full notification list + +The sidebar reads from two sources: +- **User actor** (`userTaskFeed`) — provides sort order and "which tasks are relevant to this user" +- **Org actor** (`taskSummaries`) — provides task content (title, status, branch, PR state, session summaries) + +The frontend merges these: org snapshot gives task data, user feed gives sort order. Uses the existing subscription system (`useSubscription`) for both initial state fetch and streaming updates. + +### `updatedAtMs` column semantics + +The org actor's `taskSummaries.updatedAtMs` and the user actor's `userTaskFeed.bumpedAtMs` serve different purposes: +- `taskSummaries.updatedAtMs` — updated by task actor push. Reflects the last time the task's global state changed (any mutation, any user). Used for "All Repositories" / "All Tasks" views. +- `userTaskFeed.bumpedAtMs` — updated by bump/notify commands. Reflects the last time this specific user's attention was drawn to this task. Used for the per-user sidebar sort. + +Add doc comments on both columns clarifying the update source. + +### Unread semantics + +Each user has independent unread state. The existing `userTaskState` table tracks per-`(taskId, sessionId)` unread state. When the user clicks a session: +1. `userTaskState.unread` is set to 0 for that session +2. All `userNotifications` rows matching `(taskId, sessionId)` are marked `read = 1` + +These two unread systems must stay in sync via the `user.mark_read` queue command. + +## Better Auth: Actions, Not Queues + +All Better Auth adapter operations (verification CRUD, session/email/account index mutations, and user-actor auth record mutations) are exposed as **actions**, not queue commands. This is an intentional exception to the normal pattern of using queues for mutations. + +**Why:** The org actor's workflow queue is shared with GitHub sync, webhook processing, task mutations, and billing — 20+ queue names processed sequentially. During the OAuth callback, Better Auth needs to read/write verification records and upsert session/account indexes. If any long-running queue handler (e.g., a GitHub sync step) is ahead in the queue, auth operations time out (10s), `expectQueueResponse` throws a regular `Error`, and Better Auth's `parseState` catches it as a non-`StateError` → redirects to `?error=please_restart_the_process`. + +**Why it's safe:** Auth operations are simple SQLite reads/writes scoped to a single actor instance with no cross-actor side effects. They don't need workflow replay semantics or sequential ordering guarantees relative to other queue commands. + +**Rule:** Never move Better Auth operations back to queue commands. If new auth-related mutations are added, expose them as actions on the relevant actor. + +## Maintenance + +- Keep this file up to date whenever actor ownership, hierarchy, or lifecycle responsibilities change. +- If the real actor tree diverges from this document, update this document in the same change. +- When adding, removing, or renaming coordinator index tables, update the hierarchy diagram above in the same change. +- When adding a new coordinator index table in a schema file, add a doc comment identifying which child actor it indexes (pattern: `/** Coordinator index of {ChildActor} instances. ... */`). diff --git a/foundry/packages/backend/package.json b/foundry/packages/backend/package.json new file mode 100644 index 0000000..562bab7 --- /dev/null +++ b/foundry/packages/backend/package.json @@ -0,0 +1,37 @@ +{ + "name": "@sandbox-agent/foundry-backend", + "version": "0.1.0", + "private": true, + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsup src/index.ts --format esm", + "db:generate": "find src/actors -name drizzle.config.ts -exec pnpm exec drizzle-kit generate --config {} \\; && \"$HOME/.bun/bin/bun\" src/actors/_scripts/generate-actor-migrations.ts", + "typecheck": "tsc --noEmit", + "test": "$HOME/.bun/bin/bun x vitest run", + "start": "bun dist/index.js start" + }, + "dependencies": { + "@e2b/code-interpreter": "^2.3.3", + "@hono/node-server": "^1.19.7", + "@hono/node-ws": "^1.3.0", + "@iarna/toml": "^2.2.5", + "@sandbox-agent/foundry-shared": "workspace:*", + "better-auth": "^1.5.5", + "dockerode": "^4.0.9", + "drizzle-kit": "^0.31.8", + "drizzle-orm": "^0.44.5", + "hono": "^4.11.9", + "pino": "^10.3.1", + "rivetkit": "https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a", + "sandbox-agent": "workspace:*", + "uuid": "^13.0.0", + "ws": "^8.19.0", + "zod": "^4.1.5" + }, + "devDependencies": { + "@types/bun": "^1.3.9", + "tsup": "^8.5.0" + } +} diff --git a/foundry/packages/backend/src/actors/_scripts/generate-actor-migrations.ts b/foundry/packages/backend/src/actors/_scripts/generate-actor-migrations.ts new file mode 100644 index 0000000..6b74dd3 --- /dev/null +++ b/foundry/packages/backend/src/actors/_scripts/generate-actor-migrations.ts @@ -0,0 +1,138 @@ +import { mkdir, readdir, readFile, rm, writeFile } from "node:fs/promises"; +import { dirname, join, resolve } from "node:path"; +import { createErrorContext, createFoundryLogger } from "@sandbox-agent/foundry-shared"; + +type Journal = { + entries?: Array<{ + idx: number; + when: number; + tag: string; + breakpoints?: boolean; + version?: string; + }>; +}; + +const logger = createFoundryLogger({ + service: "foundry-backend-migrations", +}); + +function padMigrationKey(idx: number): string { + return `m${String(idx).padStart(4, "0")}`; +} + +function escapeTemplateLiteral(value: string): string { + return value.replace(/`/g, "\\`").replace(/\$\{/g, "\\${"); +} + +async function fileExists(path: string): Promise { + try { + await readFile(path); + return true; + } catch { + return false; + } +} + +async function walkDirectories(root: string, onDir: (dir: string) => Promise): Promise { + const entries = await readdir(root, { withFileTypes: true }); + await onDir(root); + for (const entry of entries) { + if (!entry.isDirectory()) continue; + if (entry.name === "node_modules" || entry.name === "dist" || entry.name.startsWith(".")) { + continue; + } + await walkDirectories(join(root, entry.name), onDir); + } +} + +async function generateOne(drizzleDir: string): Promise { + const metaDir = resolve(drizzleDir, "meta"); + const journalPath = resolve(metaDir, "_journal.json"); + if (!(await fileExists(journalPath))) { + return; + } + + const drizzleEntries = (await readdir(drizzleDir, { withFileTypes: true })) + .filter((entry) => entry.isFile() && entry.name.endsWith(".sql")) + .map((entry) => entry.name) + .sort(); + + if (drizzleEntries.length === 0) { + return; + } + + const journalRaw = await readFile(journalPath, "utf8"); + const journal = JSON.parse(journalRaw) as Journal; + const entries = journal.entries ?? []; + + const sqlByKey = new Map(); + for (const entry of entries) { + const file = drizzleEntries[entry.idx]; + if (!file) { + throw new Error(`Missing migration SQL file for idx=${entry.idx} in ${drizzleDir}`); + } + const sqlPath = resolve(drizzleDir, file); + const sqlRaw = await readFile(sqlPath, "utf8"); + sqlByKey.set(padMigrationKey(entry.idx), sqlRaw); + } + + const migrationsObjectLines: string[] = []; + for (const entry of entries) { + const key = padMigrationKey(entry.idx); + const sql = sqlByKey.get(key); + if (!sql) continue; + migrationsObjectLines.push(` ${key}: \`${escapeTemplateLiteral(sql)}\`,`); + } + + const banner = `// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. +`; + + const journalLiteral = JSON.stringify( + { + entries: entries.map((entry) => ({ + idx: entry.idx, + when: entry.when, + tag: entry.tag, + breakpoints: Boolean(entry.breakpoints), + })), + }, + null, + 2, + ); + + const outPath = resolve(drizzleDir, "..", "migrations.ts"); + const content = `${banner} +const journal = ${journalLiteral} as const; + +export default { + journal, + migrations: { +${migrationsObjectLines.join("\n")} + } as const +}; +`; + + await mkdir(dirname(outPath), { recursive: true }); + await writeFile(outPath, content, "utf8"); + + // drizzle-kit generates a JS helper file by default; delete to keep TS-only sources. + await rm(resolve(drizzleDir, "migrations.js"), { force: true }); +} + +async function main(): Promise { + const packageRoot = resolve(import.meta.dirname, "..", "..", ".."); // packages/backend + const actorsRoot = resolve(packageRoot, "src", "actors"); + + await walkDirectories(actorsRoot, async (dir) => { + if (dir.endsWith(`${join("db", "drizzle")}`)) { + await generateOne(dir); + } + }); +} + +main().catch((error: unknown) => { + logger.error(createErrorContext(error), "generate_actor_migrations_failed"); + process.exitCode = 1; +}); diff --git a/foundry/packages/backend/src/actors/audit-log/db/db.ts b/foundry/packages/backend/src/actors/audit-log/db/db.ts new file mode 100644 index 0000000..d808ec0 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/db.ts @@ -0,0 +1,5 @@ +import { db } from "rivetkit/db/drizzle"; +import * as schema from "./schema.js"; +import migrations from "./migrations.js"; + +export const auditLogDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts b/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts new file mode 100644 index 0000000..da5e904 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/audit-log/db/drizzle", + schema: "./src/actors/audit-log/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql new file mode 100644 index 0000000..697aac9 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql @@ -0,0 +1,8 @@ +CREATE TABLE `events` ( + `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + `task_id` text, + `branch_name` text, + `kind` text NOT NULL, + `payload_json` text NOT NULL, + `created_at` integer NOT NULL +); diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql new file mode 100644 index 0000000..9ada559 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql @@ -0,0 +1 @@ +ALTER TABLE `events` ADD COLUMN `repo_id` text; diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000..afaadc4 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json @@ -0,0 +1,70 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "e592c829-141f-4740-88b7-09cf957a4405", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "events": { + "name": "events", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "branch_name": { + "name": "branch_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "kind": { + "name": "kind", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "payload_json": { + "name": "payload_json", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json new file mode 100644 index 0000000..cf2910c --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json @@ -0,0 +1,77 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "a1b2c3d4-0001-4000-8000-000000000001", + "prevId": "e592c829-141f-4740-88b7-09cf957a4405", + "tables": { + "events": { + "name": "events", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "repo_id": { + "name": "repo_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "branch_name": { + "name": "branch_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "kind": { + "name": "kind", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "payload_json": { + "name": "payload_json", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json new file mode 100644 index 0000000..0393be2 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json @@ -0,0 +1,20 @@ +{ + "version": "7", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "6", + "when": 1773376223815, + "tag": "0000_fluffy_kid_colt", + "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1773376223816, + "tag": "0001_add_repo_id", + "breakpoints": true + } + ] +} diff --git a/foundry/packages/backend/src/actors/audit-log/db/migrations.ts b/foundry/packages/backend/src/actors/audit-log/db/migrations.ts new file mode 100644 index 0000000..5bf9b5a --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/migrations.ts @@ -0,0 +1,37 @@ +// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. + +const journal = { + entries: [ + { + idx: 0, + when: 1773376223815, + tag: "0000_fluffy_kid_colt", + breakpoints: true, + }, + { + idx: 1, + when: 1773376223816, + tag: "0001_add_repo_id", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`events\` ( + \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, + \`task_id\` text, + \`branch_name\` text, + \`kind\` text NOT NULL, + \`payload_json\` text NOT NULL, + \`created_at\` integer NOT NULL +); +`, + m0001: `ALTER TABLE \`events\` ADD COLUMN \`repo_id\` text; +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/audit-log/db/schema.ts b/foundry/packages/backend/src/actors/audit-log/db/schema.ts new file mode 100644 index 0000000..d275dd4 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/schema.ts @@ -0,0 +1,12 @@ +import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; + +export const events = sqliteTable("events", { + id: integer("id").primaryKey({ autoIncrement: true }), + repoId: text("repo_id"), + taskId: text("task_id"), + branchName: text("branch_name"), + kind: text("kind").notNull(), + // Structured by the audit-log event kind definitions in application code. + payloadJson: text("payload_json").notNull(), + createdAt: integer("created_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/audit-log/index.ts b/foundry/packages/backend/src/actors/audit-log/index.ts new file mode 100644 index 0000000..db32829 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/index.ts @@ -0,0 +1,180 @@ +// @ts-nocheck +import { and, desc, eq } from "drizzle-orm"; +import { actor, queue } from "rivetkit"; +import { workflow, Loop } from "rivetkit/workflow"; +import type { AuditLogEvent } from "@sandbox-agent/foundry-shared"; +import { selfAuditLog } from "../handles.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; +import { auditLogDb } from "./db/db.js"; +import { events } from "./db/schema.js"; + +export interface AuditLogInput { + organizationId: string; +} + +export interface AppendAuditLogCommand { + kind: string; + repoId?: string; + taskId?: string; + branchName?: string; + payload: Record; +} + +export interface ListAuditLogParams { + repoId?: string; + branch?: string; + taskId?: string; + limit?: number; +} + +// --------------------------------------------------------------------------- +// Queue names +// --------------------------------------------------------------------------- + +const AUDIT_LOG_QUEUE_NAMES = ["auditLog.command.append"] as const; + +type AuditLogQueueName = (typeof AUDIT_LOG_QUEUE_NAMES)[number]; + +function auditLogWorkflowQueueName(name: AuditLogQueueName): AuditLogQueueName { + return name; +} + +// --------------------------------------------------------------------------- +// Mutation functions +// --------------------------------------------------------------------------- + +async function appendMutation(c: any, body: AppendAuditLogCommand): Promise<{ ok: true }> { + const now = Date.now(); + await c.db + .insert(events) + .values({ + repoId: body.repoId ?? null, + taskId: body.taskId ?? null, + branchName: body.branchName ?? null, + kind: body.kind, + payloadJson: JSON.stringify(body.payload), + createdAt: now, + }) + .run(); + return { ok: true }; +} + +// --------------------------------------------------------------------------- +// Workflow command loop +// --------------------------------------------------------------------------- + +type AuditLogWorkflowHandler = (loopCtx: any, body: any) => Promise; + +const AUDIT_LOG_COMMAND_HANDLERS: Record = { + "auditLog.command.append": async (c, body) => appendMutation(c, body), +}; + +async function runAuditLogWorkflow(ctx: any): Promise { + await ctx.loop("audit-log-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-audit-log-command", { + names: [...AUDIT_LOG_QUEUE_NAMES], + completable: true, + }); + + if (!msg) { + return Loop.continue(undefined); + } + + const handler = AUDIT_LOG_COMMAND_HANDLERS[msg.name as AuditLogQueueName]; + if (!handler) { + logActorWarning("auditLog", "unknown audit-log command", { command: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }).catch(() => {}); + return Loop.continue(undefined); + } + + try { + // Wrap in a step so c.state and c.db are accessible inside mutation functions. + const result = await loopCtx.step({ + name: msg.name, + timeout: 60_000, + run: async () => handler(loopCtx, msg.body), + }); + await msg.complete(result); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("auditLog", "audit-log workflow command failed", { + command: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} + +// --------------------------------------------------------------------------- +// Actor definition +// --------------------------------------------------------------------------- + +/** + * Organization-scoped audit log. One per org, not one per repo. + * + * The org is the coordinator for all tasks across repos, and we frequently need + * to query the full audit trail across repos (e.g. org-wide activity feed, + * compliance). A per-repo audit log would require fan-out reads every time. + * Keeping it org-scoped gives us a single queryable feed with optional repoId + * filtering when callers want a narrower view. + */ +export const auditLog = actor({ + db: auditLogDb, + queues: Object.fromEntries(AUDIT_LOG_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Audit Log", + icon: "database", + }, + createState: (_c, input: AuditLogInput) => ({ + organizationId: input.organizationId, + }), + actions: { + // Mutation — self-send to queue for workflow history + async append(c: any, body: AppendAuditLogCommand): Promise<{ ok: true }> { + const self = selfAuditLog(c); + await self.send(auditLogWorkflowQueueName("auditLog.command.append"), body, { wait: false }); + return { ok: true }; + }, + + // Read — direct action (no queue) + async list(c, params?: ListAuditLogParams): Promise { + const whereParts = []; + if (params?.repoId) { + whereParts.push(eq(events.repoId, params.repoId)); + } + if (params?.taskId) { + whereParts.push(eq(events.taskId, params.taskId)); + } + if (params?.branch) { + whereParts.push(eq(events.branchName, params.branch)); + } + + const base = c.db + .select({ + id: events.id, + repoId: events.repoId, + taskId: events.taskId, + branchName: events.branchName, + kind: events.kind, + payloadJson: events.payloadJson, + createdAt: events.createdAt, + }) + .from(events); + + const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base) + .orderBy(desc(events.createdAt)) + .limit(params?.limit ?? 100) + .all(); + + return rows.map((row) => ({ + ...row, + organizationId: c.state.organizationId, + repoId: row.repoId ?? null, + })); + }, + }, + run: workflow(runAuditLogWorkflow), +}); diff --git a/foundry/packages/backend/src/actors/context.ts b/foundry/packages/backend/src/actors/context.ts new file mode 100644 index 0000000..3554a96 --- /dev/null +++ b/foundry/packages/backend/src/actors/context.ts @@ -0,0 +1,42 @@ +import type { AppConfig } from "@sandbox-agent/foundry-shared"; +import type { BackendDriver } from "../driver.js"; +import type { NotificationService } from "../notifications/index.js"; +import type { AppShellServices } from "../services/app-shell-runtime.js"; + +let runtimeConfig: AppConfig | null = null; +let notificationService: NotificationService | null = null; +let runtimeDriver: BackendDriver | null = null; +let appShellServices: AppShellServices | null = null; + +export function initActorRuntimeContext(config: AppConfig, notifications?: NotificationService, driver?: BackendDriver, appShell?: AppShellServices): void { + runtimeConfig = config; + notificationService = notifications ?? null; + runtimeDriver = driver ?? null; + appShellServices = appShell ?? null; +} + +export function getActorRuntimeContext(): { + config: AppConfig; + notifications: NotificationService | null; + driver: BackendDriver; + appShell: AppShellServices; +} { + if (!runtimeConfig) { + throw new Error("Actor runtime context not initialized"); + } + + if (!runtimeDriver) { + throw new Error("Actor runtime context missing driver"); + } + + if (!appShellServices) { + throw new Error("Actor runtime context missing app shell services"); + } + + return { + config: runtimeConfig, + notifications: notificationService, + driver: runtimeDriver, + appShell: appShellServices, + }; +} diff --git a/foundry/packages/backend/src/actors/github-data/db/db.ts b/foundry/packages/backend/src/actors/github-data/db/db.ts new file mode 100644 index 0000000..00e5a11 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/db/db.ts @@ -0,0 +1,5 @@ +import { db } from "rivetkit/db/drizzle"; +import * as schema from "./schema.js"; +import migrations from "./migrations.js"; + +export const githubDataDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/github-data/db/migrations.ts b/foundry/packages/backend/src/actors/github-data/db/migrations.ts new file mode 100644 index 0000000..10e3804 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/db/migrations.ts @@ -0,0 +1,114 @@ +const journal = { + entries: [ + { + idx: 0, + when: 1773446400000, + tag: "0000_github_data", + breakpoints: true, + }, + { + idx: 1, + when: 1773810002000, + tag: "0001_default_branch", + breakpoints: true, + }, + { + idx: 2, + when: 1773810300000, + tag: "0002_github_branches", + breakpoints: true, + }, + { + idx: 3, + when: 1773907200000, + tag: "0003_sync_progress", + breakpoints: true, + }, + { + idx: 4, + when: 1773993600000, + tag: "0004_drop_github_branches", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`github_meta\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`connected_account\` text NOT NULL, + \`installation_status\` text NOT NULL, + \`sync_status\` text NOT NULL, + \`installation_id\` integer, + \`last_sync_label\` text NOT NULL, + \`last_sync_at\` integer, + \`updated_at\` integer NOT NULL, + CONSTRAINT \`github_meta_singleton_id_check\` CHECK(\`id\` = 1) +); +--> statement-breakpoint +CREATE TABLE \`github_repositories\` ( + \`repo_id\` text PRIMARY KEY NOT NULL, + \`full_name\` text NOT NULL, + \`clone_url\` text NOT NULL, + \`private\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`github_members\` ( + \`member_id\` text PRIMARY KEY NOT NULL, + \`login\` text NOT NULL, + \`display_name\` text NOT NULL, + \`email\` text, + \`role\` text, + \`state\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`github_pull_requests\` ( + \`pr_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`repo_full_name\` text NOT NULL, + \`number\` integer NOT NULL, + \`title\` text NOT NULL, + \`body\` text, + \`state\` text NOT NULL, + \`url\` text NOT NULL, + \`head_ref_name\` text NOT NULL, + \`base_ref_name\` text NOT NULL, + \`author_login\` text, + \`is_draft\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +`, + m0001: `ALTER TABLE \`github_repositories\` ADD \`default_branch\` text NOT NULL DEFAULT 'main'; +`, + m0002: `CREATE TABLE \`github_branches\` ( + \`branch_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`branch_name\` text NOT NULL, + \`commit_sha\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +`, + m0003: `ALTER TABLE \`github_meta\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`sync_phase\` text; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`processed_repository_count\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`total_repository_count\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_repositories\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_members\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_pull_requests\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_branches\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +`, + m0004: `DROP TABLE IF EXISTS \`github_branches\`; +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/github-data/db/schema.ts b/foundry/packages/backend/src/actors/github-data/db/schema.ts new file mode 100644 index 0000000..94b4edc --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/db/schema.ts @@ -0,0 +1,59 @@ +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; + +export const githubMeta = sqliteTable( + "github_meta", + { + id: integer("id").primaryKey(), + connectedAccount: text("connected_account").notNull(), + installationStatus: text("installation_status").notNull(), + syncStatus: text("sync_status").notNull(), + installationId: integer("installation_id"), + lastSyncLabel: text("last_sync_label").notNull(), + lastSyncAt: integer("last_sync_at"), + syncGeneration: integer("sync_generation").notNull(), + syncPhase: text("sync_phase"), + processedRepositoryCount: integer("processed_repository_count").notNull(), + totalRepositoryCount: integer("total_repository_count").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("github_meta_singleton_id_check", sql`${table.id} = 1`)], +); + +export const githubRepositories = sqliteTable("github_repositories", { + repoId: text("repo_id").notNull().primaryKey(), + fullName: text("full_name").notNull(), + cloneUrl: text("clone_url").notNull(), + private: integer("private").notNull(), + defaultBranch: text("default_branch").notNull(), + syncGeneration: integer("sync_generation").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubMembers = sqliteTable("github_members", { + memberId: text("member_id").notNull().primaryKey(), + login: text("login").notNull(), + displayName: text("display_name").notNull(), + email: text("email"), + role: text("role"), + state: text("state").notNull(), + syncGeneration: integer("sync_generation").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubPullRequests = sqliteTable("github_pull_requests", { + prId: text("pr_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + repoFullName: text("repo_full_name").notNull(), + number: integer("number").notNull(), + title: text("title").notNull(), + body: text("body"), + state: text("state").notNull(), + url: text("url").notNull(), + headRefName: text("head_ref_name").notNull(), + baseRefName: text("base_ref_name").notNull(), + authorLogin: text("author_login"), + isDraft: integer("is_draft").notNull(), + syncGeneration: integer("sync_generation").notNull(), + updatedAt: integer("updated_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts new file mode 100644 index 0000000..d19732a --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -0,0 +1,1010 @@ +// @ts-nocheck +import { eq, inArray } from "drizzle-orm"; +import { actor, queue } from "rivetkit"; +import { workflow, Loop } from "rivetkit/workflow"; +import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../context.js"; +import { getOrCreateOrganization, getTask } from "../handles.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; +import { taskWorkflowQueueName } from "../task/workflow/queue.js"; +import { repoIdFromRemote } from "../../services/repo.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +import { organizationWorkflowQueueName } from "../organization/queues.js"; +import { githubDataDb } from "./db/db.js"; +import { githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; + +const META_ROW_ID = 1; +const SYNC_REPOSITORY_BATCH_SIZE = 10; + +type GithubSyncPhase = "discovering_repositories" | "syncing_repositories" | "syncing_members" | "syncing_pull_requests"; + +interface GithubDataInput { + organizationId: string; +} + +interface GithubMemberRecord { + id: string; + login: string; + name: string; + email?: string | null; + role?: string | null; + state?: string | null; +} + +interface GithubRepositoryRecord { + fullName: string; + cloneUrl: string; + private: boolean; + defaultBranch: string; +} + +interface GithubPullRequestRecord { + repoId: string; + repoFullName: string; + number: number; + title: string; + body: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + updatedAt: number; +} + +interface FullSyncInput { + connectedAccount?: string | null; + installationStatus?: FoundryOrganization["github"]["installationStatus"]; + installationId?: number | null; + githubLogin?: string | null; + kind?: FoundryOrganization["kind"] | null; + accessToken?: string | null; + label?: string | null; +} + +interface ClearStateInput { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + installationId: number | null; + label: string; +} + +// Queue names for github-data actor +export const GITHUB_DATA_QUEUE_NAMES = [ + "githubData.command.syncRepos", + "githubData.command.handlePullRequestWebhook", + "githubData.command.clearState", +] as const; + +type GithubDataQueueName = (typeof GITHUB_DATA_QUEUE_NAMES)[number]; + +export function githubDataWorkflowQueueName(name: GithubDataQueueName): GithubDataQueueName { + return name; +} + +interface PullRequestWebhookInput { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + installationId: number | null; + repository: { + fullName: string; + cloneUrl: string; + private: boolean; + }; + pullRequest: { + number: number; + title: string; + body: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + merged?: boolean; + }; +} + +interface GithubMetaState { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + syncStatus: FoundryOrganization["github"]["syncStatus"]; + installationId: number | null; + lastSyncLabel: string; + lastSyncAt: number | null; + syncGeneration: number; + syncPhase: GithubSyncPhase | null; + processedRepositoryCount: number; + totalRepositoryCount: number; +} + +function normalizePrStatus(input: { state: string; isDraft?: boolean; merged?: boolean }): "OPEN" | "DRAFT" | "CLOSED" | "MERGED" { + const state = input.state.trim().toUpperCase(); + if (input.merged || state === "MERGED") return "MERGED"; + if (state === "CLOSED") return "CLOSED"; + return input.isDraft ? "DRAFT" : "OPEN"; +} + +function pullRequestSummaryFromRow(row: any) { + return { + prId: row.prId, + repoId: row.repoId, + repoFullName: row.repoFullName, + number: row.number, + status: Boolean(row.isDraft) ? "draft" : "ready", + title: row.title, + state: row.state, + url: row.url, + headRefName: row.headRefName, + baseRefName: row.baseRefName, + authorLogin: row.authorLogin ?? null, + isDraft: Boolean(row.isDraft), + updatedAtMs: row.updatedAt, + }; +} + +function chunkItems(items: T[], size: number): T[][] { + if (items.length === 0) { + return []; + } + const chunks: T[][] = []; + for (let index = 0; index < items.length; index += size) { + chunks.push(items.slice(index, index + size)); + } + return chunks; +} + +export async function readMeta(c: any): Promise { + const row = await c.db.select().from(githubMeta).where(eq(githubMeta.id, META_ROW_ID)).get(); + return { + connectedAccount: row?.connectedAccount ?? "", + installationStatus: (row?.installationStatus ?? "install_required") as FoundryOrganization["github"]["installationStatus"], + syncStatus: (row?.syncStatus ?? "pending") as FoundryOrganization["github"]["syncStatus"], + installationId: row?.installationId ?? null, + lastSyncLabel: row?.lastSyncLabel ?? "Waiting for first import", + lastSyncAt: row?.lastSyncAt ?? null, + syncGeneration: row?.syncGeneration ?? 0, + syncPhase: (row?.syncPhase ?? null) as GithubSyncPhase | null, + processedRepositoryCount: row?.processedRepositoryCount ?? 0, + totalRepositoryCount: row?.totalRepositoryCount ?? 0, + }; +} + +async function writeMeta(c: any, patch: Partial) { + const current = await readMeta(c); + const next = { + ...current, + ...patch, + }; + await c.db + .insert(githubMeta) + .values({ + id: META_ROW_ID, + connectedAccount: next.connectedAccount, + installationStatus: next.installationStatus, + syncStatus: next.syncStatus, + installationId: next.installationId, + lastSyncLabel: next.lastSyncLabel, + lastSyncAt: next.lastSyncAt, + syncGeneration: next.syncGeneration, + syncPhase: next.syncPhase, + processedRepositoryCount: next.processedRepositoryCount, + totalRepositoryCount: next.totalRepositoryCount, + updatedAt: Date.now(), + }) + .onConflictDoUpdate({ + target: githubMeta.id, + set: { + connectedAccount: next.connectedAccount, + installationStatus: next.installationStatus, + syncStatus: next.syncStatus, + installationId: next.installationId, + lastSyncLabel: next.lastSyncLabel, + lastSyncAt: next.lastSyncAt, + syncGeneration: next.syncGeneration, + syncPhase: next.syncPhase, + processedRepositoryCount: next.processedRepositoryCount, + totalRepositoryCount: next.totalRepositoryCount, + updatedAt: Date.now(), + }, + }) + .run(); + return next; +} + +async function publishSyncProgress(c: any, patch: Partial): Promise { + const meta = await writeMeta(c, patch); + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.send( + organizationWorkflowQueueName("organization.command.github.sync_progress.apply"), + { + connectedAccount: meta.connectedAccount, + installationStatus: meta.installationStatus, + installationId: meta.installationId, + syncStatus: meta.syncStatus, + lastSyncLabel: meta.lastSyncLabel, + lastSyncAt: meta.lastSyncAt, + syncGeneration: meta.syncGeneration, + syncPhase: meta.syncPhase, + processedRepositoryCount: meta.processedRepositoryCount, + totalRepositoryCount: meta.totalRepositoryCount, + }, + { wait: false }, + ); + return meta; +} + +async function getOrganizationContext(c: any, overrides?: FullSyncInput) { + // Try to read the org profile for fallback values, but don't require it. + // Webhook-triggered syncs can arrive before the user signs in and creates the + // org profile row. The webhook callers already pass the necessary overrides + // (connectedAccount, installationId, githubLogin, kind), so we can proceed + // without the profile as long as overrides cover the required fields. + const organizationHandle = await getOrCreateOrganization(c, c.state.organizationId); + const organizationState = await organizationHandle.getOrganizationShellStateIfInitialized({}); + + // If the org profile doesn't exist and overrides don't provide enough context, fail. + if (!organizationState && !overrides?.connectedAccount) { + throw new Error(`Organization ${c.state.organizationId} is not initialized and no override context was provided`); + } + + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + return { + kind: overrides?.kind ?? organizationState?.snapshot.kind, + githubLogin: overrides?.githubLogin ?? organizationState?.githubLogin, + connectedAccount: overrides?.connectedAccount ?? organizationState?.snapshot.github.connectedAccount ?? organizationState?.githubLogin, + installationId: overrides?.installationId ?? organizationState?.githubInstallationId ?? null, + installationStatus: + overrides?.installationStatus ?? + organizationState?.snapshot.github.installationStatus ?? + (organizationState?.snapshot.kind === "personal" ? "connected" : "reconnect_required"), + accessToken: overrides?.accessToken ?? auth?.githubToken ?? null, + }; +} + +async function upsertRepositories(c: any, repositories: GithubRepositoryRecord[], updatedAt: number, syncGeneration: number) { + for (const repository of repositories) { + await c.db + .insert(githubRepositories) + .values({ + repoId: repoIdFromRemote(repository.cloneUrl), + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration, + updatedAt, + }, + }) + .run(); + } +} + +async function sweepRepositories(c: any, syncGeneration: number) { + const rows = await c.db.select({ repoId: githubRepositories.repoId, syncGeneration: githubRepositories.syncGeneration }).from(githubRepositories).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubRepositories).where(eq(githubRepositories.repoId, row.repoId)).run(); + } +} + +async function upsertMembers(c: any, members: GithubMemberRecord[], updatedAt: number, syncGeneration: number) { + for (const member of members) { + await c.db + .insert(githubMembers) + .values({ + memberId: member.id, + login: member.login, + displayName: member.name || member.login, + email: member.email ?? null, + role: member.role ?? null, + state: member.state ?? "active", + syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubMembers.memberId, + set: { + login: member.login, + displayName: member.name || member.login, + email: member.email ?? null, + role: member.role ?? null, + state: member.state ?? "active", + syncGeneration, + updatedAt, + }, + }) + .run(); + } +} + +async function sweepMembers(c: any, syncGeneration: number) { + const rows = await c.db.select({ memberId: githubMembers.memberId, syncGeneration: githubMembers.syncGeneration }).from(githubMembers).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubMembers).where(eq(githubMembers.memberId, row.memberId)).run(); + } +} + +async function upsertPullRequests(c: any, pullRequests: GithubPullRequestRecord[], syncGeneration: number) { + for (const pullRequest of pullRequests) { + await c.db + .insert(githubPullRequests) + .values({ + prId: `${pullRequest.repoId}#${pullRequest.number}`, + repoId: pullRequest.repoId, + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + syncGeneration, + updatedAt: pullRequest.updatedAt, + }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + repoId: pullRequest.repoId, + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + syncGeneration, + updatedAt: pullRequest.updatedAt, + }, + }) + .run(); + } +} + +async function sweepPullRequests(c: any, syncGeneration: number) { + const rows = await c.db.select({ prId: githubPullRequests.prId, syncGeneration: githubPullRequests.syncGeneration }).from(githubPullRequests).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, row.prId)).run(); + } +} + +async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string, pullRequest: ReturnType | null) { + const repositoryRecord = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); + if (!repositoryRecord) { + return; + } + const organization = await getOrCreateOrganization(c, c.state.organizationId); + void organization + .send( + organizationWorkflowQueueName("organization.command.refreshTaskSummaryForBranch"), + { repoId, branchName, pullRequest, repoName: repositoryRecord.fullName ?? undefined }, + { wait: false }, + ) + .catch(() => {}); +} + +async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: any[]) { + const beforeById = new Map(beforeRows.map((row) => [row.prId, row])); + const afterById = new Map(afterRows.map((row) => [row.prId, row])); + + for (const [prId, row] of afterById) { + const previous = beforeById.get(prId); + const changed = + !previous || + previous.title !== row.title || + previous.state !== row.state || + previous.url !== row.url || + previous.headRefName !== row.headRefName || + previous.baseRefName !== row.baseRefName || + previous.authorLogin !== row.authorLogin || + previous.isDraft !== row.isDraft || + previous.updatedAt !== row.updatedAt; + if (!changed) { + continue; + } + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName, pullRequestSummaryFromRow(row)); + } + + for (const [prId, row] of beforeById) { + if (afterById.has(prId)) { + continue; + } + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName, null); + } +} + +async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { + const repositoryRecord = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, row.repoId)).get(); + if (!repositoryRecord) { + return; + } + const organization = await getOrCreateOrganization(c, c.state.organizationId); + const match = await organization.findTaskForBranch({ + repoId: row.repoId, + branchName: row.headRefName, + }); + if (!match?.taskId) { + return; + } + try { + const task = getTask(c, c.state.organizationId, row.repoId, match.taskId); + void task.send(taskWorkflowQueueName("task.command.archive"), { reason: `PR ${String(row.state).toLowerCase()}` }, { wait: false }).catch(() => {}); + } catch { + // Best-effort only. Task summary refresh will still clear the PR state. + } +} + +async function resolveRepositories(c: any, context: Awaited>): Promise { + const { appShell } = getActorRuntimeContext(); + if (context.kind === "personal") { + if (!context.accessToken) { + return []; + } + return await appShell.github.listUserRepositories(context.accessToken); + } + + if (context.installationId != null) { + try { + return await appShell.github.listInstallationRepositories(context.installationId); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + + if (!context.accessToken) { + return []; + } + + return (await appShell.github.listUserRepositories(context.accessToken)).filter((repository) => repository.fullName.startsWith(`${context.githubLogin}/`)); +} + +async function resolveMembers(c: any, context: Awaited>): Promise { + const { appShell } = getActorRuntimeContext(); + if (context.kind === "personal") { + return []; + } + if (context.installationId != null) { + try { + return await appShell.github.listInstallationMembers(context.installationId, context.githubLogin); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + if (!context.accessToken) { + return []; + } + return await appShell.github.listOrganizationMembers(context.accessToken, context.githubLogin); +} + +async function listPullRequestsForRepositories( + context: Awaited>, + repositories: GithubRepositoryRecord[], +): Promise { + const { appShell } = getActorRuntimeContext(); + if (repositories.length === 0) { + return []; + } + + let pullRequests: Array<{ + repoFullName: string; + cloneUrl: string; + number: number; + title: string; + body?: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin?: string | null; + isDraft?: boolean; + merged?: boolean; + }> = []; + + if (context.installationId != null) { + try { + pullRequests = await appShell.github.listInstallationPullRequestsForRepositories(context.installationId, repositories); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + + if (pullRequests.length === 0 && context.accessToken) { + pullRequests = await appShell.github.listPullRequestsForUserRepositories(context.accessToken, repositories); + } + + return pullRequests.map((pullRequest) => ({ + repoId: repoIdFromRemote(pullRequest.cloneUrl), + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: normalizePrStatus(pullRequest), + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: Boolean(pullRequest.isDraft), + updatedAt: Date.now(), + })); +} + +async function readAllPullRequestRows(c: any) { + return await c.db.select().from(githubPullRequests).all(); +} + +/** Config returned by fullSyncSetup, passed to subsequent sync phases. */ +export interface FullSyncConfig { + syncGeneration: number; + startedAt: number; + totalRepositoryCount: number; + connectedAccount: string; + installationStatus: string; + installationId: number | null; + beforePrRows: any[]; +} + +async function readRepositoriesFromDb(c: any): Promise { + const rows = await c.db.select().from(githubRepositories).all(); + return rows.map((r: any) => ({ + fullName: r.fullName, + cloneUrl: r.cloneUrl, + private: Boolean(r.private), + defaultBranch: r.defaultBranch, + })); +} + +/** + * Phase 1: Discover repositories and persist them. + * Returns the config needed by all subsequent phases, or null if nothing to do. + */ +export async function fullSyncSetup(c: any, input: FullSyncInput = {}): Promise { + const startedAt = Date.now(); + const beforePrRows = await readAllPullRequestRows(c); + const currentMeta = await readMeta(c); + const context = await getOrganizationContext(c, input); + const syncGeneration = currentMeta.syncGeneration + 1; + + await publishSyncProgress(c, { + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + syncStatus: "syncing", + lastSyncLabel: input.label?.trim() || "Syncing GitHub data...", + syncGeneration, + syncPhase: "discovering_repositories", + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + const repositories = await resolveRepositories(c, context); + const totalRepositoryCount = repositories.length; + + await publishSyncProgress(c, { + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + syncStatus: "syncing", + lastSyncLabel: totalRepositoryCount > 0 ? `Importing ${totalRepositoryCount} repositories...` : "No repositories available", + syncGeneration, + syncPhase: "syncing_repositories", + processedRepositoryCount: totalRepositoryCount, + totalRepositoryCount, + }); + + await upsertRepositories(c, repositories, startedAt, syncGeneration); + + return { + syncGeneration, + startedAt, + totalRepositoryCount, + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + beforePrRows, + }; +} + +/** + * Phase 2: Resolve, upsert, and sweep members. + */ +export async function fullSyncMembers(c: any, config: FullSyncConfig): Promise { + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: "Syncing GitHub members...", + syncGeneration: config.syncGeneration, + syncPhase: "syncing_members", + processedRepositoryCount: config.totalRepositoryCount, + totalRepositoryCount: config.totalRepositoryCount, + }); + + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const members = await resolveMembers(c, context); + await upsertMembers(c, members, config.startedAt, config.syncGeneration); + await sweepMembers(c, config.syncGeneration); +} + +/** + * Phase 3 (per-batch): Fetch and upsert pull requests for one batch of repos. + * Returns true when all batches have been processed. + */ +export async function fullSyncPullRequestBatch(c: any, config: FullSyncConfig, batchIndex: number): Promise { + const repos = await readRepositoriesFromDb(c); + const batches = chunkItems(repos, SYNC_REPOSITORY_BATCH_SIZE); + if (batchIndex >= batches.length) return true; + + const batch = batches[batchIndex]!; + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const batchPRs = await listPullRequestsForRepositories(context, batch); + await upsertPullRequests(c, batchPRs, config.syncGeneration); + + const processedCount = Math.min((batchIndex + 1) * SYNC_REPOSITORY_BATCH_SIZE, repos.length); + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: `Synced pull requests for ${processedCount} of ${repos.length} repositories`, + syncGeneration: config.syncGeneration, + syncPhase: "syncing_pull_requests", + processedRepositoryCount: processedCount, + totalRepositoryCount: repos.length, + }); + + return false; +} + +/** + * Phase 4: Sweep stale data, publish final state, emit PR change events. + */ +export async function fullSyncFinalize(c: any, config: FullSyncConfig): Promise { + await sweepPullRequests(c, config.syncGeneration); + await sweepRepositories(c, config.syncGeneration); + + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "synced", + lastSyncLabel: config.totalRepositoryCount > 0 ? `Synced ${config.totalRepositoryCount} repositories` : "No repositories available", + lastSyncAt: config.startedAt, + syncGeneration: config.syncGeneration, + syncPhase: null, + processedRepositoryCount: config.totalRepositoryCount, + totalRepositoryCount: config.totalRepositoryCount, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, config.beforePrRows, afterRows); +} + +/** + * Error handler: publish error sync state when a full sync fails. + */ +/** + * Single-shot full sync: runs all phases (setup, branches, members, PRs, finalize) + * using native JS loops. This must NOT use workflow primitives (step/loop/sleep) + * because it runs inside a workflow step. See workflow.ts for context on why + * sub-loops cause HistoryDivergedError. + */ +export async function runFullSync(c: any, input: FullSyncInput = {}): Promise { + const config = await fullSyncSetup(c, input); + + // Members + await fullSyncMembers(c, config); + + // Pull requests — native loop over batches + for (let i = 0; ; i++) { + const done = await fullSyncPullRequestBatch(c, config, i); + if (done) break; + } + + // Finalize + await fullSyncFinalize(c, config); +} + +export async function fullSyncError(c: any, error: unknown): Promise { + const currentMeta = await readMeta(c); + const message = error instanceof Error ? error.message : "GitHub import failed"; + await publishSyncProgress(c, { + connectedAccount: currentMeta.connectedAccount, + installationStatus: currentMeta.installationStatus, + installationId: currentMeta.installationId, + syncStatus: "error", + lastSyncLabel: message, + syncGeneration: currentMeta.syncGeneration, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); +} + +// --------------------------------------------------------------------------- +// Workflow command loop +// --------------------------------------------------------------------------- + +type GithubDataWorkflowHandler = (loopCtx: any, body: any) => Promise; + +const GITHUB_DATA_COMMAND_HANDLERS: Record = { + "githubData.command.syncRepos": async (c, body) => { + try { + await runFullSync(c, body); + return { ok: true }; + } catch (error) { + try { + await fullSyncError(c, error); + } catch { + /* best effort */ + } + throw error; + } + }, + "githubData.command.handlePullRequestWebhook": async (c, body) => { + await handlePullRequestWebhookMutation(c, body); + return { ok: true }; + }, + "githubData.command.clearState": async (c, body) => { + await clearStateMutation(c, body); + return { ok: true }; + }, +}; + +async function runGithubDataWorkflow(ctx: any): Promise { + await ctx.loop("github-data-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-github-data-command", { + names: [...GITHUB_DATA_QUEUE_NAMES], + completable: true, + }); + + if (!msg) { + return Loop.continue(undefined); + } + + const handler = GITHUB_DATA_COMMAND_HANDLERS[msg.name as GithubDataQueueName]; + if (!handler) { + logActorWarning("github-data", "unknown github-data command", { command: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }).catch(() => {}); + return Loop.continue(undefined); + } + + try { + // Wrap in a step so c.state and c.db are accessible inside mutation functions. + const result = await loopCtx.step({ + name: msg.name, + timeout: 10 * 60_000, + run: async () => handler(loopCtx, msg.body), + }); + await msg.complete(result); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("github-data", "github-data workflow command failed", { + command: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} + +export const githubData = actor({ + db: githubDataDb, + queues: Object.fromEntries(GITHUB_DATA_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "GitHub Data", + icon: "github", + actionTimeout: 10 * 60_000, + }, + createState: (_c, input: GithubDataInput) => ({ + organizationId: input.organizationId, + }), + actions: { + async getSummary(c) { + const repositories = await c.db.select().from(githubRepositories).all(); + const members = await c.db.select().from(githubMembers).all(); + const pullRequests = await c.db.select().from(githubPullRequests).all(); + return { + ...(await readMeta(c)), + repositoryCount: repositories.length, + memberCount: members.length, + pullRequestCount: pullRequests.length, + }; + }, + + async listRepositories(c) { + const rows = await c.db.select().from(githubRepositories).all(); + return rows.map((row) => ({ + repoId: row.repoId, + fullName: row.fullName, + cloneUrl: row.cloneUrl, + private: Boolean(row.private), + defaultBranch: row.defaultBranch, + })); + }, + + async getRepository(c, input: { repoId: string }) { + const row = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!row) { + return null; + } + return { + repoId: row.repoId, + fullName: row.fullName, + cloneUrl: row.cloneUrl, + private: Boolean(row.private), + defaultBranch: row.defaultBranch, + }; + }, + + async listOpenPullRequests(c) { + const rows = await c.db + .select() + .from(githubPullRequests) + .where(inArray(githubPullRequests.state, ["OPEN", "DRAFT"])) + .all(); + return rows.map((row) => pullRequestSummaryFromRow(row)); + }, + }, + run: workflow(runGithubDataWorkflow), +}); + +export async function clearStateMutation(c: any, input: ClearStateInput) { + const beforeRows = await readAllPullRequestRows(c); + const currentMeta = await readMeta(c); + await c.db.delete(githubPullRequests).run(); + await c.db.delete(githubRepositories).run(); + await c.db.delete(githubMembers).run(); + await writeMeta(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "pending", + lastSyncLabel: input.label, + lastSyncAt: null, + syncGeneration: currentMeta.syncGeneration, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + await emitPullRequestChangeEvents(c, beforeRows, []); +} + +export async function handlePullRequestWebhookMutation(c: any, input: PullRequestWebhookInput) { + const beforeRows = await readAllPullRequestRows(c); + const repoId = repoIdFromRemote(input.repository.cloneUrl); + const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); + const updatedAt = Date.now(); + const currentMeta = await readMeta(c); + const state = normalizePrStatus(input.pullRequest); + const prId = `${repoId}#${input.pullRequest.number}`; + + await c.db + .insert(githubRepositories) + .values({ + repoId, + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + + if (state === "CLOSED" || state === "MERGED") { + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); + } else { + await c.db + .insert(githubPullRequests) + .values({ + prId, + repoId, + repoFullName: input.repository.fullName, + number: input.pullRequest.number, + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + } + + await publishSyncProgress(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "synced", + lastSyncLabel: "GitHub webhook received", + lastSyncAt: updatedAt, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, beforeRows, afterRows); + if (state === "CLOSED" || state === "MERGED") { + const previous = beforeRows.find((row) => row.prId === prId); + if (previous) { + await autoArchiveTaskForClosedPullRequest(c, { + ...previous, + state, + }); + } + } +} diff --git a/foundry/packages/backend/src/actors/github-data/workflow.ts b/foundry/packages/backend/src/actors/github-data/workflow.ts new file mode 100644 index 0000000..11ece75 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/workflow.ts @@ -0,0 +1,73 @@ +// @ts-nocheck +import { logActorWarning, resolveErrorMessage } from "../logging.js"; + +// Dynamic imports to break circular dependency: index.ts imports workflow.ts, +// and workflow.ts needs functions from index.ts. +async function getIndexModule() { + return await import("./index.js"); +} + +export const GITHUB_DATA_QUEUE_NAMES = [ + "githubData.command.syncRepos", + "githubData.command.handlePullRequestWebhook", + "githubData.command.clearState", +] as const; + +export type GithubDataQueueName = (typeof GITHUB_DATA_QUEUE_NAMES)[number]; + +export function githubDataWorkflowQueueName(name: GithubDataQueueName): GithubDataQueueName { + return name; +} + +/** + * Plain run handler (no workflow engine). Drains the queue using `c.queue.iter()` + * with completable messages. This avoids the RivetKit bug where actors created + * from another actor's workflow context never start their `run: workflow(...)`. + */ +export async function runGithubDataCommandLoop(c: any): Promise { + for await (const msg of c.queue.iter({ names: [...GITHUB_DATA_QUEUE_NAMES], completable: true })) { + try { + if (msg.name === "githubData.command.syncRepos") { + try { + const { runFullSync } = await getIndexModule(); + await runFullSync(c, msg.body); + await msg.complete({ ok: true }); + } catch (error) { + const { fullSyncError } = await getIndexModule(); + try { + await fullSyncError(c, error); + } catch { + /* best effort */ + } + const message = error instanceof Error ? error.message : String(error); + await msg.complete({ error: message }).catch(() => {}); + } + continue; + } + + if (msg.name === "githubData.command.handlePullRequestWebhook") { + const { handlePullRequestWebhookMutation } = await getIndexModule(); + await handlePullRequestWebhookMutation(c, msg.body); + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === "githubData.command.clearState") { + const { clearStateMutation } = await getIndexModule(); + await clearStateMutation(c, msg.body); + await msg.complete({ ok: true }); + continue; + } + + logActorWarning("githubData", "unknown queue message", { queueName: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("githubData", "github-data command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + } +} diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts new file mode 100644 index 0000000..5aa5715 --- /dev/null +++ b/foundry/packages/backend/src/actors/handles.ts @@ -0,0 +1,85 @@ +import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey, userKey } from "./keys.js"; + +export function actorClient(c: any) { + return c.client(); +} + +export async function getOrCreateOrganization(c: any, organizationId: string) { + return await actorClient(c).organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, + }); +} + +export async function getOrCreateUser(c: any, userId: string) { + return await actorClient(c).user.getOrCreate(userKey(userId), { + createWithInput: { userId }, + }); +} + +export function getUser(c: any, userId: string) { + return actorClient(c).user.get(userKey(userId)); +} + +export function getTask(c: any, organizationId: string, repoId: string, taskId: string) { + return actorClient(c).task.get(taskKey(organizationId, repoId, taskId)); +} + +export async function getOrCreateTask(c: any, organizationId: string, repoId: string, taskId: string, createWithInput: Record) { + return await actorClient(c).task.getOrCreate(taskKey(organizationId, repoId, taskId), { + createWithInput, + }); +} + +export async function getOrCreateAuditLog(c: any, organizationId: string) { + return await actorClient(c).auditLog.getOrCreate(auditLogKey(organizationId), { + createWithInput: { + organizationId, + }, + }); +} + +export async function getOrCreateGithubData(c: any, organizationId: string) { + return await actorClient(c).githubData.getOrCreate(githubDataKey(organizationId), { + createWithInput: { + organizationId, + }, + }); +} + +export function getGithubData(c: any, organizationId: string) { + return actorClient(c).githubData.get(githubDataKey(organizationId)); +} + +export function getTaskSandbox(c: any, organizationId: string, sandboxId: string) { + return actorClient(c).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); +} + +export async function getOrCreateTaskSandbox(c: any, organizationId: string, sandboxId: string, createWithInput?: Record) { + return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(organizationId, sandboxId), { + createWithInput, + }); +} + +export function selfAuditLog(c: any) { + return actorClient(c).auditLog.getForId(c.actorId); +} + +export function selfTask(c: any) { + return actorClient(c).task.getForId(c.actorId); +} + +export function selfOrganization(c: any) { + return actorClient(c).organization.getForId(c.actorId); +} + +export function selfUser(c: any) { + return actorClient(c).user.getForId(c.actorId); +} + +export function selfGithubData(c: any) { + return actorClient(c).githubData.getForId(c.actorId); +} + +export function selfTaskSandbox(c: any) { + return actorClient(c).taskSandbox.getForId(c.actorId); +} diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts new file mode 100644 index 0000000..74ede4a --- /dev/null +++ b/foundry/packages/backend/src/actors/index.ts @@ -0,0 +1,38 @@ +import { user } from "./user/index.js"; +import { setup } from "rivetkit"; +import { githubData } from "./github-data/index.js"; +import { task } from "./task/index.js"; +import { auditLog } from "./audit-log/index.js"; +import { taskSandbox } from "./sandbox/index.js"; +import { organization } from "./organization/index.js"; +import { logger } from "../logging.js"; +import { resolveRunnerVersion } from "../config/runner-version.js"; + +const runnerVersion = resolveRunnerVersion(); + +export const registry = setup({ + serverless: { + basePath: "/v1/rivet", + }, + runner: { version: runnerVersion }, + logging: { + baseLogger: logger, + }, + use: { + user, + organization, + task, + taskSandbox, + auditLog, + githubData, + }, +}); + +export * from "./context.js"; +export * from "./audit-log/index.js"; +export * from "./user/index.js"; +export * from "./github-data/index.js"; +export * from "./task/index.js"; +export * from "./keys.js"; +export * from "./sandbox/index.js"; +export * from "./organization/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts new file mode 100644 index 0000000..03bd014 --- /dev/null +++ b/foundry/packages/backend/src/actors/keys.ts @@ -0,0 +1,26 @@ +export type ActorKey = string[]; + +export function organizationKey(organizationId: string): ActorKey { + return ["org", organizationId]; +} + +export function userKey(userId: string): ActorKey { + return ["org", "app", "user", userId]; +} + +export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { + return ["org", organizationId, "task", repoId, taskId]; +} + +export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { + return ["org", organizationId, "sandbox", sandboxId]; +} + +/** One audit log per org (not per repo) — see audit-log/index.ts for rationale. */ +export function auditLogKey(organizationId: string): ActorKey { + return ["org", organizationId, "audit-log"]; +} + +export function githubDataKey(organizationId: string): ActorKey { + return ["org", organizationId, "github-data"]; +} diff --git a/foundry/packages/backend/src/actors/logging.ts b/foundry/packages/backend/src/actors/logging.ts new file mode 100644 index 0000000..a61685f --- /dev/null +++ b/foundry/packages/backend/src/actors/logging.ts @@ -0,0 +1,43 @@ +import { logger } from "../logging.js"; + +export function resolveErrorMessage(error: unknown): string { + if (error instanceof Error) { + let msg = error.message; + if (error.cause) { + msg += ` [cause: ${resolveErrorMessage(error.cause)}]`; + } + return msg; + } + return String(error); +} + +export function isActorNotFoundError(error: unknown): boolean { + return resolveErrorMessage(error).includes("Actor not found:"); +} + +export function resolveErrorStack(error: unknown): string | undefined { + if (error instanceof Error && typeof error.stack === "string") { + return error.stack; + } + return undefined; +} + +export function logActorInfo(scope: string, message: string, context?: Record): void { + logger.info( + { + scope, + ...(context ?? {}), + }, + message, + ); +} + +export function logActorWarning(scope: string, message: string, context?: Record): void { + logger.warn( + { + scope, + ...(context ?? {}), + }, + message, + ); +} diff --git a/foundry/packages/backend/src/actors/organization/actions.ts b/foundry/packages/backend/src/actors/organization/actions.ts new file mode 100644 index 0000000..2298cd9 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions.ts @@ -0,0 +1,253 @@ +// @ts-nocheck +import { desc, eq } from "drizzle-orm"; +import type { + RepoRecord, + WorkspaceRepositorySummary, + WorkspaceTaskSummary, + OrganizationEvent, + OrganizationGithubSummary, + OrganizationSummarySnapshot, + OrganizationUseInput, +} from "@sandbox-agent/foundry-shared"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; +import { getOrCreateGithubData } from "../handles.js"; +import { organizationProfile, taskSummaries } from "./db/schema.js"; +import { organizationAppActions } from "./actions/app.js"; +import { organizationBetterAuthActions } from "./actions/better-auth.js"; +import { organizationOnboardingActions } from "./actions/onboarding.js"; +import { organizationGithubActions } from "./actions/github.js"; +import { organizationShellActions } from "./actions/organization.js"; +import { organizationTaskActions } from "./actions/tasks.js"; +import { updateOrganizationShellProfileMutation } from "./app-shell.js"; + +interface OrganizationState { + organizationId: string; +} + +const ORGANIZATION_PROFILE_ROW_ID = 1; + +function assertOrganization(c: { state: OrganizationState }, organizationId: string): void { + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`); + } +} + +function repoLabelFromRemote(remoteUrl: string): string { + try { + const url = new URL(remoteUrl.startsWith("http") ? remoteUrl : `https://${remoteUrl}`); + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length >= 2) { + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}`; + } + } catch { + // ignore + } + + return remoteUrl; +} + +function buildGithubSummary(profile: any, importedRepoCount: number): OrganizationGithubSummary { + return { + connectedAccount: profile?.githubConnectedAccount ?? "", + installationStatus: profile?.githubInstallationStatus ?? "install_required", + syncStatus: profile?.githubSyncStatus ?? "pending", + importedRepoCount, + lastSyncLabel: profile?.githubLastSyncLabel ?? "Waiting for first import", + lastSyncAt: profile?.githubLastSyncAt ?? null, + lastWebhookAt: profile?.githubLastWebhookAt ?? null, + lastWebhookEvent: profile?.githubLastWebhookEvent ?? "", + syncGeneration: profile?.githubSyncGeneration ?? 0, + syncPhase: profile?.githubSyncPhase ?? null, + processedRepositoryCount: profile?.githubProcessedRepositoryCount ?? 0, + totalRepositoryCount: profile?.githubTotalRepositoryCount ?? 0, + }; +} + +/** + * Reads the organization sidebar snapshot from local tables only — no fan-out + * to child actors. Task summaries are organization-owned and updated via push + * from task actors. + */ +async function getOrganizationSummarySnapshot(c: any): Promise { + const profile = await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)).get(); + + // Fetch repos + open PRs from github-data actor (single actor, not fan-out) + let repoRows: Array<{ repoId: string; fullName: string; cloneUrl: string; private: boolean; defaultBranch: string }> = []; + let openPullRequests: any[] = []; + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + [repoRows, openPullRequests] = await Promise.all([githubData.listRepositories({}), githubData.listOpenPullRequests({})]); + } catch { + // github-data actor may not exist yet + } + + const summaryRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + const summaries = summaryRows.map((row) => ({ + id: row.taskId, + repoId: row.repoId, + title: row.title, + status: row.status, + repoName: row.repoName, + updatedAtMs: row.updatedAtMs, + branch: row.branch ?? null, + pullRequest: row.pullRequestJson + ? (() => { + try { + return JSON.parse(row.pullRequestJson); + } catch { + return null; + } + })() + : null, + sessionsSummary: row.sessionsSummaryJson + ? (() => { + try { + return JSON.parse(row.sessionsSummaryJson); + } catch { + return []; + } + })() + : [], + })); + + return { + organizationId: c.state.organizationId, + github: buildGithubSummary(profile, repoRows.length), + repos: repoRows + .map((repo) => { + const repoTasks = summaries.filter((t) => t.repoId === repo.repoId); + const latestTaskMs = repoTasks.reduce((latest, t) => Math.max(latest, t.updatedAtMs), 0); + return { + id: repo.repoId, + label: repoLabelFromRemote(repo.cloneUrl), + taskCount: repoTasks.length, + latestActivityMs: latestTaskMs || Date.now(), + }; + }) + .sort((a, b) => b.latestActivityMs - a.latestActivityMs), + taskSummaries: summaries, + openPullRequests, + }; +} + +export async function refreshOrganizationSnapshotMutation(c: any): Promise { + c.broadcast("organizationUpdated", { + type: "organizationUpdated", + snapshot: await getOrganizationSummarySnapshot(c), + } satisfies OrganizationEvent); +} + +export const organizationActions = { + ...organizationBetterAuthActions, + ...organizationGithubActions, + ...organizationOnboardingActions, + ...organizationShellActions, + ...organizationAppActions, + ...organizationTaskActions, + async useOrganization(c: any, input: OrganizationUseInput): Promise<{ organizationId: string }> { + assertOrganization(c, input.organizationId); + return { organizationId: c.state.organizationId }; + }, + + async listRepos(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + const rows = await githubData.listRepositories({}); + return rows.map((row: any) => ({ + organizationId: c.state.organizationId, + repoId: row.repoId, + remoteUrl: row.cloneUrl, + createdAt: row.updatedAt ?? Date.now(), + updatedAt: row.updatedAt ?? Date.now(), + })); + } catch { + return []; + } + }, + + async getOrganizationSummary(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); + return await getOrganizationSummarySnapshot(c); + }, + + // updateShellProfile stays as a direct action — called with await from HTTP handler where the user can retry + async updateShellProfile(c: any, input: { displayName?: string; slug?: string; primaryDomain?: string }): Promise { + await updateOrganizationShellProfileMutation(c, input); + }, +}; + +export async function applyGithubSyncProgressMutation( + c: any, + input: { + connectedAccount: string; + installationStatus: string; + installationId: number | null; + syncStatus: string; + lastSyncLabel: string; + lastSyncAt: number | null; + syncGeneration: number; + syncPhase: string | null; + processedRepositoryCount: number; + totalRepositoryCount: number; + }, +): Promise { + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubConnectedAccount: input.connectedAccount, + githubInstallationStatus: input.installationStatus, + githubSyncStatus: input.syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: input.lastSyncLabel, + githubLastSyncAt: input.lastSyncAt, + githubSyncGeneration: input.syncGeneration, + githubSyncPhase: input.syncPhase, + githubProcessedRepositoryCount: input.processedRepositoryCount, + githubTotalRepositoryCount: input.totalRepositoryCount, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); + + await refreshOrganizationSnapshotMutation(c); +} + +export async function recordGithubWebhookReceiptMutation( + c: any, + input: { + organizationId: string; + event: string; + action?: string | null; + receivedAt?: number; + }, +): Promise { + assertOrganization(c, input.organizationId); + + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubLastWebhookAt: input.receivedAt ?? Date.now(), + githubLastWebhookEvent: input.action ? `${input.event}.${input.action}` : input.event, + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); +} diff --git a/foundry/packages/backend/src/actors/organization/actions/app.ts b/foundry/packages/backend/src/actors/organization/actions/app.ts new file mode 100644 index 0000000..d3cc329 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/app.ts @@ -0,0 +1 @@ +export { organizationAppActions } from "../app-shell.js"; diff --git a/foundry/packages/backend/src/actors/organization/actions/better-auth.ts b/foundry/packages/backend/src/actors/organization/actions/better-auth.ts new file mode 100644 index 0000000..060ceed --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/better-auth.ts @@ -0,0 +1,360 @@ +import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm"; +import { authAccountIndex, authEmailIndex, authSessionIndex, authVerification } from "../db/schema.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../constants.js"; + +function assertAppOrganization(c: any): void { + if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { + throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); + } +} + +function organizationAuthColumn(table: any, field: string): any { + const column = table[field]; + if (!column) { + throw new Error(`Unknown auth table field: ${field}`); + } + return column; +} + +function normalizeAuthValue(value: unknown): unknown { + if (value instanceof Date) { + return value.getTime(); + } + if (Array.isArray(value)) { + return value.map((entry) => normalizeAuthValue(entry)); + } + return value; +} + +function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { + const column = organizationAuthColumn(table, clause.field); + const value = normalizeAuthValue(clause.value); + switch (clause.operator) { + case "ne": + return value === null ? isNotNull(column) : ne(column, value as any); + case "lt": + return lt(column, value as any); + case "lte": + return lte(column, value as any); + case "gt": + return gt(column, value as any); + case "gte": + return gte(column, value as any); + case "in": + return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "not_in": + return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "contains": + return like(column, `%${String(value ?? "")}%`); + case "starts_with": + return like(column, `${String(value ?? "")}%`); + case "ends_with": + return like(column, `%${String(value ?? "")}`); + case "eq": + default: + return value === null ? isNull(column) : eq(column, value as any); + } +} + +function organizationBetterAuthWhere(table: any, clauses: any[] | undefined): any { + if (!clauses || clauses.length === 0) { + return undefined; + } + let expr = organizationAuthClause(table, clauses[0]); + for (const clause of clauses.slice(1)) { + const next = organizationAuthClause(table, clause); + expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); + } + return expr; +} + +export async function betterAuthUpsertSessionIndexMutation(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authSessionIndex) + .values({ + sessionId: input.sessionId, + sessionToken: input.sessionToken, + userId: input.userId, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authSessionIndex.sessionId, + set: { + sessionToken: input.sessionToken, + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authSessionIndex).where(eq(authSessionIndex.sessionId, input.sessionId)).get(); +} + +export async function betterAuthDeleteSessionIndexMutation(c: any, input: { sessionId?: string; sessionToken?: string }) { + assertAppOrganization(c); + + const clauses = [ + ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), + ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), + ]; + if (clauses.length === 0) { + return; + } + const predicate = organizationBetterAuthWhere(authSessionIndex, clauses); + await c.db.delete(authSessionIndex).where(predicate!).run(); +} + +export async function betterAuthUpsertEmailIndexMutation(c: any, input: { email: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authEmailIndex) + .values({ + email: input.email, + userId: input.userId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authEmailIndex.email, + set: { + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); +} + +export async function betterAuthDeleteEmailIndexMutation(c: any, input: { email: string }) { + assertAppOrganization(c); + await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); +} + +export async function betterAuthUpsertAccountIndexMutation(c: any, input: { id: string; providerId: string; accountId: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authAccountIndex) + .values({ + id: input.id, + providerId: input.providerId, + accountId: input.accountId, + userId: input.userId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authAccountIndex.id, + set: { + providerId: input.providerId, + accountId: input.accountId, + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); +} + +export async function betterAuthDeleteAccountIndexMutation(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + assertAppOrganization(c); + + if (input.id) { + await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); + return; + } + if (input.providerId && input.accountId) { + await c.db + .delete(authAccountIndex) + .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) + .run(); + } +} + +export async function betterAuthCreateVerificationMutation(c: any, input: { data: Record }) { + assertAppOrganization(c); + + await c.db + .insert(authVerification) + .values(input.data as any) + .run(); + return await c.db + .select() + .from(authVerification) + .where(eq(authVerification.id, input.data.id as string)) + .get(); +} + +export async function betterAuthUpdateVerificationMutation(c: any, input: { where: any[]; update: Record }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return null; + } + await c.db + .update(authVerification) + .set(input.update as any) + .where(predicate) + .run(); + return await c.db.select().from(authVerification).where(predicate).get(); +} + +export async function betterAuthUpdateManyVerificationMutation(c: any, input: { where: any[]; update: Record }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return 0; + } + await c.db + .update(authVerification) + .set(input.update as any) + .where(predicate) + .run(); + const row = await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get(); + return row?.value ?? 0; +} + +export async function betterAuthDeleteVerificationMutation(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return; + } + await c.db.delete(authVerification).where(predicate).run(); +} + +export async function betterAuthDeleteManyVerificationMutation(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return 0; + } + const rows = await c.db.select().from(authVerification).where(predicate).all(); + await c.db.delete(authVerification).where(predicate).run(); + return rows.length; +} + +// Exception to the CLAUDE.md queue-for-mutations rule: Better Auth adapter operations +// use direct actions even for mutations. Better Auth runs during OAuth callbacks on the +// HTTP request path, not through the normal organization lifecycle. Routing through the +// queue adds multiple sequential round-trips (each with actor wake-up + step overhead) +// that cause 30-second OAuth callbacks and proxy retry storms. These mutations are simple +// SQLite upserts/deletes with no cross-actor coordination or broadcast side effects. +export const organizationBetterAuthActions = { + // --- Mutation actions (called by the Better Auth adapter in better-auth.ts) --- + async betterAuthUpsertSessionIndex(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { + return await betterAuthUpsertSessionIndexMutation(c, input); + }, + async betterAuthDeleteSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { + await betterAuthDeleteSessionIndexMutation(c, input); + }, + async betterAuthUpsertEmailIndex(c: any, input: { email: string; userId: string }) { + return await betterAuthUpsertEmailIndexMutation(c, input); + }, + async betterAuthDeleteEmailIndex(c: any, input: { email: string }) { + await betterAuthDeleteEmailIndexMutation(c, input); + }, + async betterAuthUpsertAccountIndex(c: any, input: { id: string; providerId: string; accountId: string; userId: string }) { + return await betterAuthUpsertAccountIndexMutation(c, input); + }, + async betterAuthDeleteAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + await betterAuthDeleteAccountIndexMutation(c, input); + }, + async betterAuthCreateVerification(c: any, input: { data: Record }) { + return await betterAuthCreateVerificationMutation(c, input); + }, + async betterAuthUpdateVerification(c: any, input: { where: any[]; update: Record }) { + return await betterAuthUpdateVerificationMutation(c, input); + }, + async betterAuthUpdateManyVerification(c: any, input: { where: any[]; update: Record }) { + return await betterAuthUpdateManyVerificationMutation(c, input); + }, + async betterAuthDeleteVerification(c: any, input: { where: any[] }) { + await betterAuthDeleteVerificationMutation(c, input); + }, + async betterAuthDeleteManyVerification(c: any, input: { where: any[] }) { + return await betterAuthDeleteManyVerificationMutation(c, input); + }, + + // --- Read actions --- + async betterAuthFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { + assertAppOrganization(c); + + const clauses = [ + ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), + ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), + ]; + if (clauses.length === 0) { + return null; + } + const predicate = organizationBetterAuthWhere(authSessionIndex, clauses); + return await c.db.select().from(authSessionIndex).where(predicate!).get(); + }, + + async betterAuthFindEmailIndex(c: any, input: { email: string }) { + assertAppOrganization(c); + return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); + }, + + async betterAuthFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + assertAppOrganization(c); + + if (input.id) { + return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); + } + if (!input.providerId || !input.accountId) { + return null; + } + return await c.db + .select() + .from(authAccountIndex) + .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) + .get(); + }, + + async betterAuthFindOneVerification(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; + }, + + async betterAuthFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + let query = c.db.select().from(authVerification); + if (predicate) { + query = query.where(predicate); + } + if (input.sortBy?.field) { + const column = organizationAuthColumn(authVerification, input.sortBy.field); + query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); + } + if (typeof input.limit === "number") { + query = query.limit(input.limit); + } + if (typeof input.offset === "number") { + query = query.offset(input.offset); + } + return await query.all(); + }, + + async betterAuthCountVerification(c: any, input: { where?: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + const row = predicate + ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() + : await c.db.select({ value: sqlCount() }).from(authVerification).get(); + return row?.value ?? 0; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/github.ts b/foundry/packages/backend/src/actors/organization/actions/github.ts new file mode 100644 index 0000000..43818c0 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/github.ts @@ -0,0 +1,80 @@ +import { desc } from "drizzle-orm"; +import type { FoundryAppSnapshot } from "@sandbox-agent/foundry-shared"; +import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js"; +import { githubDataWorkflowQueueName } from "../../github-data/index.js"; +import { authSessionIndex } from "../db/schema.js"; +import { assertAppOrganization, buildAppSnapshot, requireEligibleOrganization, requireSignedInSession } from "../app-shell.js"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { refreshOrganizationSnapshotMutation } from "../actions.js"; +import { organizationWorkflowQueueName } from "../queues.js"; + +export const organizationGithubActions = { + async resolveAppGithubToken( + c: any, + input: { organizationId: string; requireRepoScope?: boolean }, + ): Promise<{ accessToken: string; scopes: string[] } | null> { + assertAppOrganization(c); + const auth = getBetterAuthService(); + const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); + + for (const row of rows) { + const authState = await auth.getAuthState(row.sessionId); + if (authState?.sessionState?.activeOrganizationId !== input.organizationId) { + continue; + } + + const token = await auth.getAccessTokenForSession(row.sessionId); + if (!token?.accessToken) { + continue; + } + + const scopes = token.scopes; + if (input.requireRepoScope !== false && scopes.length > 0 && !scopes.some((scope) => scope === "repo" || scope.startsWith("repo:"))) { + continue; + } + + return { + accessToken: token.accessToken, + scopes, + }; + } + + return null; + }, + + async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + + const githubData = await getOrCreateGithubData(c, input.organizationId); + const summary = await githubData.getSummary({}); + if (summary.syncStatus === "syncing") { + return await buildAppSnapshot(c, input.sessionId); + } + + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.shell.sync_started.mark"), + { label: "Importing repository catalog..." }, + { wait: false }, + ); + await organizationHandle.send(organizationWorkflowQueueName("organization.command.snapshot.broadcast"), {}, { wait: false }); + + void githubData + .send(githubDataWorkflowQueueName("githubData.command.syncRepos"), { label: "Importing repository catalog..." }, { wait: false }) + .catch(() => {}); + + return await buildAppSnapshot(c, input.sessionId); + }, + + async adminReloadGithubOrganization(c: any): Promise { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + await githubData.send(githubDataWorkflowQueueName("githubData.command.syncRepos"), { label: "Reloading GitHub organization..." }, { wait: false }); + }, + + async adminReloadGithubRepository(c: any, _input: { repoId: string }): Promise { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + await githubData.send(githubDataWorkflowQueueName("githubData.command.syncRepos"), { label: "Reloading repository..." }, { wait: false }); + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/onboarding.ts b/foundry/packages/backend/src/actors/organization/actions/onboarding.ts new file mode 100644 index 0000000..22153f4 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/onboarding.ts @@ -0,0 +1,82 @@ +import { randomUUID } from "node:crypto"; +import type { FoundryAppSnapshot, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult } from "@sandbox-agent/foundry-shared"; +import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js"; +import { + assertAppOrganization, + buildAppSnapshot, + getOrganizationState, + requireEligibleOrganization, + requireSignedInSession, +} from "../app-shell.js"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { getActorRuntimeContext } from "../../context.js"; +import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js"; + +const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; + +export const organizationOnboardingActions = { + async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + starterRepoStatus: "skipped", + starterRepoSkippedAt: Date.now(), + starterRepoStarredAt: null, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.starSandboxAgentRepo({ + organizationId: input.organizationId, + }); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + starterRepoStatus: "starred", + starterRepoStarredAt: Date.now(), + starterRepoSkippedAt: null, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); + await getOrCreateGithubData(c, input.organizationId); + return await buildAppSnapshot(c, input.sessionId); + }, + + async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.kind !== "organization") { + return { + url: `${appShell.appUrl}/organizations/${input.organizationId}`, + }; + } + return { + url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), + }; + }, + + async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { + const { driver } = getActorRuntimeContext(); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.starRepository(SANDBOX_AGENT_REPO, { + githubToken: auth?.githubToken ?? null, + }); + return { + repo: SANDBOX_AGENT_REPO, + starredAt: Date.now(), + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/organization.ts b/foundry/packages/backend/src/actors/organization/actions/organization.ts new file mode 100644 index 0000000..9e1cbd6 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/organization.ts @@ -0,0 +1,53 @@ +import type { FoundryAppSnapshot, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { getOrCreateOrganization } from "../../handles.js"; +import { + assertAppOrganization, + assertOrganizationShell, + buildAppSnapshot, + buildOrganizationState, + buildOrganizationStateIfInitialized, + requireEligibleOrganization, + requireSignedInSession, +} from "../app-shell.js"; + +export const organizationShellActions = { + async getAppSnapshot(c: any, input: { sessionId: string }): Promise { + return await buildAppSnapshot(c, input.sessionId); + }, + + async setAppDefaultModel(c: any, input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + defaultModel: input.defaultModel, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async updateAppOrganizationProfile( + c: any, + input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, + ): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.updateShellProfile({ + displayName: input.displayName, + slug: input.slug, + primaryDomain: input.primaryDomain, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async getOrganizationShellState(c: any): Promise { + assertOrganizationShell(c); + return await buildOrganizationState(c); + }, + + async getOrganizationShellStateIfInitialized(c: any): Promise { + assertOrganizationShell(c); + return await buildOrganizationStateIfInitialized(c); + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts b/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts new file mode 100644 index 0000000..3affccd --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts @@ -0,0 +1,478 @@ +// @ts-nocheck +import { randomUUID } from "node:crypto"; +import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; +import type { + RepoOverview, + SandboxProviderId, + TaskRecord, + TaskSummary, + WorkspacePullRequestSummary, + WorkspaceSessionSummary, + WorkspaceTaskSummary, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../../context.js"; +import { getGithubData, getOrCreateAuditLog, getOrCreateTask, getTask } from "../../handles.js"; +// task actions called directly (no queue) +import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../../services/create-flow.js"; +// actions return directly (no queue response unwrapping) +import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { taskWorkflowQueueName } from "../../task/workflow/queue.js"; +import { expectQueueResponse } from "../../../services/queue.js"; +import { taskIndex, taskSummaries } from "../db/schema.js"; +import { refreshOrganizationSnapshotMutation } from "../actions.js"; + +interface CreateTaskCommand { + repoId: string; + task: string; + sandboxProviderId: SandboxProviderId; + explicitTitle: string | null; + explicitBranchName: string | null; + onBranch: string | null; +} + +interface RegisterTaskBranchCommand { + repoId: string; + taskId: string; + branchName: string; +} + +function isStaleTaskReferenceError(error: unknown): boolean { + const message = resolveErrorMessage(error); + return isActorNotFoundError(error) || message.startsWith("Task not found:"); +} + +function parseJsonValue(value: string | null | undefined, fallback: T): T { + if (!value) { + return fallback; + } + + try { + return JSON.parse(value) as T; + } catch { + return fallback; + } +} + +function taskSummaryRowFromSummary(taskSummary: WorkspaceTaskSummary) { + return { + taskId: taskSummary.id, + repoId: taskSummary.repoId, + title: taskSummary.title, + status: taskSummary.status, + repoName: taskSummary.repoName, + updatedAtMs: taskSummary.updatedAtMs, + branch: taskSummary.branch, + pullRequestJson: JSON.stringify(taskSummary.pullRequest), + sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary), + primaryUserLogin: taskSummary.primaryUserLogin ?? null, + primaryUserAvatarUrl: taskSummary.primaryUserAvatarUrl ?? null, + }; +} + +export function taskSummaryFromRow(repoId: string, row: any): WorkspaceTaskSummary { + return { + id: row.taskId, + repoId, + title: row.title, + status: row.status, + repoName: row.repoName, + updatedAtMs: row.updatedAtMs, + branch: row.branch ?? null, + pullRequest: parseJsonValue(row.pullRequestJson, null), + sessionsSummary: parseJsonValue(row.sessionsSummaryJson, []), + primaryUserLogin: row.primaryUserLogin ?? null, + primaryUserAvatarUrl: row.primaryUserAvatarUrl ?? null, + }; +} + +export async function upsertTaskSummary(c: any, taskSummary: WorkspaceTaskSummary): Promise { + await c.db + .insert(taskSummaries) + .values(taskSummaryRowFromSummary(taskSummary)) + .onConflictDoUpdate({ + target: taskSummaries.taskId, + set: taskSummaryRowFromSummary(taskSummary), + }) + .run(); +} + +async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { + try { + await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); + } catch { + // Best effort cleanup only. + } +} + +async function listKnownTaskBranches(c: any, repoId: string): Promise { + const rows = await c.db + .select({ branchName: taskIndex.branchName }) + .from(taskIndex) + .where(and(eq(taskIndex.repoId, repoId), isNotNull(taskIndex.branchName))) + .all(); + return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); +} + +async function resolveGitHubRepository(c: any, repoId: string) { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getRepository({ repoId }).catch(() => null); +} + +async function resolveRepositoryRemoteUrl(c: any, repoId: string): Promise { + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = repository?.cloneUrl?.trim(); + if (!remoteUrl) { + throw new Error(`Missing remote URL for repo ${repoId}`); + } + return remoteUrl; +} + +/** + * The ONLY backend code path that creates a task actor via getOrCreateTask. + * Called when a user explicitly creates a new task (not during sync/webhooks). + * + * All other code must use getTask (handles.ts) which calls .get() and will + * error if the actor doesn't exist. Virtual tasks created during PR sync + * are materialized lazily by the client's getOrCreate in backend-client.ts. + * + * NEVER call this from a sync loop or webhook handler. + */ +export async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { + const organizationId = c.state.organizationId; + const repoId = cmd.repoId; + await resolveRepositoryRemoteUrl(c, repoId); + const onBranch = cmd.onBranch?.trim() || null; + const taskId = randomUUID(); + let initialBranchName: string | null = null; + let initialTitle: string | null = null; + + if (onBranch) { + initialBranchName = onBranch; + initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); + + await registerTaskBranchMutation(c, { + repoId, + taskId, + branchName: onBranch, + }); + } else { + const reservedBranches = await listKnownTaskBranches(c, repoId); + const resolved = resolveCreateFlowDecision({ + task: cmd.task, + explicitTitle: cmd.explicitTitle ?? undefined, + explicitBranchName: cmd.explicitBranchName ?? undefined, + localBranches: [], + taskBranches: reservedBranches, + }); + + initialBranchName = resolved.branchName; + initialTitle = resolved.title; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + repoId, + branchName: resolved.branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoNothing() + .run(); + } + + let taskHandle: Awaited>; + try { + taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { + organizationId, + repoId, + taskId, + }); + } catch (error) { + if (initialBranchName) { + await deleteStaleTaskIndexRow(c, taskId); + } + throw error; + } + + const created = expectQueueResponse( + await taskHandle.send( + taskWorkflowQueueName("task.command.initialize"), + { + sandboxProviderId: cmd.sandboxProviderId, + branchName: initialBranchName, + title: initialTitle, + task: cmd.task, + }, + { wait: true, timeout: 10_000 }, + ), + ); + + try { + await upsertTaskSummary(c, await taskHandle.getTaskSummary({})); + await refreshOrganizationSnapshotMutation(c); + } catch (error) { + logActorWarning("organization", "failed seeding task summary after task creation", { + organizationId, + repoId, + taskId, + error: resolveErrorMessage(error), + }); + } + + const auditLog = await getOrCreateAuditLog(c, organizationId); + void auditLog.append({ + kind: "task.created", + repoId, + taskId, + payload: { + repoId, + sandboxProviderId: cmd.sandboxProviderId, + }, + }); + + try { + const taskSummary = await taskHandle.getTaskSummary({}); + await upsertTaskSummary(c, taskSummary); + } catch (error) { + logActorWarning("organization", "failed seeding organization task projection", { + organizationId, + repoId, + taskId, + error: resolveErrorMessage(error), + }); + } + + return created; +} + +export async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string }> { + const branchName = cmd.branchName.trim(); + if (!branchName) { + throw new Error("branchName is required"); + } + + const existingOwner = await c.db + .select({ taskId: taskIndex.taskId }) + .from(taskIndex) + .where(and(eq(taskIndex.branchName, branchName), eq(taskIndex.repoId, cmd.repoId), ne(taskIndex.taskId, cmd.taskId))) + .get(); + + if (existingOwner) { + let ownerMissing = false; + try { + await getTask(c, c.state.organizationId, cmd.repoId, existingOwner.taskId).get(); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + ownerMissing = true; + await deleteStaleTaskIndexRow(c, existingOwner.taskId); + } else { + throw error; + } + } + if (!ownerMissing) { + throw new Error(`branch is already assigned to a different task: ${branchName}`); + } + } + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId: cmd.taskId, + repoId: cmd.repoId, + branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); + + return { branchName }; +} + +export async function applyTaskSummaryUpdateMutation(c: any, input: { taskSummary: WorkspaceTaskSummary }): Promise { + await upsertTaskSummary(c, input.taskSummary); + await refreshOrganizationSnapshotMutation(c); +} + +export async function removeTaskSummaryMutation(c: any, input: { taskId: string }): Promise { + await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); + await refreshOrganizationSnapshotMutation(c); +} + +/** + * Called for every changed PR during sync and on webhook PR events. + * Runs in a bulk loop — MUST NOT create task actors or make cross-actor calls + * to task actors. Only writes to the org's local taskIndex/taskSummaries tables. + * Task actors are created lazily when the user views the task. + */ +export async function refreshTaskSummaryForBranchMutation( + c: any, + input: { repoId: string; branchName: string; pullRequest?: WorkspacePullRequestSummary | null; repoName?: string }, +): Promise { + const pullRequest = input.pullRequest ?? null; + let rows = await c.db + .select({ taskId: taskSummaries.taskId }) + .from(taskSummaries) + .where(and(eq(taskSummaries.branch, input.branchName), eq(taskSummaries.repoId, input.repoId))) + .all(); + + if (rows.length === 0 && pullRequest) { + // Create a virtual task entry in the org's local tables only. + // No task actor is spawned — it will be created lazily when the user + // clicks on the task in the sidebar (the "materialize" path). + const taskId = randomUUID(); + const now = Date.now(); + const title = pullRequest.title?.trim() || input.branchName; + const repoName = input.repoName ?? `${c.state.organizationId}/${input.repoId}`; + + await c.db + .insert(taskIndex) + .values({ taskId, repoId: input.repoId, branchName: input.branchName, createdAt: now, updatedAt: now }) + .onConflictDoNothing() + .run(); + + await c.db + .insert(taskSummaries) + .values({ + taskId, + repoId: input.repoId, + title, + status: "init_complete", + repoName, + updatedAtMs: pullRequest.updatedAtMs ?? now, + branch: input.branchName, + pullRequestJson: JSON.stringify(pullRequest), + sessionsSummaryJson: "[]", + }) + .onConflictDoNothing() + .run(); + + rows = [{ taskId }]; + } else { + // Update PR data on existing task summaries locally. + // If a real task actor exists, also notify it. + for (const row of rows) { + // Update the local summary with the new PR data + await c.db + .update(taskSummaries) + .set({ + pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null, + updatedAtMs: pullRequest?.updatedAtMs ?? Date.now(), + }) + .where(eq(taskSummaries.taskId, row.taskId)) + .run(); + + // Best-effort notify the task actor if it exists (fire-and-forget) + try { + const task = getTask(c, c.state.organizationId, input.repoId, row.taskId); + void task.syncPullRequest({ pullRequest }).catch(() => {}); + } catch { + // Task actor doesn't exist yet — that's fine, it's virtual + } + } + } + + await refreshOrganizationSnapshotMutation(c); +} + +export async function listTaskSummariesForRepo(c: any, repoId: string, includeArchived = false): Promise { + const rows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows + .map((row) => ({ + organizationId: c.state.organizationId, + repoId, + taskId: row.taskId, + branchName: row.branch ?? null, + title: row.title, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + })) + .filter((row) => includeArchived || row.status !== "archived"); +} + +export async function listAllTaskSummaries(c: any, includeArchived = false): Promise { + const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows + .map((row) => ({ + organizationId: c.state.organizationId, + repoId: row.repoId, + taskId: row.taskId, + branchName: row.branch ?? null, + title: row.title, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + })) + .filter((row) => includeArchived || row.status !== "archived"); +} + +export async function listWorkspaceTaskSummaries(c: any): Promise { + const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows.map((row) => taskSummaryFromRow(row.repoId, row)); +} + +export async function getRepoOverviewFromOrg(c: any, repoId: string): Promise { + const now = Date.now(); + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId); + const taskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).all(); + + const branches = taskRows + .filter((row: any) => row.branch) + .map((row: any) => { + const pr = parseJsonValue(row.pullRequestJson, null); + return { + branchName: row.branch!, + commitSha: "", + taskId: row.taskId, + taskTitle: row.title ?? null, + taskStatus: row.status ?? null, + pullRequest: pr, + ciStatus: null, + updatedAt: Math.max(row.updatedAtMs ?? 0, pr?.updatedAtMs ?? 0, now), + }; + }) + .sort((a: any, b: any) => b.updatedAt - a.updatedAt); + + return { + organizationId: c.state.organizationId, + repoId, + remoteUrl, + baseRef: repository?.defaultBranch ?? null, + fetchedAt: now, + branches, + }; +} + +export async function getRepositoryMetadataFromOrg( + c: any, + repoId: string, +): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId); + return { + defaultBranch: repository?.defaultBranch ?? null, + fullName: repository?.fullName ?? null, + remoteUrl, + }; +} + +export async function findTaskForBranch(c: any, repoId: string, branchName: string): Promise<{ taskId: string | null }> { + const row = await c.db + .select({ taskId: taskSummaries.taskId }) + .from(taskSummaries) + .where(and(eq(taskSummaries.branch, branchName), eq(taskSummaries.repoId, repoId))) + .get(); + return { taskId: row?.taskId ?? null }; +} diff --git a/foundry/packages/backend/src/actors/organization/actions/tasks.ts b/foundry/packages/backend/src/actors/organization/actions/tasks.ts new file mode 100644 index 0000000..80bb2f9 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/tasks.ts @@ -0,0 +1,377 @@ +// @ts-nocheck +import { desc, eq } from "drizzle-orm"; +import type { + AuditLogEvent, + CreateTaskInput, + HistoryQueryInput, + ListTasksInput, + RepoOverview, + SwitchResult, + TaskRecord, + TaskSummary, + TaskWorkspaceChangeModelInput, + TaskWorkspaceChangeOwnerInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../../context.js"; +import { getOrCreateAuditLog, getOrCreateTask, getTask as getTaskHandle } from "../../handles.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { taskWorkflowQueueName } from "../../task/workflow/queue.js"; +import { expectQueueResponse } from "../../../services/queue.js"; +import { taskIndex, taskSummaries } from "../db/schema.js"; +import { + createTaskMutation, + getRepoOverviewFromOrg, + getRepositoryMetadataFromOrg, + findTaskForBranch, + listTaskSummariesForRepo, + listAllTaskSummaries, +} from "./task-mutations.js"; + +function assertOrganization(c: { state: { organizationId: string } }, organizationId: string): void { + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`); + } +} + +/** + * Look up the repoId for a task from the local task index. + * Used when callers (e.g. sandbox actor) only have taskId but need repoId + * to construct the task actor key. + */ +async function resolveTaskRepoId(c: any, taskId: string): Promise { + const row = await c.db.select({ repoId: taskIndex.repoId }).from(taskIndex).where(eq(taskIndex.taskId, taskId)).get(); + if (!row) { + throw new Error(`Task ${taskId} not found in task index`); + } + return row.repoId; +} + +/** + * Get or lazily create a task actor for a user-initiated action. + * Uses getOrCreate because the user may be interacting with a virtual task + * (PR-driven) that has no actor yet. The task actor self-initializes in + * getCurrentRecord() from the org's getTaskIndexEntry data. + * + * This is safe because requireWorkspaceTask is only called from user-initiated + * actions (createSession, sendMessage, etc.), never from sync loops. + * See CLAUDE.md "Lazy Task Actor Creation". + */ +async function requireWorkspaceTask(c: any, repoId: string, taskId: string) { + return getOrCreateTask(c, c.state.organizationId, repoId, taskId, { + organizationId: c.state.organizationId, + repoId, + taskId, + }); +} + +interface GetTaskInput { + organizationId: string; + repoId: string; + taskId: string; +} + +interface TaskProxyActionInput extends GetTaskInput { + reason?: string; +} + +interface RepoOverviewInput { + organizationId: string; + repoId: string; +} + +export { createTaskMutation }; + +export const organizationTaskActions = { + async createTask(c: any, input: CreateTaskInput): Promise { + assertOrganization(c, input.organizationId); + const { config } = getActorRuntimeContext(); + const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); + + // Self-call: call the mutation directly since we're inside the org actor + return await createTaskMutation(c, { + repoId: input.repoId, + task: input.task, + sandboxProviderId, + explicitTitle: input.explicitTitle ?? null, + explicitBranchName: input.explicitBranchName ?? null, + onBranch: input.onBranch ?? null, + }); + }, + + async materializeTask(c: any, input: { organizationId: string; repoId: string; virtualTaskId: string }): Promise { + assertOrganization(c, input.organizationId); + const { config } = getActorRuntimeContext(); + // Self-call: call the mutation directly + return await createTaskMutation(c, { + repoId: input.repoId, + task: input.virtualTaskId, + sandboxProviderId: defaultSandboxProviderId(config), + explicitTitle: null, + explicitBranchName: null, + onBranch: null, + }); + }, + + async createWorkspaceTask(c: any, input: TaskWorkspaceCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { + const created = await organizationTaskActions.createTask(c, { + organizationId: c.state.organizationId, + repoId: input.repoId, + task: input.task, + ...(input.title ? { explicitTitle: input.title } : {}), + ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), + }); + + const task = await requireWorkspaceTask(c, input.repoId, created.taskId); + void task + .send( + taskWorkflowQueueName("task.command.workspace.create_session_and_send"), + { + model: input.model, + text: input.task, + authSessionId: input.authSessionId, + }, + { wait: false }, + ) + .catch(() => {}); + + return { taskId: created.taskId }; + }, + + async markWorkspaceUnread(c: any, input: TaskWorkspaceSelectInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.markUnread({ authSessionId: input.authSessionId }); + }, + + async renameWorkspaceTask(c: any, input: TaskWorkspaceRenameInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.renameTask({ value: input.value }); + }, + + async createWorkspaceSession(c: any, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + return expectQueueResponse( + await task.send( + taskWorkflowQueueName("task.command.workspace.create_session"), + { + ...(input.model ? { model: input.model } : {}), + ...(input.authSessionId ? { authSessionId: input.authSessionId } : {}), + }, + { wait: true, timeout: 10_000 }, + ), + ); + }, + + async renameWorkspaceSession(c: any, input: TaskWorkspaceRenameSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.renameSession({ sessionId: input.sessionId, title: input.title }); + }, + + async selectWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.selectSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }); + }, + + async setWorkspaceSessionUnread(c: any, input: TaskWorkspaceSetSessionUnreadInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.setSessionUnread({ sessionId: input.sessionId, unread: input.unread, authSessionId: input.authSessionId }); + }, + + async updateWorkspaceDraft(c: any, input: TaskWorkspaceUpdateDraftInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .updateDraft({ + sessionId: input.sessionId, + text: input.text, + attachments: input.attachments, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + }, + + async changeWorkspaceModel(c: any, input: TaskWorkspaceChangeModelInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.changeModel({ sessionId: input.sessionId, model: input.model, authSessionId: input.authSessionId }); + }, + + async sendWorkspaceMessage(c: any, input: TaskWorkspaceSendMessageInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .send( + taskWorkflowQueueName("task.command.workspace.send_message"), + { + sessionId: input.sessionId, + text: input.text, + attachments: input.attachments, + authSessionId: input.authSessionId, + }, + { wait: false }, + ) + .catch(() => {}); + }, + + async stopWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .send(taskWorkflowQueueName("task.command.workspace.stop_session"), { sessionId: input.sessionId, authSessionId: input.authSessionId }, { wait: false }) + .catch(() => {}); + }, + + async closeWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .send(taskWorkflowQueueName("task.command.workspace.close_session"), { sessionId: input.sessionId, authSessionId: input.authSessionId }, { wait: false }) + .catch(() => {}); + }, + + async publishWorkspacePr(c: any, input: TaskWorkspaceSelectInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.send(taskWorkflowQueueName("task.command.workspace.publish_pr"), {}, { wait: false }).catch(() => {}); + }, + + async changeWorkspaceTaskOwner(c: any, input: TaskWorkspaceChangeOwnerInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.send( + taskWorkflowQueueName("task.command.workspace.change_owner"), + { + primaryUserId: input.targetUserId, + primaryGithubLogin: input.targetUserName, + primaryGithubEmail: input.targetUserEmail, + primaryGithubAvatarUrl: null, + }, + { wait: false }, + ); + }, + + async revertWorkspaceFile(c: any, input: TaskWorkspaceDiffInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.send(taskWorkflowQueueName("task.command.workspace.revert_file"), input, { wait: false }).catch(() => {}); + }, + + async getRepoOverview(c: any, input: RepoOverviewInput): Promise { + assertOrganization(c, input.organizationId); + + return await getRepoOverviewFromOrg(c, input.repoId); + }, + + async listTasks(c: any, input: ListTasksInput): Promise { + assertOrganization(c, input.organizationId); + if (input.repoId) { + return await listTaskSummariesForRepo(c, input.repoId, true); + } + return await listAllTaskSummaries(c, true); + }, + + async switchTask(c: any, input: { repoId: string; taskId: string }): Promise { + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + const record = await h.get(); + const switched = expectQueueResponse<{ switchTarget: string | null }>( + await h.send(taskWorkflowQueueName("task.command.switch"), {}, { wait: true, timeout: 10_000 }), + ); + return { + organizationId: c.state.organizationId, + taskId: input.taskId, + sandboxProviderId: record.sandboxProviderId, + switchTarget: switched.switchTarget, + }; + }, + + async auditLog(c: any, input: HistoryQueryInput): Promise { + assertOrganization(c, input.organizationId); + const auditLog = await getOrCreateAuditLog(c, c.state.organizationId); + return await auditLog.list({ + repoId: input.repoId, + branch: input.branch, + taskId: input.taskId, + limit: input.limit ?? 20, + }); + }, + + async getTask(c: any, input: GetTaskInput): Promise { + assertOrganization(c, input.organizationId); + // Resolve repoId from local task index if not provided (e.g. sandbox actor only has taskId) + const repoId = input.repoId || (await resolveTaskRepoId(c, input.taskId)); + // Use getOrCreate — the task may be virtual (PR-driven, no actor yet). + // The task actor self-initializes in getCurrentRecord(). + const handle = await getOrCreateTask(c, c.state.organizationId, repoId, input.taskId, { + organizationId: c.state.organizationId, + repoId, + taskId: input.taskId, + }); + return await handle.get(); + }, + + async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + return expectQueueResponse(await h.send(taskWorkflowQueueName("task.command.attach"), { reason: input.reason }, { wait: true, timeout: 10_000 })); + }, + + async pushTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.send(taskWorkflowQueueName("task.command.push"), { reason: input.reason }, { wait: false }).catch(() => {}); + }, + + async syncTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.send(taskWorkflowQueueName("task.command.sync"), { reason: input.reason }, { wait: false }).catch(() => {}); + }, + + async mergeTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.send(taskWorkflowQueueName("task.command.merge"), { reason: input.reason }, { wait: false }).catch(() => {}); + }, + + async archiveTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.send(taskWorkflowQueueName("task.command.archive"), { reason: input.reason }, { wait: false }).catch(() => {}); + }, + + async killTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.send(taskWorkflowQueueName("task.command.kill"), { reason: input.reason }, { wait: false }).catch(() => {}); + }, + + async getRepositoryMetadata(c: any, input: { repoId: string }): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + return await getRepositoryMetadataFromOrg(c, input.repoId); + }, + + async findTaskForBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { + return await findTaskForBranch(c, input.repoId, input.branchName); + }, + + /** + * Lightweight read of task index + summary data. Used by the task actor + * to self-initialize when lazily materialized from a virtual task. + * Does NOT trigger materialization — no circular dependency. + */ + async getTaskIndexEntry(c: any, input: { taskId: string }): Promise<{ branchName: string | null; title: string | null } | null> { + const idx = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(eq(taskIndex.taskId, input.taskId)).get(); + const summary = await c.db.select({ title: taskSummaries.title }).from(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).get(); + if (!idx && !summary) return null; + return { + branchName: idx?.branchName ?? null, + title: summary?.title ?? null, + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/app-shell.ts b/foundry/packages/backend/src/actors/organization/app-shell.ts new file mode 100644 index 0000000..ed1005a --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/app-shell.ts @@ -0,0 +1,1455 @@ +import { desc, eq } from "drizzle-orm"; +import { randomUUID } from "node:crypto"; +import type { + FoundryAppSnapshot, + FoundryBillingPlanId, + FoundryBillingState, + FoundryOrganization, + FoundryOrganizationMember, + FoundryUser, + UpdateFoundryOrganizationProfileInput, + WorkspaceModelId, +} from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../context.js"; +import { getOrCreateGithubData, getOrCreateOrganization, selfOrganization } from "../handles.js"; +import { GitHubAppError } from "../../services/app-github.js"; +import { getBetterAuthService } from "../../services/better-auth.js"; +import { repoLabelFromRemote } from "../../services/repo.js"; +import { logger } from "../../logging.js"; +import { githubDataWorkflowQueueName } from "../github-data/index.js"; +import { organizationWorkflowQueueName } from "./queues.js"; +import { invoices, organizationMembers, organizationProfile, seatAssignments, stripeLookup } from "./db/schema.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./constants.js"; + +const githubWebhookLogger = logger.child({ + scope: "github-webhook", +}); + +const PROFILE_ROW_ID = 1; + +function roundDurationMs(start: number): number { + return Math.round((performance.now() - start) * 100) / 100; +} + +export function assertAppOrganization(c: any): void { + if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { + throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); + } +} + +export function assertOrganizationShell(c: any): void { + if (c.state.organizationId === APP_SHELL_ORGANIZATION_ID) { + throw new Error("Organization action cannot run on the reserved app organization"); + } +} + +function slugify(value: string): string { + return value + .trim() + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+|-+$/g, ""); +} + +function personalOrganizationId(login: string): string { + return `personal-${slugify(login)}`; +} + +function organizationOrganizationId(kind: FoundryOrganization["kind"], login: string): string { + return kind === "personal" ? personalOrganizationId(login) : slugify(login); +} + +function parseEligibleOrganizationIds(value: string): string[] { + try { + const parsed = JSON.parse(value); + if (!Array.isArray(parsed)) { + return []; + } + return parsed.filter((entry): entry is string => typeof entry === "string" && entry.length > 0); + } catch { + return []; + } +} + +function encodeEligibleOrganizationIds(value: string[]): string { + return JSON.stringify([...new Set(value)]); +} + +function errorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +function seatsIncludedForPlan(planId: FoundryBillingPlanId): number { + switch (planId) { + case "free": + return 1; + case "team": + return 5; + } +} + +function stripeStatusToBillingStatus(stripeStatus: string, cancelAtPeriodEnd: boolean): FoundryBillingState["status"] { + if (cancelAtPeriodEnd) { + return "scheduled_cancel"; + } + if (stripeStatus === "trialing") { + return "trialing"; + } + if (stripeStatus === "past_due" || stripeStatus === "unpaid" || stripeStatus === "incomplete") { + return "past_due"; + } + return "active"; +} + +function formatUnixDate(value: number): string { + return new Date(value * 1000).toISOString().slice(0, 10); +} + +function legacyRepoImportStatusToGithubSyncStatus(value: string | null | undefined): FoundryOrganization["github"]["syncStatus"] { + switch (value) { + case "ready": + return "synced"; + case "importing": + return "syncing"; + default: + return "pending"; + } +} + +function stringFromMetadata(metadata: unknown, key: string): string | null { + if (!metadata || typeof metadata !== "object") { + return null; + } + const value = (metadata as Record)[key]; + return typeof value === "string" && value.length > 0 ? value : null; +} + +function stripeWebhookSubscription(event: any) { + const object = event.data.object as Record; + const items = (object.items as { data?: Array> } | undefined)?.data ?? []; + const price = items[0]?.price as Record | undefined; + return { + id: typeof object.id === "string" ? object.id : "", + customerId: typeof object.customer === "string" ? object.customer : "", + priceId: typeof price?.id === "string" ? price.id : null, + status: typeof object.status === "string" ? object.status : "active", + cancelAtPeriodEnd: object.cancel_at_period_end === true, + currentPeriodEnd: typeof object.current_period_end === "number" ? object.current_period_end : null, + trialEnd: typeof object.trial_end === "number" ? object.trial_end : null, + defaultPaymentMethodLabel: "Payment method on file", + }; +} + +// sendOrganizationCommand removed — org actions called directly + +export async function getOrganizationState(organization: any) { + return await organization.getOrganizationShellState({}); +} + +async function getOrganizationStateIfInitialized(organization: any) { + return await organization.getOrganizationShellStateIfInitialized({}); +} + +async function listSnapshotOrganizations(c: any, sessionId: string, organizationIds: string[]) { + const results = await Promise.all( + organizationIds.map(async (organizationId) => { + const organizationStartedAt = performance.now(); + try { + const organization = await getOrCreateOrganization(c, organizationId); + const organizationState = await getOrganizationStateIfInitialized(organization); + if (!organizationState) { + logger.warn( + { + sessionId, + actorOrganizationId: c.state.organizationId, + organizationId, + durationMs: roundDurationMs(organizationStartedAt), + }, + "build_app_snapshot_organization_uninitialized", + ); + return { organizationId, snapshot: null, status: "uninitialized" as const }; + } + logger.info( + { + sessionId, + actorOrganizationId: c.state.organizationId, + organizationId, + durationMs: roundDurationMs(organizationStartedAt), + }, + "build_app_snapshot_organization_completed", + ); + return { organizationId, snapshot: organizationState.snapshot, status: "ok" as const }; + } catch (error) { + const message = errorMessage(error); + if (!message.includes("Actor not found")) { + logger.error( + { + sessionId, + actorOrganizationId: c.state.organizationId, + organizationId, + durationMs: roundDurationMs(organizationStartedAt), + errorMessage: message, + errorStack: error instanceof Error ? error.stack : undefined, + }, + "build_app_snapshot_organization_failed", + ); + throw error; + } + logger.info( + { + sessionId, + actorOrganizationId: c.state.organizationId, + organizationId, + durationMs: roundDurationMs(organizationStartedAt), + }, + "build_app_snapshot_organization_missing", + ); + return { organizationId, snapshot: null, status: "missing" as const }; + } + }), + ); + + return { + organizations: results.map((result) => result.snapshot).filter((organization): organization is FoundryOrganization => organization !== null), + uninitializedOrganizationIds: results.filter((result) => result.status === "uninitialized").map((result) => result.organizationId), + }; +} + +export async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { + assertAppOrganization(c); + const startedAt = performance.now(); + const auth = getBetterAuthService(); + let authState = await auth.getAuthState(sessionId); + // Inline fallback: if the user is signed in but has no eligible organizations yet + // (e.g. first load after OAuth callback), sync GitHub orgs before building the snapshot. + if (authState?.user && parseEligibleOrganizationIds(authState.profile?.eligibleOrganizationIdsJson ?? "[]").length === 0) { + const token = await auth.getAccessTokenForSession(sessionId); + if (token?.accessToken) { + logger.info({ sessionId }, "build_app_snapshot_sync_orgs"); + await syncGithubOrganizations(c, { sessionId, accessToken: token.accessToken }); + authState = await auth.getAuthState(sessionId); + } else { + logger.warn({ sessionId }, "build_app_snapshot_no_access_token"); + } + } + + const session = authState?.session ?? null; + const user = authState?.user ?? null; + const profile = authState?.profile ?? null; + const currentSessionState = authState?.sessionState ?? null; + const githubAccount = authState?.accounts?.find((account: any) => account.providerId === "github") ?? null; + const eligibleOrganizationIds = parseEligibleOrganizationIds(profile?.eligibleOrganizationIdsJson ?? "[]"); + + logger.info( + { + sessionId, + organizationId: c.state.organizationId, + eligibleOrganizationCount: eligibleOrganizationIds.length, + eligibleOrganizationIds, + }, + "build_app_snapshot_started", + ); + + let { organizations, uninitializedOrganizationIds } = await listSnapshotOrganizations(c, sessionId, eligibleOrganizationIds); + + if (allowOrganizationRepair && uninitializedOrganizationIds.length > 0) { + const token = await auth.getAccessTokenForSession(sessionId); + if (token?.accessToken) { + logger.info( + { + sessionId, + organizationId: c.state.organizationId, + organizationIds: uninitializedOrganizationIds, + }, + "build_app_snapshot_repairing_organizations", + ); + await syncGithubOrganizationsInternal(c, { sessionId, accessToken: token.accessToken }, { broadcast: false }); + return await buildAppSnapshot(c, sessionId, false); + } + logger.warn( + { + sessionId, + organizationId: c.state.organizationId, + organizationIds: uninitializedOrganizationIds, + }, + "build_app_snapshot_repair_skipped_no_access_token", + ); + } + + const currentUser: FoundryUser | null = user + ? { + id: profile?.githubAccountId ?? githubAccount?.accountId ?? user.id, + name: user.name, + email: user.email, + githubLogin: profile?.githubLogin ?? "", + roleLabel: profile?.roleLabel ?? "GitHub user", + eligibleOrganizationIds, + defaultModel: profile?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + } + : null; + + const activeOrganizationId = + currentUser && + currentSessionState?.activeOrganizationId && + organizations.some((organization) => organization.id === currentSessionState.activeOrganizationId) + ? currentSessionState.activeOrganizationId + : currentUser && organizations.length === 1 + ? (organizations[0]?.id ?? null) + : null; + + const snapshot: FoundryAppSnapshot = { + auth: { + status: currentUser ? "signed_in" : "signed_out", + currentUserId: currentUser?.id ?? null, + }, + activeOrganizationId, + onboarding: { + starterRepo: { + repoFullName: "rivet-dev/sandbox-agent", + repoUrl: "https://github.com/rivet-dev/sandbox-agent", + status: profile?.starterRepoStatus ?? "pending", + starredAt: profile?.starterRepoStarredAt ?? null, + skippedAt: profile?.starterRepoSkippedAt ?? null, + }, + }, + users: currentUser ? [currentUser] : [], + organizations, + }; + + logger.info( + { + sessionId, + organizationId: c.state.organizationId, + eligibleOrganizationCount: eligibleOrganizationIds.length, + organizationCount: organizations.length, + durationMs: roundDurationMs(startedAt), + }, + "build_app_snapshot_completed", + ); + + return snapshot; +} + +export async function requireSignedInSession(c: any, sessionId: string) { + const auth = getBetterAuthService(); + const authState = await auth.getAuthState(sessionId); + const user = authState?.user ?? null; + const profile = authState?.profile ?? null; + const githubAccount = authState?.accounts?.find((account: any) => account.providerId === "github") ?? null; + if (!authState?.session || !user?.email) { + throw new Error("User must be signed in"); + } + const token = await auth.getAccessTokenForSession(sessionId); + return { + ...authState.session, + authUserId: user.id, + currentUserId: profile?.githubAccountId ?? githubAccount?.accountId ?? user.id, + currentUserName: user.name, + currentUserEmail: user.email, + currentUserGithubLogin: profile?.githubLogin ?? "", + currentUserRoleLabel: profile?.roleLabel ?? "GitHub user", + eligibleOrganizationIdsJson: profile?.eligibleOrganizationIdsJson ?? "[]", + githubAccessToken: token?.accessToken ?? null, + githubScope: (token?.scopes ?? []).join(","), + starterRepoStatus: profile?.starterRepoStatus ?? "pending", + starterRepoStarredAt: profile?.starterRepoStarredAt ?? null, + starterRepoSkippedAt: profile?.starterRepoSkippedAt ?? null, + }; +} + +export function requireEligibleOrganization(session: any, organizationId: string): void { + const eligibleOrganizationIds = parseEligibleOrganizationIds(session.eligibleOrganizationIdsJson); + if (!eligibleOrganizationIds.includes(organizationId)) { + throw new Error(`Organization ${organizationId} is not available in this app session`); + } +} + +async function upsertStripeLookupEntries(c: any, organizationId: string, customerId: string | null, subscriptionId: string | null): Promise { + assertAppOrganization(c); + const now = Date.now(); + for (const lookupKey of [customerId ? `customer:${customerId}` : null, subscriptionId ? `subscription:${subscriptionId}` : null]) { + if (!lookupKey) { + continue; + } + await c.db + .insert(stripeLookup) + .values({ + lookupKey, + organizationId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: stripeLookup.lookupKey, + set: { + organizationId, + updatedAt: now, + }, + }) + .run(); + } +} + +async function findOrganizationIdForStripeEvent(c: any, customerId: string | null, subscriptionId: string | null): Promise { + assertAppOrganization(c); + const customerLookup = customerId + ? await c.db + .select({ organizationId: stripeLookup.organizationId }) + .from(stripeLookup) + .where(eq(stripeLookup.lookupKey, `customer:${customerId}`)) + .get() + : null; + if (customerLookup?.organizationId) { + return customerLookup.organizationId; + } + + const subscriptionLookup = subscriptionId + ? await c.db + .select({ organizationId: stripeLookup.organizationId }) + .from(stripeLookup) + .where(eq(stripeLookup.lookupKey, `subscription:${subscriptionId}`)) + .get() + : null; + return subscriptionLookup?.organizationId ?? null; +} + +async function safeListOrganizations(accessToken: string): Promise { + const { appShell } = getActorRuntimeContext(); + try { + return await appShell.github.listOrganizations(accessToken); + } catch (error) { + if (error instanceof GitHubAppError && error.status === 403) { + return []; + } + throw error; + } +} + +async function safeListInstallations(accessToken: string): Promise { + const { appShell } = getActorRuntimeContext(); + try { + return await appShell.github.listInstallations(accessToken); + } catch (error) { + if (error instanceof GitHubAppError && (error.status === 403 || error.status === 404)) { + return []; + } + throw error; + } +} + +/** + * Slow path: list GitHub orgs + installations, sync each org organization, + * and update the session's eligible organization list. Called from the + * workflow queue so it runs in the background after the callback has + * already returned a redirect to the browser. + */ +export async function syncGithubOrganizations(c: any, input: { sessionId: string; accessToken: string }): Promise { + await syncGithubOrganizationsInternal(c, input, { broadcast: true }); +} + +async function syncGithubOrganizationsInternal(c: any, input: { sessionId: string; accessToken: string }, options: { broadcast: boolean }): Promise { + assertAppOrganization(c); + const auth = getBetterAuthService(); + const { appShell } = getActorRuntimeContext(); + const { sessionId, accessToken } = input; + const authState = await auth.getAuthState(sessionId); + if (!authState?.user) { + throw new Error("User must be signed in"); + } + const viewer = await appShell.github.getViewer(accessToken); + const organizations = await safeListOrganizations(accessToken); + const installations = await safeListInstallations(accessToken); + const authUserId = authState.user.id; + const githubUserId = String(viewer.id); + + const linkedOrganizationIds: string[] = []; + const accounts = [ + { + githubAccountId: viewer.id, + githubLogin: viewer.login, + githubAccountType: "User", + kind: "personal" as const, + displayName: viewer.name || viewer.login, + }, + ...organizations.map((organization) => ({ + githubAccountId: organization.id, + githubLogin: organization.login, + githubAccountType: "Organization", + kind: "organization" as const, + displayName: organization.name || organization.login, + })), + ]; + + for (const account of accounts) { + const organizationId = organizationOrganizationId(account.kind, account.githubLogin); + const installation = installations.find((candidate) => candidate.accountLogin === account.githubLogin) ?? null; + const organization = await getOrCreateOrganization(c, organizationId); + await organization.send( + organizationWorkflowQueueName("organization.command.github.organization_shell.sync_from_github"), + { + userId: githubUserId, + userName: viewer.name || viewer.login, + userEmail: viewer.email ?? `${viewer.login}@users.noreply.github.com`, + githubUserLogin: viewer.login, + githubAccountId: account.githubAccountId, + githubLogin: account.githubLogin, + githubAccountType: account.githubAccountType, + kind: account.kind, + displayName: account.displayName, + installationId: installation?.id ?? null, + appConfigured: appShell.github.isAppConfigured(), + }, + { wait: true, timeout: 10_000 }, + ); + linkedOrganizationIds.push(organizationId); + } + + const activeOrganizationId = + authState.sessionState?.activeOrganizationId && linkedOrganizationIds.includes(authState.sessionState.activeOrganizationId) + ? authState.sessionState.activeOrganizationId + : linkedOrganizationIds.length === 1 + ? (linkedOrganizationIds[0] ?? null) + : null; + + await auth.setActiveOrganization(sessionId, activeOrganizationId); + await auth.upsertUserProfile(authUserId, { + githubAccountId: String(viewer.id), + githubLogin: viewer.login, + roleLabel: "GitHub user", + eligibleOrganizationIdsJson: encodeEligibleOrganizationIds(linkedOrganizationIds), + }); + if (!options.broadcast) { + return; + } + c.broadcast("appUpdated", { + type: "appUpdated", + snapshot: await buildAppSnapshot(c, sessionId), + }); +} + +async function readOrganizationProfileRow(c: any) { + assertOrganizationShell(c); + return await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, PROFILE_ROW_ID)).get(); +} + +async function requireOrganizationProfileRow(c: any) { + const row = await readOrganizationProfileRow(c); + if (!row) { + throw new Error(`Organization profile is not initialized for organization ${c.state.organizationId}`); + } + return row; +} + +async function listOrganizationMembers(c: any): Promise { + assertOrganizationShell(c); + const rows = await c.db.select().from(organizationMembers).orderBy(organizationMembers.role, organizationMembers.name).all(); + return rows.map((row) => ({ + id: row.id, + name: row.name, + email: row.email, + role: row.role, + state: row.state, + })); +} + +async function listOrganizationSeatAssignments(c: any): Promise { + assertOrganizationShell(c); + const rows = await c.db.select({ email: seatAssignments.email }).from(seatAssignments).orderBy(seatAssignments.email).all(); + return rows.map((row) => row.email); +} + +async function listOrganizationInvoices(c: any): Promise { + assertOrganizationShell(c); + const rows = await c.db.select().from(invoices).orderBy(desc(invoices.issuedAt), desc(invoices.createdAt)).all(); + return rows.map((row) => ({ + id: row.id, + label: row.label, + issuedAt: row.issuedAt, + amountUsd: row.amountUsd, + status: row.status, + })); +} + +async function listOrganizationRepoCatalog(c: any): Promise { + assertOrganizationShell(c); + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + const rows = await githubData.listRepositories({}); + return rows.map((row: any) => repoLabelFromRemote(row.cloneUrl)).sort((a: string, b: string) => a.localeCompare(b)); + } catch { + return []; + } +} + +export async function buildOrganizationState(c: any) { + const startedAt = performance.now(); + const row = await requireOrganizationProfileRow(c); + return await buildOrganizationStateFromRow(c, row, startedAt); +} + +export async function buildOrganizationStateIfInitialized(c: any) { + const startedAt = performance.now(); + const row = await readOrganizationProfileRow(c); + if (!row) { + return null; + } + return await buildOrganizationStateFromRow(c, row, startedAt); +} + +async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number) { + const repoCatalog = await listOrganizationRepoCatalog(c); + const members = await listOrganizationMembers(c); + const seatAssignmentEmails = await listOrganizationSeatAssignments(c); + const invoiceRows = await listOrganizationInvoices(c); + + const state = { + id: c.state.organizationId, + organizationId: c.state.organizationId, + kind: row.kind, + githubLogin: row.githubLogin, + githubInstallationId: row.githubInstallationId ?? null, + stripeCustomerId: row.stripeCustomerId ?? null, + stripeSubscriptionId: row.stripeSubscriptionId ?? null, + stripePriceId: row.stripePriceId ?? null, + billingPlanId: row.billingPlanId, + snapshot: { + id: c.state.organizationId, + organizationId: c.state.organizationId, + kind: row.kind, + settings: { + displayName: row.displayName, + slug: row.slug, + primaryDomain: row.primaryDomain, + seatAccrualMode: "first_prompt", + autoImportRepos: row.autoImportRepos === 1, + }, + github: { + connectedAccount: row.githubConnectedAccount, + installationStatus: row.githubInstallationStatus, + syncStatus: row.githubSyncStatus ?? legacyRepoImportStatusToGithubSyncStatus(row.repoImportStatus), + importedRepoCount: repoCatalog.length, + lastSyncLabel: row.githubLastSyncLabel, + lastSyncAt: row.githubLastSyncAt ?? null, + lastWebhookAt: row.githubLastWebhookAt ?? null, + lastWebhookEvent: row.githubLastWebhookEvent ?? "", + syncGeneration: row.githubSyncGeneration ?? 0, + syncPhase: row.githubSyncPhase ?? null, + processedRepositoryCount: row.githubProcessedRepositoryCount ?? 0, + totalRepositoryCount: row.githubTotalRepositoryCount ?? 0, + }, + billing: { + planId: row.billingPlanId, + status: row.billingStatus, + seatsIncluded: row.billingSeatsIncluded, + trialEndsAt: row.billingTrialEndsAt, + renewalAt: row.billingRenewalAt, + stripeCustomerId: row.stripeCustomerId ?? "", + paymentMethodLabel: row.billingPaymentMethodLabel, + invoices: invoiceRows, + }, + members, + seatAssignments: seatAssignmentEmails, + repoCatalog, + }, + }; + + logger.info( + { + organizationId: c.state.organizationId, + githubLogin: row.githubLogin, + repoCount: repoCatalog.length, + memberCount: members.length, + seatAssignmentCount: seatAssignmentEmails.length, + invoiceCount: invoiceRows.length, + durationMs: roundDurationMs(startedAt), + }, + "build_organization_state_completed", + ); + + return state; +} + +async function applySubscriptionState( + organization: any, + subscription: { + id: string; + customerId: string; + priceId: string | null; + status: string; + cancelAtPeriodEnd: boolean; + currentPeriodEnd: number | null; + trialEnd: number | null; + defaultPaymentMethodLabel: string; + }, + fallbackPlanId: FoundryBillingPlanId, +): Promise { + await organization.send( + organizationWorkflowQueueName("organization.command.billing.stripe_subscription.apply"), + { subscription, fallbackPlanId }, + { wait: true, timeout: 10_000 }, + ); +} + +export const organizationAppActions = { + async createAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; planId: FoundryBillingPlanId }): Promise<{ url: string }> { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + + if (input.planId === "free") { + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.billing.free_plan.apply"), + { clearSubscription: false }, + { wait: true, timeout: 10_000 }, + ); + return { + url: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, + }; + } + + if (!appShell.stripe.isConfigured()) { + throw new Error("Stripe is not configured"); + } + + let customerId = organizationState.stripeCustomerId; + if (!customerId) { + customerId = ( + await appShell.stripe.createCustomer({ + organizationId: input.organizationId, + displayName: organizationState.snapshot.settings.displayName, + email: session.currentUserEmail, + }) + ).id; + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.billing.stripe_customer.apply"), + { customerId }, + { wait: true, timeout: 10_000 }, + ); + await upsertStripeLookupEntries(c, input.organizationId, customerId, null); + } + + return { + url: await appShell.stripe + .createCheckoutSession({ + organizationId: input.organizationId, + customerId, + customerEmail: session.currentUserEmail, + planId: input.planId, + successUrl: `${appShell.apiUrl}/v1/billing/checkout/complete?organizationId=${encodeURIComponent( + input.organizationId, + )}&session_id={CHECKOUT_SESSION_ID}`, + cancelUrl: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, + }) + .then((checkout) => checkout.url), + }; + }, + + async finalizeAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; checkoutSessionId: string }): Promise<{ redirectTo: string }> { + assertAppOrganization(c); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + const completion = await appShell.stripe.retrieveCheckoutCompletion(input.checkoutSessionId); + + if (completion.customerId) { + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.billing.stripe_customer.apply"), + { customerId: completion.customerId }, + { wait: true, timeout: 10_000 }, + ); + } + await upsertStripeLookupEntries(c, input.organizationId, completion.customerId, completion.subscriptionId); + + if (completion.subscriptionId) { + const subscription = await appShell.stripe.retrieveSubscription(completion.subscriptionId); + await applySubscriptionState(organizationHandle, subscription, completion.planId ?? organizationState.billingPlanId); + } + + if (completion.paymentMethodLabel) { + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.billing.payment_method.set"), + { label: completion.paymentMethodLabel }, + { wait: true, timeout: 10_000 }, + ); + } + + return { + redirectTo: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, + }; + }, + + async createAppBillingPortalSession(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (!organizationState.stripeCustomerId) { + throw new Error("Stripe customer is not available for this organization"); + } + const portal = await appShell.stripe.createPortalSession({ + customerId: organizationState.stripeCustomerId, + returnUrl: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, + }); + return { url: portal.url }; + }, + + async cancelAppScheduledRenewal(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + + if (organizationState.stripeSubscriptionId && appShell.stripe.isConfigured()) { + const subscription = await appShell.stripe.updateSubscriptionCancellation(organizationState.stripeSubscriptionId, true); + await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); + await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); + } else { + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.billing.status.set"), + { status: "scheduled_cancel" }, + { wait: true, timeout: 10_000 }, + ); + } + + return await buildAppSnapshot(c, input.sessionId); + }, + + async resumeAppSubscription(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + + if (organizationState.stripeSubscriptionId && appShell.stripe.isConfigured()) { + const subscription = await appShell.stripe.updateSubscriptionCancellation(organizationState.stripeSubscriptionId, false); + await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); + await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); + } else { + await organizationHandle.send( + organizationWorkflowQueueName("organization.command.billing.status.set"), + { status: "active" }, + { wait: true, timeout: 10_000 }, + ); + } + + return await buildAppSnapshot(c, input.sessionId); + }, + + async recordAppSeatUsage(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.send( + organizationWorkflowQueueName("organization.command.billing.seat_usage.record"), + { email: session.currentUserEmail }, + { wait: true, timeout: 10_000 }, + ); + return await buildAppSnapshot(c, input.sessionId); + }, + + async handleAppStripeWebhook(c: any, input: { payload: string; signatureHeader: string | null }): Promise<{ ok: true }> { + assertAppOrganization(c); + const { appShell } = getActorRuntimeContext(); + const event = appShell.stripe.verifyWebhookEvent(input.payload, input.signatureHeader); + + if (event.type === "checkout.session.completed") { + const object = event.data.object as Record; + const organizationId = + stringFromMetadata(object.metadata, "organizationId") ?? + (await findOrganizationIdForStripeEvent( + c, + typeof object.customer === "string" ? object.customer : null, + typeof object.subscription === "string" ? object.subscription : null, + )); + if (organizationId) { + const organization = await getOrCreateOrganization(c, organizationId); + if (typeof object.customer === "string") { + await organization.send( + organizationWorkflowQueueName("organization.command.billing.stripe_customer.apply"), + { customerId: object.customer }, + { wait: true, timeout: 10_000 }, + ); + } + await upsertStripeLookupEntries( + c, + organizationId, + typeof object.customer === "string" ? object.customer : null, + typeof object.subscription === "string" ? object.subscription : null, + ); + } + return { ok: true }; + } + + if (event.type === "customer.subscription.updated" || event.type === "customer.subscription.created") { + const subscription = stripeWebhookSubscription(event); + const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); + if (organizationId) { + const organizationHandle = await getOrCreateOrganization(c, organizationId); + const organizationState = await getOrganizationState(organizationHandle); + await applySubscriptionState( + organizationHandle, + subscription, + appShell.stripe.planIdForPriceId(subscription.priceId ?? "") ?? organizationState.billingPlanId, + ); + await upsertStripeLookupEntries(c, organizationId, subscription.customerId, subscription.id); + } + return { ok: true }; + } + + if (event.type === "customer.subscription.deleted") { + const subscription = stripeWebhookSubscription(event); + const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); + if (organizationId) { + const organization = await getOrCreateOrganization(c, organizationId); + await organization.send( + organizationWorkflowQueueName("organization.command.billing.free_plan.apply"), + { clearSubscription: true }, + { wait: true, timeout: 10_000 }, + ); + } + return { ok: true }; + } + + if (event.type === "invoice.paid" || event.type === "invoice.payment_failed") { + const invoice = event.data.object as Record; + const organizationId = await findOrganizationIdForStripeEvent(c, typeof invoice.customer === "string" ? invoice.customer : null, null); + if (organizationId) { + const organization = await getOrCreateOrganization(c, organizationId); + const rawAmount = typeof invoice.amount_paid === "number" ? invoice.amount_paid : invoice.amount_due; + const amountUsd = Math.round((typeof rawAmount === "number" ? rawAmount : 0) / 100); + await organization.send( + organizationWorkflowQueueName("organization.command.billing.invoice.upsert"), + { + id: String(invoice.id), + label: typeof invoice.number === "string" ? `Invoice ${invoice.number}` : "Stripe invoice", + issuedAt: formatUnixDate(typeof invoice.created === "number" ? invoice.created : Math.floor(Date.now() / 1000)), + amountUsd: Number.isFinite(amountUsd) ? amountUsd : 0, + status: event.type === "invoice.paid" ? "paid" : "open", + }, + { wait: true, timeout: 10_000 }, + ); + } + } + + return { ok: true }; + }, + + async handleAppGithubWebhook(c: any, input: { payload: string; signatureHeader: string | null; eventHeader: string | null }): Promise<{ ok: true }> { + assertAppOrganization(c); + const { appShell } = getActorRuntimeContext(); + const { event, body } = appShell.github.verifyWebhookEvent(input.payload, input.signatureHeader, input.eventHeader); + + const accountLogin = body.installation?.account?.login ?? body.repository?.owner?.login ?? body.organization?.login ?? null; + const accountType = body.installation?.account?.type ?? (body.organization?.login ? "Organization" : null); + if (!accountLogin) { + githubWebhookLogger.info( + { + event, + action: body.action ?? null, + reason: "missing_installation_account", + }, + "ignored", + ); + return { ok: true }; + } + + const kind: FoundryOrganization["kind"] = accountType === "User" ? "personal" : "organization"; + const organizationId = organizationOrganizationId(kind, accountLogin); + const receivedAt = Date.now(); + const organization = await getOrCreateOrganization(c, organizationId); + await organization.send( + organizationWorkflowQueueName("organization.command.github.webhook_receipt.record"), + { organizationId, event, action: body.action ?? null, receivedAt }, + { wait: false }, + ); + const githubData = await getOrCreateGithubData(c, organizationId); + + if (event === "installation" && (body.action === "created" || body.action === "deleted" || body.action === "suspend" || body.action === "unsuspend")) { + githubWebhookLogger.info( + { + event, + action: body.action, + accountLogin, + organizationId, + }, + "installation_event", + ); + if (body.action === "deleted") { + await githubData.send( + githubDataWorkflowQueueName("githubData.command.clearState"), + { connectedAccount: accountLogin, installationStatus: "install_required", installationId: null, label: "GitHub App installation removed" }, + { wait: false }, + ); + } else if (body.action === "created") { + void githubData + .send( + githubDataWorkflowQueueName("githubData.command.syncRepos"), + { + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Syncing GitHub data from installation webhook...", + }, + { wait: false }, + ) + .catch(() => {}); + } else if (body.action === "suspend") { + await githubData.send( + githubDataWorkflowQueueName("githubData.command.clearState"), + { + connectedAccount: accountLogin, + installationStatus: "reconnect_required", + installationId: body.installation?.id ?? null, + label: "GitHub App installation suspended", + }, + { wait: false }, + ); + } else if (body.action === "unsuspend") { + void githubData + .send( + githubDataWorkflowQueueName("githubData.command.syncRepos"), + { + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after unsuspend...", + }, + { wait: false }, + ) + .catch(() => {}); + } + return { ok: true }; + } + + if (event === "installation_repositories") { + githubWebhookLogger.info( + { + event, + action: body.action ?? null, + accountLogin, + organizationId, + repositoriesAdded: body.repositories_added?.length ?? 0, + repositoriesRemoved: body.repositories_removed?.length ?? 0, + }, + "repository_membership_changed", + ); + void githubData + .send( + githubDataWorkflowQueueName("githubData.command.syncRepos"), + { + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after repository access change...", + }, + { wait: false }, + ) + .catch(() => {}); + return { ok: true }; + } + + if ( + event === "push" || + event === "pull_request" || + event === "pull_request_review" || + event === "pull_request_review_comment" || + event === "check_run" || + event === "check_suite" || + event === "status" || + event === "create" || + event === "delete" + ) { + const repoFullName = body.repository?.full_name; + if (repoFullName) { + githubWebhookLogger.info( + { + event, + action: body.action ?? null, + accountLogin, + organizationId, + repoFullName, + }, + "repository_event", + ); + if (event === "pull_request" && body.repository?.clone_url && body.pull_request) { + await githubData.send( + githubDataWorkflowQueueName("githubData.command.handlePullRequestWebhook"), + { + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + repository: { + fullName: body.repository.full_name, + cloneUrl: body.repository.clone_url, + private: Boolean(body.repository.private), + }, + pullRequest: { + number: body.pull_request.number, + status: body.pull_request.draft ? "draft" : "ready", + title: body.pull_request.title ?? "", + body: body.pull_request.body ?? null, + state: body.pull_request.state ?? "open", + url: body.pull_request.html_url ?? `https://github.com/${body.repository.full_name}/pull/${body.pull_request.number}`, + headRefName: body.pull_request.head?.ref ?? "", + baseRefName: body.pull_request.base?.ref ?? "", + authorLogin: body.pull_request.user?.login ?? null, + isDraft: Boolean(body.pull_request.draft), + merged: Boolean(body.pull_request.merged), + }, + }, + { wait: false }, + ); + } + } + return { ok: true }; + } + + githubWebhookLogger.info( + { + event, + action: body.action ?? null, + accountLogin, + organizationId, + }, + "unhandled_event", + ); + return { ok: true }; + }, +}; + +export async function syncOrganizationShellFromGithubMutation( + c: any, + input: { + userId: string; + userName: string; + userEmail: string; + githubUserLogin: string; + githubAccountId: string; + githubLogin: string; + githubAccountType: string; + kind: FoundryOrganization["kind"]; + displayName: string; + installationId: number | null; + appConfigured: boolean; + }, +): Promise<{ organizationId: string }> { + assertOrganizationShell(c); + const now = Date.now(); + const existing = await readOrganizationProfileRow(c); + const slug = existing?.slug ?? slugify(input.githubLogin); + const organizationId = organizationOrganizationId(input.kind, input.githubLogin); + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); + } + + const installationStatus = + input.kind === "personal" ? "connected" : input.installationId ? "connected" : input.appConfigured ? "install_required" : "reconnect_required"; + const syncStatus = existing?.githubSyncStatus ?? legacyRepoImportStatusToGithubSyncStatus(existing?.repoImportStatus); + const lastSyncLabel = + syncStatus === "synced" + ? existing.githubLastSyncLabel + : installationStatus === "connected" + ? "Waiting for first import" + : installationStatus === "install_required" + ? "GitHub App installation required" + : "GitHub App configuration incomplete"; + const hasStripeBillingState = Boolean(existing?.stripeCustomerId || existing?.stripeSubscriptionId || existing?.stripePriceId); + const defaultBillingPlanId = input.kind === "personal" || !hasStripeBillingState ? "free" : (existing?.billingPlanId ?? "team"); + const defaultSeatsIncluded = input.kind === "personal" || !hasStripeBillingState ? 1 : (existing?.billingSeatsIncluded ?? 5); + const defaultPaymentMethodLabel = + input.kind === "personal" + ? "No card required" + : hasStripeBillingState + ? (existing?.billingPaymentMethodLabel ?? "Payment method on file") + : "No payment method on file"; + + await c.db + .insert(organizationProfile) + .values({ + id: PROFILE_ROW_ID, + kind: input.kind, + githubAccountId: input.githubAccountId, + githubLogin: input.githubLogin, + githubAccountType: input.githubAccountType, + displayName: input.displayName, + slug, + defaultModel: existing?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + primaryDomain: existing?.primaryDomain ?? (input.kind === "personal" ? "personal" : `${slug}.github`), + autoImportRepos: existing?.autoImportRepos ?? 1, + repoImportStatus: existing?.repoImportStatus ?? "not_started", + githubConnectedAccount: input.githubLogin, + githubInstallationStatus: installationStatus, + githubSyncStatus: syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: lastSyncLabel, + githubLastSyncAt: existing?.githubLastSyncAt ?? null, + githubSyncGeneration: existing?.githubSyncGeneration ?? 0, + githubSyncPhase: existing?.githubSyncPhase ?? null, + githubProcessedRepositoryCount: existing?.githubProcessedRepositoryCount ?? 0, + githubTotalRepositoryCount: existing?.githubTotalRepositoryCount ?? 0, + stripeCustomerId: existing?.stripeCustomerId ?? null, + stripeSubscriptionId: existing?.stripeSubscriptionId ?? null, + stripePriceId: existing?.stripePriceId ?? null, + billingPlanId: defaultBillingPlanId, + billingStatus: existing?.billingStatus ?? "active", + billingSeatsIncluded: defaultSeatsIncluded, + billingTrialEndsAt: existing?.billingTrialEndsAt ?? null, + billingRenewalAt: existing?.billingRenewalAt ?? null, + billingPaymentMethodLabel: defaultPaymentMethodLabel, + createdAt: existing?.createdAt ?? now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: organizationProfile.id, + set: { + kind: input.kind, + githubAccountId: input.githubAccountId, + githubLogin: input.githubLogin, + githubAccountType: input.githubAccountType, + displayName: input.displayName, + githubConnectedAccount: input.githubLogin, + githubInstallationStatus: installationStatus, + githubSyncStatus: syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: lastSyncLabel, + githubLastSyncAt: existing?.githubLastSyncAt ?? null, + githubSyncGeneration: existing?.githubSyncGeneration ?? 0, + githubSyncPhase: existing?.githubSyncPhase ?? null, + githubProcessedRepositoryCount: existing?.githubProcessedRepositoryCount ?? 0, + githubTotalRepositoryCount: existing?.githubTotalRepositoryCount ?? 0, + billingPlanId: defaultBillingPlanId, + billingSeatsIncluded: defaultSeatsIncluded, + billingPaymentMethodLabel: defaultPaymentMethodLabel, + updatedAt: now, + }, + }) + .run(); + + await c.db + .insert(organizationMembers) + .values({ + id: input.userId, + name: input.userName, + email: input.userEmail, + role: input.kind === "personal" ? "owner" : "admin", + state: "active", + updatedAt: now, + }) + .onConflictDoUpdate({ + target: organizationMembers.id, + set: { + name: input.userName, + email: input.userEmail, + role: input.kind === "personal" ? "owner" : "admin", + state: "active", + updatedAt: now, + }, + }) + .run(); + + // Auto-trigger github-data sync when the org has a connected installation + // but hasn't synced yet. This handles the common case where a personal + // account or an org with an existing GitHub App installation signs in for + // the first time on a fresh DB — the installation webhook already fired + // before the org actor existed, so we kick off the sync here instead. + const needsInitialSync = installationStatus === "connected" && syncStatus === "pending"; + if (needsInitialSync) { + const githubData = await getOrCreateGithubData(c, organizationId); + void githubData + .send( + githubDataWorkflowQueueName("githubData.command.syncRepos"), + { + connectedAccount: input.githubLogin, + installationStatus: "connected", + installationId: input.installationId, + githubLogin: input.githubLogin, + kind: input.kind, + label: "Initial repository sync...", + }, + { wait: false }, + ) + .catch(() => {}); + } + + return { organizationId }; +} + +export async function updateOrganizationShellProfileMutation( + c: any, + input: Pick, +): Promise { + assertOrganizationShell(c); + const existing = await requireOrganizationProfileRow(c); + await c.db + .update(organizationProfile) + .set({ + displayName: input.displayName.trim() || existing.displayName, + slug: input.slug.trim() || existing.slug, + primaryDomain: input.primaryDomain.trim() || existing.primaryDomain, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function markOrganizationSyncStartedMutation(c: any, input: { label: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + githubSyncStatus: "syncing", + githubLastSyncLabel: input.label, + githubSyncPhase: "discovering_repositories", + githubProcessedRepositoryCount: 0, + githubTotalRepositoryCount: 0, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationStripeCustomerMutation(c: any, input: { customerId: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + stripeCustomerId: input.customerId, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationStripeSubscriptionMutation( + c: any, + input: { + subscription: { + id: string; + customerId: string; + priceId: string | null; + status: string; + cancelAtPeriodEnd: boolean; + currentPeriodEnd: number | null; + trialEnd: number | null; + defaultPaymentMethodLabel: string; + }; + fallbackPlanId: FoundryBillingPlanId; + }, +): Promise { + assertOrganizationShell(c); + const { appShell } = getActorRuntimeContext(); + const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; + await c.db + .update(organizationProfile) + .set({ + stripeCustomerId: input.subscription.customerId || null, + stripeSubscriptionId: input.subscription.id || null, + stripePriceId: input.subscription.priceId, + billingPlanId: planId, + billingStatus: stripeStatusToBillingStatus(input.subscription.status, input.subscription.cancelAtPeriodEnd), + billingSeatsIncluded: seatsIncludedForPlan(planId), + billingTrialEndsAt: input.subscription.trialEnd ? new Date(input.subscription.trialEnd * 1000).toISOString() : null, + billingRenewalAt: input.subscription.currentPeriodEnd ? new Date(input.subscription.currentPeriodEnd * 1000).toISOString() : null, + billingPaymentMethodLabel: input.subscription.defaultPaymentMethodLabel || "Payment method on file", + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationFreePlanMutation(c: any, input: { clearSubscription: boolean }): Promise { + assertOrganizationShell(c); + const patch: Record = { + billingPlanId: "free", + billingStatus: "active", + billingSeatsIncluded: 1, + billingTrialEndsAt: null, + billingRenewalAt: null, + billingPaymentMethodLabel: "No card required", + updatedAt: Date.now(), + }; + if (input.clearSubscription) { + patch.stripeSubscriptionId = null; + patch.stripePriceId = null; + } + await c.db.update(organizationProfile).set(patch).where(eq(organizationProfile.id, PROFILE_ROW_ID)).run(); +} + +export async function setOrganizationBillingPaymentMethodMutation(c: any, input: { label: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + billingPaymentMethodLabel: input.label, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function setOrganizationBillingStatusMutation(c: any, input: { status: FoundryBillingState["status"] }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + billingStatus: input.status, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function upsertOrganizationInvoiceMutation( + c: any, + input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }, +): Promise { + assertOrganizationShell(c); + await c.db + .insert(invoices) + .values({ + id: input.id, + label: input.label, + issuedAt: input.issuedAt, + amountUsd: input.amountUsd, + status: input.status, + createdAt: Date.now(), + }) + .onConflictDoUpdate({ + target: invoices.id, + set: { + label: input.label, + issuedAt: input.issuedAt, + amountUsd: input.amountUsd, + status: input.status, + }, + }) + .run(); +} + +export async function recordOrganizationSeatUsageMutation(c: any, input: { email: string }): Promise { + assertOrganizationShell(c); + await c.db + .insert(seatAssignments) + .values({ + email: input.email, + createdAt: Date.now(), + }) + .onConflictDoNothing() + .run(); +} diff --git a/foundry/packages/backend/src/actors/organization/constants.ts b/foundry/packages/backend/src/actors/organization/constants.ts new file mode 100644 index 0000000..0b8e3c0 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/constants.ts @@ -0,0 +1 @@ +export const APP_SHELL_ORGANIZATION_ID = "app"; diff --git a/foundry/packages/backend/src/actors/organization/db/db.ts b/foundry/packages/backend/src/actors/organization/db/db.ts new file mode 100644 index 0000000..f7eb392 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/db.ts @@ -0,0 +1,5 @@ +import { db } from "rivetkit/db/drizzle"; +import * as schema from "./schema.js"; +import migrations from "./migrations.js"; + +export const organizationDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts b/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts new file mode 100644 index 0000000..eb43667 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/organization/db/drizzle", + schema: "./src/actors/organization/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql new file mode 100644 index 0000000..80be04f --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql @@ -0,0 +1,92 @@ +CREATE TABLE `app_sessions` ( + `id` text PRIMARY KEY NOT NULL, + `current_user_id` text, + `current_user_name` text, + `current_user_email` text, + `current_user_github_login` text, + `current_user_role_label` text, + `eligible_organization_ids_json` text NOT NULL, + `active_organization_id` text, + `github_access_token` text, + `github_scope` text NOT NULL, + `starter_repo_status` text NOT NULL, + `starter_repo_starred_at` integer, + `starter_repo_skipped_at` integer, + `oauth_state` text, + `oauth_state_expires_at` integer, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `invoices` ( + `id` text PRIMARY KEY NOT NULL, + `label` text NOT NULL, + `issued_at` text NOT NULL, + `amount_usd` integer NOT NULL, + `status` text NOT NULL, + `created_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `organization_members` ( + `id` text PRIMARY KEY NOT NULL, + `name` text NOT NULL, + `email` text NOT NULL, + `role` text NOT NULL, + `state` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `organization_profile` ( + `id` text PRIMARY KEY NOT NULL, + `kind` text NOT NULL, + `github_account_id` text NOT NULL, + `github_login` text NOT NULL, + `github_account_type` text NOT NULL, + `display_name` text NOT NULL, + `slug` text NOT NULL, + `primary_domain` text NOT NULL, + `default_model` text NOT NULL, + `auto_import_repos` integer NOT NULL, + `repo_import_status` text NOT NULL, + `github_connected_account` text NOT NULL, + `github_installation_status` text NOT NULL, + `github_sync_status` text NOT NULL, + `github_installation_id` integer, + `github_last_sync_label` text NOT NULL, + `github_last_sync_at` integer, + `github_last_webhook_at` integer, + `github_last_webhook_event` text, + `github_sync_generation` integer NOT NULL, + `github_sync_phase` text, + `github_processed_repository_count` integer NOT NULL, + `github_total_repository_count` integer NOT NULL, + `stripe_customer_id` text, + `stripe_subscription_id` text, + `stripe_price_id` text, + `billing_plan_id` text NOT NULL, + `billing_status` text NOT NULL, + `billing_seats_included` integer NOT NULL, + `billing_trial_ends_at` text, + `billing_renewal_at` text, + `billing_payment_method_label` text NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `repos` ( + `repo_id` text PRIMARY KEY NOT NULL, + `remote_url` text NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `seat_assignments` ( + `email` text PRIMARY KEY NOT NULL, + `created_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `stripe_lookup` ( + `lookup_key` text PRIMARY KEY NOT NULL, + `organization_id` text NOT NULL, + `updated_at` integer NOT NULL +); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql new file mode 100644 index 0000000..fcd1b60 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql @@ -0,0 +1,50 @@ +CREATE TABLE IF NOT EXISTS `auth_session_index` ( + `session_id` text PRIMARY KEY NOT NULL, + `session_token` text NOT NULL, + `user_id` text NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS `auth_email_index` ( + `email` text PRIMARY KEY NOT NULL, + `user_id` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS `auth_account_index` ( + `id` text PRIMARY KEY NOT NULL, + `provider_id` text NOT NULL, + `account_id` text NOT NULL, + `user_id` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS `auth_verification` ( + `id` text PRIMARY KEY NOT NULL, + `identifier` text NOT NULL, + `value` text NOT NULL, + `expires_at` integer NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS `task_index` ( + `task_id` text PRIMARY KEY NOT NULL, + `repo_id` text NOT NULL, + `branch_name` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS `task_summaries` ( + `task_id` text PRIMARY KEY NOT NULL, + `repo_id` text NOT NULL, + `title` text NOT NULL, + `status` text NOT NULL, + `repo_name` text NOT NULL, + `updated_at_ms` integer NOT NULL, + `branch` text, + `pull_request_json` text, + `sessions_summary_json` text DEFAULT '[]' NOT NULL +); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000..a29c546 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json @@ -0,0 +1,592 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "0bef30e4-148a-4fe1-b2ca-a9721893c3ac", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "app_sessions": { + "name": "app_sessions", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "current_user_id": { + "name": "current_user_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "current_user_name": { + "name": "current_user_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "current_user_email": { + "name": "current_user_email", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "current_user_github_login": { + "name": "current_user_github_login", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "current_user_role_label": { + "name": "current_user_role_label", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "eligible_organization_ids_json": { + "name": "eligible_organization_ids_json", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "active_organization_id": { + "name": "active_organization_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_access_token": { + "name": "github_access_token", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_scope": { + "name": "github_scope", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "starter_repo_status": { + "name": "starter_repo_status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "starter_repo_starred_at": { + "name": "starter_repo_starred_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "starter_repo_skipped_at": { + "name": "starter_repo_skipped_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "oauth_state": { + "name": "oauth_state", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "oauth_state_expires_at": { + "name": "oauth_state_expires_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "invoices": { + "name": "invoices", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "label": { + "name": "label", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "issued_at": { + "name": "issued_at", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "amount_usd": { + "name": "amount_usd", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "organization_members": { + "name": "organization_members", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "role": { + "name": "role", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "state": { + "name": "state", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "organization_profile": { + "name": "organization_profile", + "columns": { + "id": { + "name": "id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "kind": { + "name": "kind", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_account_id": { + "name": "github_account_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_login": { + "name": "github_login", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_account_type": { + "name": "github_account_type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "display_name": { + "name": "display_name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "slug": { + "name": "slug", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "primary_domain": { + "name": "primary_domain", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "default_model": { + "name": "default_model", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "auto_import_repos": { + "name": "auto_import_repos", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "repo_import_status": { + "name": "repo_import_status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_connected_account": { + "name": "github_connected_account", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_installation_status": { + "name": "github_installation_status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_sync_status": { + "name": "github_sync_status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_installation_id": { + "name": "github_installation_id", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_last_sync_label": { + "name": "github_last_sync_label", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_last_sync_at": { + "name": "github_last_sync_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_last_webhook_at": { + "name": "github_last_webhook_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_last_webhook_event": { + "name": "github_last_webhook_event", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_sync_generation": { + "name": "github_sync_generation", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_sync_phase": { + "name": "github_sync_phase", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_processed_repository_count": { + "name": "github_processed_repository_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_total_repository_count": { + "name": "github_total_repository_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "stripe_customer_id": { + "name": "stripe_customer_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "stripe_subscription_id": { + "name": "stripe_subscription_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "stripe_price_id": { + "name": "stripe_price_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "billing_plan_id": { + "name": "billing_plan_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "billing_status": { + "name": "billing_status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "billing_seats_included": { + "name": "billing_seats_included", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "billing_trial_ends_at": { + "name": "billing_trial_ends_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "billing_renewal_at": { + "name": "billing_renewal_at", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "billing_payment_method_label": { + "name": "billing_payment_method_label", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "repos": { + "name": "repos", + "columns": { + "repo_id": { + "name": "repo_id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "remote_url": { + "name": "remote_url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "seat_assignments": { + "name": "seat_assignments", + "columns": { + "email": { + "name": "email", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "stripe_lookup": { + "name": "stripe_lookup", + "columns": { + "lookup_key": { + "name": "lookup_key", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "organization_id": { + "name": "organization_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json new file mode 100644 index 0000000..41ea23b --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json @@ -0,0 +1,20 @@ +{ + "version": "7", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "6", + "when": 1773376221152, + "tag": "0000_melted_viper", + "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1773840000000, + "tag": "0001_add_auth_and_task_tables", + "breakpoints": true + } + ] +} diff --git a/foundry/packages/backend/src/actors/organization/db/migrations.ts b/foundry/packages/backend/src/actors/organization/db/migrations.ts new file mode 100644 index 0000000..2e8570b --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/migrations.ts @@ -0,0 +1,180 @@ +// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. + +const journal = { + entries: [ + { + idx: 0, + when: 1773376221152, + tag: "0000_melted_viper", + breakpoints: true, + }, + { + idx: 1, + when: 1773840000000, + tag: "0001_add_auth_and_task_tables", + breakpoints: true, + }, + { + idx: 2, + when: 1773984000000, + tag: "0002_add_task_owner_columns", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`app_sessions\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`current_user_id\` text, + \`current_user_name\` text, + \`current_user_email\` text, + \`current_user_github_login\` text, + \`current_user_role_label\` text, + \`eligible_organization_ids_json\` text NOT NULL, + \`active_organization_id\` text, + \`github_access_token\` text, + \`github_scope\` text NOT NULL, + \`starter_repo_status\` text NOT NULL, + \`starter_repo_starred_at\` integer, + \`starter_repo_skipped_at\` integer, + \`oauth_state\` text, + \`oauth_state_expires_at\` integer, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`invoices\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`label\` text NOT NULL, + \`issued_at\` text NOT NULL, + \`amount_usd\` integer NOT NULL, + \`status\` text NOT NULL, + \`created_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`organization_members\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`email\` text NOT NULL, + \`role\` text NOT NULL, + \`state\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`organization_profile\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`kind\` text NOT NULL, + \`github_account_id\` text NOT NULL, + \`github_login\` text NOT NULL, + \`github_account_type\` text NOT NULL, + \`display_name\` text NOT NULL, + \`slug\` text NOT NULL, + \`primary_domain\` text NOT NULL, + \`default_model\` text NOT NULL, + \`auto_import_repos\` integer NOT NULL, + \`repo_import_status\` text NOT NULL, + \`github_connected_account\` text NOT NULL, + \`github_installation_status\` text NOT NULL, + \`github_sync_status\` text NOT NULL, + \`github_installation_id\` integer, + \`github_last_sync_label\` text NOT NULL, + \`github_last_sync_at\` integer, + \`github_last_webhook_at\` integer, + \`github_last_webhook_event\` text, + \`github_sync_generation\` integer NOT NULL, + \`github_sync_phase\` text, + \`github_processed_repository_count\` integer NOT NULL, + \`github_total_repository_count\` integer NOT NULL, + \`stripe_customer_id\` text, + \`stripe_subscription_id\` text, + \`stripe_price_id\` text, + \`billing_plan_id\` text NOT NULL, + \`billing_status\` text NOT NULL, + \`billing_seats_included\` integer NOT NULL, + \`billing_trial_ends_at\` text, + \`billing_renewal_at\` text, + \`billing_payment_method_label\` text NOT NULL, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`repos\` ( + \`repo_id\` text PRIMARY KEY NOT NULL, + \`remote_url\` text NOT NULL, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`seat_assignments\` ( + \`email\` text PRIMARY KEY NOT NULL, + \`created_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`stripe_lookup\` ( + \`lookup_key\` text PRIMARY KEY NOT NULL, + \`organization_id\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +`, + m0001: `CREATE TABLE IF NOT EXISTS \`auth_session_index\` ( + \`session_id\` text PRIMARY KEY NOT NULL, + \`session_token\` text NOT NULL, + \`user_id\` text NOT NULL, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS \`auth_email_index\` ( + \`email\` text PRIMARY KEY NOT NULL, + \`user_id\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS \`auth_account_index\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`provider_id\` text NOT NULL, + \`account_id\` text NOT NULL, + \`user_id\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS \`auth_verification\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`identifier\` text NOT NULL, + \`value\` text NOT NULL, + \`expires_at\` integer NOT NULL, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS \`task_index\` ( + \`task_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`branch_name\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS \`task_summaries\` ( + \`task_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`title\` text NOT NULL, + \`status\` text NOT NULL, + \`repo_name\` text NOT NULL, + \`updated_at_ms\` integer NOT NULL, + \`branch\` text, + \`pull_request_json\` text, + \`sessions_summary_json\` text DEFAULT '[]' NOT NULL +); +`, + m0002: `ALTER TABLE \`task_summaries\` ADD COLUMN \`primary_user_login\` text; +--> statement-breakpoint +ALTER TABLE \`task_summaries\` ADD COLUMN \`primary_user_avatar_url\` text; +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/organization/db/schema.ts b/foundry/packages/backend/src/actors/organization/db/schema.ts new file mode 100644 index 0000000..3978a5f --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/schema.ts @@ -0,0 +1,160 @@ +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; + +// SQLite is per organization actor instance, so no organizationId column needed. + +/** + * Coordinator index of TaskActor instances. + * The organization actor is the direct coordinator for tasks (not a per-repo + * actor) because the sidebar needs to query all tasks across all repos on + * every snapshot. With many repos, fanning out to N repo actors on the hot + * read path is too expensive — owning the index here keeps that a single + * local table scan. Each row maps a taskId to its repo and immutable branch + * name. Used for branch conflict checking (scoped by repoId) and + * task-by-branch lookups. + */ +export const taskIndex = sqliteTable("task_index", { + taskId: text("task_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + branchName: text("branch_name"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** + * Organization-owned materialized task summary projection. + * Task actors push summary updates directly to the organization coordinator, + * which keeps this table local for fast list/lookups without fan-out. + * Same rationale as taskIndex: the sidebar repeatedly reads all tasks across + * all repos, so the org must own the materialized view to avoid O(repos) + * actor fan-out on the hot read path. + */ +export const taskSummaries = sqliteTable("task_summaries", { + taskId: text("task_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + title: text("title").notNull(), + status: text("status").notNull(), + repoName: text("repo_name").notNull(), + updatedAtMs: integer("updated_at_ms").notNull(), + branch: text("branch"), + pullRequestJson: text("pull_request_json"), + sessionsSummaryJson: text("sessions_summary_json").notNull().default("[]"), + primaryUserLogin: text("primary_user_login"), + primaryUserAvatarUrl: text("primary_user_avatar_url"), +}); + +export const organizationProfile = sqliteTable( + "organization_profile", + { + id: integer("id").primaryKey(), + kind: text("kind").notNull(), + githubAccountId: text("github_account_id").notNull(), + githubLogin: text("github_login").notNull(), + githubAccountType: text("github_account_type").notNull(), + displayName: text("display_name").notNull(), + slug: text("slug").notNull(), + defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID), + primaryDomain: text("primary_domain").notNull(), + autoImportRepos: integer("auto_import_repos").notNull(), + repoImportStatus: text("repo_import_status").notNull(), + githubConnectedAccount: text("github_connected_account").notNull(), + githubInstallationStatus: text("github_installation_status").notNull(), + githubSyncStatus: text("github_sync_status").notNull(), + githubInstallationId: integer("github_installation_id"), + githubLastSyncLabel: text("github_last_sync_label").notNull(), + githubLastSyncAt: integer("github_last_sync_at"), + githubLastWebhookAt: integer("github_last_webhook_at"), + githubLastWebhookEvent: text("github_last_webhook_event"), + githubSyncGeneration: integer("github_sync_generation").notNull(), + githubSyncPhase: text("github_sync_phase"), + githubProcessedRepositoryCount: integer("github_processed_repository_count").notNull(), + githubTotalRepositoryCount: integer("github_total_repository_count").notNull(), + stripeCustomerId: text("stripe_customer_id"), + stripeSubscriptionId: text("stripe_subscription_id"), + stripePriceId: text("stripe_price_id"), + billingPlanId: text("billing_plan_id").notNull(), + billingStatus: text("billing_status").notNull(), + billingSeatsIncluded: integer("billing_seats_included").notNull(), + billingTrialEndsAt: text("billing_trial_ends_at"), + billingRenewalAt: text("billing_renewal_at"), + billingPaymentMethodLabel: text("billing_payment_method_label").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("organization_profile_singleton_id_check", sql`${table.id} = 1`)], +); + +export const organizationMembers = sqliteTable("organization_members", { + id: text("id").notNull().primaryKey(), + name: text("name").notNull(), + email: text("email").notNull(), + role: text("role").notNull(), + state: text("state").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const seatAssignments = sqliteTable("seat_assignments", { + email: text("email").notNull().primaryKey(), + createdAt: integer("created_at").notNull(), +}); + +export const invoices = sqliteTable("invoices", { + id: text("id").notNull().primaryKey(), + label: text("label").notNull(), + issuedAt: text("issued_at").notNull(), + amountUsd: integer("amount_usd").notNull(), + status: text("status").notNull(), + createdAt: integer("created_at").notNull(), +}); + +/** + * Coordinator index of AuthUserActor instances — routes session token → userId. + * Better Auth adapter uses this to resolve which user actor to query + * before the user identity is known. + */ +export const authSessionIndex = sqliteTable("auth_session_index", { + sessionId: text("session_id").notNull().primaryKey(), + sessionToken: text("session_token").notNull(), + userId: text("user_id").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** + * Coordinator index of AuthUserActor instances — routes email → userId. + * Better Auth adapter uses this to resolve which user actor to query. + */ +export const authEmailIndex = sqliteTable("auth_email_index", { + email: text("email").notNull().primaryKey(), + userId: text("user_id").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** + * Coordinator index of AuthUserActor instances — routes OAuth account → userId. + * Better Auth adapter uses this to resolve which user actor to query. + */ +export const authAccountIndex = sqliteTable("auth_account_index", { + id: text("id").notNull().primaryKey(), + providerId: text("provider_id").notNull(), + accountId: text("account_id").notNull(), + userId: text("user_id").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authVerification = sqliteTable("auth_verification", { + id: text("id").notNull().primaryKey(), + identifier: text("identifier").notNull(), + value: text("value").notNull(), + expiresAt: integer("expires_at").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const stripeLookup = sqliteTable("stripe_lookup", { + lookupKey: text("lookup_key").notNull().primaryKey(), + organizationId: text("organization_id").notNull(), + updatedAt: integer("updated_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/organization/index.ts b/foundry/packages/backend/src/actors/organization/index.ts new file mode 100644 index 0000000..9ceb27f --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/index.ts @@ -0,0 +1,23 @@ +import { actor, queue } from "rivetkit"; +import { workflow } from "rivetkit/workflow"; +import { organizationDb } from "./db/db.js"; +import { organizationActions } from "./actions.js"; +import { runOrganizationWorkflow } from "./workflow.js"; +import { ORGANIZATION_QUEUE_NAMES } from "./queues.js"; + +export const organization = actor({ + db: organizationDb, + queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Organization", + icon: "compass", + actionTimeout: 5 * 60_000, + }, + createState: (_c, organizationId: string) => ({ + organizationId, + }), + actions: { + ...organizationActions, + }, + run: workflow(runOrganizationWorkflow), +}); diff --git a/foundry/packages/backend/src/actors/organization/queues.ts b/foundry/packages/backend/src/actors/organization/queues.ts new file mode 100644 index 0000000..2e67dc5 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/queues.ts @@ -0,0 +1,26 @@ +export const ORGANIZATION_QUEUE_NAMES = [ + "organization.command.createTask", + "organization.command.materializeTask", + "organization.command.applyTaskSummaryUpdate", + "organization.command.removeTaskSummary", + "organization.command.refreshTaskSummaryForBranch", + "organization.command.snapshot.broadcast", + "organization.command.syncGithubSession", + "organization.command.github.organization_shell.sync_from_github", + "organization.command.github.sync_progress.apply", + "organization.command.github.webhook_receipt.record", + "organization.command.shell.sync_started.mark", + "organization.command.billing.stripe_customer.apply", + "organization.command.billing.stripe_subscription.apply", + "organization.command.billing.free_plan.apply", + "organization.command.billing.payment_method.set", + "organization.command.billing.status.set", + "organization.command.billing.invoice.upsert", + "organization.command.billing.seat_usage.record", +] as const; + +export type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; + +export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { + return name; +} diff --git a/foundry/packages/backend/src/actors/organization/workflow.ts b/foundry/packages/backend/src/actors/organization/workflow.ts new file mode 100644 index 0000000..e62e80d --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/workflow.ts @@ -0,0 +1,164 @@ +// @ts-nocheck +/** + * Organization workflow — queue-based command loop. + * + * Mutations are dispatched through named queues and processed inside workflow + * steps so that every command appears in the RivetKit inspector's workflow + * history. Read actions remain direct (no queue). + * + * Callers send commands directly via `.send()` to the appropriate queue name. + */ +import { Loop } from "rivetkit/workflow"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; +import { ORGANIZATION_QUEUE_NAMES, type OrganizationQueueName } from "./queues.js"; + +import { applyGithubSyncProgressMutation, recordGithubWebhookReceiptMutation, refreshOrganizationSnapshotMutation } from "./actions.js"; +import { + applyTaskSummaryUpdateMutation, + createTaskMutation, + refreshTaskSummaryForBranchMutation, + removeTaskSummaryMutation, +} from "./actions/task-mutations.js"; +import { + applyOrganizationFreePlanMutation, + applyOrganizationStripeCustomerMutation, + applyOrganizationStripeSubscriptionMutation, + markOrganizationSyncStartedMutation, + recordOrganizationSeatUsageMutation, + setOrganizationBillingPaymentMethodMutation, + setOrganizationBillingStatusMutation, + syncOrganizationShellFromGithubMutation, + upsertOrganizationInvoiceMutation, +} from "./app-shell.js"; + +// --------------------------------------------------------------------------- +// Workflow command loop — runs inside `run: workflow(runOrganizationWorkflow)` +// --------------------------------------------------------------------------- + +type WorkflowHandler = (loopCtx: any, body: any) => Promise; + +/** + * Maps queue names to their mutation handlers. + * Each handler receives the workflow loop context and the message body, + * executes the mutation, and returns the result (which is sent back via + * msg.complete). + */ +const COMMAND_HANDLERS: Record = { + // Task mutations + "organization.command.createTask": async (c, body) => createTaskMutation(c, body), + "organization.command.materializeTask": async (c, body) => createTaskMutation(c, body), + "organization.command.applyTaskSummaryUpdate": async (c, body) => { + await applyTaskSummaryUpdateMutation(c, body); + return { ok: true }; + }, + "organization.command.removeTaskSummary": async (c, body) => { + await removeTaskSummaryMutation(c, body); + return { ok: true }; + }, + "organization.command.refreshTaskSummaryForBranch": async (c, body) => { + await refreshTaskSummaryForBranchMutation(c, body); + return { ok: true }; + }, + "organization.command.snapshot.broadcast": async (c, _body) => { + await refreshOrganizationSnapshotMutation(c); + return { ok: true }; + }, + "organization.command.syncGithubSession": async (c, body) => { + const { syncGithubOrganizations } = await import("./app-shell.js"); + await syncGithubOrganizations(c, body); + return { ok: true }; + }, + + // GitHub organization shell sync (stays on queue) + "organization.command.github.organization_shell.sync_from_github": async (c, body) => syncOrganizationShellFromGithubMutation(c, body), + + // GitHub sync progress + webhook receipt + "organization.command.github.sync_progress.apply": async (c, body) => { + await applyGithubSyncProgressMutation(c, body); + return { ok: true }; + }, + "organization.command.github.webhook_receipt.record": async (c, body) => { + await recordGithubWebhookReceiptMutation(c, body); + return { ok: true }; + }, + "organization.command.shell.sync_started.mark": async (c, body) => { + await markOrganizationSyncStartedMutation(c, body); + return { ok: true }; + }, + + // Billing mutations + "organization.command.billing.stripe_customer.apply": async (c, body) => { + await applyOrganizationStripeCustomerMutation(c, body); + return { ok: true }; + }, + "organization.command.billing.stripe_subscription.apply": async (c, body) => { + await applyOrganizationStripeSubscriptionMutation(c, body); + return { ok: true }; + }, + "organization.command.billing.free_plan.apply": async (c, body) => { + await applyOrganizationFreePlanMutation(c, body); + return { ok: true }; + }, + "organization.command.billing.payment_method.set": async (c, body) => { + await setOrganizationBillingPaymentMethodMutation(c, body); + return { ok: true }; + }, + "organization.command.billing.status.set": async (c, body) => { + await setOrganizationBillingStatusMutation(c, body); + return { ok: true }; + }, + "organization.command.billing.invoice.upsert": async (c, body) => { + await upsertOrganizationInvoiceMutation(c, body); + return { ok: true }; + }, + "organization.command.billing.seat_usage.record": async (c, body) => { + await recordOrganizationSeatUsageMutation(c, body); + return { ok: true }; + }, +}; + +export async function runOrganizationWorkflow(ctx: any): Promise { + await ctx.loop("organization-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-organization-command", { + names: [...ORGANIZATION_QUEUE_NAMES], + completable: true, + }); + + if (!msg) { + return Loop.continue(undefined); + } + + const handler = COMMAND_HANDLERS[msg.name as OrganizationQueueName]; + if (!handler) { + logActorWarning("organization", "unknown organization command", { command: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }).catch(() => {}); + return Loop.continue(undefined); + } + + try { + // Wrap in a step so c.state and c.db are accessible inside mutation functions. + const result = await loopCtx.step({ + name: msg.name, + timeout: 10 * 60_000, + run: async () => handler(loopCtx, msg.body), + }); + try { + await msg.complete(result); + } catch (completeError) { + logActorWarning("organization", "organization workflow failed completing response", { + command: msg.name, + error: resolveErrorMessage(completeError), + }); + } + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("organization", "organization workflow command failed", { + command: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} diff --git a/foundry/packages/backend/src/actors/polling.ts b/foundry/packages/backend/src/actors/polling.ts new file mode 100644 index 0000000..8de9c34 --- /dev/null +++ b/foundry/packages/backend/src/actors/polling.ts @@ -0,0 +1,189 @@ +import { Loop } from "rivetkit/workflow"; +import { normalizeMessages } from "../services/queue.js"; + +export interface PollingControlState { + intervalMs: number; + running: boolean; +} + +export interface PollingControlQueueNames { + start: string; + stop: string; + setInterval: string; + force: string; +} + +export interface PollingQueueMessage { + name: string; + body: unknown; + complete(response: unknown): Promise; +} + +interface PollingActorContext { + state: TState; + abortSignal: AbortSignal; + queue: { + nextBatch(options: { names: readonly string[]; timeout: number; count: number; completable: true }): Promise; + }; +} + +interface RunPollingOptions { + control: PollingControlQueueNames; + onPoll(c: PollingActorContext): Promise; +} + +export async function runPollingControlLoop( + c: PollingActorContext, + options: RunPollingOptions, +): Promise { + while (!c.abortSignal.aborted) { + const messages = normalizeMessages( + await c.queue.nextBatch({ + names: [options.control.start, options.control.stop, options.control.setInterval, options.control.force], + timeout: Math.max(500, c.state.intervalMs), + count: 16, + completable: true, + }), + ) as PollingQueueMessage[]; + + if (messages.length === 0) { + if (!c.state.running) { + continue; + } + await options.onPoll(c); + continue; + } + + for (const msg of messages) { + if (msg.name === options.control.start) { + c.state.running = true; + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === options.control.stop) { + c.state.running = false; + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === options.control.setInterval) { + const intervalMs = Number((msg.body as { intervalMs?: unknown })?.intervalMs); + c.state.intervalMs = Number.isFinite(intervalMs) ? Math.max(500, intervalMs) : c.state.intervalMs; + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === options.control.force) { + await options.onPoll(c); + await msg.complete({ ok: true }); + } + } + } +} + +interface WorkflowPollingActorContext { + state: TState; + loop(config: { name: string; historyEvery: number; historyKeep: number; run(ctx: WorkflowPollingActorContext): Promise }): Promise; +} + +interface WorkflowPollingQueueMessage extends PollingQueueMessage {} + +interface WorkflowPollingLoopContext { + state: TState; + queue: { + nextBatch( + name: string, + options: { + names: readonly string[]; + timeout: number; + count: number; + completable: true; + }, + ): Promise; + }; + step( + nameOrConfig: + | string + | { + name: string; + timeout?: number; + run: () => Promise; + }, + run?: () => Promise, + ): Promise; +} + +export async function runWorkflowPollingLoop( + ctx: any, + options: RunPollingOptions & { loopName: string }, +): Promise { + await ctx.loop(options.loopName, async (loopCtx: WorkflowPollingLoopContext) => { + const control = await loopCtx.step("read-control-state", async () => ({ + intervalMs: Math.max(500, Number(loopCtx.state.intervalMs) || 500), + running: Boolean(loopCtx.state.running), + })); + + const messages = normalizeMessages( + await loopCtx.queue.nextBatch("next-polling-control-batch", { + names: [options.control.start, options.control.stop, options.control.setInterval, options.control.force], + timeout: control.running ? control.intervalMs : 60_000, + count: 16, + completable: true, + }), + ) as WorkflowPollingQueueMessage[]; + + if (messages.length === 0) { + if (control.running) { + await loopCtx.step({ + name: "poll-tick", + timeout: 5 * 60_000, + run: async () => { + await options.onPoll(loopCtx as unknown as PollingActorContext); + }, + }); + } + return Loop.continue(undefined); + } + + for (const msg of messages) { + if (msg.name === options.control.start) { + await loopCtx.step("control-start", async () => { + loopCtx.state.running = true; + }); + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === options.control.stop) { + await loopCtx.step("control-stop", async () => { + loopCtx.state.running = false; + }); + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === options.control.setInterval) { + await loopCtx.step("control-set-interval", async () => { + const intervalMs = Number((msg.body as { intervalMs?: unknown })?.intervalMs); + loopCtx.state.intervalMs = Number.isFinite(intervalMs) ? Math.max(500, intervalMs) : loopCtx.state.intervalMs; + }); + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === options.control.force) { + await loopCtx.step({ + name: "control-force", + timeout: 5 * 60_000, + run: async () => { + await options.onPoll(loopCtx as unknown as PollingActorContext); + }, + }); + await msg.complete({ ok: true }); + } + } + + return Loop.continue(undefined); + }); +} diff --git a/foundry/packages/backend/src/actors/sandbox/index.ts b/foundry/packages/backend/src/actors/sandbox/index.ts new file mode 100644 index 0000000..0444d9b --- /dev/null +++ b/foundry/packages/backend/src/actors/sandbox/index.ts @@ -0,0 +1,646 @@ +// @ts-nocheck +import { actor, queue } from "rivetkit"; +import { workflow, Loop } from "rivetkit/workflow"; +import { e2b, sandboxActor } from "rivetkit/sandbox"; +import { existsSync } from "node:fs"; +import Dockerode from "dockerode"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS, workspaceModelGroupsFromSandboxAgents, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; +import { SandboxAgent } from "sandbox-agent"; +import { getActorRuntimeContext } from "../context.js"; +import { organizationKey } from "../keys.js"; +import { selfTaskSandbox } from "../handles.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; +import { expectQueueResponse } from "../../services/queue.js"; +import { resolveSandboxProviderId } from "../../sandbox-config.js"; + +/** + * Default repo CWD inside the sandbox. The actual path is resolved dynamically + * via `$HOME/repo` because different sandbox providers run as different users + * (e.g. E2B uses `/home/user`, local Docker uses `/home/sandbox`). + */ +const DEFAULT_SANDBOX_REPO_CWD = "/home/user/repo"; +const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:foundry-base-latest"; +const DEFAULT_LOCAL_SANDBOX_PORT = 2468; +const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" }); + +function parseTaskSandboxKey(key: readonly string[]): { organizationId: string; taskId: string } { + if (key.length !== 4 || key[0] !== "org" || key[2] !== "sandbox") { + throw new Error(`Invalid task sandbox key: ${JSON.stringify(key)}`); + } + + return { + organizationId: key[1]!, + taskId: key[3]!, + }; +} + +function preferredDockerHost(): string { + if (process.env.FOUNDRY_DOCKER_HOST?.trim()) { + return process.env.FOUNDRY_DOCKER_HOST.trim(); + } + + return existsSync("/.dockerenv") ? "host.docker.internal" : "127.0.0.1"; +} + +function preferredPublicDockerHost(): string { + if (process.env.FOUNDRY_PUBLIC_SANDBOX_HOST?.trim()) { + return process.env.FOUNDRY_PUBLIC_SANDBOX_HOST.trim(); + } + + return "127.0.0.1"; +} + +function localSandboxAgentPort(): number { + const raw = process.env.FOUNDRY_LOCAL_SANDBOX_PORT?.trim() ?? process.env.HF_LOCAL_SANDBOX_PORT?.trim() ?? ""; + const parsed = Number(raw); + if (Number.isInteger(parsed) && parsed > 0 && parsed <= 65535) { + return parsed; + } + return DEFAULT_LOCAL_SANDBOX_PORT; +} + +function sandboxEnvPairs(): string[] { + const openAiApiKey = process.env.OPENAI_API_KEY; + const entries = [ + ["ANTHROPIC_API_KEY", process.env.ANTHROPIC_API_KEY], + ["CLAUDE_API_KEY", process.env.CLAUDE_API_KEY ?? process.env.ANTHROPIC_API_KEY], + ["OPENAI_API_KEY", openAiApiKey], + // Codex ACP prefers CODEX_API_KEY when present. In dev we want that to be the + // actual OpenAI API key, not an unrelated local Codex auth token. + ["CODEX_API_KEY", openAiApiKey ?? process.env.CODEX_API_KEY], + ["GH_TOKEN", process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN], + ["GITHUB_TOKEN", process.env.GITHUB_TOKEN ?? process.env.GH_TOKEN], + ["E2B_API_KEY", process.env.E2B_API_KEY], + ]; + + return entries + .filter((entry): entry is [string, string] => typeof entry[1] === "string" && entry[1].trim().length > 0) + .map(([key, value]) => `${key}=${value}`); +} + +function sandboxEnvObject(): Record { + return Object.fromEntries( + sandboxEnvPairs().map((entry) => { + const [key, ...rest] = entry.split("="); + return [key!, rest.join("=")]; + }), + ); +} + +function modeIdForAgent(agent?: string | null): string | null { + switch (agent) { + case "codex": + return "full-access"; + case "claude": + return "acceptEdits"; + default: + return null; + } +} + +async function getPublishedDockerPort(sandboxId: string, containerPort: number): Promise { + const info = await dockerClient.getContainer(sandboxId).inspect(); + const hostPort = info.NetworkSettings?.Ports?.[`${containerPort}/tcp`]?.[0]?.HostPort; + if (!hostPort) { + throw new Error(`docker sandbox-agent port ${containerPort} is not published`); + } + return Number(hostPort); +} + +function createLocalSandboxProvider(image: string): any { + const agentPort = localSandboxAgentPort(); + const backendHost = preferredDockerHost(); + const publicHost = preferredPublicDockerHost(); + + return { + name: "docker", + + async create(_context: any): Promise { + const container = await dockerClient.createContainer({ + Image: image, + Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)], + Env: sandboxEnvPairs(), + ExposedPorts: { + [`${agentPort}/tcp`]: {}, + }, + HostConfig: { + AutoRemove: true, + PortBindings: { + [`${agentPort}/tcp`]: [{ HostPort: "0" }], + }, + }, + }); + + await container.start(); + return container.id; + }, + + async destroy(sandboxId: string): Promise { + const container = dockerClient.getContainer(sandboxId); + try { + await container.stop({ t: 5 }); + } catch {} + try { + await container.remove({ force: true }); + } catch {} + }, + + async getUrl(sandboxId: string): Promise { + const hostPort = await getPublishedDockerPort(sandboxId, agentPort); + return `http://${publicHost}:${hostPort}`; + }, + + async connectAgent(sandboxId: string, connectOptions: any): Promise { + const hostPort = await getPublishedDockerPort(sandboxId, agentPort); + return await SandboxAgent.connect({ + baseUrl: `http://${backendHost}:${hostPort}`, + ...connectOptions, + }); + }, + }; +} + +function sanitizeActorResult(value: unknown, seen = new WeakSet()): unknown { + if (typeof value === "function" || value === undefined) { + return undefined; + } + + if (value && typeof value === "object") { + const maybeToRecord = (value as { toRecord?: unknown }).toRecord; + if (typeof maybeToRecord === "function") { + return sanitizeActorResult(maybeToRecord.call(value), seen); + } + } + + if (value === null || typeof value !== "object") { + return value; + } + + if (value instanceof Date) { + return value.toISOString(); + } + + if (Array.isArray(value)) { + return value.map((entry) => sanitizeActorResult(entry, seen)).filter((entry) => entry !== undefined); + } + + if (seen.has(value)) { + return undefined; + } + seen.add(value); + + const next: Record = {}; + for (const [key, entry] of Object.entries(value)) { + const sanitized = sanitizeActorResult(entry, seen); + if (sanitized !== undefined) { + next[key] = sanitized; + } + } + return next; +} + +const baseTaskSandbox = sandboxActor({ + createProvider: async (c) => { + const { config } = getActorRuntimeContext(); + const { organizationId, taskId } = parseTaskSandboxKey(c.key); + const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, + }); + const task = await organization.getTask({ organizationId, taskId }); + const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId); + + if (sandboxProviderId === "e2b") { + return e2b({ + create: () => ({ + template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.5.x", + envs: sandboxEnvObject(), + // TEMPORARY: Default E2B timeout is 5 minutes which is too short. + // Set to 1 hour as a stopgap. Remove this once the E2B provider in + // sandbox-agent uses betaCreate + autoPause (see + // .context/proposal-rivetkit-sandbox-resilience.md). At that point + // the provider handles timeout/pause lifecycle and this override is + // unnecessary. + timeoutMs: 60 * 60 * 1000, + }), + installAgents: ["claude", "codex"], + }); + } + + return createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + }, +}); + +async function broadcastProcesses(c: any, actions: Record Promise>): Promise { + try { + const listed = await actions.listProcesses(c); + c.broadcast("processesUpdated", { + type: "processesUpdated", + processes: listed.processes ?? [], + }); + } catch (error) { + // Process broadcasts are best-effort. Callers still receive the primary action result. + logActorWarning("taskSandbox", "broadcastProcesses failed", { + sandboxId: c.state?.sandboxId, + error: resolveErrorMessage(error), + }); + } +} + +async function providerForConnection(c: any): Promise { + if (c.state.sandboxDestroyed || !c.state.sandboxId) { + return null; + } + + if (c.vars.provider) { + return c.vars.provider; + } + + const providerFactory = baseTaskSandbox.config.actions as Record; + void providerFactory; + const { config } = getActorRuntimeContext(); + const { organizationId, taskId } = parseTaskSandboxKey(c.key); + const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, + }); + const task = await organization.getTask({ organizationId, taskId }); + const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId); + + const provider = + sandboxProviderId === "e2b" + ? e2b({ + create: () => ({ + template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.5.x", + envs: sandboxEnvObject(), + }), + installAgents: ["claude", "codex"], + }) + : createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + + c.vars.provider = provider; + return provider; +} + +async function listWorkspaceModelGroupsForSandbox(c: any): Promise { + const provider = await providerForConnection(c); + if (!provider || !c.state.sandboxId || typeof provider.connectAgent !== "function") { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } + + try { + const client = await provider.connectAgent(c.state.sandboxId, { + waitForHealth: { + timeoutMs: 15_000, + }, + }); + const listed = await client.listAgents({ config: true }); + const groups = workspaceModelGroupsFromSandboxAgents(Array.isArray(listed?.agents) ? listed.agents : []); + return groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS; + } catch { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } +} + +const baseActions = baseTaskSandbox.config.actions as Record Promise>; + +// --------------------------------------------------------------------------- +// Dynamic repo CWD resolution +// --------------------------------------------------------------------------- + +let cachedRepoCwd: string | null = null; + +/** + * Resolve the repo CWD inside the sandbox by querying `$HOME`. + * Different providers run as different users (E2B: `/home/user`, local Docker: + * `/home/sandbox`), so the path must be resolved dynamically. The result is + * cached for the lifetime of this sandbox actor instance. + */ +async function resolveRepoCwd(c: any): Promise { + if (cachedRepoCwd) return cachedRepoCwd; + + try { + const result = await baseActions.runProcess(c, { + command: "bash", + args: ["-lc", "echo $HOME"], + cwd: "/", + timeoutMs: 10_000, + }); + const home = (result.stdout ?? result.result ?? "").trim(); + if (home && home.startsWith("/")) { + cachedRepoCwd = `${home}/repo`; + return cachedRepoCwd; + } + } catch (error) { + logActorWarning("taskSandbox", "failed to resolve $HOME, using default", { + error: resolveErrorMessage(error), + }); + } + + cachedRepoCwd = DEFAULT_SANDBOX_REPO_CWD; + return cachedRepoCwd; +} + +// --------------------------------------------------------------------------- +// Queue names for sandbox actor +// --------------------------------------------------------------------------- + +const SANDBOX_QUEUE_NAMES = [ + "sandbox.command.createSession", + "sandbox.command.resumeOrCreateSession", + "sandbox.command.destroySession", + "sandbox.command.createProcess", + "sandbox.command.stopProcess", + "sandbox.command.killProcess", + "sandbox.command.deleteProcess", +] as const; + +type SandboxQueueName = (typeof SANDBOX_QUEUE_NAMES)[number]; + +function sandboxWorkflowQueueName(name: SandboxQueueName): SandboxQueueName { + return name; +} + +// --------------------------------------------------------------------------- +// Mutation handlers — executed inside the workflow command loop +// --------------------------------------------------------------------------- + +async function createSessionMutation(c: any, request: any): Promise { + const session = await baseActions.createSession(c, request); + const sessionId = typeof request?.id === "string" && request.id.length > 0 ? request.id : session?.id; + const modeId = modeIdForAgent(request?.agent); + if (sessionId && modeId) { + try { + await baseActions.rawSendSessionMethod(c, sessionId, "session/set_mode", { modeId }); + } catch { + // Session mode updates are best-effort. + } + } + return sanitizeActorResult(session); +} + +async function resumeOrCreateSessionMutation(c: any, request: any): Promise { + return sanitizeActorResult(await baseActions.resumeOrCreateSession(c, request)); +} + +async function destroySessionMutation(c: any, sessionId: string): Promise { + return sanitizeActorResult(await baseActions.destroySession(c, sessionId)); +} + +async function createProcessMutation(c: any, request: any): Promise { + const created = await baseActions.createProcess(c, request); + await broadcastProcesses(c, baseActions); + return created; +} + +async function runProcessMutation(c: any, request: any): Promise { + const result = await baseActions.runProcess(c, request); + await broadcastProcesses(c, baseActions); + return result; +} + +async function stopProcessMutation(c: any, processId: string, query?: any): Promise { + const stopped = await baseActions.stopProcess(c, processId, query); + await broadcastProcesses(c, baseActions); + return stopped; +} + +async function killProcessMutation(c: any, processId: string, query?: any): Promise { + const killed = await baseActions.killProcess(c, processId, query); + await broadcastProcesses(c, baseActions); + return killed; +} + +async function deleteProcessMutation(c: any, processId: string): Promise { + await baseActions.deleteProcess(c, processId); + await broadcastProcesses(c, baseActions); +} + +// --------------------------------------------------------------------------- +// Workflow command loop +// --------------------------------------------------------------------------- + +type SandboxWorkflowHandler = (loopCtx: any, body: any) => Promise; + +const SANDBOX_COMMAND_HANDLERS: Record = { + "sandbox.command.createSession": async (c, body) => createSessionMutation(c, body), + "sandbox.command.resumeOrCreateSession": async (c, body) => resumeOrCreateSessionMutation(c, body), + "sandbox.command.destroySession": async (c, body) => destroySessionMutation(c, body?.sessionId), + "sandbox.command.createProcess": async (c, body) => createProcessMutation(c, body), + "sandbox.command.stopProcess": async (c, body) => stopProcessMutation(c, body?.processId, body?.query), + "sandbox.command.killProcess": async (c, body) => killProcessMutation(c, body?.processId, body?.query), + "sandbox.command.deleteProcess": async (c, body) => { + await deleteProcessMutation(c, body?.processId); + return { ok: true }; + }, +}; + +async function runSandboxWorkflow(ctx: any): Promise { + await ctx.loop("sandbox-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-sandbox-command", { + names: [...SANDBOX_QUEUE_NAMES], + completable: true, + }); + + if (!msg) { + return Loop.continue(undefined); + } + + const handler = SANDBOX_COMMAND_HANDLERS[msg.name as SandboxQueueName]; + if (!handler) { + logActorWarning("taskSandbox", "unknown sandbox command", { command: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }).catch(() => {}); + return Loop.continue(undefined); + } + + try { + // Wrap in a step so c.state and c.db are accessible inside mutation functions. + const result = await loopCtx.step({ + name: msg.name, + timeout: 10 * 60_000, + run: async () => handler(loopCtx, msg.body), + }); + try { + await msg.complete(result); + } catch (completeError) { + logActorWarning("taskSandbox", "sandbox workflow failed completing response", { + command: msg.name, + error: resolveErrorMessage(completeError), + }); + } + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("taskSandbox", "sandbox workflow command failed", { + command: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} + +// --------------------------------------------------------------------------- +// Actor definition +// --------------------------------------------------------------------------- + +export const taskSandbox = actor({ + ...baseTaskSandbox.config, + queues: Object.fromEntries(SANDBOX_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + ...baseTaskSandbox.config.options, + actionTimeout: 10 * 60_000, + }, + actions: { + ...baseActions, + + // Read actions — direct (no queue) + async resumeSession(c: any, sessionId: string): Promise { + return sanitizeActorResult(await baseActions.resumeSession(c, sessionId)); + }, + + async getSession(c: any, sessionId: string): Promise { + return sanitizeActorResult(await baseActions.getSession(c, sessionId)); + }, + + async listSessions(c: any, query?: any): Promise { + return sanitizeActorResult(await baseActions.listSessions(c, query)); + }, + + async listProcesses(c: any): Promise { + try { + return await baseActions.listProcesses(c); + } catch (error) { + // Sandbox may be gone (E2B timeout, destroyed, etc.) — degrade to empty + logActorWarning("taskSandbox", "listProcesses failed, sandbox may be expired", { + sandboxId: c.state.sandboxId, + error: resolveErrorMessage(error), + }); + return { processes: [] }; + } + }, + + async sandboxAgentConnection(c: any): Promise<{ endpoint: string; token?: string }> { + const provider = await providerForConnection(c); + if (!provider || !c.state.sandboxId) { + return { endpoint: "mock://terminal-unavailable" }; + } + + try { + return { + endpoint: await provider.getUrl(c.state.sandboxId), + }; + } catch { + return { endpoint: "mock://terminal-unavailable" }; + } + }, + + async listWorkspaceModelGroups(c: any): Promise { + return await listWorkspaceModelGroupsForSandbox(c); + }, + + async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { + const { config } = getActorRuntimeContext(); + const { taskId } = parseTaskSandboxKey(c.key); + const at = Date.now(); + const sandboxProviderId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null); + + if (c.state.sandboxDestroyed) { + return { sandboxProviderId, sandboxId: taskId, state: "destroyed", at }; + } + + if (!c.state.sandboxId) { + return { sandboxProviderId, sandboxId: taskId, state: "pending", at }; + } + + try { + const health = await baseActions.getHealth(c); + return { + sandboxProviderId, + sandboxId: taskId, + state: health.status === "ok" ? "running" : "degraded", + at, + }; + } catch { + return { + sandboxProviderId, + sandboxId: taskId, + state: "error", + at, + }; + } + }, + + async repoCwd(c: any): Promise<{ cwd: string }> { + const resolved = await resolveRepoCwd(c); + return { cwd: resolved }; + }, + + // Long-running action — kept as direct action to avoid blocking the + // workflow loop (prompt responses can take minutes). + async sendPrompt(c: any, request: { sessionId: string; prompt: string }): Promise { + const text = typeof request?.prompt === "string" ? request.prompt.trim() : ""; + if (!text) { + return null; + } + + const session = await baseActions.resumeSession(c, request.sessionId); + if (!session || typeof session.prompt !== "function") { + throw new Error(`session '${request.sessionId}' not found`); + } + + return sanitizeActorResult(await session.prompt([{ type: "text", text }])); + }, + + // Mutation actions — self-send to queue for workflow history + async createSession(c: any, request: any): Promise { + const self = selfTaskSandbox(c); + return expectQueueResponse(await self.send(sandboxWorkflowQueueName("sandbox.command.createSession"), request ?? {}, { wait: true, timeout: 10_000 })); + }, + + async resumeOrCreateSession(c: any, request: any): Promise { + const self = selfTaskSandbox(c); + return expectQueueResponse( + await self.send(sandboxWorkflowQueueName("sandbox.command.resumeOrCreateSession"), request ?? {}, { wait: true, timeout: 10_000 }), + ); + }, + + async destroySession(c: any, sessionId: string): Promise { + const self = selfTaskSandbox(c); + return expectQueueResponse(await self.send(sandboxWorkflowQueueName("sandbox.command.destroySession"), { sessionId }, { wait: true, timeout: 10_000 })); + }, + + async createProcess(c: any, request: any): Promise { + const self = selfTaskSandbox(c); + return expectQueueResponse(await self.send(sandboxWorkflowQueueName("sandbox.command.createProcess"), request ?? {}, { wait: true, timeout: 10_000 })); + }, + + // runProcess kept as direct action — response can exceed 128KB queue limit + async runProcess(c: any, request: any): Promise { + const result = await baseActions.runProcess(c, request); + await broadcastProcesses(c, baseActions); + return result; + }, + + async stopProcess(c: any, processId: string, query?: any): Promise { + const self = selfTaskSandbox(c); + return expectQueueResponse( + await self.send(sandboxWorkflowQueueName("sandbox.command.stopProcess"), { processId, query }, { wait: true, timeout: 10_000 }), + ); + }, + + async killProcess(c: any, processId: string, query?: any): Promise { + const self = selfTaskSandbox(c); + return expectQueueResponse( + await self.send(sandboxWorkflowQueueName("sandbox.command.killProcess"), { processId, query }, { wait: true, timeout: 10_000 }), + ); + }, + + async deleteProcess(c: any, processId: string): Promise { + const self = selfTaskSandbox(c); + await self.send(sandboxWorkflowQueueName("sandbox.command.deleteProcess"), { processId }, { wait: false }); + }, + }, + run: workflow(runSandboxWorkflow), +}); + +export { DEFAULT_SANDBOX_REPO_CWD, resolveRepoCwd }; diff --git a/foundry/packages/backend/src/actors/task/db/db.ts b/foundry/packages/backend/src/actors/task/db/db.ts new file mode 100644 index 0000000..128f856 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/db.ts @@ -0,0 +1,5 @@ +import { db } from "rivetkit/db/drizzle"; +import * as schema from "./schema.js"; +import migrations from "./migrations.js"; + +export const taskDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/task/db/drizzle.config.ts b/foundry/packages/backend/src/actors/task/db/drizzle.config.ts new file mode 100644 index 0000000..d022c36 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/task/db/drizzle", + schema: "./src/actors/task/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql b/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql new file mode 100644 index 0000000..c6a346a --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql @@ -0,0 +1,49 @@ +CREATE TABLE `task` ( + `id` integer PRIMARY KEY NOT NULL, + `branch_name` text, + `title` text, + `task` text NOT NULL, + `sandbox_provider_id` text NOT NULL, + `status` text NOT NULL, + `pull_request_json` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL, + CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1) +); +--> statement-breakpoint +CREATE TABLE `task_runtime` ( + `id` integer PRIMARY KEY NOT NULL, + `active_sandbox_id` text, + `active_switch_target` text, + `active_cwd` text, + `git_state_json` text, + `git_state_updated_at` integer, + `updated_at` integer NOT NULL, + CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1) +); +--> statement-breakpoint +CREATE TABLE `task_sandboxes` ( + `sandbox_id` text PRIMARY KEY NOT NULL, + `sandbox_provider_id` text NOT NULL, + `sandbox_actor_id` text, + `switch_target` text NOT NULL, + `cwd` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_workspace_sessions` ( + `session_id` text PRIMARY KEY NOT NULL, + `sandbox_session_id` text, + `session_name` text NOT NULL, + `model` text NOT NULL, + `status` text DEFAULT 'ready' NOT NULL, + `error_message` text, + `transcript_json` text DEFAULT '[]' NOT NULL, + `transcript_updated_at` integer, + `created` integer DEFAULT 1 NOT NULL, + `closed` integer DEFAULT 0 NOT NULL, + `thinking_since_ms` integer, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000..7397b89 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json @@ -0,0 +1,324 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "6daaa6d5-3280-46fe-9261-40cabeba1b49", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "task": { + "name": "task", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "branch_name": { + "name": "branch_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "title": { + "name": "title", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "task": { + "name": "task", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sandbox_provider_id": { + "name": "sandbox_provider_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "pull_request_json": { + "name": "pull_request_json", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": { + "task_singleton_id_check": { + "name": "task_singleton_id_check", + "value": "\"task\".\"id\" = 1" + } + } + }, + "task_runtime": { + "name": "task_runtime", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "active_sandbox_id": { + "name": "active_sandbox_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "active_switch_target": { + "name": "active_switch_target", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "active_cwd": { + "name": "active_cwd", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "git_state_json": { + "name": "git_state_json", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "git_state_updated_at": { + "name": "git_state_updated_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": { + "task_runtime_singleton_id_check": { + "name": "task_runtime_singleton_id_check", + "value": "\"task_runtime\".\"id\" = 1" + } + } + }, + "task_sandboxes": { + "name": "task_sandboxes", + "columns": { + "sandbox_id": { + "name": "sandbox_id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sandbox_provider_id": { + "name": "sandbox_provider_id", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "sandbox_actor_id": { + "name": "sandbox_actor_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "switch_target": { + "name": "switch_target", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "cwd": { + "name": "cwd", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_workspace_sessions": { + "name": "task_workspace_sessions", + "columns": { + "session_id": { + "name": "session_id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "sandbox_session_id": { + "name": "sandbox_session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "session_name": { + "name": "session_name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "model": { + "name": "model", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "status": { + "name": "status", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'ready'" + }, + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "transcript_json": { + "name": "transcript_json", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": "'[]'" + }, + "transcript_updated_at": { + "name": "transcript_updated_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 1 + }, + "closed": { + "name": "closed", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false, + "default": 0 + }, + "thinking_since_ms": { + "name": "thinking_since_ms", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/task/db/drizzle/meta/_journal.json new file mode 100644 index 0000000..1f9e97e --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/drizzle/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "7", + "dialect": "sqlite", + "entries": [ + { + "idx": 0, + "version": "6", + "when": 1773376222525, + "tag": "0000_charming_maestro", + "breakpoints": true + } + ] +} diff --git a/foundry/packages/backend/src/actors/task/db/migrations.ts b/foundry/packages/backend/src/actors/task/db/migrations.ts new file mode 100644 index 0000000..61b0dff --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/migrations.ts @@ -0,0 +1,86 @@ +// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. + +const journal = { + entries: [ + { + idx: 0, + when: 1773376222525, + tag: "0000_charming_maestro", + breakpoints: true, + }, + { + idx: 1, + when: 1773984000000, + tag: "0001_add_task_owner", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`task\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`branch_name\` text, + \`title\` text, + \`task\` text NOT NULL, + \`sandbox_provider_id\` text NOT NULL, + \`status\` text NOT NULL, + \`pull_request_json\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL, + CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1) +); +--> statement-breakpoint +CREATE TABLE \`task_runtime\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`active_sandbox_id\` text, + \`active_switch_target\` text, + \`active_cwd\` text, + \`git_state_json\` text, + \`git_state_updated_at\` integer, + \`updated_at\` integer NOT NULL, + CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1) +); +--> statement-breakpoint +CREATE TABLE \`task_sandboxes\` ( + \`sandbox_id\` text PRIMARY KEY NOT NULL, + \`sandbox_provider_id\` text NOT NULL, + \`sandbox_actor_id\` text, + \`switch_target\` text NOT NULL, + \`cwd\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`task_workspace_sessions\` ( + \`session_id\` text PRIMARY KEY NOT NULL, + \`sandbox_session_id\` text, + \`session_name\` text NOT NULL, + \`model\` text NOT NULL, + \`status\` text DEFAULT 'ready' NOT NULL, + \`error_message\` text, + \`transcript_json\` text DEFAULT '[]' NOT NULL, + \`transcript_updated_at\` integer, + \`created\` integer DEFAULT 1 NOT NULL, + \`closed\` integer DEFAULT 0 NOT NULL, + \`thinking_since_ms\` integer, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +`, + m0001: `CREATE TABLE \`task_owner\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`primary_user_id\` text, + \`primary_github_login\` text, + \`primary_github_email\` text, + \`primary_github_avatar_url\` text, + \`updated_at\` integer NOT NULL, + CONSTRAINT "task_owner_singleton_id_check" CHECK("task_owner"."id" = 1) +); +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/task/db/schema.ts b/foundry/packages/backend/src/actors/task/db/schema.ts new file mode 100644 index 0000000..bdb7cf7 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/db/schema.ts @@ -0,0 +1,88 @@ +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; + +// SQLite is per task actor instance, so these tables only ever store one row (id=1). +export const task = sqliteTable( + "task", + { + id: integer("id").primaryKey(), + branchName: text("branch_name"), + title: text("title"), + task: text("task").notNull(), + sandboxProviderId: text("sandbox_provider_id").notNull(), + status: text("status").notNull(), + pullRequestJson: text("pull_request_json"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("task_singleton_id_check", sql`${table.id} = 1`)], +); + +export const taskRuntime = sqliteTable( + "task_runtime", + { + id: integer("id").primaryKey(), + activeSandboxId: text("active_sandbox_id"), + activeSwitchTarget: text("active_switch_target"), + activeCwd: text("active_cwd"), + gitStateJson: text("git_state_json"), + gitStateUpdatedAt: integer("git_state_updated_at"), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("task_runtime_singleton_id_check", sql`${table.id} = 1`)], +); + +/** + * Coordinator index of SandboxInstanceActor instances. + * Tracks all sandbox instances provisioned for this task. Only one + * is active at a time (referenced by taskRuntime.activeSandboxId). + */ +export const taskSandboxes = sqliteTable("task_sandboxes", { + sandboxId: text("sandbox_id").notNull().primaryKey(), + sandboxProviderId: text("sandbox_provider_id").notNull(), + sandboxActorId: text("sandbox_actor_id"), + switchTarget: text("switch_target").notNull(), + cwd: text("cwd"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** + * Single-row table tracking the primary user (owner) of this task. + * The owner's GitHub OAuth credentials are injected into the sandbox + * for git operations. Updated when a different user sends a message. + */ +export const taskOwner = sqliteTable( + "task_owner", + { + id: integer("id").primaryKey(), + primaryUserId: text("primary_user_id"), + primaryGithubLogin: text("primary_github_login"), + primaryGithubEmail: text("primary_github_email"), + primaryGithubAvatarUrl: text("primary_github_avatar_url"), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("task_owner_singleton_id_check", sql`${table.id} = 1`)], +); + +/** + * Coordinator index of workspace sessions within this task. + * The task actor is the coordinator for sessions. Each row holds session + * metadata, model, status, transcript, and draft state. Sessions are + * sub-entities of the task — no separate session actor in the DB. + */ +export const taskWorkspaceSessions = sqliteTable("task_workspace_sessions", { + sessionId: text("session_id").notNull().primaryKey(), + sandboxSessionId: text("sandbox_session_id"), + sessionName: text("session_name").notNull(), + model: text("model").notNull(), + status: text("status").notNull().default("ready"), + errorMessage: text("error_message"), + transcriptJson: text("transcript_json").notNull().default("[]"), + transcriptUpdatedAt: integer("transcript_updated_at"), + created: integer("created").notNull().default(1), + closed: integer("closed").notNull().default(0), + thinkingSinceMs: integer("thinking_since_ms"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts new file mode 100644 index 0000000..68bee1c --- /dev/null +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -0,0 +1,99 @@ +import { actor, queue } from "rivetkit"; +import { workflow } from "rivetkit/workflow"; +import type { TaskRecord } from "@sandbox-agent/foundry-shared"; +import { taskDb } from "./db/db.js"; +import { getCurrentRecord } from "./workflow/common.js"; +import { + changeWorkspaceModel, + getSessionDetail, + getTaskDetail, + getTaskSummary, + markWorkspaceUnread, + refreshWorkspaceDerivedState, + refreshWorkspaceSessionTranscript, + renameWorkspaceSession, + renameWorkspaceTask, + selectWorkspaceSession, + setWorkspaceSessionUnread, + syncTaskPullRequest, + syncWorkspaceSessionStatus, + updateWorkspaceDraft, +} from "./workspace.js"; +import { runTaskWorkflow } from "./workflow/index.js"; +import { TASK_QUEUE_NAMES } from "./workflow/queue.js"; + +export interface TaskInput { + organizationId: string; + repoId: string; + taskId: string; +} + +export const task = actor({ + db: taskDb, + queues: Object.fromEntries(TASK_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Task", + icon: "wrench", + actionTimeout: 10 * 60_000, + }, + createState: (_c, input: TaskInput) => ({ + organizationId: input.organizationId, + repoId: input.repoId, + taskId: input.taskId, + }), + actions: { + async get(c): Promise { + return await getCurrentRecord(c); + }, + + async getTaskSummary(c) { + return await getTaskSummary(c); + }, + + async getTaskDetail(c, input?: { authSessionId?: string }) { + return await getTaskDetail(c, input?.authSessionId); + }, + + async getSessionDetail(c, input: { sessionId: string; authSessionId?: string }) { + return await getSessionDetail(c, input.sessionId, input.authSessionId); + }, + + // Direct actions migrated from queue: + async markUnread(c, input: { authSessionId?: string }) { + await markWorkspaceUnread(c, input?.authSessionId); + }, + async renameTask(c, input: { value: string }) { + await renameWorkspaceTask(c, input.value); + }, + async renameSession(c, input: { sessionId: string; title: string }) { + await renameWorkspaceSession(c, input.sessionId, input.title); + }, + async selectSession(c, input: { sessionId: string; authSessionId?: string }) { + await selectWorkspaceSession(c, input.sessionId, input?.authSessionId); + }, + async setSessionUnread(c, input: { sessionId: string; unread: boolean; authSessionId?: string }) { + await setWorkspaceSessionUnread(c, input.sessionId, input.unread, input?.authSessionId); + }, + async updateDraft(c, input: { sessionId: string; text: string; attachments: any[]; authSessionId?: string }) { + await updateWorkspaceDraft(c, input.sessionId, input.text, input.attachments, input?.authSessionId); + }, + async changeModel(c, input: { sessionId: string; model: string; authSessionId?: string }) { + await changeWorkspaceModel(c, input.sessionId, input.model, input?.authSessionId); + }, + async refreshSessionTranscript(c, input: { sessionId: string }) { + await refreshWorkspaceSessionTranscript(c, input.sessionId); + }, + async refreshDerived(c) { + await refreshWorkspaceDerivedState(c); + }, + async syncSessionStatus(c, input: { sessionId: string; status: "running" | "idle" | "error"; at: number }) { + await syncWorkspaceSessionStatus(c, input.sessionId, input.status, input.at); + }, + async syncPullRequest(c, input: { pullRequest: any }) { + await syncTaskPullRequest(c, input?.pullRequest ?? null); + }, + }, + run: workflow(runTaskWorkflow), +}); + +export { taskWorkflowQueueName } from "./workflow/index.js"; diff --git a/foundry/packages/backend/src/actors/task/workflow/commands.ts b/foundry/packages/backend/src/actors/task/workflow/commands.ts new file mode 100644 index 0000000..7ba2d2b --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workflow/commands.ts @@ -0,0 +1,119 @@ +// @ts-nocheck +import { eq } from "drizzle-orm"; +import { getTaskSandbox } from "../../handles.js"; +import { logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { task as taskTable } from "../db/schema.js"; +import { TASK_ROW_ID, appendAuditLog, getCurrentRecord, setTaskState } from "./common.js"; +import { pushActiveBranchActivity } from "./push.js"; + +async function withTimeout(promise: Promise, timeoutMs: number, label: string): Promise { + let timer: ReturnType | undefined; + try { + return await Promise.race([ + promise, + new Promise((_resolve, reject) => { + timer = setTimeout(() => reject(new Error(`${label} timed out after ${timeoutMs}ms`)), timeoutMs); + }), + ]); + } finally { + if (timer) { + clearTimeout(timer); + } + } +} + +export async function handleAttachActivity(loopCtx: any, msg: any): Promise { + const record = await getCurrentRecord(loopCtx); + let target = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === record.activeSandboxId)?.switchTarget ?? ""; + const sessionId = msg.body?.sessionId ?? null; + + if (record.activeSandboxId) { + try { + const sandbox = getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId); + const connection = await sandbox.sandboxAgentConnection(); + if (typeof connection?.endpoint === "string" && connection.endpoint.length > 0) { + target = connection.endpoint; + } + } catch { + // Best effort; keep the last known switch target if the sandbox actor is unavailable. + } + } + + await appendAuditLog(loopCtx, "task.attach", { + target, + sessionId, + }); + + await msg.complete({ + target, + sessionId, + }); +} + +export async function handleSwitchActivity(loopCtx: any, msg: any): Promise { + const db = loopCtx.db; + const runtime = await db.select({ switchTarget: taskRuntime.activeSwitchTarget }).from(taskRuntime).where(eq(taskRuntime.id, TASK_ROW_ID)).get(); + + await msg.complete({ switchTarget: runtime?.switchTarget ?? "" }); +} + +export async function handlePushActivity(loopCtx: any, msg: any): Promise { + await pushActiveBranchActivity(loopCtx, { + reason: msg.body?.reason ?? null, + historyKind: "task.push", + }); + await msg.complete({ ok: true }); +} + +export async function handleSimpleCommandActivity(loopCtx: any, msg: any, historyKind: string): Promise { + await appendAuditLog(loopCtx, historyKind, { reason: msg.body?.reason ?? null }); + await msg.complete({ ok: true }); +} + +export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { + await setTaskState(loopCtx, "archive_stop_status_sync"); + const record = await getCurrentRecord(loopCtx); + + if (record.activeSandboxId) { + await setTaskState(loopCtx, "archive_release_sandbox"); + void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => { + logActorWarning("task.commands", "failed to release sandbox during archive", { + organizationId: loopCtx.state.organizationId, + repoId: loopCtx.state.repoId, + taskId: loopCtx.state.taskId, + sandboxId: record.activeSandboxId, + error: resolveErrorMessage(error), + }); + }); + } + + const db = loopCtx.db; + await setTaskState(loopCtx, "archive_finalize"); + await db.update(taskTable).set({ status: "archived", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run(); + + await appendAuditLog(loopCtx, "task.archive", { reason: msg.body?.reason ?? null }); + await msg.complete({ ok: true }); +} + +export async function killDestroySandboxActivity(loopCtx: any): Promise { + await setTaskState(loopCtx, "kill_destroy_sandbox"); + const record = await getCurrentRecord(loopCtx); + if (!record.activeSandboxId) { + return; + } + + await getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(); +} + +export async function killWriteDbActivity(loopCtx: any, msg: any): Promise { + await setTaskState(loopCtx, "kill_finalize"); + const db = loopCtx.db; + await db.update(taskTable).set({ status: "killed", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run(); + + await appendAuditLog(loopCtx, "task.kill", { reason: msg.body?.reason ?? null }); + await msg.complete({ ok: true }); +} + +export async function handleGetActivity(loopCtx: any, msg: any): Promise { + await msg.complete(await getCurrentRecord(loopCtx)); +} diff --git a/foundry/packages/backend/src/actors/task/workflow/common.ts b/foundry/packages/backend/src/actors/task/workflow/common.ts new file mode 100644 index 0000000..cbe63e6 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workflow/common.ts @@ -0,0 +1,201 @@ +// @ts-nocheck +import { eq } from "drizzle-orm"; +import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared"; +import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js"; +import { getOrCreateAuditLog, getOrCreateOrganization } from "../../handles.js"; +import { broadcastTaskUpdate } from "../workspace.js"; +import { getActorRuntimeContext } from "../../context.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; + +export const TASK_ROW_ID = 1; + +export function collectErrorMessages(error: unknown): string[] { + if (error == null) { + return []; + } + + const out: string[] = []; + const seen = new Set(); + let current: unknown = error; + + while (current != null && !seen.has(current)) { + seen.add(current); + + if (current instanceof Error) { + const message = current.message?.trim(); + if (message) { + out.push(message); + } + current = (current as { cause?: unknown }).cause; + continue; + } + + if (typeof current === "string") { + const message = current.trim(); + if (message) { + out.push(message); + } + break; + } + + break; + } + + return out.filter((msg, index) => out.indexOf(msg) === index); +} + +export function resolveErrorDetail(error: unknown): string { + const messages = collectErrorMessages(error); + if (messages.length === 0) { + return String(error); + } + + const nonWorkflowWrapper = messages.find((msg) => !/^Step\s+"[^"]+"\s+failed\b/i.test(msg)); + return nonWorkflowWrapper ?? messages[0]!; +} + +export function buildAgentPrompt(task: string): string { + return task.trim(); +} + +export async function setTaskState(ctx: any, status: TaskStatus): Promise { + const now = Date.now(); + const db = ctx.db; + await db.update(taskTable).set({ status, updatedAt: now }).where(eq(taskTable.id, TASK_ROW_ID)).run(); + + await broadcastTaskUpdate(ctx); +} + +/** + * Read the task's current record from its local SQLite DB. + * If the task actor was lazily created (virtual task from PR sync) and has no + * DB rows yet, auto-initializes by reading branch/title from the org actor's + * getTaskIndexEntry. This is the self-initialization path for lazy task actors. + */ +export async function getCurrentRecord(ctx: any): Promise { + const db = ctx.db; + const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId); + let row = await db + .select({ + branchName: taskTable.branchName, + title: taskTable.title, + task: taskTable.task, + sandboxProviderId: taskTable.sandboxProviderId, + status: taskTable.status, + pullRequestJson: taskTable.pullRequestJson, + activeSandboxId: taskRuntime.activeSandboxId, + createdAt: taskTable.createdAt, + updatedAt: taskTable.updatedAt, + }) + .from(taskTable) + .leftJoin(taskRuntime, eq(taskTable.id, taskRuntime.id)) + .where(eq(taskTable.id, TASK_ROW_ID)) + .get(); + + if (!row) { + // Virtual task — auto-initialize from org actor's task index data + let branchName: string | null = null; + let title = "Untitled"; + try { + const entry = await organization.getTaskIndexEntry({ taskId: ctx.state.taskId }); + branchName = entry?.branchName ?? null; + title = entry?.title ?? title; + } catch {} + + const { config } = getActorRuntimeContext(); + const { initBootstrapDbActivity, initCompleteActivity } = await import("./init.js"); + await initBootstrapDbActivity(ctx, { + sandboxProviderId: defaultSandboxProviderId(config), + branchName, + title, + task: title, + }); + await initCompleteActivity(ctx, { sandboxProviderId: defaultSandboxProviderId(config) }); + + // Re-read the row after initialization + const initialized = await db + .select({ + branchName: taskTable.branchName, + title: taskTable.title, + task: taskTable.task, + sandboxProviderId: taskTable.sandboxProviderId, + status: taskTable.status, + pullRequestJson: taskTable.pullRequestJson, + activeSandboxId: taskRuntime.activeSandboxId, + createdAt: taskTable.createdAt, + updatedAt: taskTable.updatedAt, + }) + .from(taskTable) + .leftJoin(taskRuntime, eq(taskTable.id, taskRuntime.id)) + .where(eq(taskTable.id, TASK_ROW_ID)) + .get(); + + if (!initialized) { + throw new Error(`Task not found after initialization: ${ctx.state.taskId}`); + } + + row = initialized; + } + + const repositoryMetadata = await organization.getRepositoryMetadata({ repoId: ctx.state.repoId }); + let pullRequest = null; + if (row.pullRequestJson) { + try { + pullRequest = JSON.parse(row.pullRequestJson); + } catch { + pullRequest = null; + } + } + + const sandboxes = await db + .select({ + sandboxId: taskSandboxes.sandboxId, + sandboxProviderId: taskSandboxes.sandboxProviderId, + sandboxActorId: taskSandboxes.sandboxActorId, + switchTarget: taskSandboxes.switchTarget, + cwd: taskSandboxes.cwd, + createdAt: taskSandboxes.createdAt, + updatedAt: taskSandboxes.updatedAt, + }) + .from(taskSandboxes) + .all(); + + return { + organizationId: ctx.state.organizationId, + repoId: ctx.state.repoId, + repoRemote: repositoryMetadata.remoteUrl, + taskId: ctx.state.taskId, + branchName: row.branchName, + title: row.title, + task: row.task, + sandboxProviderId: row.sandboxProviderId, + status: row.status, + activeSandboxId: row.activeSandboxId ?? null, + pullRequest, + sandboxes: sandboxes.map((sb) => ({ + sandboxId: sb.sandboxId, + sandboxProviderId: sb.sandboxProviderId, + sandboxActorId: sb.sandboxActorId ?? null, + switchTarget: sb.switchTarget, + cwd: sb.cwd ?? null, + createdAt: sb.createdAt, + updatedAt: sb.updatedAt, + })), + createdAt: row.createdAt, + updatedAt: row.updatedAt, + } as TaskRecord; +} + +export async function appendAuditLog(ctx: any, kind: string, payload: Record): Promise { + const row = await ctx.db.select({ branchName: taskTable.branchName }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get(); + const auditLog = await getOrCreateAuditLog(ctx, ctx.state.organizationId); + void auditLog.append({ + kind, + repoId: ctx.state.repoId, + taskId: ctx.state.taskId, + branchName: row?.branchName ?? null, + payload, + }); + + await broadcastTaskUpdate(ctx); +} diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts new file mode 100644 index 0000000..75b2da3 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -0,0 +1,185 @@ +// @ts-nocheck +/** + * Task workflow — queue-based command loop. + * + * Mutations are dispatched through named queues and processed inside the + * workflow command loop so that every command appears in the RivetKit + * inspector's workflow history. Read actions remain direct (no queue). + * + * Callers send commands directly via `.send(taskWorkflowQueueName(...), ...)`. + */ +import { Loop } from "rivetkit/workflow"; +import { logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { TASK_QUEUE_NAMES, type TaskQueueName, taskWorkflowQueueName } from "./queue.js"; +import { getCurrentRecord } from "./common.js"; +import { initBootstrapDbActivity, initCompleteActivity, initEnqueueProvisionActivity, initFailedActivity } from "./init.js"; +import { + handleArchiveActivity, + handleAttachActivity, + handlePushActivity, + handleSimpleCommandActivity, + handleSwitchActivity, + killDestroySandboxActivity, + killWriteDbActivity, +} from "./commands.js"; +import { + changeTaskOwnerManually, + closeWorkspaceSession, + createWorkspaceSession, + ensureWorkspaceSession, + publishWorkspacePr, + revertWorkspaceFile, + sendWorkspaceMessage, + stopWorkspaceSession, +} from "../workspace.js"; + +export { taskWorkflowQueueName } from "./queue.js"; + +// --------------------------------------------------------------------------- +// Workflow command loop — runs inside `run: workflow(runTaskWorkflow)` +// --------------------------------------------------------------------------- + +type WorkflowHandler = (loopCtx: any, msg: any) => Promise; + +const COMMAND_HANDLERS: Record = { + "task.command.initialize": async (loopCtx, msg) => { + await initBootstrapDbActivity(loopCtx, msg.body); + await initEnqueueProvisionActivity(loopCtx, msg.body); + const record = await getCurrentRecord(loopCtx); + await msg.complete(record); + }, + + "task.command.provision": async (loopCtx, msg) => { + try { + await initCompleteActivity(loopCtx, msg.body); + await msg.complete({ ok: true }); + } catch (error) { + await initFailedActivity(loopCtx, error, msg.body); + await msg.complete({ ok: false, error: resolveErrorMessage(error) }); + } + }, + + "task.command.attach": async (loopCtx, msg) => { + await handleAttachActivity(loopCtx, msg); + }, + + "task.command.switch": async (loopCtx, msg) => { + await handleSwitchActivity(loopCtx, msg); + }, + + "task.command.push": async (loopCtx, msg) => { + await handlePushActivity(loopCtx, msg); + }, + + "task.command.sync": async (loopCtx, msg) => { + await handleSimpleCommandActivity(loopCtx, msg, "task.sync"); + }, + + "task.command.merge": async (loopCtx, msg) => { + await handleSimpleCommandActivity(loopCtx, msg, "task.merge"); + }, + + "task.command.archive": async (loopCtx, msg) => { + await handleArchiveActivity(loopCtx, msg); + }, + + "task.command.kill": async (loopCtx, msg) => { + await killDestroySandboxActivity(loopCtx); + await killWriteDbActivity(loopCtx, msg); + }, + + "task.command.workspace.create_session": async (loopCtx, msg) => { + const result = await createWorkspaceSession(loopCtx, msg.body?.model, msg.body?.authSessionId); + await msg.complete(result); + }, + + "task.command.workspace.create_session_and_send": async (loopCtx, msg) => { + try { + const created = await createWorkspaceSession(loopCtx, msg.body?.model, msg.body?.authSessionId); + await sendWorkspaceMessage(loopCtx, created.sessionId, msg.body.text, [], msg.body?.authSessionId); + } catch (error) { + logActorWarning("task.workflow", "create_session_and_send failed", { + error: resolveErrorMessage(error), + }); + } + await msg.complete({ ok: true }); + }, + + "task.command.workspace.ensure_session": async (loopCtx, msg) => { + await ensureWorkspaceSession(loopCtx, msg.body.sessionId, msg.body?.model, msg.body?.authSessionId); + await msg.complete({ ok: true }); + }, + + "task.command.workspace.send_message": async (loopCtx, msg) => { + await sendWorkspaceMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments, msg.body?.authSessionId); + await msg.complete({ ok: true }); + }, + + "task.command.workspace.stop_session": async (loopCtx, msg) => { + await stopWorkspaceSession(loopCtx, msg.body.sessionId); + await msg.complete({ ok: true }); + }, + + "task.command.workspace.close_session": async (loopCtx, msg) => { + await closeWorkspaceSession(loopCtx, msg.body.sessionId, msg.body?.authSessionId); + await msg.complete({ ok: true }); + }, + + "task.command.workspace.publish_pr": async (loopCtx, msg) => { + await publishWorkspacePr(loopCtx); + await msg.complete({ ok: true }); + }, + + "task.command.workspace.revert_file": async (loopCtx, msg) => { + await revertWorkspaceFile(loopCtx, msg.body.path); + await msg.complete({ ok: true }); + }, + + "task.command.workspace.change_owner": async (loopCtx, msg) => { + await changeTaskOwnerManually(loopCtx, { + primaryUserId: msg.body.primaryUserId, + primaryGithubLogin: msg.body.primaryGithubLogin, + primaryGithubEmail: msg.body.primaryGithubEmail, + primaryGithubAvatarUrl: msg.body.primaryGithubAvatarUrl ?? null, + }); + await msg.complete({ ok: true }); + }, +}; + +export async function runTaskWorkflow(ctx: any): Promise { + await ctx.loop("task-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-task-command", { + names: [...TASK_QUEUE_NAMES], + completable: true, + }); + + if (!msg) { + return Loop.continue(undefined); + } + + const handler = COMMAND_HANDLERS[msg.name as TaskQueueName]; + if (!handler) { + logActorWarning("task.workflow", "unknown task command", { command: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }).catch(() => {}); + return Loop.continue(undefined); + } + + try { + // Wrap in a step so c.state and c.db are accessible inside mutation functions. + await loopCtx.step({ + name: msg.name, + timeout: 10 * 60_000, + run: async () => handler(loopCtx, msg), + }); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("task.workflow", "task workflow command failed", { + command: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} diff --git a/foundry/packages/backend/src/actors/task/workflow/init.ts b/foundry/packages/backend/src/actors/task/workflow/init.ts new file mode 100644 index 0000000..ffdf1d4 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workflow/init.ts @@ -0,0 +1,166 @@ +// @ts-nocheck +import { eq } from "drizzle-orm"; +import { getActorRuntimeContext } from "../../context.js"; +import { selfTask } from "../../handles.js"; +import { resolveErrorMessage } from "../../logging.js"; +import { taskWorkflowQueueName } from "./queue.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { task as taskTable, taskRuntime } from "../db/schema.js"; +import { TASK_ROW_ID, appendAuditLog, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; +// task actions called directly (no queue) + +export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise { + const { config } = getActorRuntimeContext(); + const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config); + const task = body?.task; + if (typeof task !== "string" || task.trim().length === 0) { + throw new Error("task initialize requires the task prompt"); + } + const now = Date.now(); + + await loopCtx.db + .insert(taskTable) + .values({ + id: TASK_ROW_ID, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task, + sandboxProviderId, + status: "init_bootstrap_db", + pullRequestJson: null, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskTable.id, + set: { + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task, + sandboxProviderId, + status: "init_bootstrap_db", + pullRequestJson: null, + updatedAt: now, + }, + }) + .run(); + + await loopCtx.db + .insert(taskRuntime) + .values({ + id: TASK_ROW_ID, + activeSandboxId: null, + activeSwitchTarget: null, + activeCwd: null, + gitStateJson: null, + gitStateUpdatedAt: null, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskRuntime.id, + set: { + activeSandboxId: null, + activeSwitchTarget: null, + activeCwd: null, + updatedAt: now, + }, + }) + .run(); +} + +export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise { + await setTaskState(loopCtx, "init_enqueue_provision"); + + const self = selfTask(loopCtx); + try { + void self.send(taskWorkflowQueueName("task.command.provision"), body ?? {}, { wait: false }).catch(() => {}); + } catch (error) { + logActorWarning("task.init", "background provision command failed", { + organizationId: loopCtx.state.organizationId, + repoId: loopCtx.state.repoId, + taskId: loopCtx.state.taskId, + error: resolveErrorMessage(error), + }); + throw error; + } +} + +export async function initCompleteActivity(loopCtx: any, body: any): Promise { + const now = Date.now(); + const { config } = getActorRuntimeContext(); + const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config); + + await setTaskState(loopCtx, "init_complete"); + await loopCtx.db + .update(taskRuntime) + .set({ + updatedAt: now, + }) + .where(eq(taskRuntime.id, TASK_ROW_ID)) + .run(); + + await appendAuditLog(loopCtx, "task.initialized", { + payload: { sandboxProviderId }, + }); +} + +export async function initFailedActivity(loopCtx: any, error: unknown, body?: any): Promise { + const now = Date.now(); + const detail = resolveErrorDetail(error); + const messages = collectErrorMessages(error); + const { config } = getActorRuntimeContext(); + const sandboxProviderId = defaultSandboxProviderId(config); + const task = typeof body?.task === "string" ? body.task : null; + + await loopCtx.db + .insert(taskTable) + .values({ + id: TASK_ROW_ID, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task: task ?? detail, + sandboxProviderId, + status: "error", + pullRequestJson: null, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskTable.id, + set: { + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task: task ?? detail, + sandboxProviderId, + status: "error", + pullRequestJson: null, + updatedAt: now, + }, + }) + .run(); + + await loopCtx.db + .insert(taskRuntime) + .values({ + id: TASK_ROW_ID, + activeSandboxId: null, + activeSwitchTarget: null, + activeCwd: null, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskRuntime.id, + set: { + activeSandboxId: null, + activeSwitchTarget: null, + activeCwd: null, + updatedAt: now, + }, + }) + .run(); + + await appendAuditLog(loopCtx, "task.error", { + detail, + messages, + }); +} diff --git a/foundry/packages/backend/src/actors/task/workflow/push.ts b/foundry/packages/backend/src/actors/task/workflow/push.ts new file mode 100644 index 0000000..f15ab0b --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workflow/push.ts @@ -0,0 +1,61 @@ +// @ts-nocheck +import { getTaskSandbox } from "../../handles.js"; +import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js"; +import { appendAuditLog, getCurrentRecord } from "./common.js"; + +export interface PushActiveBranchOptions { + reason?: string | null; + historyKind?: string; +} + +export async function pushActiveBranchActivity(loopCtx: any, options: PushActiveBranchOptions = {}): Promise { + const record = await getCurrentRecord(loopCtx); + const activeSandboxId = record.activeSandboxId; + const branchName = record.branchName; + + if (!activeSandboxId) { + throw new Error("cannot push: no active sandbox"); + } + if (!branchName) { + throw new Error("cannot push: task branch is not set"); + } + + const activeSandbox = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === activeSandboxId) ?? null; + const cwd = activeSandbox?.cwd ?? null; + if (!cwd) { + throw new Error("cannot push: active sandbox cwd is not set"); + } + + const script = [ + "set -euo pipefail", + `cd ${JSON.stringify(cwd)}`, + "git rev-parse --verify HEAD >/dev/null", + "git config credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'", + `git push -u origin ${JSON.stringify(branchName)}`, + ].join("; "); + + const sandbox = getTaskSandbox(loopCtx, loopCtx.state.organizationId, activeSandboxId); + const auth = await resolveOrganizationGithubAuth(loopCtx, loopCtx.state.organizationId); + const result = await sandbox.runProcess({ + command: "bash", + args: ["-lc", script], + cwd: "/", + env: auth?.githubToken + ? { + GH_TOKEN: auth.githubToken, + GITHUB_TOKEN: auth.githubToken, + } + : undefined, + timeoutMs: 5 * 60_000, + }); + + if ((result.exitCode ?? 0) !== 0) { + throw new Error(`git push failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`); + } + + await appendAuditLog(loopCtx, options.historyKind ?? "task.push", { + reason: options.reason ?? null, + branchName, + sandboxId: activeSandboxId, + }); +} diff --git a/foundry/packages/backend/src/actors/task/workflow/queue.ts b/foundry/packages/backend/src/actors/task/workflow/queue.ts new file mode 100644 index 0000000..a49c39a --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workflow/queue.ts @@ -0,0 +1,26 @@ +export const TASK_QUEUE_NAMES = [ + "task.command.initialize", + "task.command.provision", + "task.command.attach", + "task.command.switch", + "task.command.push", + "task.command.sync", + "task.command.merge", + "task.command.archive", + "task.command.kill", + "task.command.workspace.create_session", + "task.command.workspace.create_session_and_send", + "task.command.workspace.ensure_session", + "task.command.workspace.send_message", + "task.command.workspace.stop_session", + "task.command.workspace.close_session", + "task.command.workspace.publish_pr", + "task.command.workspace.revert_file", + "task.command.workspace.change_owner", +] as const; + +export type TaskQueueName = (typeof TASK_QUEUE_NAMES)[number]; + +export function taskWorkflowQueueName(name: string): string { + return name; +} diff --git a/foundry/packages/backend/src/actors/task/workspace.ts b/foundry/packages/backend/src/actors/task/workspace.ts new file mode 100644 index 0000000..0856947 --- /dev/null +++ b/foundry/packages/backend/src/actors/task/workspace.ts @@ -0,0 +1,1651 @@ +// @ts-nocheck +import { randomUUID } from "node:crypto"; +import { basename } from "node:path"; +import { asc, eq } from "drizzle-orm"; +import { + DEFAULT_WORKSPACE_MODEL_GROUPS, + DEFAULT_WORKSPACE_MODEL_ID, + workspaceAgentForModel, + workspaceSandboxAgentIdForModel, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../context.js"; +import { getOrCreateOrganization, getOrCreateTaskSandbox, getOrCreateUser, getTaskSandbox, selfTask } from "../handles.js"; +import { logActorInfo, logActorWarning, resolveErrorMessage } from "../logging.js"; +import { resolveSandboxProviderId } from "../../sandbox-config.js"; +import { getBetterAuthService } from "../../services/better-auth.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +import { githubRepoFullNameFromRemote } from "../../services/repo.js"; +import { taskWorkflowQueueName } from "./workflow/queue.js"; +import { organizationWorkflowQueueName } from "../organization/queues.js"; + +import { task as taskTable, taskOwner, taskRuntime, taskSandboxes, taskWorkspaceSessions } from "./db/schema.js"; +import { getCurrentRecord } from "./workflow/common.js"; + +function emptyGitState() { + return { + fileChanges: [], + diffs: {}, + fileTree: [], + updatedAt: null as number | null, + }; +} + +const FALLBACK_MODEL = DEFAULT_WORKSPACE_MODEL_ID; + +function agentKindForModel(model: string) { + return workspaceAgentForModel(model); +} + +export function sandboxAgentIdForModel(model: string) { + return workspaceSandboxAgentIdForModel(model); +} + +async function resolveWorkspaceModelGroups(c: any): Promise { + try { + const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + const groups = await sandbox.listWorkspaceModelGroups(); + return Array.isArray(groups) && groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS; + } catch { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } +} + +async function resolveSandboxAgentForModel(c: any, model: string): Promise { + const groups = await resolveWorkspaceModelGroups(c); + return workspaceSandboxAgentIdForModel(model, groups); +} + +function repoLabelFromRemote(remoteUrl: string): string { + const trimmed = remoteUrl.trim(); + try { + const url = new URL(trimmed.startsWith("http") ? trimmed : `https://${trimmed}`); + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length >= 2) { + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}`; + } + } catch { + // ignore + } + + return basename(trimmed.replace(/\.git$/, "")); +} + +async function getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const organization = await getOrCreateOrganization(c, c.state.organizationId); + return await organization.getRepositoryMetadata({ repoId: c.state.repoId }); +} + +function parseDraftAttachments(value: string | null | undefined): Array { + if (!value) { + return []; + } + + try { + const parsed = JSON.parse(value) as unknown; + return Array.isArray(parsed) ? parsed : []; + } catch { + return []; + } +} + +function parseTranscript(value: string | null | undefined): Array { + if (!value) { + return []; + } + + try { + const parsed = JSON.parse(value) as unknown; + return Array.isArray(parsed) ? parsed : []; + } catch { + return []; + } +} + +function parseGitState(value: string | null | undefined): { fileChanges: Array; diffs: Record; fileTree: Array } { + if (!value) { + return emptyGitState(); + } + + try { + const parsed = JSON.parse(value) as { + fileChanges?: unknown; + diffs?: unknown; + fileTree?: unknown; + }; + return { + fileChanges: Array.isArray(parsed.fileChanges) ? parsed.fileChanges : [], + diffs: parsed.diffs && typeof parsed.diffs === "object" ? (parsed.diffs as Record) : {}, + fileTree: Array.isArray(parsed.fileTree) ? parsed.fileTree : [], + }; + } catch { + return emptyGitState(); + } +} + +async function readTaskOwner(c: any): Promise<{ + primaryUserId: string | null; + primaryGithubLogin: string | null; + primaryGithubEmail: string | null; + primaryGithubAvatarUrl: string | null; +} | null> { + const row = await c.db.select().from(taskOwner).where(eq(taskOwner.id, 1)).get(); + if (!row) { + return null; + } + return { + primaryUserId: row.primaryUserId ?? null, + primaryGithubLogin: row.primaryGithubLogin ?? null, + primaryGithubEmail: row.primaryGithubEmail ?? null, + primaryGithubAvatarUrl: row.primaryGithubAvatarUrl ?? null, + }; +} + +async function upsertTaskOwner( + c: any, + owner: { primaryUserId: string; primaryGithubLogin: string; primaryGithubEmail: string; primaryGithubAvatarUrl: string | null }, +): Promise { + const now = Date.now(); + await c.db + .insert(taskOwner) + .values({ + id: 1, + primaryUserId: owner.primaryUserId, + primaryGithubLogin: owner.primaryGithubLogin, + primaryGithubEmail: owner.primaryGithubEmail, + primaryGithubAvatarUrl: owner.primaryGithubAvatarUrl, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskOwner.id, + set: { + primaryUserId: owner.primaryUserId, + primaryGithubLogin: owner.primaryGithubLogin, + primaryGithubEmail: owner.primaryGithubEmail, + primaryGithubAvatarUrl: owner.primaryGithubAvatarUrl, + updatedAt: now, + }, + }) + .run(); +} + +/** + * Inject the user's GitHub OAuth token into the sandbox as a git credential store file. + * Also configures git user.name and user.email so commits are attributed correctly. + * The credential file is overwritten on each owner swap. + * + * Race condition note: If User A sends a message and the agent starts a long git operation, + * then User B triggers an owner swap, the in-flight git process still has User A's credentials + * (already read from the credential store). The next git operation uses User B's credentials. + */ +async function injectGitCredentials(sandbox: any, login: string, email: string, token: string): Promise { + const script = [ + "set -euo pipefail", + `git config --global user.name ${JSON.stringify(login)}`, + `git config --global user.email ${JSON.stringify(email)}`, + `git config --global credential.helper 'store --file=$HOME/.git-token'`, + `printf '%s\\n' ${JSON.stringify(`https://${login}:${token}@github.com`)} > $HOME/.git-token`, + `chmod 600 $HOME/.git-token`, + ]; + const result = await sandbox.runProcess({ + command: "bash", + args: ["-lc", script.join("; ")], + cwd: "/", + timeoutMs: 30_000, + }); + if ((result.exitCode ?? 0) !== 0) { + logActorWarning("task", "git credential injection failed", { + exitCode: result.exitCode, + output: [result.stdout, result.stderr].filter(Boolean).join(""), + }); + } +} + +/** + * Resolves the current user's GitHub identity from their auth session. + * Returns null if the session is invalid or the user has no GitHub account. + */ +async function resolveGithubIdentity(authSessionId: string): Promise<{ + userId: string; + login: string; + email: string; + avatarUrl: string | null; + accessToken: string; +} | null> { + const authService = getBetterAuthService(); + const authState = await authService.getAuthState(authSessionId); + if (!authState?.user?.id) { + return null; + } + + const tokenResult = await authService.getAccessTokenForSession(authSessionId); + if (!tokenResult?.accessToken) { + return null; + } + + const githubAccount = authState.accounts?.find((account: any) => account.providerId === "github"); + if (!githubAccount) { + return null; + } + + // Resolve the GitHub login from the API since Better Auth only stores the + // numeric account ID, not the login username. + let login = authState.user.name ?? "unknown"; + let avatarUrl = authState.user.image ?? null; + try { + const resp = await fetch("https://api.github.com/user", { + headers: { + Authorization: `Bearer ${tokenResult.accessToken}`, + Accept: "application/vnd.github+json", + }, + }); + if (resp.ok) { + const ghUser = (await resp.json()) as { login?: string; avatar_url?: string }; + if (ghUser.login) { + login = ghUser.login; + } + if (ghUser.avatar_url) { + avatarUrl = ghUser.avatar_url; + } + } + } catch (error) { + console.warn("resolveGithubIdentity: failed to fetch GitHub user", error); + } + + return { + userId: authState.user.id, + login, + email: authState.user.email ?? `${githubAccount.accountId}@users.noreply.github.com`, + avatarUrl, + accessToken: tokenResult.accessToken, + }; +} + +/** + * Check if the task owner needs to swap, and if so, update the owner record + * and inject new git credentials into the sandbox. + * Returns true if an owner swap occurred. + */ +async function maybeSwapTaskOwner(c: any, authSessionId: string | null | undefined, sandbox: any | null): Promise { + if (!authSessionId) { + return false; + } + + const identity = await resolveGithubIdentity(authSessionId); + if (!identity) { + return false; + } + + const currentOwner = await readTaskOwner(c); + if (currentOwner?.primaryUserId === identity.userId) { + return false; + } + + await upsertTaskOwner(c, { + primaryUserId: identity.userId, + primaryGithubLogin: identity.login, + primaryGithubEmail: identity.email, + primaryGithubAvatarUrl: identity.avatarUrl, + }); + + if (sandbox) { + await injectGitCredentials(sandbox, identity.login, identity.email, identity.accessToken); + } + + return true; +} + +/** + * Manually change the task owner. Updates the owner record and broadcasts the + * change to subscribers. Git credentials are NOT injected here — they will be + * injected the next time the target user sends a message (auto-swap path). + */ +export async function changeTaskOwnerManually( + c: any, + input: { primaryUserId: string; primaryGithubLogin: string; primaryGithubEmail: string; primaryGithubAvatarUrl: string | null }, +): Promise { + await upsertTaskOwner(c, input); + await broadcastTaskUpdate(c); +} + +export function shouldMarkSessionUnreadForStatus(meta: { thinkingSinceMs?: number | null }, status: "running" | "idle" | "error"): boolean { + if (status === "running") { + return false; + } + + // Only mark unread when we observe the transition out of an active thinking state. + // Repeated idle polls for an already-finished session must not flip unread back on. + return Boolean(meta.thinkingSinceMs); +} + +export function shouldRecreateSessionForModelChange(meta: { + status: "pending_provision" | "pending_session_create" | "ready" | "error"; + sandboxSessionId?: string | null; + created?: boolean; + transcript?: Array; +}): boolean { + if (meta.status !== "ready" || !meta.sandboxSessionId) { + return false; + } + + if (meta.created) { + return false; + } + + return !Array.isArray(meta.transcript) || meta.transcript.length === 0; +} + +async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }): Promise> { + const rows = await c.db.select().from(taskWorkspaceSessions).orderBy(asc(taskWorkspaceSessions.createdAt)).all(); + const mapped = rows.map((row: any) => ({ + ...row, + id: row.sessionId, + sessionId: row.sessionId, + sandboxSessionId: row.sandboxSessionId ?? null, + status: row.status ?? "ready", + errorMessage: row.errorMessage ?? null, + transcript: parseTranscript(row.transcriptJson), + transcriptUpdatedAt: row.transcriptUpdatedAt ?? null, + created: row.created === 1, + closed: row.closed === 1, + })); + + if (options?.includeClosed === true) { + return mapped; + } + + return mapped.filter((row: any) => row.closed !== true); +} + +async function nextSessionName(c: any): Promise { + const rows = await listSessionMetaRows(c, { includeClosed: true }); + return `Session ${rows.length + 1}`; +} + +async function readSessionMeta(c: any, sessionId: string): Promise { + const row = await c.db.select().from(taskWorkspaceSessions).where(eq(taskWorkspaceSessions.sessionId, sessionId)).get(); + + if (!row) { + return null; + } + + return { + ...row, + id: row.sessionId, + sessionId: row.sessionId, + sandboxSessionId: row.sandboxSessionId ?? null, + status: row.status ?? "ready", + errorMessage: row.errorMessage ?? null, + transcript: parseTranscript(row.transcriptJson), + transcriptUpdatedAt: row.transcriptUpdatedAt ?? null, + created: row.created === 1, + closed: row.closed === 1, + }; +} + +async function getUserTaskState(c: any, authSessionId?: string | null): Promise<{ activeSessionId: string | null; bySessionId: Map }> { + if (!authSessionId) { + return { activeSessionId: null, bySessionId: new Map() }; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return { activeSessionId: null, bySessionId: new Map() }; + } + + const user = await getOrCreateUser(c, userId); + const state = await user.getTaskState({ taskId: c.state.taskId }); + const bySessionId = new Map( + (state?.sessions ?? []).map((row: any) => [ + row.sessionId, + { + unread: Boolean(row.unread), + draftText: row.draftText ?? "", + draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), + draftUpdatedAtMs: row.draftUpdatedAt ?? null, + }, + ]), + ); + return { + activeSessionId: state?.activeSessionId ?? null, + bySessionId, + }; +} + +async function upsertUserTaskState(c: any, authSessionId: string | null | undefined, sessionId: string, patch: Record): Promise { + if (!authSessionId) { + return; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return; + } + + const user = await getOrCreateUser(c, userId); + await user.upsertTaskState({ + taskId: c.state.taskId, + sessionId, + patch, + }); +} + +async function deleteUserTaskState(c: any, authSessionId: string | null | undefined, sessionId: string): Promise { + if (!authSessionId) { + return; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return; + } + + const user = await getOrCreateUser(c, userId); + await user.deleteTaskState({ + taskId: c.state.taskId, + sessionId, + }); +} + +async function resolveDefaultModel(c: any, authSessionId?: string | null): Promise { + if (!authSessionId) { + return FALLBACK_MODEL; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return FALLBACK_MODEL; + } + + const user = await getOrCreateUser(c, userId); + const userState = await user.getAppAuthState({ sessionId: authSessionId }); + return userState?.profile?.defaultModel ?? FALLBACK_MODEL; +} + +async function ensureSessionMeta( + c: any, + params: { + sessionId: string; + sandboxSessionId?: string | null; + model?: string; + authSessionId?: string | null; + sessionName?: string; + created?: boolean; + status?: "pending_provision" | "pending_session_create" | "ready" | "error"; + errorMessage?: string | null; + }, +): Promise { + const existing = await readSessionMeta(c, params.sessionId); + if (existing) { + return existing; + } + + const now = Date.now(); + const sessionName = params.sessionName ?? (await nextSessionName(c)); + const model = params.model ?? (await resolveDefaultModel(c, params.authSessionId)); + + await c.db + .insert(taskWorkspaceSessions) + .values({ + sessionId: params.sessionId, + sandboxSessionId: params.sandboxSessionId ?? null, + sessionName, + model, + status: params.status ?? "ready", + errorMessage: params.errorMessage ?? null, + transcriptJson: "[]", + transcriptUpdatedAt: null, + created: params.created === false ? 0 : 1, + closed: 0, + thinkingSinceMs: null, + createdAt: now, + updatedAt: now, + }) + .run(); + + return await readSessionMeta(c, params.sessionId); +} + +async function updateSessionMeta(c: any, sessionId: string, values: Record): Promise { + await ensureSessionMeta(c, { sessionId }); + await c.db + .update(taskWorkspaceSessions) + .set({ + ...values, + updatedAt: Date.now(), + }) + .where(eq(taskWorkspaceSessions.sessionId, sessionId)) + .run(); + return await readSessionMeta(c, sessionId); +} + +async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise { + const row = await c.db.select().from(taskWorkspaceSessions).where(eq(taskWorkspaceSessions.sandboxSessionId, sandboxSessionId)).get(); + if (!row) { + return null; + } + return await readSessionMeta(c, row.sessionId); +} + +async function requireReadySessionMeta(c: any, sessionId: string): Promise { + const meta = await readSessionMeta(c, sessionId); + if (!meta) { + throw new Error(`Unknown workspace session: ${sessionId}`); + } + if (meta.status !== "ready" || !meta.sandboxSessionId) { + throw new Error(meta.errorMessage ?? "This workspace session is still preparing"); + } + return meta; +} + +export function requireSendableSessionMeta(meta: any, sessionId: string): any { + if (!meta) { + throw new Error(`Unknown workspace session: ${sessionId}`); + } + if (meta.status !== "ready" || !meta.sandboxSessionId) { + throw new Error(`Session is not ready (status: ${meta.status}). Wait for session provisioning to complete.`); + } + return meta; +} + +function shellFragment(parts: string[]): string { + return parts.join(" && "); +} + +function stableSandboxId(c: any): string { + return c.state.taskId; +} + +async function getTaskSandboxRuntime( + c: any, + record: any, +): Promise<{ + sandbox: any; + sandboxId: string; + sandboxProviderId: string; + switchTarget: string; + cwd: string; +}> { + const { config } = getActorRuntimeContext(); + const sandboxId = stableSandboxId(c); + const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? null); + const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, sandboxId, {}); + const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null; + const switchTarget = sandboxProviderId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; + + // Resolve the actual repo CWD from the sandbox's $HOME (differs by provider). + const repoCwdResult = await sandbox.repoCwd(); + const cwd = repoCwdResult?.cwd ?? "$HOME/repo"; + const now = Date.now(); + + await c.db + .insert(taskSandboxes) + .values({ + sandboxId, + sandboxProviderId, + sandboxActorId: typeof actorId === "string" ? actorId : null, + switchTarget, + cwd, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskSandboxes.sandboxId, + set: { + sandboxProviderId, + sandboxActorId: typeof actorId === "string" ? actorId : null, + switchTarget, + cwd, + updatedAt: now, + }, + }) + .run(); + + await c.db + .update(taskRuntime) + .set({ + activeSandboxId: sandboxId, + activeSwitchTarget: switchTarget, + activeCwd: cwd, + updatedAt: now, + }) + .where(eq(taskRuntime.id, 1)) + .run(); + + return { + sandbox, + sandboxId, + sandboxProviderId, + switchTarget, + cwd, + }; +} + +/** + * Track whether the sandbox repo has been fully prepared (cloned + fetched + checked out) + * for the current actor lifecycle. Subsequent calls can skip the expensive `git fetch` + * when `skipFetch` is true (used by sendWorkspaceMessage to avoid blocking on every prompt). + */ +let sandboxRepoPrepared = false; + +async function ensureSandboxRepo(c: any, sandbox: any, record: any, opts?: { skipFetchIfPrepared?: boolean; authSessionId?: string | null }): Promise { + if (!record.branchName) { + throw new Error("cannot prepare a sandbox repo before the task branch exists"); + } + + // If the repo was already prepared and the caller allows skipping fetch, just return. + // The clone, fetch, and checkout already happened on a prior call. + if (opts?.skipFetchIfPrepared && sandboxRepoPrepared) { + logActorInfo("task.sandbox", "ensureSandboxRepo skipped (already prepared)"); + return; + } + + const repoStart = performance.now(); + + const t0 = performance.now(); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + const metadata = await getRepositoryMetadata(c); + logActorInfo("task.sandbox", "resolveAuth+metadata", { durationMs: Math.round(performance.now() - t0) }); + + const baseRef = metadata.defaultBranch ?? "main"; + // Use $HOME inside the shell script so the path resolves correctly regardless + // of which user the sandbox runs as (E2B: "user", local Docker: "sandbox"). + const script = [ + "set -euo pipefail", + 'REPO_DIR="$HOME/repo"', + 'mkdir -p "$HOME"', + "git config --global credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'", + `if [ ! -d "$REPO_DIR/.git" ]; then rm -rf "$REPO_DIR" && git clone ${JSON.stringify(metadata.remoteUrl)} "$REPO_DIR"; fi`, + 'cd "$REPO_DIR"', + "git fetch origin --prune", + `if git show-ref --verify --quiet refs/remotes/origin/${JSON.stringify(record.branchName).slice(1, -1)}; then target_ref=${JSON.stringify( + `origin/${record.branchName}`, + )}; else target_ref=${JSON.stringify(baseRef)}; fi`, + `git checkout -B ${JSON.stringify(record.branchName)} \"$target_ref\"`, + ]; + + const t1 = performance.now(); + const result = await sandbox.runProcess({ + command: "bash", + args: ["-lc", script.join("; ")], + cwd: "/", + env: auth?.githubToken + ? { + GH_TOKEN: auth.githubToken, + GITHUB_TOKEN: auth.githubToken, + } + : undefined, + timeoutMs: 5 * 60_000, + }); + logActorInfo("task.sandbox", "git clone/fetch/checkout", { + branch: record.branchName, + repo: metadata.remoteUrl, + durationMs: Math.round(performance.now() - t1), + }); + + if ((result.exitCode ?? 0) !== 0) { + throw new Error(`sandbox repo preparation failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`); + } + + // On first repo preparation, inject the task owner's git credentials into the sandbox + // so that push/commit operations are authenticated and attributed to the correct user. + if (!sandboxRepoPrepared && opts?.authSessionId) { + const t2 = performance.now(); + await maybeSwapTaskOwner(c, opts.authSessionId, sandbox); + logActorInfo("task.sandbox", "maybeSwapTaskOwner", { durationMs: Math.round(performance.now() - t2) }); + } + + sandboxRepoPrepared = true; + logActorInfo("task.sandbox", "ensureSandboxRepo complete", { totalDurationMs: Math.round(performance.now() - repoStart) }); +} + +async function executeInSandbox( + c: any, + params: { + sandboxId: string; + cwd: string; + command: string; + label: string; + }, +): Promise<{ exitCode: number; result: string }> { + const record = await ensureWorkspaceSeeded(c); + const runtime = await getTaskSandboxRuntime(c, record); + await ensureSandboxRepo(c, runtime.sandbox, record); + const response = await runtime.sandbox.runProcess({ + command: "bash", + args: ["-lc", shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command])], + cwd: "/", + timeoutMs: 5 * 60_000, + }); + + return { + exitCode: response.exitCode ?? 0, + result: [response.stdout, response.stderr].filter(Boolean).join(""), + }; +} + +function parseGitStatus(output: string): Array<{ path: string; type: "M" | "A" | "D" }> { + return output + .split("\n") + .map((line) => line.trimEnd()) + .filter(Boolean) + .map((line) => { + const status = line.slice(0, 2).trim(); + const rawPath = line.slice(3).trim(); + const path = rawPath.includes(" -> ") ? (rawPath.split(" -> ").pop() ?? rawPath) : rawPath; + const type = status.includes("D") ? "D" : status.includes("A") || status === "??" ? "A" : "M"; + return { path, type }; + }); +} + +function parseNumstat(output: string): Map { + const map = new Map(); + for (const line of output.split("\n")) { + const trimmed = line.trim(); + if (!trimmed) continue; + const [addedRaw, removedRaw, ...pathParts] = trimmed.split("\t"); + const path = pathParts.join("\t").trim(); + if (!path) continue; + map.set(path, { + added: Number.parseInt(addedRaw ?? "0", 10) || 0, + removed: Number.parseInt(removedRaw ?? "0", 10) || 0, + }); + } + return map; +} + +function buildFileTree(paths: string[]): Array { + const root = { + children: new Map(), + }; + + for (const path of paths) { + const parts = path.split("/").filter(Boolean); + let current = root; + let currentPath = ""; + + for (let index = 0; index < parts.length; index += 1) { + const part = parts[index]!; + currentPath = currentPath ? `${currentPath}/${part}` : part; + const isDir = index < parts.length - 1; + let node = current.children.get(part); + if (!node) { + node = { + name: part, + path: currentPath, + isDir, + children: isDir ? new Map() : undefined, + }; + current.children.set(part, node); + } else if (isDir && !(node.children instanceof Map)) { + node.children = new Map(); + } + current = node; + } + } + + function sortNodes(nodes: Iterable): Array { + return [...nodes] + .map((node) => + node.isDir + ? { + name: node.name, + path: node.path, + isDir: true, + children: sortNodes(node.children?.values?.() ?? []), + } + : { + name: node.name, + path: node.path, + isDir: false, + }, + ) + .sort((left, right) => { + if (left.isDir !== right.isDir) { + return left.isDir ? -1 : 1; + } + return left.path.localeCompare(right.path); + }); + } + + return sortNodes(root.children.values()); +} + +async function collectWorkspaceGitState(c: any, record: any) { + const activeSandboxId = record.activeSandboxId; + const activeSandbox = activeSandboxId != null ? ((record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === activeSandboxId) ?? null) : null; + const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null; + if (!activeSandboxId || !cwd) { + return { + fileChanges: [], + diffs: {}, + fileTree: [], + }; + } + + const statusResult = await executeInSandbox(c, { + sandboxId: activeSandboxId, + cwd, + command: "git status --porcelain=v1 -uall", + label: "git status", + }); + if (statusResult.exitCode !== 0) { + return { + fileChanges: [], + diffs: {}, + fileTree: [], + }; + } + + const statusRows = parseGitStatus(statusResult.result); + const numstatResult = await executeInSandbox(c, { + sandboxId: activeSandboxId, + cwd, + command: "git diff --numstat", + label: "git diff numstat", + }); + const numstat = parseNumstat(numstatResult.result); + + const filesResult = await executeInSandbox(c, { + sandboxId: activeSandboxId, + cwd, + command: "git ls-files --cached --others --exclude-standard", + label: "git ls-files", + }); + const allPaths = filesResult.result + .split("\n") + .map((line) => line.trim()) + .filter(Boolean); + + const diffs: Record = {}; + for (const row of statusRows) { + const diffResult = await executeInSandbox(c, { + sandboxId: activeSandboxId, + cwd, + command: `git diff -- ${JSON.stringify(row.path)}`, + label: `git diff ${row.path}`, + }); + diffs[row.path] = diffResult.exitCode === 0 ? diffResult.result : ""; + } + + return { + fileChanges: statusRows.map((row) => { + const counts = numstat.get(row.path) ?? { added: 0, removed: 0 }; + return { + path: row.path, + added: counts.added, + removed: counts.removed, + type: row.type, + }; + }), + diffs, + fileTree: buildFileTree(allPaths), + }; +} + +async function readCachedGitState(c: any): Promise<{ fileChanges: Array; diffs: Record; fileTree: Array; updatedAt: number | null }> { + const row = await c.db + .select({ + gitStateJson: taskRuntime.gitStateJson, + gitStateUpdatedAt: taskRuntime.gitStateUpdatedAt, + }) + .from(taskRuntime) + .where(eq(taskRuntime.id, 1)) + .get(); + const parsed = parseGitState(row?.gitStateJson); + return { + ...parsed, + updatedAt: row?.gitStateUpdatedAt ?? null, + }; +} + +async function writeCachedGitState(c: any, gitState: { fileChanges: Array; diffs: Record; fileTree: Array }): Promise { + const now = Date.now(); + await c.db + .update(taskRuntime) + .set({ + gitStateJson: JSON.stringify(gitState), + gitStateUpdatedAt: now, + updatedAt: now, + }) + .where(eq(taskRuntime.id, 1)) + .run(); +} + +async function readSessionTranscript(c: any, record: any, sessionId: string) { + const sandboxId = record.activeSandboxId ?? stableSandboxId(c); + if (!sandboxId) { + return []; + } + + const sandbox = getTaskSandbox(c, c.state.organizationId, sandboxId); + const page = await sandbox.getEvents({ + sessionId, + limit: 100, + }); + return page.items.map((event: any) => ({ + id: event.id, + eventIndex: event.eventIndex, + sessionId: event.sessionId, + createdAt: event.createdAt, + connectionId: event.connectionId, + sender: event.sender, + payload: event.payload, + })); +} + +async function writeSessionTranscript(c: any, sessionId: string, transcript: Array): Promise { + await updateSessionMeta(c, sessionId, { + transcriptJson: JSON.stringify(transcript), + transcriptUpdatedAt: Date.now(), + }); +} + +function fireRefreshDerived(c: any): void { + const self = selfTask(c); + void self.refreshDerived({}).catch(() => {}); +} + +function fireRefreshSessionTranscript(c: any, sessionId: string): void { + const self = selfTask(c); + void self.refreshSessionTranscript({ sessionId }).catch(() => {}); +} + +async function enqueueWorkspaceEnsureSession(c: any, sessionId: string): Promise { + const self = selfTask(c); + await self.send(taskWorkflowQueueName("task.command.workspace.ensure_session" as any), { sessionId }, { wait: false }); +} + +function pendingWorkspaceSessionStatus(record: any): "pending_provision" | "pending_session_create" { + return record.activeSandboxId ? "pending_session_create" : "pending_provision"; +} + +async function maybeScheduleWorkspaceRefreshes(c: any, record: any, sessions: Array): Promise { + const gitState = await readCachedGitState(c); + if (record.activeSandboxId && !gitState.updatedAt) { + fireRefreshDerived(c); + } + + for (const session of sessions) { + if (session.closed || session.status !== "ready" || !session.sandboxSessionId || session.transcriptUpdatedAt) { + continue; + } + fireRefreshSessionTranscript(c, session.sandboxSessionId); + } +} + +function computeWorkspaceTaskStatus(record: any, sessions: Array) { + if (record.status && String(record.status).startsWith("init_")) { + return record.status; + } + if (record.status === "archived" || record.status === "killed") { + return record.status; + } + if (sessions.some((session) => session.closed !== true && session.thinkingSinceMs)) { + return "running"; + } + if (sessions.some((session) => session.closed !== true && session.status === "error")) { + return "error"; + } + return "idle"; +} + +export async function ensureWorkspaceSeeded(c: any): Promise { + return await getCurrentRecord(c); +} + +function buildSessionSummary(meta: any, userState?: any): any { + const derivedSandboxSessionId = meta.status === "ready" ? (meta.sandboxSessionId ?? null) : null; + const sessionStatus = + meta.status === "pending_provision" || meta.status === "pending_session_create" + ? meta.status + : meta.thinkingSinceMs + ? "running" + : meta.status === "error" + ? "error" + : meta.status === "ready" && derivedSandboxSessionId + ? "idle" + : "ready"; + let thinkingSinceMs = meta.thinkingSinceMs ?? null; + let unread = Boolean(userState?.unread); + if (thinkingSinceMs && sessionStatus !== "running") { + thinkingSinceMs = null; + unread = true; + } + + return { + id: meta.id, + sessionId: meta.sessionId, + sandboxSessionId: derivedSandboxSessionId, + sessionName: meta.sessionName, + agent: agentKindForModel(meta.model), + model: meta.model, + status: sessionStatus, + thinkingSinceMs: sessionStatus === "running" ? thinkingSinceMs : null, + unread, + created: Boolean(meta.created || derivedSandboxSessionId), + errorMessage: meta.errorMessage ?? null, + }; +} + +function buildSessionDetailFromMeta(meta: any, userState?: any): any { + const summary = buildSessionSummary(meta, userState); + return { + sessionId: meta.sessionId, + sandboxSessionId: summary.sandboxSessionId ?? null, + sessionName: summary.sessionName, + agent: summary.agent, + model: summary.model, + status: summary.status, + thinkingSinceMs: summary.thinkingSinceMs, + unread: summary.unread, + created: summary.created, + errorMessage: summary.errorMessage, + draft: { + text: userState?.draftText ?? "", + attachments: Array.isArray(userState?.draftAttachments) ? userState.draftAttachments : [], + updatedAtMs: userState?.draftUpdatedAtMs ?? null, + }, + transcript: meta.transcript ?? [], + }; +} + +/** + * Builds a WorkspaceTaskSummary from local task actor state. Task actors push + * this to the parent organization actor so organization sidebar reads stay local. + */ +export async function buildTaskSummary(c: any, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); + const repositoryMetadata = await getRepositoryMetadata(c); + const sessions = await listSessionMetaRows(c); + await maybeScheduleWorkspaceRefreshes(c, record, sessions); + const userTaskState = await getUserTaskState(c, authSessionId); + const taskStatus = computeWorkspaceTaskStatus(record, sessions); + const activeSessionId = + userTaskState.activeSessionId && sessions.some((meta) => meta.sessionId === userTaskState.activeSessionId) ? userTaskState.activeSessionId : null; + + const owner = await readTaskOwner(c); + + return { + id: c.state.taskId, + repoId: c.state.repoId, + title: record.title ?? "New Task", + status: taskStatus, + repoName: repoLabelFromRemote(repositoryMetadata.remoteUrl), + updatedAtMs: record.updatedAt, + branch: record.branchName, + pullRequest: record.pullRequest ?? null, + activeSessionId, + sessionsSummary: sessions.map((meta) => buildSessionSummary(meta, userTaskState.bySessionId.get(meta.sessionId))), + primaryUserLogin: owner?.primaryGithubLogin ?? null, + primaryUserAvatarUrl: owner?.primaryGithubAvatarUrl ?? null, + }; +} + +/** + * Builds a WorkspaceTaskDetail from local task actor state for direct task + * subscribers. This is a full replacement payload, not a patch. + */ +export async function buildTaskDetail(c: any, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); + const gitState = await readCachedGitState(c); + const sessions = await listSessionMetaRows(c); + await maybeScheduleWorkspaceRefreshes(c, record, sessions); + const summary = await buildTaskSummary(c, authSessionId); + + return { + ...summary, + task: record.task, + fileChanges: gitState.fileChanges, + diffs: gitState.diffs, + fileTree: gitState.fileTree, + minutesUsed: 0, + sandboxes: await Promise.all( + (record.sandboxes ?? []).map(async (sandbox: any) => { + let url: string | null = null; + if (sandbox.sandboxId) { + try { + const handle = getTaskSandbox(c, c.state.organizationId, sandbox.sandboxId); + const conn = await handle.sandboxAgentConnection(); + if (conn?.endpoint && !conn.endpoint.startsWith("mock://")) { + url = conn.endpoint; + } + } catch { + // Sandbox may not be running + } + } + return { + sandboxProviderId: sandbox.sandboxProviderId, + sandboxId: sandbox.sandboxId, + cwd: sandbox.cwd ?? null, + url, + }; + }), + ), + activeSandboxId: record.activeSandboxId ?? null, + }; +} + +/** + * Builds a WorkspaceSessionDetail for a specific session. + */ +export async function buildSessionDetail(c: any, sessionId: string, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + throw new Error(`Unknown workspace session: ${sessionId}`); + } + const userTaskState = await getUserTaskState(c, authSessionId); + const userSessionState = userTaskState.bySessionId.get(sessionId); + + // Skip live transcript fetch if the sandbox session doesn't exist yet or + // the session is still provisioning — the sandbox API will block/timeout. + const isPending = meta.status === "pending_provision" || meta.status === "pending_session_create"; + if (!meta.sandboxSessionId || isPending) { + return buildSessionDetailFromMeta(meta, userSessionState); + } + + try { + const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); + if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) { + await writeSessionTranscript(c, meta.sessionId, transcript); + return buildSessionDetailFromMeta( + { + ...meta, + transcript, + transcriptUpdatedAt: Date.now(), + }, + userSessionState, + ); + } + } catch (error) { + // Session detail reads degrade to cached transcript when sandbox is unavailable. + logActorWarning("task", "readSessionTranscript failed, using cached transcript", { + taskId: c.state.taskId, + sessionId, + error: resolveErrorMessage(error), + }); + } + + return buildSessionDetailFromMeta(meta, userSessionState); +} + +export async function getTaskSummary(c: any): Promise { + return await buildTaskSummary(c); +} + +export async function getTaskDetail(c: any, authSessionId?: string): Promise { + return await buildTaskDetail(c, authSessionId); +} + +export async function getSessionDetail(c: any, sessionId: string, authSessionId?: string): Promise { + return await buildSessionDetail(c, sessionId, authSessionId); +} + +/** + * Replaces the old notifyWorkspaceUpdated pattern. + * + * The task actor emits two kinds of updates: + * - Push summary state up to the parent organization actor so the sidebar + * materialized projection stays current. + * - Broadcast full detail/session payloads down to direct task subscribers. + */ +export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise { + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.send( + organizationWorkflowQueueName("organization.command.applyTaskSummaryUpdate"), + { taskSummary: await buildTaskSummary(c) }, + { wait: false }, + ); + c.broadcast("taskUpdated", { + type: "taskUpdated", + detail: await buildTaskDetail(c), + }); + + if (options?.sessionId) { + c.broadcast("sessionUpdated", { + type: "sessionUpdated", + session: await buildSessionDetail(c, options.sessionId), + }); + } +} + +export async function refreshWorkspaceDerivedState(c: any): Promise { + const record = await ensureWorkspaceSeeded(c); + const gitState = await collectWorkspaceGitState(c, record); + await writeCachedGitState(c, gitState); + await broadcastTaskUpdate(c); +} + +export async function refreshWorkspaceSessionTranscript(c: any, sessionId: string): Promise { + const record = await ensureWorkspaceSeeded(c); + const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await readSessionMeta(c, sessionId)); + if (!meta?.sandboxSessionId) { + return; + } + + const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); + await writeSessionTranscript(c, meta.sessionId, transcript); + await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); +} + +export async function renameWorkspaceTask(c: any, value: string): Promise { + const nextTitle = value.trim(); + if (!nextTitle) { + throw new Error("task title is required"); + } + + await c.db + .update(taskTable) + .set({ + title: nextTitle, + updatedAt: Date.now(), + }) + .where(eq(taskTable.id, 1)) + .run(); + await broadcastTaskUpdate(c); +} + +export async function syncTaskPullRequest(c: any, pullRequest: any): Promise { + const now = pullRequest?.updatedAtMs ?? Date.now(); + await c.db + .update(taskTable) + .set({ + pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null, + updatedAt: now, + }) + .where(eq(taskTable.id, 1)) + .run(); + await broadcastTaskUpdate(c); +} + +export async function createWorkspaceSession(c: any, model?: string, authSessionId?: string): Promise<{ sessionId: string }> { + const sessionId = `session-${randomUUID()}`; + const record = await ensureWorkspaceSeeded(c); + await ensureSessionMeta(c, { + sessionId, + model: model ?? (await resolveDefaultModel(c, authSessionId)), + authSessionId, + sandboxSessionId: null, + status: pendingWorkspaceSessionStatus(record), + created: false, + }); + await upsertUserTaskState(c, authSessionId, sessionId, { + activeSessionId: sessionId, + unread: false, + }); + await broadcastTaskUpdate(c, { sessionId: sessionId }); + await enqueueWorkspaceEnsureSession(c, sessionId); + return { sessionId }; +} + +export async function ensureWorkspaceSession(c: any, sessionId: string, model?: string, authSessionId?: string): Promise { + const ensureStart = performance.now(); + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + return; + } + + const record = await ensureWorkspaceSeeded(c); + if (meta.sandboxSessionId && meta.status === "ready") { + fireRefreshSessionTranscript(c, meta.sandboxSessionId); + await broadcastTaskUpdate(c, { sessionId: sessionId }); + return; + } + + await updateSessionMeta(c, sessionId, { + sandboxSessionId: meta.sandboxSessionId ?? sessionId, + status: "pending_session_create", + errorMessage: null, + }); + + try { + const t0 = performance.now(); + const runtime = await getTaskSandboxRuntime(c, record); + logActorInfo("task.session", "getTaskSandboxRuntime", { sessionId, durationMs: Math.round(performance.now() - t0) }); + + const t1 = performance.now(); + await ensureSandboxRepo(c, runtime.sandbox, record); + logActorInfo("task.session", "ensureSandboxRepo", { sessionId, durationMs: Math.round(performance.now() - t1) }); + + const resolvedModel = model ?? meta.model ?? (await resolveDefaultModel(c, authSessionId)); + const resolvedAgent = await resolveSandboxAgentForModel(c, resolvedModel); + + const t2 = performance.now(); + await runtime.sandbox.createSession({ + id: meta.sandboxSessionId ?? sessionId, + agent: resolvedAgent, + model: resolvedModel, + sessionInit: { + cwd: runtime.cwd, + }, + }); + logActorInfo("task.session", "createSession", { sessionId, agent: resolvedAgent, model: resolvedModel, durationMs: Math.round(performance.now() - t2) }); + + await updateSessionMeta(c, sessionId, { + sandboxSessionId: meta.sandboxSessionId ?? sessionId, + status: "ready", + errorMessage: null, + }); + logActorInfo("task.session", "ensureWorkspaceSession complete", { sessionId, totalDurationMs: Math.round(performance.now() - ensureStart) }); + fireRefreshSessionTranscript(c, meta.sandboxSessionId ?? sessionId); + } catch (error) { + await updateSessionMeta(c, sessionId, { + status: "error", + errorMessage: error instanceof Error ? error.message : String(error), + }); + } + + await broadcastTaskUpdate(c, { sessionId: sessionId }); +} + +export async function enqueuePendingWorkspaceSessions(c: any): Promise { + const pending = (await listSessionMetaRows(c, { includeClosed: true })).filter( + (row) => row.closed !== true && row.status !== "ready" && row.status !== "error", + ); + + const self = selfTask(c); + for (const row of pending) { + await self.send(taskWorkflowQueueName("task.command.workspace.ensure_session" as any), { sessionId: row.sessionId, model: row.model }, { wait: false }); + } +} + +export async function renameWorkspaceSession(c: any, sessionId: string, title: string): Promise { + const trimmed = title.trim(); + if (!trimmed) { + throw new Error("session title is required"); + } + await updateSessionMeta(c, sessionId, { + sessionName: trimmed, + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function selectWorkspaceSession(c: any, sessionId: string, authSessionId?: string): Promise { + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + return; + } + await upsertUserTaskState(c, authSessionId, sessionId, { + activeSessionId: sessionId, + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function setWorkspaceSessionUnread(c: any, sessionId: string, unread: boolean, authSessionId?: string): Promise { + await upsertUserTaskState(c, authSessionId, sessionId, { + unread, + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function updateWorkspaceDraft(c: any, sessionId: string, text: string, attachments: Array, authSessionId?: string): Promise { + await upsertUserTaskState(c, authSessionId, sessionId, { + draftText: text, + draftAttachmentsJson: JSON.stringify(attachments), + draftUpdatedAt: Date.now(), + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function changeWorkspaceModel(c: any, sessionId: string, model: string, _authSessionId?: string): Promise { + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + return; + } + + if (meta.model === model) { + return; + } + + const record = await ensureWorkspaceSeeded(c); + let nextMeta = await updateSessionMeta(c, sessionId, { + model, + }); + let shouldEnsure = nextMeta.status === "pending_provision" || nextMeta.status === "pending_session_create" || nextMeta.status === "error"; + + if (shouldRecreateSessionForModelChange(nextMeta)) { + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + await sandbox.destroySession(nextMeta.sandboxSessionId); + nextMeta = await updateSessionMeta(c, sessionId, { + sandboxSessionId: null, + status: pendingWorkspaceSessionStatus(record), + errorMessage: null, + transcriptJson: "[]", + transcriptUpdatedAt: null, + thinkingSinceMs: null, + }); + shouldEnsure = true; + } else if (nextMeta.status === "ready" && nextMeta.sandboxSessionId) { + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + if (typeof sandbox.rawSendSessionMethod === "function") { + try { + await sandbox.rawSendSessionMethod(nextMeta.sandboxSessionId, "session/set_config_option", { + configId: "model", + value: model, + }); + } catch { + // Some agents do not allow live model updates. Preserve the new preference in metadata. + } + } + } else if (nextMeta.status !== "ready") { + nextMeta = await updateSessionMeta(c, sessionId, { + status: pendingWorkspaceSessionStatus(record), + errorMessage: null, + }); + } + + if (shouldEnsure) { + await enqueueWorkspaceEnsureSession(c, sessionId); + } + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function sendWorkspaceMessage(c: any, sessionId: string, text: string, attachments: Array, authSessionId?: string): Promise { + const sendStart = performance.now(); + const meta = requireSendableSessionMeta(await readSessionMeta(c, sessionId), sessionId); + const record = await ensureWorkspaceSeeded(c); + + const t0 = performance.now(); + const runtime = await getTaskSandboxRuntime(c, record); + logActorInfo("task.message", "getTaskSandboxRuntime", { sessionId, durationMs: Math.round(performance.now() - t0) }); + + const t1 = performance.now(); + // Skip git fetch on subsequent messages — the repo was already prepared during session + // creation. This avoids a 5-30s network round-trip to GitHub on every prompt. + await ensureSandboxRepo(c, runtime.sandbox, record, { skipFetchIfPrepared: true, authSessionId }); + logActorInfo("task.message", "ensureSandboxRepo", { sessionId, durationMs: Math.round(performance.now() - t1) }); + + // Check if the task owner needs to swap. If a different user is sending this message, + // update the owner record and inject their git credentials into the sandbox. + const ownerSwapped = await maybeSwapTaskOwner(c, authSessionId, runtime.sandbox); + if (ownerSwapped) { + await broadcastTaskUpdate(c); + } + const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)].filter( + Boolean, + ); + if (prompt.length === 0) { + throw new Error("message text is required"); + } + + await updateSessionMeta(c, sessionId, { + created: 1, + thinkingSinceMs: Date.now(), + }); + await upsertUserTaskState(c, authSessionId, sessionId, { + unread: false, + draftText: "", + draftAttachmentsJson: "[]", + draftUpdatedAt: Date.now(), + activeSessionId: sessionId, + }); + + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "running", Date.now()); + + try { + const t2 = performance.now(); + await runtime.sandbox.sendPrompt({ + sessionId: meta.sandboxSessionId, + prompt: prompt.join("\n\n"), + }); + logActorInfo("task.message", "sendPrompt", { sessionId, durationMs: Math.round(performance.now() - t2) }); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "idle", Date.now()); + } catch (error) { + await updateSessionMeta(c, sessionId, { + status: "error", + errorMessage: error instanceof Error ? error.message : String(error), + }); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "error", Date.now()); + throw error; + } + logActorInfo("task.message", "sendWorkspaceMessage complete", { sessionId, totalDurationMs: Math.round(performance.now() - sendStart) }); +} + +export async function stopWorkspaceSession(c: any, sessionId: string): Promise { + const meta = await requireReadySessionMeta(c, sessionId); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + await sandbox.destroySession(meta.sandboxSessionId); + await updateSessionMeta(c, sessionId, { + thinkingSinceMs: null, + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function syncWorkspaceSessionStatus(c: any, sessionId: string, status: "running" | "idle" | "error", at: number): Promise { + const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { sessionId: sessionId, sandboxSessionId: sessionId })); + let changed = false; + + if (status === "running") { + if (!meta.thinkingSinceMs) { + await updateSessionMeta(c, sessionId, { + thinkingSinceMs: at, + }); + changed = true; + } + } else { + if (meta.thinkingSinceMs) { + await updateSessionMeta(c, sessionId, { + thinkingSinceMs: null, + }); + changed = true; + } + } + + if (changed) { + const sessions = await listSessionMetaRows(c, { includeClosed: true }); + const nextStatus = computeWorkspaceTaskStatus(await ensureWorkspaceSeeded(c), sessions); + await c.db + .update(taskTable) + .set({ + status: nextStatus, + updatedAt: at, + }) + .where(eq(taskTable.id, 1)) + .run(); + fireRefreshSessionTranscript(c, sessionId); + if (status !== "running") { + fireRefreshDerived(c); + } + await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); + } +} + +export async function closeWorkspaceSession(c: any, sessionId: string, authSessionId?: string): Promise { + const sessions = await listSessionMetaRows(c); + if (sessions.filter((candidate) => candidate.closed !== true).length <= 1) { + return; + } + + const meta = await readSessionMeta(c, sessionId); + if (!meta) { + return; + } + if (meta.sandboxSessionId) { + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + await sandbox.destroySession(meta.sandboxSessionId); + } + await updateSessionMeta(c, sessionId, { + closed: 1, + thinkingSinceMs: null, + }); + const remainingSessions = sessions.filter((candidate) => candidate.sessionId !== sessionId && candidate.closed !== true); + const userTaskState = await getUserTaskState(c, authSessionId); + if (userTaskState.activeSessionId === sessionId && remainingSessions[0]) { + await upsertUserTaskState(c, authSessionId, remainingSessions[0].sessionId, { + activeSessionId: remainingSessions[0].sessionId, + }); + } + await deleteUserTaskState(c, authSessionId, sessionId); + await broadcastTaskUpdate(c); +} + +export async function markWorkspaceUnread(c: any, authSessionId?: string): Promise { + const sessions = await listSessionMetaRows(c); + const latest = sessions[sessions.length - 1]; + if (!latest) { + return; + } + await upsertUserTaskState(c, authSessionId, latest.sessionId, { + unread: true, + }); + await broadcastTaskUpdate(c, { sessionId: latest.sessionId }); +} + +export async function publishWorkspacePr(c: any): Promise { + const record = await ensureWorkspaceSeeded(c); + if (!record.branchName) { + throw new Error("cannot publish PR without a branch"); + } + const metadata = await getRepositoryMetadata(c); + const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(metadata.remoteUrl); + if (!repoFullName) { + throw new Error(`Unable to resolve GitHub repository for ${metadata.remoteUrl}`); + } + const { driver } = getActorRuntimeContext(); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + const created = await driver.github.createPr(repoFullName, record.branchName, record.title ?? record.task, undefined, { + githubToken: auth?.githubToken ?? null, + baseBranch: metadata.defaultBranch ?? undefined, + }); + await syncTaskPullRequest(c, { + number: created.number, + status: "ready", + title: record.title ?? record.task, + body: null, + state: "open", + url: created.url, + headRefName: record.branchName, + baseRefName: metadata.defaultBranch ?? "main", + authorLogin: null, + isDraft: false, + merged: false, + updatedAtMs: Date.now(), + }); +} + +export async function revertWorkspaceFile(c: any, path: string): Promise { + const record = await ensureWorkspaceSeeded(c); + if (!record.activeSandboxId) { + throw new Error("cannot revert file without an active sandbox"); + } + const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null; + if (!activeSandbox?.cwd) { + throw new Error("cannot revert file without a sandbox cwd"); + } + + const result = await executeInSandbox(c, { + sandboxId: record.activeSandboxId, + cwd: activeSandbox.cwd, + command: `if git ls-files --error-unmatch -- ${JSON.stringify(path)} >/dev/null 2>&1; then git restore --staged --worktree -- ${JSON.stringify(path)} || git checkout -- ${JSON.stringify(path)}; else rm -f ${JSON.stringify(path)}; fi`, + label: `git restore ${path}`, + }); + if (result.exitCode !== 0) { + throw new Error(`file revert failed (${result.exitCode}): ${result.result}`); + } + fireRefreshDerived(c); + await broadcastTaskUpdate(c); +} diff --git a/foundry/packages/backend/src/actors/user/actions/better-auth.ts b/foundry/packages/backend/src/actors/user/actions/better-auth.ts new file mode 100644 index 0000000..3ef8656 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/actions/better-auth.ts @@ -0,0 +1,105 @@ +import { asc, count as sqlCount, desc } from "drizzle-orm"; +import { applyJoinToRow, applyJoinToRows, buildWhere, columnFor, materializeRow, persistInput, persistPatch, tableFor } from "../query-helpers.js"; + +// Exception to the CLAUDE.md queue-for-mutations rule: Better Auth adapter operations +// use direct actions even for mutations. Better Auth runs during OAuth callbacks on the +// HTTP request path, not through the normal organization lifecycle. Routing through the +// queue adds multiple sequential round-trips (each with actor wake-up + step overhead) +// that cause 30-second OAuth callbacks and proxy retry storms. These mutations are simple +// SQLite upserts/deletes with no cross-actor coordination or broadcast side effects. +export const betterAuthActions = { + // --- Mutation actions --- + async betterAuthCreateRecord(c, input: { model: string; data: Record }) { + const table = tableFor(input.model); + const persisted = persistInput(input.model, input.data); + await c.db + .insert(table) + .values(persisted as any) + .run(); + const row = await c.db + .select() + .from(table) + .where(buildWhere(table, [{ field: "id", value: input.data.id }])!) + .get(); + return materializeRow(input.model, row); + }, + + async betterAuthUpdateRecord(c, input: { model: string; where: any[]; update: Record }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("betterAuthUpdateRecord requires a where clause"); + await c.db + .update(table) + .set(persistPatch(input.model, input.update) as any) + .where(predicate) + .run(); + return materializeRow(input.model, await c.db.select().from(table).where(predicate).get()); + }, + + async betterAuthUpdateManyRecords(c, input: { model: string; where: any[]; update: Record }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("betterAuthUpdateManyRecords requires a where clause"); + await c.db + .update(table) + .set(persistPatch(input.model, input.update) as any) + .where(predicate) + .run(); + const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get(); + return row?.value ?? 0; + }, + + async betterAuthDeleteRecord(c, input: { model: string; where: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("betterAuthDeleteRecord requires a where clause"); + await c.db.delete(table).where(predicate).run(); + }, + + async betterAuthDeleteManyRecords(c, input: { model: string; where: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("betterAuthDeleteManyRecords requires a where clause"); + const rows = await c.db.select().from(table).where(predicate).all(); + await c.db.delete(table).where(predicate).run(); + return rows.length; + }, + + // --- Read actions --- + async betterAuthFindOneRecord(c, input: { model: string; where: any[]; join?: any }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get(); + return await applyJoinToRow(c, input.model, row ?? null, input.join); + }, + + async betterAuthFindManyRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + let query: any = c.db.select().from(table); + if (predicate) { + query = query.where(predicate); + } + if (input.sortBy?.field) { + const column = columnFor(input.model, table, input.sortBy.field); + query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); + } + if (typeof input.limit === "number") { + query = query.limit(input.limit); + } + if (typeof input.offset === "number") { + query = query.offset(input.offset); + } + const rows = await query.all(); + return await applyJoinToRows(c, input.model, rows, input.join); + }, + + async betterAuthCountRecords(c, input: { model: string; where?: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + const row = predicate + ? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get() + : await c.db.select({ value: sqlCount() }).from(table).get(); + return row?.value ?? 0; + }, +}; diff --git a/foundry/packages/backend/src/actors/user/actions/user.ts b/foundry/packages/backend/src/actors/user/actions/user.ts new file mode 100644 index 0000000..f251c95 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/actions/user.ts @@ -0,0 +1,188 @@ +import { eq, and } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; +import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "../db/schema.js"; +import { materializeRow } from "../query-helpers.js"; + +export const userActions = { + // Custom Foundry action — not part of Better Auth. + async getAppAuthState(c, input: { sessionId: string }) { + const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get(); + if (!session) { + return null; + } + const [user, profile, currentSessionState, accounts] = await Promise.all([ + c.db.select().from(authUsers).where(eq(authUsers.authUserId, session.userId)).get(), + c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(), + c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(), + c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(), + ]); + return { + session, + user: materializeRow("user", user), + profile: profile ?? null, + sessionState: currentSessionState ?? null, + accounts, + }; + }, + + // Custom Foundry action — not part of Better Auth. + async getTaskState(c, input: { taskId: string }) { + const rows = await c.db.select().from(userTaskState).where(eq(userTaskState.taskId, input.taskId)).all(); + const activeSessionId = rows.find((row) => typeof row.activeSessionId === "string" && row.activeSessionId.length > 0)?.activeSessionId ?? null; + return { + taskId: input.taskId, + activeSessionId, + sessions: rows.map((row) => ({ + sessionId: row.sessionId, + unread: row.unread === 1, + draftText: row.draftText, + draftAttachmentsJson: row.draftAttachmentsJson, + draftUpdatedAt: row.draftUpdatedAt ?? null, + updatedAt: row.updatedAt, + })), + }; + }, + + // --- Mutation actions (migrated from queue) --- + + async upsertProfile( + c, + input: { + userId: string; + patch: { + githubAccountId?: string | null; + githubLogin?: string | null; + roleLabel?: string; + defaultModel?: string; + eligibleOrganizationIdsJson?: string; + starterRepoStatus?: string; + starterRepoStarredAt?: number | null; + starterRepoSkippedAt?: number | null; + }; + }, + ) { + const now = Date.now(); + await c.db + .insert(userProfiles) + .values({ + id: 1, + userId: input.userId, + githubAccountId: input.patch.githubAccountId ?? null, + githubLogin: input.patch.githubLogin ?? null, + roleLabel: input.patch.roleLabel ?? "GitHub user", + defaultModel: input.patch.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]", + starterRepoStatus: input.patch.starterRepoStatus ?? "pending", + starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null, + starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: userProfiles.userId, + set: { + ...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}), + ...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}), + ...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}), + ...(input.patch.defaultModel !== undefined ? { defaultModel: input.patch.defaultModel } : {}), + ...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}), + ...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}), + ...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}), + ...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}), + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get(); + }, + + async upsertSessionState(c, input: { sessionId: string; activeOrganizationId: string | null }) { + const now = Date.now(); + await c.db + .insert(sessionState) + .values({ + sessionId: input.sessionId, + activeOrganizationId: input.activeOrganizationId, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: sessionState.sessionId, + set: { activeOrganizationId: input.activeOrganizationId, updatedAt: now }, + }) + .run(); + return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(); + }, + + async upsertTaskState( + c, + input: { + taskId: string; + sessionId: string; + patch: { + activeSessionId?: string | null; + unread?: boolean; + draftText?: string; + draftAttachmentsJson?: string; + draftUpdatedAt?: number | null; + }; + }, + ) { + const now = Date.now(); + const existing = await c.db + .select() + .from(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .get(); + + if (input.patch.activeSessionId !== undefined) { + await c.db + .update(userTaskState) + .set({ activeSessionId: input.patch.activeSessionId, updatedAt: now }) + .where(eq(userTaskState.taskId, input.taskId)) + .run(); + } + + await c.db + .insert(userTaskState) + .values({ + taskId: input.taskId, + sessionId: input.sessionId, + activeSessionId: input.patch.activeSessionId ?? existing?.activeSessionId ?? null, + unread: input.patch.unread !== undefined ? (input.patch.unread ? 1 : 0) : (existing?.unread ?? 0), + draftText: input.patch.draftText ?? existing?.draftText ?? "", + draftAttachmentsJson: input.patch.draftAttachmentsJson ?? existing?.draftAttachmentsJson ?? "[]", + draftUpdatedAt: input.patch.draftUpdatedAt === undefined ? (existing?.draftUpdatedAt ?? null) : input.patch.draftUpdatedAt, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: [userTaskState.taskId, userTaskState.sessionId], + set: { + ...(input.patch.activeSessionId !== undefined ? { activeSessionId: input.patch.activeSessionId } : {}), + ...(input.patch.unread !== undefined ? { unread: input.patch.unread ? 1 : 0 } : {}), + ...(input.patch.draftText !== undefined ? { draftText: input.patch.draftText } : {}), + ...(input.patch.draftAttachmentsJson !== undefined ? { draftAttachmentsJson: input.patch.draftAttachmentsJson } : {}), + ...(input.patch.draftUpdatedAt !== undefined ? { draftUpdatedAt: input.patch.draftUpdatedAt } : {}), + updatedAt: now, + }, + }) + .run(); + + return await c.db + .select() + .from(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .get(); + }, + + async deleteTaskState(c, input: { taskId: string; sessionId?: string }) { + if (input.sessionId) { + await c.db + .delete(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .run(); + return; + } + await c.db.delete(userTaskState).where(eq(userTaskState.taskId, input.taskId)).run(); + }, +}; diff --git a/foundry/packages/backend/src/actors/user/db/db.ts b/foundry/packages/backend/src/actors/user/db/db.ts new file mode 100644 index 0000000..a864893 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/db/db.ts @@ -0,0 +1,5 @@ +import { db } from "rivetkit/db/drizzle"; +import * as schema from "./schema.js"; +import migrations from "./migrations.js"; + +export const userDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/user/db/migrations.ts b/foundry/packages/backend/src/actors/user/db/migrations.ts new file mode 100644 index 0000000..da92bdc --- /dev/null +++ b/foundry/packages/backend/src/actors/user/db/migrations.ts @@ -0,0 +1,106 @@ +// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. + +const journal = { + entries: [ + { + idx: 0, + when: 1773446400000, + tag: "0000_auth_user", + breakpoints: true, + }, + { + idx: 1, + when: 1773532800000, + tag: "0001_user_task_state", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`user\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`auth_user_id\` text NOT NULL, + \`name\` text NOT NULL, + \`email\` text NOT NULL, + \`email_verified\` integer NOT NULL, + \`image\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL, + CONSTRAINT \`user_singleton_id_check\` CHECK(\`id\` = 1) +); +--> statement-breakpoint +CREATE UNIQUE INDEX \`user_auth_user_id_idx\` ON \`user\` (\`auth_user_id\`); +--> statement-breakpoint +CREATE TABLE \`session\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`token\` text NOT NULL, + \`user_id\` text NOT NULL, + \`expires_at\` integer NOT NULL, + \`ip_address\` text, + \`user_agent\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE UNIQUE INDEX \`session_token_idx\` ON \`session\` (\`token\`); +--> statement-breakpoint +CREATE TABLE \`account\` ( + \`id\` text PRIMARY KEY NOT NULL, + \`account_id\` text NOT NULL, + \`provider_id\` text NOT NULL, + \`user_id\` text NOT NULL, + \`access_token\` text, + \`refresh_token\` text, + \`id_token\` text, + \`access_token_expires_at\` integer, + \`refresh_token_expires_at\` integer, + \`scope\` text, + \`password\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE UNIQUE INDEX \`account_provider_account_idx\` ON \`account\` (\`provider_id\`, \`account_id\`); +--> statement-breakpoint +CREATE TABLE \`user_profiles\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`user_id\` text NOT NULL, + \`github_account_id\` text, + \`github_login\` text, + \`role_label\` text NOT NULL, + \`default_model\` text DEFAULT 'gpt-5.3-codex' NOT NULL, + \`eligible_organization_ids_json\` text NOT NULL, + \`starter_repo_status\` text NOT NULL, + \`starter_repo_starred_at\` integer, + \`starter_repo_skipped_at\` integer, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL, + CONSTRAINT \`user_profiles_singleton_id_check\` CHECK(\`id\` = 1) +); +--> statement-breakpoint +CREATE UNIQUE INDEX \`user_profiles_user_id_idx\` ON \`user_profiles\` (\`user_id\`); +--> statement-breakpoint +CREATE TABLE \`session_state\` ( + \`session_id\` text PRIMARY KEY NOT NULL, + \`active_organization_id\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +);`, + m0001: `CREATE TABLE \`user_task_state\` ( + \`task_id\` text NOT NULL, + \`session_id\` text NOT NULL, + \`active_session_id\` text, + \`unread\` integer DEFAULT 0 NOT NULL, + \`draft_text\` text DEFAULT '' NOT NULL, + \`draft_attachments_json\` text DEFAULT '[]' NOT NULL, + \`draft_updated_at\` integer, + \`updated_at\` integer NOT NULL, + PRIMARY KEY(\`task_id\`, \`session_id\`) +);`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/user/db/schema.ts b/foundry/packages/backend/src/actors/user/db/schema.ts new file mode 100644 index 0000000..6a87a11 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/db/schema.ts @@ -0,0 +1,112 @@ +import { check, integer, primaryKey, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core"; +import { sql } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authUsers = sqliteTable( + "user", + { + id: integer("id").primaryKey(), + authUserId: text("auth_user_id").notNull(), + name: text("name").notNull(), + email: text("email").notNull(), + emailVerified: integer("email_verified").notNull(), + image: text("image"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + authUserIdIdx: uniqueIndex("user_auth_user_id_idx").on(table.authUserId), + singletonCheck: check("user_singleton_id_check", sql`${table.id} = 1`), + }), +); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authSessions = sqliteTable( + "session", + { + id: text("id").notNull().primaryKey(), + token: text("token").notNull(), + userId: text("user_id").notNull(), + expiresAt: integer("expires_at").notNull(), + ipAddress: text("ip_address"), + userAgent: text("user_agent"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + tokenIdx: uniqueIndex("session_token_idx").on(table.token), + }), +); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authAccounts = sqliteTable( + "account", + { + id: text("id").notNull().primaryKey(), + accountId: text("account_id").notNull(), + providerId: text("provider_id").notNull(), + userId: text("user_id").notNull(), + accessToken: text("access_token"), + refreshToken: text("refresh_token"), + idToken: text("id_token"), + accessTokenExpiresAt: integer("access_token_expires_at"), + refreshTokenExpiresAt: integer("refresh_token_expires_at"), + scope: text("scope"), + password: text("password"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId), + }), +); + +/** Custom Foundry table — not part of Better Auth. */ +export const userProfiles = sqliteTable( + "user_profiles", + { + id: integer("id").primaryKey(), + userId: text("user_id").notNull(), + githubAccountId: text("github_account_id"), + githubLogin: text("github_login"), + roleLabel: text("role_label").notNull(), + defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID), + eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(), + starterRepoStatus: text("starter_repo_status").notNull(), + starterRepoStarredAt: integer("starter_repo_starred_at"), + starterRepoSkippedAt: integer("starter_repo_skipped_at"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + userIdIdx: uniqueIndex("user_profiles_user_id_idx").on(table.userId), + singletonCheck: check("user_profiles_singleton_id_check", sql`${table.id} = 1`), + }), +); + +/** Custom Foundry table — not part of Better Auth. */ +export const sessionState = sqliteTable("session_state", { + sessionId: text("session_id").notNull().primaryKey(), + activeOrganizationId: text("active_organization_id"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** Custom Foundry table — not part of Better Auth. Stores per-user task/session UI state. */ +export const userTaskState = sqliteTable( + "user_task_state", + { + taskId: text("task_id").notNull(), + sessionId: text("session_id").notNull(), + activeSessionId: text("active_session_id"), + unread: integer("unread").notNull().default(0), + draftText: text("draft_text").notNull().default(""), + draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), + draftUpdatedAt: integer("draft_updated_at"), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + pk: primaryKey({ columns: [table.taskId, table.sessionId] }), + }), +); diff --git a/foundry/packages/backend/src/actors/user/index.ts b/foundry/packages/backend/src/actors/user/index.ts new file mode 100644 index 0000000..0deb1cb --- /dev/null +++ b/foundry/packages/backend/src/actors/user/index.ts @@ -0,0 +1,20 @@ +import { actor } from "rivetkit"; +import { userDb } from "./db/db.js"; +import { betterAuthActions } from "./actions/better-auth.js"; +import { userActions } from "./actions/user.js"; + +export const user = actor({ + db: userDb, + options: { + name: "User", + icon: "shield", + actionTimeout: 60_000, + }, + createState: (_c, input: { userId: string }) => ({ + userId: input.userId, + }), + actions: { + ...betterAuthActions, + ...userActions, + }, +}); diff --git a/foundry/packages/backend/src/actors/user/query-helpers.ts b/foundry/packages/backend/src/actors/user/query-helpers.ts new file mode 100644 index 0000000..5bdee10 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/query-helpers.ts @@ -0,0 +1,197 @@ +import { and, eq, inArray, isNotNull, isNull, like, lt, lte, gt, gte, ne, notInArray, or } from "drizzle-orm"; +import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js"; + +export const userTables = { + user: authUsers, + session: authSessions, + account: authAccounts, + userProfiles, + sessionState, + userTaskState, +} as const; + +export function tableFor(model: string) { + const table = userTables[model as keyof typeof userTables]; + if (!table) { + throw new Error(`Unsupported user model: ${model}`); + } + return table as any; +} + +function dbFieldFor(model: string, field: string): string { + if (model === "user" && field === "id") { + return "authUserId"; + } + return field; +} + +export function materializeRow(model: string, row: any) { + if (!row || model !== "user") { + return row; + } + + const { id: _singletonId, authUserId, ...rest } = row; + return { + id: authUserId, + ...rest, + }; +} + +export function persistInput(model: string, data: Record) { + if (model !== "user") { + return data; + } + + const { id, ...rest } = data; + return { + id: 1, + authUserId: id, + ...rest, + }; +} + +export function persistPatch(model: string, data: Record) { + if (model !== "user") { + return data; + } + + const { id, ...rest } = data; + return { + ...(id !== undefined ? { authUserId: id } : {}), + ...rest, + }; +} + +export function columnFor(model: string, table: any, field: string) { + const column = table[dbFieldFor(model, field)]; + if (!column) { + throw new Error(`Unsupported user field: ${model}.${field}`); + } + return column; +} + +export function normalizeValue(value: unknown): unknown { + if (value instanceof Date) { + return value.getTime(); + } + if (Array.isArray(value)) { + return value.map((entry) => normalizeValue(entry)); + } + return value; +} + +export function clauseToExpr(table: any, clause: any) { + const model = table === authUsers ? "user" : table === authSessions ? "session" : table === authAccounts ? "account" : ""; + const column = columnFor(model, table, clause.field); + const value = normalizeValue(clause.value); + + switch (clause.operator) { + case "ne": + return value === null ? isNotNull(column) : ne(column, value as any); + case "lt": + return lt(column, value as any); + case "lte": + return lte(column, value as any); + case "gt": + return gt(column, value as any); + case "gte": + return gte(column, value as any); + case "in": + return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "not_in": + return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "contains": + return like(column, `%${String(value ?? "")}%`); + case "starts_with": + return like(column, `${String(value ?? "")}%`); + case "ends_with": + return like(column, `%${String(value ?? "")}`); + case "eq": + default: + return value === null ? isNull(column) : eq(column, value as any); + } +} + +export function buildWhere(table: any, where: any[] | undefined) { + if (!where || where.length === 0) { + return undefined; + } + + let expr = clauseToExpr(table, where[0]); + for (const clause of where.slice(1)) { + const next = clauseToExpr(table, clause); + expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); + } + return expr; +} + +export function applyJoinToRow(c: any, model: string, row: any, join: any) { + const materialized = materializeRow(model, row); + if (!materialized || !join) { + return materialized; + } + + if (model === "session" && join.user) { + return c.db + .select() + .from(authUsers) + .where(eq(authUsers.authUserId, materialized.userId)) + .get() + .then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null })); + } + + if (model === "account" && join.user) { + return c.db + .select() + .from(authUsers) + .where(eq(authUsers.authUserId, materialized.userId)) + .get() + .then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null })); + } + + if (model === "user" && join.account) { + return c.db + .select() + .from(authAccounts) + .where(eq(authAccounts.userId, materialized.id)) + .all() + .then((accounts: any[]) => ({ ...materialized, account: accounts })); + } + + return Promise.resolve(materialized); +} + +export async function applyJoinToRows(c: any, model: string, rows: any[], join: any) { + if (!join || rows.length === 0) { + return rows.map((row) => materializeRow(model, row)); + } + + if (model === "session" && join.user) { + const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; + const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : []; + const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)])); + return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); + } + + if (model === "account" && join.user) { + const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; + const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : []; + const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)])); + return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); + } + + if (model === "user" && join.account) { + const materializedRows = rows.map((row) => materializeRow("user", row)); + const userIds = materializedRows.map((row) => row.id); + const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : []; + const accountsByUserId = new Map(); + for (const account of accounts) { + const entries = accountsByUserId.get(account.userId) ?? []; + entries.push(account); + accountsByUserId.set(account.userId, entries); + } + return materializedRows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] })); + } + + return rows.map((row) => materializeRow(model, row)); +} diff --git a/foundry/packages/backend/src/config/backend.ts b/foundry/packages/backend/src/config/backend.ts new file mode 100644 index 0000000..82d9bbe --- /dev/null +++ b/foundry/packages/backend/src/config/backend.ts @@ -0,0 +1,22 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { dirname } from "node:path"; +import { homedir } from "node:os"; +import * as toml from "@iarna/toml"; +import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; + +export const CONFIG_PATH = `${homedir()}/.config/foundry/config.toml`; + +export function loadConfig(path = CONFIG_PATH): AppConfig { + if (!existsSync(path)) { + return ConfigSchema.parse({}); + } + + const raw = readFileSync(path, "utf8"); + const parsed = toml.parse(raw) as unknown; + return ConfigSchema.parse(parsed); +} + +export function saveConfig(config: AppConfig, path = CONFIG_PATH): void { + mkdirSync(dirname(path), { recursive: true }); + writeFileSync(path, toml.stringify(config), "utf8"); +} diff --git a/foundry/packages/backend/src/config/organization.ts b/foundry/packages/backend/src/config/organization.ts new file mode 100644 index 0000000..8b5c766 --- /dev/null +++ b/foundry/packages/backend/src/config/organization.ts @@ -0,0 +1,13 @@ +import type { AppConfig } from "@sandbox-agent/foundry-shared"; + +export function defaultOrganization(config: AppConfig): string { + const organizationId = config.organization.default.trim(); + return organizationId.length > 0 ? organizationId : "default"; +} + +export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string { + if (flagOrganization && flagOrganization.trim().length > 0) { + return flagOrganization.trim(); + } + return defaultOrganization(config); +} diff --git a/foundry/packages/backend/src/config/runner-version.ts b/foundry/packages/backend/src/config/runner-version.ts new file mode 100644 index 0000000..5c33672 --- /dev/null +++ b/foundry/packages/backend/src/config/runner-version.ts @@ -0,0 +1,33 @@ +import { readFileSync } from "node:fs"; + +function parseRunnerVersion(rawValue: string | undefined): number | undefined { + const value = rawValue?.trim(); + if (!value) { + return undefined; + } + + const parsed = Number.parseInt(value, 10); + if (Number.isNaN(parsed)) { + return undefined; + } + + return parsed; +} + +export function resolveRunnerVersion(): number | undefined { + const envVersion = parseRunnerVersion(process.env.RIVET_RUNNER_VERSION); + if (envVersion !== undefined) { + return envVersion; + } + + const versionFilePath = process.env.RIVET_RUNNER_VERSION_FILE; + if (!versionFilePath) { + return undefined; + } + + try { + return parseRunnerVersion(readFileSync(versionFilePath, "utf8")); + } catch { + return undefined; + } +} diff --git a/foundry/packages/backend/src/driver.ts b/foundry/packages/backend/src/driver.ts new file mode 100644 index 0000000..5c01035 --- /dev/null +++ b/foundry/packages/backend/src/driver.ts @@ -0,0 +1,33 @@ +import { createPr, starRepository } from "./integrations/github/index.js"; + +export interface GithubDriver { + createPr( + repoFullName: string, + headBranch: string, + title: string, + body?: string, + options?: { githubToken?: string | null; baseBranch?: string | null }, + ): Promise<{ number: number; url: string }>; + starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise; +} + +export interface TmuxDriver { + setWindowStatus(branchName: string, status: string): number; +} + +export interface BackendDriver { + github: GithubDriver; + tmux: TmuxDriver; +} + +export function createDefaultDriver(): BackendDriver { + return { + github: { + createPr, + starRepository, + }, + tmux: { + setWindowStatus: () => 0, + }, + }; +} diff --git a/foundry/packages/backend/src/index.ts b/foundry/packages/backend/src/index.ts new file mode 100644 index 0000000..617bacc --- /dev/null +++ b/foundry/packages/backend/src/index.ts @@ -0,0 +1,517 @@ +import { Hono } from "hono"; +import { cors } from "hono/cors"; +import { randomUUID } from "node:crypto"; +import { initActorRuntimeContext } from "./actors/context.js"; +import { registry } from "./actors/index.js"; +import { organizationKey } from "./actors/keys.js"; +import { loadConfig } from "./config/backend.js"; +import { createBackends, createNotificationService } from "./notifications/index.js"; +import { createDefaultDriver } from "./driver.js"; +import { createClient } from "rivetkit/client"; +import { initBetterAuthService } from "./services/better-auth.js"; +import { createDefaultAppShellServices } from "./services/app-shell-runtime.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/constants.js"; +import { logger } from "./logging.js"; + +export interface BackendStartOptions { + host?: string; + port?: number; +} + +interface AppOrganizationLogContext { + action?: string; + cfConnectingIp?: string; + cfRay?: string; + forwardedFor?: string; + forwardedHost?: string; + forwardedProto?: string; + method?: string; + path?: string; + requestId?: string; + referer?: string; + secFetchDest?: string; + secFetchMode?: string; + secFetchSite?: string; + secFetchUser?: string; + sessionId?: string; + userAgent?: string; + xRealIp?: string; +} + +function stripTrailingSlash(value: string): string { + return value.replace(/\/$/, ""); +} + +function isRivetRequest(request: Request): boolean { + const { pathname } = new URL(request.url); + return pathname === "/v1/rivet" || pathname.startsWith("/v1/rivet/"); +} + +export async function startBackend(options: BackendStartOptions = {}): Promise { + // Prevent the sandbox-agent SDK's unhandled SQLite constraint errors from + // crashing the entire process. The SDK has a bug where duplicate event + // inserts (sandbox_agent_events UNIQUE constraint) throw from an internal + // async path with no catch. Log and continue. + process.on("uncaughtException", (error) => { + logger.error({ error: error?.message ?? String(error), stack: error?.stack }, "uncaughtException (kept alive)"); + }); + process.on("unhandledRejection", (reason) => { + const msg = reason instanceof Error ? reason.message : String(reason); + const stack = reason instanceof Error ? reason.stack : undefined; + logger.error({ error: msg, stack }, "unhandledRejection (kept alive)"); + }); + + // sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth. + // Normalize to keep local dev + docker-compose simple. + if (!process.env.CODEX_API_KEY && process.env.OPENAI_API_KEY) { + process.env.CODEX_API_KEY = process.env.OPENAI_API_KEY; + } + + const config = loadConfig(); + config.backend.host = options.host ?? config.backend.host; + config.backend.port = options.port ?? config.backend.port; + + // Allow docker-compose/dev environments to supply provider config via env vars + // instead of writing into the container's config.toml. + const envFirst = (...keys: string[]): string | undefined => { + for (const key of keys) { + const raw = process.env[key]; + if (raw && raw.trim().length > 0) return raw.trim(); + } + return undefined; + }; + + config.sandboxProviders.e2b.apiKey = envFirst("E2B_API_KEY") ?? config.sandboxProviders.e2b.apiKey; + config.sandboxProviders.e2b.template = envFirst("HF_E2B_TEMPLATE", "E2B_TEMPLATE") ?? config.sandboxProviders.e2b.template; + + const driver = createDefaultDriver(); + const backends = await createBackends(config.notify); + const notifications = createNotificationService(backends); + const appShellServices = createDefaultAppShellServices(); + initActorRuntimeContext(config, notifications, driver, appShellServices); + + const actorClient = createClient({ + endpoint: `http://127.0.0.1:${config.backend.port}/v1/rivet`, + }) as any; + const betterAuth = initBetterAuthService(actorClient, { + apiUrl: appShellServices.apiUrl, + appUrl: appShellServices.appUrl, + }); + + const requestHeaderContext = (c: any): AppOrganizationLogContext => ({ + cfConnectingIp: c.req.header("cf-connecting-ip") ?? undefined, + cfRay: c.req.header("cf-ray") ?? undefined, + forwardedFor: c.req.header("x-forwarded-for") ?? undefined, + forwardedHost: c.req.header("x-forwarded-host") ?? undefined, + forwardedProto: c.req.header("x-forwarded-proto") ?? undefined, + referer: c.req.header("referer") ?? undefined, + secFetchDest: c.req.header("sec-fetch-dest") ?? undefined, + secFetchMode: c.req.header("sec-fetch-mode") ?? undefined, + secFetchSite: c.req.header("sec-fetch-site") ?? undefined, + secFetchUser: c.req.header("sec-fetch-user") ?? undefined, + userAgent: c.req.header("user-agent") ?? undefined, + xRealIp: c.req.header("x-real-ip") ?? undefined, + }); + + // Serve custom Foundry HTTP APIs alongside the RivetKit registry. + const app = new Hono<{ Variables: { requestId: string } }>(); + const allowHeaders = [ + "Content-Type", + "Authorization", + "x-rivet-token", + "x-rivet-encoding", + "x-rivet-query", + "x-rivet-conn-params", + "x-rivet-actor", + "x-rivet-target", + "x-rivet-namespace", + "x-rivet-endpoint", + "x-rivet-total-slots", + "x-rivet-runner-name", + "x-rivet-namespace-name", + ]; + const exposeHeaders = ["Content-Type", "x-rivet-ray-id"]; + const allowedOrigins = new Set([stripTrailingSlash(appShellServices.appUrl), stripTrailingSlash(appShellServices.apiUrl)]); + const corsConfig = { + origin: (origin: string) => (allowedOrigins.has(origin) ? origin : null) as string | undefined | null, + credentials: true, + allowHeaders, + allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"], + exposeHeaders, + }; + app.use("/v1/*", cors(corsConfig)); + app.use("/v1", cors(corsConfig)); + + // On-demand memory snapshot endpoint for diagnosing spikes (dev only). + // Usage: curl http://127.0.0.1:7741/debug/memory + // Trigger GC first: curl http://127.0.0.1:7741/debug/memory?gc=1 + // Write JSC heap snapshot: curl http://127.0.0.1:7741/debug/memory?heap=1 + // (writes /tmp/foundry-heap-.json, inspect with chrome://tracing) + app.get("/debug/memory", async (c) => { + if (process.env.NODE_ENV !== "development") { + return c.json({ error: "debug endpoints disabled in production" }, 403); + } + const wantGc = c.req.query("gc") === "1"; + if (wantGc && typeof Bun !== "undefined") { + // Bun.gc(true) triggers a synchronous full GC sweep in JavaScriptCore. + Bun.gc(true); + } + const mem = process.memoryUsage(); + const rssMb = Math.round(mem.rss / 1024 / 1024); + const heapUsedMb = Math.round(mem.heapUsed / 1024 / 1024); + const heapTotalMb = Math.round(mem.heapTotal / 1024 / 1024); + const externalMb = Math.round(mem.external / 1024 / 1024); + const nonHeapMb = rssMb - heapUsedMb - externalMb; + // Bun.heapStats() gives JSC-specific breakdown: object counts, typed array + // bytes, extra memory (native allocations tracked by JSC). Useful for + // distinguishing JS object bloat from native/WASM memory. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const BunAny = Bun as any; + const heapStats = typeof BunAny.heapStats === "function" ? BunAny.heapStats() : null; + const snapshot = { + rssMb, + heapUsedMb, + heapTotalMb, + externalMb, + nonHeapMb, + gcTriggered: wantGc, + rssBytes: mem.rss, + heapUsedBytes: mem.heapUsed, + heapTotalBytes: mem.heapTotal, + externalBytes: mem.external, + ...(heapStats ? { bunHeapStats: heapStats } : {}), + }; + // Optionally write a full JSC heap snapshot for offline analysis. + let heapSnapshotPath: string | null = null; + const wantHeap = c.req.query("heap") === "1"; + if (wantHeap && typeof Bun !== "undefined") { + heapSnapshotPath = `/tmp/foundry-heap-${Date.now()}.json`; + // Bun.generateHeapSnapshot("v8") returns a V8-compatible JSON string. + const heapJson = Bun.generateHeapSnapshot("v8"); + await Bun.write(heapSnapshotPath, heapJson); + } + logger.info(snapshot, "memory_usage_debug"); + return c.json({ ...snapshot, ...(heapSnapshotPath ? { heapSnapshotPath } : {}) }); + }); + + app.use("*", async (c, next) => { + const requestId = c.req.header("x-request-id")?.trim() || randomUUID(); + const start = performance.now(); + c.set("requestId", requestId); + c.header("x-request-id", requestId); + + try { + await next(); + } catch (error) { + logger.error( + { + ...requestHeaderContext(c), + requestId, + method: c.req.method, + path: c.req.path, + errorMessage: error instanceof Error ? error.message : String(error), + errorStack: error instanceof Error ? error.stack : undefined, + }, + "http_request_failed", + ); + throw error; + } + + logger.info( + { + ...requestHeaderContext(c), + requestId, + method: c.req.method, + path: c.req.path, + status: c.res.status, + durationMs: Math.round((performance.now() - start) * 100) / 100, + }, + "http_request", + ); + }); + + // Cache the app organization actor handle for the lifetime of this backend process. + // The "app" organization is a singleton coordinator for auth indexes, org state, and + // billing. Caching avoids repeated getOrCreate round-trips on every HTTP request. + let cachedAppOrganization: any | null = null; + + const appOrganization = async (context: AppOrganizationLogContext = {}) => { + if (cachedAppOrganization) return cachedAppOrganization; + + const start = performance.now(); + try { + const handle = await actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { + createWithInput: APP_SHELL_ORGANIZATION_ID, + }); + cachedAppOrganization = handle; + logger.info( + { + ...context, + cache: "miss", + durationMs: Math.round((performance.now() - start) * 100) / 100, + }, + "app_organization_resolve", + ); + return handle; + } catch (error) { + logger.error( + { + ...context, + cache: "miss", + durationMs: Math.round((performance.now() - start) * 100) / 100, + errorMessage: error instanceof Error ? error.message : String(error), + errorStack: error instanceof Error ? error.stack : undefined, + }, + "app_organization_resolve_failed", + ); + throw error; + } + }; + + const requestLogContext = (c: any, sessionId?: string): AppOrganizationLogContext => ({ + ...requestHeaderContext(c), + method: c.req.method, + path: c.req.path, + requestId: c.get("requestId"), + sessionId, + }); + + const resolveSessionId = async (c: any): Promise => { + const session = await betterAuth.resolveSession(c.req.raw.headers); + return session?.session?.id ?? null; + }; + + // Deduplicate OAuth callback requests. The production proxy chain + // (Cloudflare -> Fastly -> Railway) retries callback requests when they take + // >10s. The first request deletes the verification record on success, so the + // retry fails with "verification not found" -> ?error=please_restart_the_process. + // This map tracks in-flight callbacks by state param so retries wait for and + // reuse the first request's response. + const inflightCallbacks = new Map>(); + + app.all("/v1/auth/*", async (c) => { + const authPath = c.req.path; + const authMethod = c.req.method; + const isCallback = authPath.includes("/callback/"); + + // Deduplicate callback requests by OAuth state parameter + if (isCallback) { + const url = new URL(c.req.url); + const state = url.searchParams.get("state"); + if (state) { + const existing = inflightCallbacks.get(state); + if (existing) { + logger.info({ path: authPath, state: state.slice(0, 8) + "..." }, "auth_callback_dedup"); + const original = await existing; + return original.clone(); + } + + const promise = (async () => { + logger.info({ path: authPath, method: authMethod, state: state.slice(0, 8) + "..." }, "auth_callback_start"); + const start = performance.now(); + const response = await betterAuth.auth.handler(c.req.raw); + const durationMs = Math.round((performance.now() - start) * 100) / 100; + const location = response.headers.get("location"); + logger.info({ path: authPath, status: response.status, durationMs, location: location ?? undefined }, "auth_callback_complete"); + if (location && location.includes("error=")) { + logger.error({ path: authPath, status: response.status, durationMs, location }, "auth_callback_error_redirect"); + } + return response; + })(); + + inflightCallbacks.set(state, promise); + try { + const response = await promise; + return response.clone(); + } finally { + // Keep entry briefly so late retries still hit the cache + setTimeout(() => inflightCallbacks.delete(state), 30_000); + } + } + } + + return await betterAuth.auth.handler(c.req.raw); + }); + + app.post("/v1/app/sign-out", async (c) => { + const sessionId = await resolveSessionId(c); + if (sessionId) { + const signOutResponse = await betterAuth.signOut(c.req.raw.headers); + const setCookie = signOutResponse.headers.get("set-cookie"); + if (setCookie) { + c.header("set-cookie", setCookie); + } + } + return c.json({ + auth: { status: "signed_out", currentUserId: null }, + activeOrganizationId: null, + onboarding: { + starterRepo: { + repoFullName: "rivet-dev/sandbox-agent", + repoUrl: "https://github.com/rivet-dev/sandbox-agent", + status: "pending", + starredAt: null, + skippedAt: null, + }, + }, + users: [], + organizations: [], + }); + }); + + app.get("/v1/billing/checkout/complete", async (c) => { + const organizationId = c.req.query("organizationId"); + const checkoutSessionId = c.req.query("session_id"); + if (!organizationId || !checkoutSessionId) { + return c.text("Missing Stripe checkout completion parameters", 400); + } + const sessionId = await resolveSessionId(c); + if (!sessionId) { + return c.text("Unauthorized", 401); + } + const result = await (await appOrganization(requestLogContext(c, sessionId))).finalizeAppCheckoutSession({ + organizationId, + sessionId, + checkoutSessionId, + }); + return Response.redirect(result.redirectTo, 302); + }); + + const handleStripeWebhook = async (c: any) => { + const payload = await c.req.text(); + await (await appOrganization(requestLogContext(c))).handleAppStripeWebhook({ + payload, + signatureHeader: c.req.header("stripe-signature") ?? null, + }); + return c.json({ ok: true }); + }; + + app.post("/v1/webhooks/stripe", handleStripeWebhook); + + app.post("/v1/webhooks/github", async (c) => { + const payload = await c.req.text(); + await (await appOrganization(requestLogContext(c))).handleAppGithubWebhook({ + payload, + signatureHeader: c.req.header("x-hub-signature-256") ?? null, + eventHeader: c.req.header("x-github-event") ?? null, + }); + return c.json({ ok: true }); + }); + + const server = Bun.serve({ + fetch: (request) => { + if (isRivetRequest(request)) { + return registry.handler(request); + } + return app.fetch(request); + }, + hostname: config.backend.host, + port: config.backend.port, + // Bun defaults to 10s idle timeout. Actor RPCs go through the gateway + // tunnel (not direct HTTP), and the SSE stream has a 1s ping interval + // (RUNNER_SSE_PING_INTERVAL in rivetkit), so the idle timeout likely + // never fires in practice. Set high as a safety net regardless. + idleTimeout: 255, + }); + + logger.info( + { + host: config.backend.host, + port: config.backend.port, + }, + "backend_started", + ); + + // Periodic memory usage reporting for diagnosing memory spikes (dev only). + // Logs JS heap, RSS, and external (native/WASM) separately so we can tell + // whether spikes come from JS objects, Bun/JSC internals, or native addons + // like SQLite/WASM. + if (process.env.NODE_ENV === "development") { + let prevRss = 0; + setInterval(() => { + const mem = process.memoryUsage(); + const rssMb = Math.round(mem.rss / 1024 / 1024); + const heapUsedMb = Math.round(mem.heapUsed / 1024 / 1024); + const heapTotalMb = Math.round(mem.heapTotal / 1024 / 1024); + const externalMb = Math.round(mem.external / 1024 / 1024); + // Non-heap RSS: memory not accounted for by JS heap or external buffers. + // Large values here point to native allocations (WASM, mmap, child process + // bookkeeping, Bun's internal arena, etc.). + const nonHeapMb = rssMb - heapUsedMb - externalMb; + const deltaRss = rssMb - prevRss; + prevRss = rssMb; + logger.info( + { + rssMb, + heapUsedMb, + heapTotalMb, + externalMb, + nonHeapMb, + deltaRssMb: deltaRss, + rssBytes: mem.rss, + heapUsedBytes: mem.heapUsed, + heapTotalBytes: mem.heapTotal, + externalBytes: mem.external, + }, + "memory_usage", + ); + }, 60_000); + } + + process.on("SIGINT", async () => { + server.stop(); + process.exit(0); + }); + + process.on("SIGTERM", async () => { + server.stop(); + process.exit(0); + }); + + // Keep process alive. + await new Promise(() => undefined); +} + +function parseArg(flag: string): string | undefined { + const idx = process.argv.indexOf(flag); + if (idx < 0) return undefined; + return process.argv[idx + 1]; +} + +function parseEnvPort(value: string | undefined): number | undefined { + if (!value) { + return undefined; + } + const port = Number(value); + if (!Number.isInteger(port) || port <= 0 || port > 65535) { + return undefined; + } + return port; +} + +async function main(): Promise { + const cmd = process.argv[2] ?? "start"; + if (cmd !== "start") { + throw new Error(`Unsupported backend command: ${cmd}`); + } + + const host = parseArg("--host") ?? process.env.HOST ?? process.env.HF_BACKEND_HOST; + const port = parseArg("--port") ?? process.env.PORT ?? process.env.HF_BACKEND_PORT; + await startBackend({ + host, + port: parseEnvPort(port), + }); +} + +if (import.meta.url === `file://${process.argv[1]}`) { + main().catch((err: unknown) => { + logger.fatal( + { + errorMessage: err instanceof Error ? err.message : String(err), + errorStack: err instanceof Error ? err.stack : undefined, + }, + "backend_start_failed", + ); + process.exit(1); + }); +} diff --git a/foundry/packages/backend/src/integrations/github/index.ts b/foundry/packages/backend/src/integrations/github/index.ts new file mode 100644 index 0000000..87fc996 --- /dev/null +++ b/foundry/packages/backend/src/integrations/github/index.ts @@ -0,0 +1,80 @@ +interface GithubAuthOptions { + githubToken?: string | null; + baseBranch?: string | null; +} + +function authHeaders(options?: GithubAuthOptions): HeadersInit { + const token = options?.githubToken?.trim(); + if (!token) { + throw new Error("GitHub token is required for this operation"); + } + return { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${token}`, + "X-GitHub-Api-Version": "2022-11-28", + }; +} + +async function githubRequest(path: string, init: RequestInit, options?: GithubAuthOptions): Promise { + return await fetch(`https://api.github.com${path}`, { + ...init, + headers: { + ...authHeaders(options), + ...(init.headers ?? {}), + }, + }); +} + +export async function createPr( + repoFullName: string, + headBranch: string, + title: string, + body?: string, + options?: GithubAuthOptions, +): Promise<{ number: number; url: string }> { + const baseBranch = options?.baseBranch?.trim() || "main"; + const response = await githubRequest( + `/repos/${repoFullName}/pulls`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + title, + head: headBranch, + base: baseBranch, + body: body ?? "", + }), + }, + options, + ); + + const payload = (await response.json()) as { number?: number; html_url?: string; message?: string }; + if (!response.ok || !payload.number || !payload.html_url) { + throw new Error(payload.message ?? `Failed to create pull request for ${repoFullName}`); + } + + return { + number: payload.number, + url: payload.html_url, + }; +} + +export async function starRepository(repoFullName: string, options?: GithubAuthOptions): Promise { + const response = await githubRequest( + `/user/starred/${repoFullName}`, + { + method: "PUT", + headers: { + "Content-Length": "0", + }, + }, + options, + ); + + if (!response.ok) { + const payload = (await response.json().catch(() => null)) as { message?: string } | null; + throw new Error(payload?.message ?? `Failed to star GitHub repository ${repoFullName}`); + } +} diff --git a/foundry/packages/backend/src/integrations/sandbox-agent/client.ts b/foundry/packages/backend/src/integrations/sandbox-agent/client.ts new file mode 100644 index 0000000..f936db3 --- /dev/null +++ b/foundry/packages/backend/src/integrations/sandbox-agent/client.ts @@ -0,0 +1,418 @@ +import type { AgentType } from "@sandbox-agent/foundry-shared"; +import type { + ListEventsRequest, + ListPage, + ListPageRequest, + ProcessCreateRequest, + ProcessInfo, + ProcessLogFollowQuery, + ProcessLogsResponse, + ProcessSignalQuery, + SessionEvent, + SessionPersistDriver, + SessionRecord, +} from "sandbox-agent"; +import { SandboxAgent } from "sandbox-agent"; + +export type AgentId = AgentType | "opencode"; + +export interface SandboxSession { + id: string; + status: "running" | "idle" | "error"; +} + +export interface SandboxSessionCreateRequest { + prompt?: string; + cwd?: string; + agent?: AgentId; +} + +export interface SandboxSessionPromptRequest { + sessionId: string; + prompt: string; + notification?: boolean; +} + +export interface SandboxAgentClientOptions { + endpoint: string; + token?: string; + agent?: AgentId; + persist?: SessionPersistDriver; +} + +const DEFAULT_AGENT: AgentId = "codex"; + +function modeIdForAgent(agent: AgentId): string | null { + switch (agent) { + case "codex": + return "full-access"; + case "claude": + return "acceptEdits"; + default: + return null; + } +} + +function normalizeStatusFromMessage(payload: unknown): SandboxSession["status"] | null { + if (payload && typeof payload === "object") { + const envelope = payload as { + error?: unknown; + method?: unknown; + result?: unknown; + }; + + const maybeError = envelope.error; + if (maybeError) { + return "error"; + } + + if (envelope.result && typeof envelope.result === "object") { + const stopReason = (envelope.result as { stopReason?: unknown }).stopReason; + if (typeof stopReason === "string" && stopReason.length > 0) { + return "idle"; + } + } + + const method = envelope.method; + if (typeof method === "string") { + const lowered = method.toLowerCase(); + if (lowered.includes("error") || lowered.includes("failed")) { + return "error"; + } + if (lowered.includes("ended") || lowered.includes("complete") || lowered.includes("stopped")) { + return "idle"; + } + } + } + + return null; +} + +export class SandboxAgentClient { + readonly endpoint: string; + readonly token?: string; + readonly agent: AgentId; + readonly persist?: SessionPersistDriver; + private sdkPromise?: Promise; + private readonly statusBySessionId = new Map(); + + constructor(options: SandboxAgentClientOptions) { + this.endpoint = options.endpoint.replace(/\/$/, ""); + this.token = options.token; + this.agent = options.agent ?? DEFAULT_AGENT; + this.persist = options.persist; + } + + private async sdk(): Promise { + if (!this.sdkPromise) { + this.sdkPromise = SandboxAgent.connect({ + baseUrl: this.endpoint, + token: this.token, + persist: this.persist, + }); + } + + return this.sdkPromise; + } + + private setStatus(sessionId: string, status: SandboxSession["status"]): void { + this.statusBySessionId.set(sessionId, status); + } + + private isLikelyPromptTimeout(err: unknown): boolean { + const message = err instanceof Error ? err.message : String(err); + const lowered = message.toLowerCase(); + // sandbox-agent server times out long-running ACP prompts and returns a 504-like error. + return lowered.includes("timeout waiting for agent response") || lowered.includes("timed out waiting for agent response") || lowered.includes("504"); + } + + async createSession(request: string | SandboxSessionCreateRequest): Promise { + const normalized: SandboxSessionCreateRequest = typeof request === "string" ? { prompt: request } : request; + const sdk = await this.sdk(); + // Do not wrap createSession in a local Promise.race timeout. The underlying SDK + // call is not abortable, so local timeout races create overlapping ACP requests and + // can produce duplicate/orphaned sessions while the original request is still running. + const session = await sdk.createSession({ + agent: normalized.agent ?? this.agent, + sessionInit: { + cwd: normalized.cwd ?? "/", + mcpServers: [], + }, + }); + const modeId = modeIdForAgent(normalized.agent ?? this.agent); + + // Codex defaults to a restrictive "read-only" preset in some environments. + // Foundry automation needs edits, command execution, and network access. + // access (git push / PR creation). Use full-access where supported. + // + // If the agent doesn't support session modes, ignore. + // + // Do this in the background: ACP mode updates can occasionally time out (504), + // and waiting here can stall session creation long enough to trip task init + // step timeouts even though the session itself was created. + if (modeId) { + void session.rawSend("session/set_mode", { modeId }).catch(() => { + // ignore + }); + } + + const prompt = normalized.prompt?.trim(); + if (!prompt) { + this.setStatus(session.id, "idle"); + return { + id: session.id, + status: "idle", + }; + } + + // Fire the first turn in the background. We intentionally do not await this: + // session creation must remain fast, and we observe completion via events/stopReason. + // + // Note: sandbox-agent's ACP adapter for Codex may take >2 minutes to respond. + // sandbox-agent can return a timeout error (504) even though the agent continues + // running. Treat that timeout as non-fatal and keep polling events. + void session + .prompt([{ type: "text", text: prompt }]) + .then(() => { + this.setStatus(session.id, "idle"); + }) + .catch((err) => { + if (this.isLikelyPromptTimeout(err)) { + this.setStatus(session.id, "running"); + return; + } + this.setStatus(session.id, "error"); + }); + + this.setStatus(session.id, "running"); + return { + id: session.id, + status: "running", + }; + } + + async createSessionNoTask(dir: string): Promise { + return this.createSession({ + cwd: dir, + }); + } + + async listSessions(request: ListPageRequest = {}): Promise> { + const sdk = await this.sdk(); + const page = await sdk.listSessions(request); + return { + items: page.items.map((session) => session.toRecord()), + nextCursor: page.nextCursor, + }; + } + + async listEvents(request: ListEventsRequest): Promise> { + const sdk = await this.sdk(); + return sdk.getEvents(request); + } + + async createProcess(request: ProcessCreateRequest): Promise { + const sdk = await this.sdk(); + return await sdk.createProcess(request); + } + + async listProcesses(): Promise<{ processes: ProcessInfo[] }> { + const sdk = await this.sdk(); + return await sdk.listProcesses(); + } + + async getProcessLogs(processId: string, query: ProcessLogFollowQuery = {}): Promise { + const sdk = await this.sdk(); + return await sdk.getProcessLogs(processId, query); + } + + async stopProcess(processId: string, query?: ProcessSignalQuery): Promise { + const sdk = await this.sdk(); + return await sdk.stopProcess(processId, query); + } + + async killProcess(processId: string, query?: ProcessSignalQuery): Promise { + const sdk = await this.sdk(); + return await sdk.killProcess(processId, query); + } + + async deleteProcess(processId: string): Promise { + const sdk = await this.sdk(); + await sdk.deleteProcess(processId); + } + + async sendPrompt(request: SandboxSessionPromptRequest): Promise { + const sdk = await this.sdk(); + const existing = await sdk.getSession(request.sessionId); + if (!existing) { + throw new Error(`session '${request.sessionId}' not found`); + } + + const session = await sdk.resumeSession(request.sessionId); + const modeId = modeIdForAgent(this.agent); + // Keep mode update best-effort and non-blocking for the same reason as createSession. + if (modeId) { + void session.rawSend("session/set_mode", { modeId }).catch(() => { + // ignore + }); + } + const text = request.prompt.trim(); + if (!text) return; + + // sandbox-agent's Session.send(notification=true) forwards an extNotification with + // method "session/prompt", which some agents (e.g. codex-acp) do not implement. + // Use Session.prompt and treat notification=true as "fire-and-forget". + const fireAndForget = request.notification ?? true; + if (fireAndForget) { + void session + .prompt([{ type: "text", text }]) + .then(() => { + this.setStatus(request.sessionId, "idle"); + }) + .catch((err) => { + if (this.isLikelyPromptTimeout(err)) { + this.setStatus(request.sessionId, "running"); + return; + } + this.setStatus(request.sessionId, "error"); + }); + } else { + try { + await session.prompt([{ type: "text", text }]); + this.setStatus(request.sessionId, "idle"); + } catch (err) { + if (this.isLikelyPromptTimeout(err)) { + this.setStatus(request.sessionId, "running"); + return; + } + throw err; + } + } + this.setStatus(request.sessionId, "running"); + } + + async cancelSession(sessionId: string): Promise { + const sdk = await this.sdk(); + const existing = await sdk.getSession(sessionId); + if (!existing) { + throw new Error(`session '${sessionId}' not found`); + } + + const session = await sdk.resumeSession(sessionId); + await session.rawSend("session/cancel", {}); + this.setStatus(sessionId, "idle"); + } + + async destroySession(sessionId: string): Promise { + const sdk = await this.sdk(); + await sdk.destroySession(sessionId); + this.setStatus(sessionId, "idle"); + } + + async sessionStatus(sessionId: string): Promise { + const cached = this.statusBySessionId.get(sessionId); + if (cached && cached !== "running") { + return { id: sessionId, status: cached }; + } + + const sdk = await this.sdk(); + const session = await sdk.getSession(sessionId); + + if (!session) { + this.setStatus(sessionId, "error"); + return { id: sessionId, status: "error" }; + } + + const record = session.toRecord(); + if (record.destroyedAt) { + this.setStatus(sessionId, "idle"); + return { id: sessionId, status: "idle" }; + } + + const events = await sdk.getEvents({ + sessionId, + limit: 25, + }); + + for (let i = events.items.length - 1; i >= 0; i--) { + const item = events.items[i]; + if (!item) continue; + const status = normalizeStatusFromMessage(item.payload); + if (status) { + this.setStatus(sessionId, status); + return { id: sessionId, status }; + } + } + + this.setStatus(sessionId, "running"); + return { id: sessionId, status: "running" }; + } + + async killSessionsInDirectory(dir: string): Promise { + const sdk = await this.sdk(); + let cursor: string | undefined; + + do { + const page = await sdk.listSessions({ + cursor, + limit: 100, + }); + + for (const session of page.items) { + const initCwd = session.toRecord().sessionInit?.cwd; + if (initCwd !== dir) { + continue; + } + await sdk.destroySession(session.id); + this.statusBySessionId.delete(session.id); + } + + cursor = page.nextCursor; + } while (cursor); + } + + async generateCommitMessage(dir: string, spec: string, task: string): Promise { + const prompt = [ + "Generate a conventional commit message for the following changes.", + "Return ONLY the commit message, no explanation or markdown formatting.", + "", + `Task: ${task}`, + "", + `Spec/diff:\n${spec}`, + ].join("\n"); + + const sdk = await this.sdk(); + const session = await sdk.createSession({ + agent: this.agent, + sessionInit: { + cwd: dir, + mcpServers: [], + }, + }); + + await session.prompt([{ type: "text", text: prompt }]); + this.setStatus(session.id, "idle"); + + const events = await sdk.getEvents({ + sessionId: session.id, + limit: 100, + }); + + for (let i = events.items.length - 1; i >= 0; i--) { + const event = events.items[i]; + if (!event) continue; + if (event.sender !== "agent") continue; + + const payload = event.payload as Record; + const params = payload.params; + if (!params || typeof params !== "object") continue; + + const text = (params as { text?: unknown }).text; + if (typeof text === "string" && text.trim().length > 0) { + return text.trim(); + } + } + + throw new Error("sandbox-agent commit message response was empty"); + } +} diff --git a/foundry/packages/backend/src/logging.ts b/foundry/packages/backend/src/logging.ts new file mode 100644 index 0000000..5b96d92 --- /dev/null +++ b/foundry/packages/backend/src/logging.ts @@ -0,0 +1,6 @@ +import { createFoundryLogger } from "@sandbox-agent/foundry-shared"; + +export const logger = createFoundryLogger({ + service: "foundry-backend", + format: "logfmt", +}); diff --git a/foundry/packages/backend/src/notifications/backends.ts b/foundry/packages/backend/src/notifications/backends.ts new file mode 100644 index 0000000..9429298 --- /dev/null +++ b/foundry/packages/backend/src/notifications/backends.ts @@ -0,0 +1,124 @@ +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; + +const execFileAsync = promisify(execFile); + +export type NotifyUrgency = "low" | "normal" | "high"; + +export interface NotifyBackend { + name: string; + available(): Promise; + send(title: string, body: string, urgency: NotifyUrgency): Promise; +} + +async function isOnPath(binary: string): Promise { + try { + await execFileAsync("which", [binary]); + return true; + } catch { + return false; + } +} + +export class OpenclawBackend implements NotifyBackend { + readonly name = "openclaw"; + + async available(): Promise { + return isOnPath("openclaw"); + } + + async send(title: string, body: string, _urgency: NotifyUrgency): Promise { + try { + await execFileAsync("openclaw", ["wake", "--title", title, "--body", body]); + return true; + } catch { + return false; + } + } +} + +export class MacOsNotifyBackend implements NotifyBackend { + readonly name = "macos-osascript"; + + async available(): Promise { + return process.platform === "darwin"; + } + + async send(title: string, body: string, _urgency: NotifyUrgency): Promise { + try { + const escaped_body = body.replace(/\\/g, "\\\\").replace(/"/g, '\\"'); + const escaped_title = title.replace(/\\/g, "\\\\").replace(/"/g, '\\"'); + const script = `display notification "${escaped_body}" with title "${escaped_title}"`; + await execFileAsync("osascript", ["-e", script]); + return true; + } catch { + return false; + } + } +} + +export class LinuxNotifySendBackend implements NotifyBackend { + readonly name = "linux-notify-send"; + + async available(): Promise { + return isOnPath("notify-send"); + } + + async send(title: string, body: string, urgency: NotifyUrgency): Promise { + const urgencyMap: Record = { + low: "low", + normal: "normal", + high: "critical", + }; + + try { + await execFileAsync("notify-send", ["-u", urgencyMap[urgency], title, body]); + return true; + } catch { + return false; + } + } +} + +export class TerminalBellBackend implements NotifyBackend { + readonly name = "terminal"; + + async available(): Promise { + return true; + } + + async send(title: string, body: string, _urgency: NotifyUrgency): Promise { + try { + process.stderr.write("\x07"); + process.stderr.write(`[${title}] ${body}\n`); + return true; + } catch { + return false; + } + } +} + +const backendFactories: Record NotifyBackend> = { + openclaw: () => new OpenclawBackend(), + "macos-osascript": () => new MacOsNotifyBackend(), + "linux-notify-send": () => new LinuxNotifySendBackend(), + terminal: () => new TerminalBellBackend(), +}; + +export async function createBackends(configOrder: string[]): Promise { + const backends: NotifyBackend[] = []; + + for (const name of configOrder) { + const backendBuilder = backendFactories[name]; + if (!backendBuilder) { + continue; + } + + const backend = backendBuilder(); + if (await backend.available()) { + backends.push(backend); + } + } + + return backends; +} diff --git a/foundry/packages/backend/src/notifications/index.ts b/foundry/packages/backend/src/notifications/index.ts new file mode 100644 index 0000000..32b89ba --- /dev/null +++ b/foundry/packages/backend/src/notifications/index.ts @@ -0,0 +1,63 @@ +import type { NotifyBackend, NotifyUrgency } from "./backends.js"; + +export type { NotifyUrgency } from "./backends.js"; +export { createBackends } from "./backends.js"; + +export interface NotificationService { + notify(title: string, body: string, urgency: NotifyUrgency): Promise; + agentIdle(branchName: string): Promise; + agentError(branchName: string, error: string): Promise; + ciPassed(branchName: string, prNumber: number): Promise; + ciFailed(branchName: string, prNumber: number): Promise; + prApproved(branchName: string, prNumber: number, reviewer: string): Promise; + changesRequested(branchName: string, prNumber: number, reviewer: string): Promise; + prMerged(branchName: string, prNumber: number): Promise; + taskCreated(branchName: string): Promise; +} + +export function createNotificationService(backends: NotifyBackend[]): NotificationService { + async function notify(title: string, body: string, urgency: NotifyUrgency): Promise { + for (const backend of backends) { + const sent = await backend.send(title, body, urgency); + if (sent) { + return; + } + } + } + + return { + notify, + + async agentIdle(branchName: string): Promise { + await notify("Agent Idle", `Agent finished on ${branchName}`, "normal"); + }, + + async agentError(branchName: string, error: string): Promise { + await notify("Agent Error", `Agent error on ${branchName}: ${error}`, "high"); + }, + + async ciPassed(branchName: string, prNumber: number): Promise { + await notify("CI Passed", `CI passed on ${branchName} (PR #${prNumber})`, "low"); + }, + + async ciFailed(branchName: string, prNumber: number): Promise { + await notify("CI Failed", `CI failed on ${branchName} (PR #${prNumber})`, "high"); + }, + + async prApproved(branchName: string, prNumber: number, reviewer: string): Promise { + await notify("PR Approved", `PR #${prNumber} on ${branchName} approved by ${reviewer}`, "normal"); + }, + + async changesRequested(branchName: string, prNumber: number, reviewer: string): Promise { + await notify("Changes Requested", `Changes requested on PR #${prNumber} (${branchName}) by ${reviewer}`, "high"); + }, + + async prMerged(branchName: string, prNumber: number): Promise { + await notify("PR Merged", `PR #${prNumber} on ${branchName} merged`, "normal"); + }, + + async taskCreated(branchName: string): Promise { + await notify("Task Created", `New task on ${branchName}`, "low"); + }, + }; +} diff --git a/foundry/packages/backend/src/notifications/state-tracker.ts b/foundry/packages/backend/src/notifications/state-tracker.ts new file mode 100644 index 0000000..24250cf --- /dev/null +++ b/foundry/packages/backend/src/notifications/state-tracker.ts @@ -0,0 +1,43 @@ +export type CiState = "running" | "pass" | "fail" | "unknown"; +export type ReviewState = "approved" | "changes_requested" | "pending" | "none" | "unknown"; + +export interface PrStateTransition { + type: "ci_passed" | "ci_failed" | "pr_approved" | "changes_requested"; + branchName: string; + prNumber: number; + reviewer?: string; +} + +export class PrStateTracker { + private states: Map; + + constructor() { + this.states = new Map(); + } + + update(repoId: string, branchName: string, prNumber: number, ci: CiState, review: ReviewState, reviewer?: string): PrStateTransition[] { + const key = `${repoId}:${branchName}`; + const prev = this.states.get(key); + const transitions: PrStateTransition[] = []; + + if (prev) { + // CI transitions: only fire when moving from "running" to a terminal state + if (prev.ci === "running" && ci === "pass") { + transitions.push({ type: "ci_passed", branchName, prNumber }); + } else if (prev.ci === "running" && ci === "fail") { + transitions.push({ type: "ci_failed", branchName, prNumber }); + } + + // Review transitions: only fire when moving from "pending" to a terminal state + if (prev.review === "pending" && review === "approved") { + transitions.push({ type: "pr_approved", branchName, prNumber, reviewer }); + } else if (prev.review === "pending" && review === "changes_requested") { + transitions.push({ type: "changes_requested", branchName, prNumber, reviewer }); + } + } + + this.states.set(key, { ci, review }); + + return transitions; + } +} diff --git a/foundry/packages/backend/src/sandbox-config.ts b/foundry/packages/backend/src/sandbox-config.ts new file mode 100644 index 0000000..9d85f51 --- /dev/null +++ b/foundry/packages/backend/src/sandbox-config.ts @@ -0,0 +1,39 @@ +import type { AppConfig, SandboxProviderId } from "@sandbox-agent/foundry-shared"; + +function hasE2BApiKey(config: AppConfig): boolean { + return Boolean(config.sandboxProviders.e2b.apiKey?.trim()); +} + +function forcedSandboxProviderId(): SandboxProviderId | null { + const raw = process.env.FOUNDRY_SANDBOX_PROVIDER?.trim() ?? process.env.HF_SANDBOX_PROVIDER?.trim() ?? null; + if (raw === "local" || raw === "e2b") { + return raw; + } + return null; +} + +export function defaultSandboxProviderId(config: AppConfig): SandboxProviderId { + const forced = forcedSandboxProviderId(); + if (forced === "local") { + return "local"; + } + if (forced === "e2b") { + if (!hasE2BApiKey(config)) { + throw new Error("FOUNDRY_SANDBOX_PROVIDER=e2b requires E2B_API_KEY to be configured."); + } + return "e2b"; + } + return hasE2BApiKey(config) ? "e2b" : "local"; +} + +export function availableSandboxProviderIds(config: AppConfig): SandboxProviderId[] { + return hasE2BApiKey(config) ? ["e2b", "local"] : ["local"]; +} + +export function resolveSandboxProviderId(config: AppConfig, requested?: SandboxProviderId | null): SandboxProviderId { + if (requested === "e2b" && !hasE2BApiKey(config)) { + throw new Error("E2B provider is not configured. Set E2B_API_KEY before selecting the e2b provider."); + } + + return requested ?? defaultSandboxProviderId(config); +} diff --git a/foundry/packages/backend/src/services/app-github.ts b/foundry/packages/backend/src/services/app-github.ts new file mode 100644 index 0000000..52e5308 --- /dev/null +++ b/foundry/packages/backend/src/services/app-github.ts @@ -0,0 +1,718 @@ +import { createHmac, createPrivateKey, createSign, timingSafeEqual } from "node:crypto"; +import { logger } from "../logging.js"; + +export class GitHubAppError extends Error { + readonly status: number; + + constructor(message: string, status = 500) { + super(message); + this.name = "GitHubAppError"; + this.status = status; + } +} + +export interface GitHubOAuthSession { + accessToken: string; + scopes: string[]; +} + +export interface GitHubViewerIdentity { + id: string; + login: string; + name: string; + email: string | null; +} + +export interface GitHubOrgIdentity { + id: string; + login: string; + name: string | null; +} + +export interface GitHubInstallationRecord { + id: number; + accountLogin: string; +} + +export interface GitHubRepositoryRecord { + fullName: string; + cloneUrl: string; + private: boolean; + defaultBranch: string; +} + +export interface GitHubMemberRecord { + id: string; + login: string; + name: string; + email: string | null; + role: string | null; + state: string; +} + +export interface GitHubPullRequestRecord { + repoFullName: string; + cloneUrl: string; + number: number; + title: string; + body: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + merged: boolean; +} + +interface GitHubTokenResponse { + access_token?: string; + scope?: string; + error?: string; + error_description?: string; +} + +interface GitHubPageResponse { + items: T[]; + nextUrl: string | null; +} + +const githubOAuthLogger = logger.child({ + scope: "github-oauth", +}); + +export interface GitHubWebhookEvent { + action?: string; + organization?: { login?: string; id?: number }; + installation?: { id: number; account?: { login?: string; type?: string; id?: number } | null }; + repositories_added?: Array<{ id: number; full_name: string; private: boolean }>; + repositories_removed?: Array<{ id: number; full_name: string }>; + repository?: { id: number; full_name: string; clone_url?: string; private?: boolean; owner?: { login?: string } }; + pull_request?: { + number: number; + title?: string; + body?: string | null; + state?: string; + html_url?: string; + draft?: boolean; + merged?: boolean; + user?: { login?: string } | null; + head?: { ref?: string }; + base?: { ref?: string }; + }; + sender?: { login?: string; id?: number }; + [key: string]: unknown; +} + +export interface GitHubAppClientOptions { + apiBaseUrl?: string; + authBaseUrl?: string; + clientId?: string; + clientSecret?: string; + redirectUri?: string; + appId?: string; + appPrivateKey?: string; + webhookSecret?: string; +} + +function normalizePem(value: string | undefined): string | undefined { + if (!value) { + return value; + } + + return value.includes("\\n") ? value.replace(/\\n/g, "\n") : value; +} + +export class GitHubAppClient { + private readonly apiBaseUrl: string; + private readonly authBaseUrl: string; + private readonly clientId?: string; + private readonly clientSecret?: string; + private readonly redirectUri?: string; + private readonly appId?: string; + private readonly appPrivateKey?: string; + private readonly webhookSecret?: string; + + constructor(options: GitHubAppClientOptions = {}) { + this.apiBaseUrl = (options.apiBaseUrl ?? "https://api.github.com").replace(/\/$/, ""); + this.authBaseUrl = (options.authBaseUrl ?? "https://github.com").replace(/\/$/, ""); + this.clientId = options.clientId ?? process.env.GITHUB_CLIENT_ID; + this.clientSecret = options.clientSecret ?? process.env.GITHUB_CLIENT_SECRET; + this.redirectUri = options.redirectUri ?? process.env.GITHUB_REDIRECT_URI; + this.appId = options.appId ?? process.env.GITHUB_APP_ID; + this.appPrivateKey = normalizePem(options.appPrivateKey ?? process.env.GITHUB_APP_PRIVATE_KEY); + this.webhookSecret = options.webhookSecret ?? process.env.GITHUB_WEBHOOK_SECRET; + } + + isOauthConfigured(): boolean { + return Boolean(this.clientId && this.clientSecret && this.redirectUri); + } + + isAppConfigured(): boolean { + return Boolean(this.appId && this.appPrivateKey); + } + + isWebhookConfigured(): boolean { + return Boolean(this.webhookSecret); + } + + verifyWebhookEvent(payload: string, signatureHeader: string | null, eventHeader: string | null): { event: string; body: GitHubWebhookEvent } { + if (!this.webhookSecret) { + throw new GitHubAppError("GitHub webhook secret is not configured", 500); + } + if (!signatureHeader) { + throw new GitHubAppError("Missing GitHub signature header", 400); + } + if (!eventHeader) { + throw new GitHubAppError("Missing GitHub event header", 400); + } + + const expectedSignature = signatureHeader.startsWith("sha256=") ? signatureHeader.slice(7) : null; + if (!expectedSignature) { + throw new GitHubAppError("Malformed GitHub signature header", 400); + } + + const computed = createHmac("sha256", this.webhookSecret).update(payload).digest("hex"); + const computedBuffer = Buffer.from(computed, "utf8"); + const expectedBuffer = Buffer.from(expectedSignature, "utf8"); + if (computedBuffer.length !== expectedBuffer.length || !timingSafeEqual(computedBuffer, expectedBuffer)) { + throw new GitHubAppError("GitHub webhook signature verification failed", 400); + } + + return { + event: eventHeader, + body: JSON.parse(payload) as GitHubWebhookEvent, + }; + } + + buildAuthorizeUrl(state: string): string { + if (!this.clientId || !this.redirectUri) { + throw new GitHubAppError("GitHub OAuth is not configured", 500); + } + + const url = new URL(`${this.authBaseUrl}/login/oauth/authorize`); + url.searchParams.set("client_id", this.clientId); + url.searchParams.set("redirect_uri", this.redirectUri); + url.searchParams.set("scope", "read:user user:email read:org repo"); + url.searchParams.set("state", state); + return url.toString(); + } + + async exchangeCode(code: string): Promise { + if (!this.clientId || !this.clientSecret || !this.redirectUri) { + throw new GitHubAppError("GitHub OAuth is not configured", 500); + } + + const exchangeBody = { + client_id: this.clientId, + client_secret: this.clientSecret, + code, + redirect_uri: this.redirectUri, + }; + githubOAuthLogger.debug( + { + url: `${this.authBaseUrl}/login/oauth/access_token`, + clientId: this.clientId, + redirectUri: this.redirectUri, + codeLength: code.length, + codePrefix: code.slice(0, 6), + }, + "exchange_code_request", + ); + + const response = await fetch(`${this.authBaseUrl}/login/oauth/access_token`, { + method: "POST", + headers: { + Accept: "application/json", + "Content-Type": "application/json", + }, + body: JSON.stringify(exchangeBody), + }); + + const responseText = await response.text(); + githubOAuthLogger.debug( + { + status: response.status, + bodyPreview: responseText.slice(0, 300), + }, + "exchange_code_response", + ); + let payload: GitHubTokenResponse; + try { + payload = JSON.parse(responseText) as GitHubTokenResponse; + } catch { + // GitHub may return URL-encoded responses despite Accept: application/json + const params = new URLSearchParams(responseText); + if (params.has("access_token")) { + payload = { + access_token: params.get("access_token")!, + scope: params.get("scope") ?? "", + }; + } else { + throw new GitHubAppError( + params.get("error_description") ?? params.get("error") ?? `GitHub token exchange failed: ${responseText.slice(0, 200)}`, + response.status || 502, + ); + } + } + if (!response.ok || !payload.access_token) { + throw new GitHubAppError(payload.error_description ?? payload.error ?? `GitHub token exchange failed with ${response.status}`, response.status); + } + + return { + accessToken: payload.access_token, + scopes: + payload.scope + ?.split(",") + .map((value) => value.trim()) + .filter((value) => value.length > 0) ?? [], + }; + } + + async getViewer(accessToken: string): Promise { + const user = await this.requestJson<{ + id: number; + login: string; + name?: string | null; + email?: string | null; + }>("/user", accessToken); + + let email = user.email ?? null; + if (!email) { + try { + const emails = await this.requestJson>("/user/emails", accessToken); + const primary = emails.find((candidate) => candidate.primary && candidate.verified) ?? emails[0] ?? null; + email = primary?.email ?? null; + } catch (error) { + if (!(error instanceof GitHubAppError) || error.status !== 404) { + throw error; + } + } + } + + return { + id: String(user.id), + login: user.login, + name: user.name?.trim() || user.login, + email, + }; + } + + async listOrganizations(accessToken: string): Promise { + const organizations = await this.paginate<{ id: number; login: string; name?: string | null }>("/user/orgs?per_page=100", accessToken); + return organizations.map((organization) => ({ + id: String(organization.id), + login: organization.login, + name: organization.name?.trim() || organization.login, + })); + } + + async listInstallations(accessToken: string): Promise { + if (!this.isAppConfigured()) { + return []; + } + try { + const payload = await this.requestJson<{ + installations?: Array<{ id: number; account?: { login?: string } | null }>; + }>("/user/installations", accessToken); + + return (payload.installations ?? []) + .map((installation) => ({ + id: installation.id, + accountLogin: installation.account?.login?.trim() ?? "", + })) + .filter((installation) => installation.accountLogin.length > 0); + } catch (error) { + if (!(error instanceof GitHubAppError) || (error.status !== 401 && error.status !== 403)) { + throw error; + } + } + + const installations = await this.paginateApp<{ id: number; account?: { login?: string } | null }>("/app/installations?per_page=100"); + return installations + .map((installation) => ({ + id: installation.id, + accountLogin: installation.account?.login?.trim() ?? "", + })) + .filter((installation) => installation.accountLogin.length > 0); + } + + async listUserRepositories(accessToken: string): Promise { + const repositories = await this.paginate<{ + full_name: string; + clone_url: string; + private: boolean; + default_branch: string; + }>("/user/repos?per_page=100&affiliation=owner,collaborator,organization_member&sort=updated", accessToken); + + return repositories.map((repository) => ({ + fullName: repository.full_name, + cloneUrl: repository.clone_url, + private: repository.private, + defaultBranch: repository.default_branch, + })); + } + + async listInstallationRepositories(installationId: number): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + const repositories = await this.paginate<{ + full_name: string; + clone_url: string; + private: boolean; + default_branch: string; + }>("/installation/repositories?per_page=100", accessToken); + + return repositories.map((repository) => ({ + fullName: repository.full_name, + cloneUrl: repository.clone_url, + private: repository.private, + defaultBranch: repository.default_branch, + })); + } + + async getUserRepository(accessToken: string, fullName: string): Promise { + try { + const repository = await this.requestJson<{ + full_name: string; + clone_url: string; + private: boolean; + default_branch: string; + }>(`/repos/${fullName}`, accessToken); + return { + fullName: repository.full_name, + cloneUrl: repository.clone_url, + private: repository.private, + defaultBranch: repository.default_branch, + }; + } catch (error) { + if (error instanceof GitHubAppError && error.status === 404) { + return null; + } + throw error; + } + } + + async getInstallationRepository(installationId: number, fullName: string): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.getUserRepository(accessToken, fullName); + } + + async listOrganizationMembers(accessToken: string, organizationLogin: string): Promise { + const members = await this.paginate<{ + id: number; + login: string; + role?: string | null; + }>(`/orgs/${organizationLogin}/members?per_page=100&role=all`, accessToken); + + const detailedMembers = await Promise.all( + members.map(async (member) => { + try { + const detail = await this.requestJson<{ + id: number; + login: string; + name?: string | null; + email?: string | null; + }>(`/users/${member.login}`, accessToken); + return { + id: String(detail.id), + login: detail.login, + name: detail.name?.trim() || detail.login, + email: detail.email ?? null, + role: member.role ?? null, + state: "active", + }; + } catch { + return { + id: String(member.id), + login: member.login, + name: member.login, + email: null, + role: member.role ?? null, + state: "active", + }; + } + }), + ); + + return detailedMembers; + } + + async listInstallationMembers(installationId: number, organizationLogin: string): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.listOrganizationMembers(accessToken, organizationLogin); + } + + async listPullRequestsForUserRepositories(accessToken: string, repositories: GitHubRepositoryRecord[]): Promise { + return (await Promise.all(repositories.map((repository) => this.listRepositoryPullRequests(accessToken, repository.fullName, repository.cloneUrl)))).flat(); + } + + async listInstallationPullRequestsForRepositories(installationId: number, repositories: GitHubRepositoryRecord[]): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.listPullRequestsForUserRepositories(accessToken, repositories); + } + + async getUserPullRequest(accessToken: string, fullName: string, prNumber: number): Promise { + try { + const pullRequest = await this.requestJson<{ + number: number; + title: string; + body?: string | null; + state: string; + html_url: string; + draft?: boolean; + merged?: boolean; + user?: { login?: string } | null; + head?: { ref?: string } | null; + base?: { ref?: string } | null; + }>(`/repos/${fullName}/pulls/${prNumber}`, accessToken); + const repository = await this.getUserRepository(accessToken, fullName); + if (!repository) { + return null; + } + return { + repoFullName: fullName, + cloneUrl: repository.cloneUrl, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.html_url, + headRefName: pullRequest.head?.ref?.trim() ?? "", + baseRefName: pullRequest.base?.ref?.trim() ?? "", + authorLogin: pullRequest.user?.login?.trim() ?? null, + isDraft: Boolean(pullRequest.draft), + merged: Boolean(pullRequest.merged), + }; + } catch (error) { + if (error instanceof GitHubAppError && error.status === 404) { + return null; + } + throw error; + } + } + + async getInstallationPullRequest(installationId: number, fullName: string, prNumber: number): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.getUserPullRequest(accessToken, fullName, prNumber); + } + + async buildInstallationUrl(organizationLogin: string, state: string): Promise { + if (!this.isAppConfigured()) { + throw new GitHubAppError("GitHub App is not configured", 500); + } + const app = await this.requestAppJson<{ slug?: string }>("/app"); + if (!app.slug) { + throw new GitHubAppError("GitHub App slug is unavailable", 500); + } + const url = new URL(`${this.authBaseUrl}/apps/${app.slug}/installations/new`); + url.searchParams.set("state", state); + void organizationLogin; + return url.toString(); + } + + private async createInstallationAccessToken(installationId: number): Promise { + if (!this.appId || !this.appPrivateKey) { + throw new GitHubAppError("GitHub App is not configured", 500); + } + + const response = await fetch(`${this.apiBaseUrl}/app/installations/${installationId}/access_tokens`, { + method: "POST", + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${this.createAppJwt()}`, + "X-GitHub-Api-Version": "2022-11-28", + }, + }); + + const payload = (await response.json()) as { token?: string; message?: string }; + if (!response.ok || !payload.token) { + throw new GitHubAppError(payload.message ?? "Unable to mint GitHub installation token", response.status); + } + return payload.token; + } + + private createAppJwt(): string { + if (!this.appId || !this.appPrivateKey) { + throw new GitHubAppError("GitHub App is not configured", 500); + } + + const header = base64UrlEncode(JSON.stringify({ alg: "RS256", typ: "JWT" })); + const now = Math.floor(Date.now() / 1000); + const payload = base64UrlEncode( + JSON.stringify({ + iat: now - 60, + exp: now + 540, + iss: this.appId, + }), + ); + const signer = createSign("RSA-SHA256"); + signer.update(`${header}.${payload}`); + signer.end(); + const key = createPrivateKey(this.appPrivateKey); + const signature = signer.sign(key); + return `${header}.${payload}.${base64UrlEncode(signature)}`; + } + + private async requestAppJson(path: string): Promise { + const response = await fetch(`${this.apiBaseUrl}${path}`, { + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${this.createAppJwt()}`, + "X-GitHub-Api-Version": "2022-11-28", + }, + }); + + const payload = (await response.json()) as T | { message?: string }; + if (!response.ok) { + throw new GitHubAppError( + typeof payload === "object" && payload && "message" in payload ? (payload.message ?? "GitHub request failed") : "GitHub request failed", + response.status, + ); + } + return payload as T; + } + + private async paginateApp(path: string): Promise { + let nextUrl = `${this.apiBaseUrl}${path.startsWith("/") ? path : `/${path}`}`; + const items: T[] = []; + + while (nextUrl) { + const page = await this.requestAppPage(nextUrl); + items.push(...page.items); + nextUrl = page.nextUrl ?? ""; + } + + return items; + } + + private async requestJson(path: string, accessToken: string): Promise { + const response = await fetch(`${this.apiBaseUrl}${path}`, { + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${accessToken}`, + "X-GitHub-Api-Version": "2022-11-28", + }, + }); + + const payload = (await response.json()) as T | { message?: string }; + if (!response.ok) { + throw new GitHubAppError( + typeof payload === "object" && payload && "message" in payload ? (payload.message ?? "GitHub request failed") : "GitHub request failed", + response.status, + ); + } + return payload as T; + } + + private async listRepositoryPullRequests(accessToken: string, fullName: string, cloneUrl: string): Promise { + const pullRequests = await this.paginate<{ + number: number; + title: string; + body?: string | null; + state: string; + html_url: string; + draft?: boolean; + merged?: boolean; + user?: { login?: string } | null; + head?: { ref?: string } | null; + base?: { ref?: string } | null; + }>(`/repos/${fullName}/pulls?state=open&per_page=100&sort=updated&direction=desc`, accessToken); + + return pullRequests.map((pullRequest) => ({ + repoFullName: fullName, + cloneUrl, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.html_url, + headRefName: pullRequest.head?.ref?.trim() ?? "", + baseRefName: pullRequest.base?.ref?.trim() ?? "", + authorLogin: pullRequest.user?.login?.trim() ?? null, + isDraft: Boolean(pullRequest.draft), + merged: Boolean(pullRequest.merged), + })); + } + + private async paginate(path: string, accessToken: string): Promise { + let nextUrl = `${this.apiBaseUrl}${path.startsWith("/") ? path : `/${path}`}`; + const items: T[] = []; + + while (nextUrl) { + const page = await this.requestPage(nextUrl, accessToken); + items.push(...page.items); + nextUrl = page.nextUrl ?? ""; + } + + return items; + } + + private async requestPage(url: string, accessToken: string): Promise> { + const response = await fetch(url, { + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${accessToken}`, + "X-GitHub-Api-Version": "2022-11-28", + }, + }); + + const payload = (await response.json()) as T[] | { repositories?: T[]; message?: string }; + if (!response.ok) { + throw new GitHubAppError( + typeof payload === "object" && payload && "message" in payload ? (payload.message ?? "GitHub request failed") : "GitHub request failed", + response.status, + ); + } + + const items = Array.isArray(payload) ? payload : (payload.repositories ?? []); + return { + items, + nextUrl: parseNextLink(response.headers.get("link")), + }; + } + + private async requestAppPage(url: string): Promise> { + const response = await fetch(url, { + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${this.createAppJwt()}`, + "X-GitHub-Api-Version": "2022-11-28", + }, + }); + + const payload = (await response.json()) as T[] | { installations?: T[]; message?: string }; + if (!response.ok) { + throw new GitHubAppError( + typeof payload === "object" && payload && "message" in payload ? (payload.message ?? "GitHub request failed") : "GitHub request failed", + response.status, + ); + } + + const items = Array.isArray(payload) ? payload : (payload.installations ?? []); + return { + items, + nextUrl: parseNextLink(response.headers.get("link")), + }; + } +} + +function parseNextLink(linkHeader: string | null): string | null { + if (!linkHeader) { + return null; + } + + for (const part of linkHeader.split(",")) { + const [urlPart, relPart] = part.split(";").map((value) => value.trim()); + if (!urlPart || !relPart || !relPart.includes('rel="next"')) { + continue; + } + return urlPart.replace(/^<|>$/g, ""); + } + + return null; +} + +function base64UrlEncode(value: string | Buffer): string { + const source = typeof value === "string" ? Buffer.from(value, "utf8") : value; + return source.toString("base64").replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/g, ""); +} diff --git a/foundry/packages/backend/src/services/app-shell-runtime.ts b/foundry/packages/backend/src/services/app-shell-runtime.ts new file mode 100644 index 0000000..84cb326 --- /dev/null +++ b/foundry/packages/backend/src/services/app-shell-runtime.ts @@ -0,0 +1,84 @@ +import { + GitHubAppClient, + type GitHubInstallationRecord, + type GitHubOAuthSession, + type GitHubOrgIdentity, + type GitHubRepositoryRecord, + type GitHubViewerIdentity, + type GitHubWebhookEvent, +} from "./app-github.js"; +import { + StripeAppClient, + type StripeCheckoutCompletion, + type StripeCheckoutSession, + type StripePortalSession, + type StripeSubscriptionSnapshot, + type StripeWebhookEvent, +} from "./app-stripe.js"; +import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared"; + +export type AppShellGithubClient = Pick< + GitHubAppClient, + | "isAppConfigured" + | "isWebhookConfigured" + | "buildAuthorizeUrl" + | "exchangeCode" + | "getViewer" + | "listOrganizations" + | "listInstallations" + | "listUserRepositories" + | "listInstallationRepositories" + | "buildInstallationUrl" + | "verifyWebhookEvent" +>; + +export type AppShellStripeClient = Pick< + StripeAppClient, + | "isConfigured" + | "createCustomer" + | "createCheckoutSession" + | "retrieveCheckoutCompletion" + | "retrieveSubscription" + | "createPortalSession" + | "updateSubscriptionCancellation" + | "verifyWebhookEvent" + | "planIdForPriceId" +>; + +export interface AppShellServices { + appUrl: string; + apiUrl: string; + github: AppShellGithubClient; + stripe: AppShellStripeClient; +} + +export interface CreateAppShellServicesOptions { + appUrl?: string; + apiUrl?: string; + github?: AppShellGithubClient; + stripe?: AppShellStripeClient; +} + +export function createDefaultAppShellServices(options: CreateAppShellServicesOptions = {}): AppShellServices { + return { + appUrl: (options.appUrl ?? process.env.APP_URL ?? "http://localhost:4173").replace(/\/$/, ""), + apiUrl: (options.apiUrl ?? process.env.BETTER_AUTH_URL ?? process.env.APP_URL ?? "http://localhost:7741").replace(/\/$/, ""), + github: options.github ?? new GitHubAppClient(), + stripe: options.stripe ?? new StripeAppClient(), + }; +} + +export type { + GitHubInstallationRecord, + GitHubOAuthSession, + GitHubOrgIdentity, + GitHubRepositoryRecord, + GitHubViewerIdentity, + GitHubWebhookEvent, + StripeCheckoutCompletion, + StripeCheckoutSession, + StripePortalSession, + StripeSubscriptionSnapshot, + StripeWebhookEvent, + FoundryBillingPlanId, +}; diff --git a/foundry/packages/backend/src/services/app-stripe.ts b/foundry/packages/backend/src/services/app-stripe.ts new file mode 100644 index 0000000..9cb249e --- /dev/null +++ b/foundry/packages/backend/src/services/app-stripe.ts @@ -0,0 +1,284 @@ +import { createHmac, timingSafeEqual } from "node:crypto"; +import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared"; + +export class StripeAppError extends Error { + readonly status: number; + + constructor(message: string, status = 500) { + super(message); + this.name = "StripeAppError"; + this.status = status; + } +} + +export interface StripeCheckoutSession { + id: string; + url: string; +} + +export interface StripePortalSession { + url: string; +} + +export interface StripeSubscriptionSnapshot { + id: string; + customerId: string; + priceId: string | null; + status: string; + cancelAtPeriodEnd: boolean; + currentPeriodEnd: number | null; + trialEnd: number | null; + defaultPaymentMethodLabel: string; +} + +export interface StripeCheckoutCompletion { + customerId: string | null; + subscriptionId: string | null; + planId: FoundryBillingPlanId | null; + paymentMethodLabel: string; +} + +export interface StripeWebhookEvent { + id: string; + type: string; + data: { + object: T; + }; +} + +export interface StripeAppClientOptions { + apiBaseUrl?: string; + secretKey?: string; + webhookSecret?: string; + teamPriceId?: string; +} + +export class StripeAppClient { + private readonly apiBaseUrl: string; + private readonly secretKey?: string; + private readonly webhookSecret?: string; + private readonly teamPriceId?: string; + + constructor(options: StripeAppClientOptions = {}) { + this.apiBaseUrl = (options.apiBaseUrl ?? "https://api.stripe.com").replace(/\/$/, ""); + this.secretKey = options.secretKey ?? process.env.STRIPE_SECRET_KEY; + this.webhookSecret = options.webhookSecret ?? process.env.STRIPE_WEBHOOK_SECRET; + this.teamPriceId = options.teamPriceId ?? process.env.STRIPE_PRICE_TEAM; + } + + isConfigured(): boolean { + return Boolean(this.secretKey); + } + + createCheckoutSession(input: { + organizationId: string; + customerId: string; + customerEmail: string | null; + planId: Exclude; + successUrl: string; + cancelUrl: string; + }): Promise { + const priceId = this.priceIdForPlan(input.planId); + return this.formRequest("/v1/checkout/sessions", { + mode: "subscription", + success_url: input.successUrl, + cancel_url: input.cancelUrl, + customer: input.customerId, + "line_items[0][price]": priceId, + "line_items[0][quantity]": "1", + "metadata[organizationId]": input.organizationId, + "metadata[planId]": input.planId, + "subscription_data[metadata][organizationId]": input.organizationId, + "subscription_data[metadata][planId]": input.planId, + }); + } + + createPortalSession(input: { customerId: string; returnUrl: string }): Promise { + return this.formRequest("/v1/billing_portal/sessions", { + customer: input.customerId, + return_url: input.returnUrl, + }); + } + + createCustomer(input: { organizationId: string; displayName: string; email: string | null }): Promise<{ id: string }> { + return this.formRequest<{ id: string }>("/v1/customers", { + name: input.displayName, + ...(input.email ? { email: input.email } : {}), + "metadata[organizationId]": input.organizationId, + }); + } + + async updateSubscriptionCancellation(subscriptionId: string, cancelAtPeriodEnd: boolean): Promise { + const payload = await this.formRequest>(`/v1/subscriptions/${subscriptionId}`, { + cancel_at_period_end: cancelAtPeriodEnd ? "true" : "false", + }); + return stripeSubscriptionSnapshot(payload); + } + + async retrieveCheckoutCompletion(sessionId: string): Promise { + const payload = await this.requestJson>(`/v1/checkout/sessions/${sessionId}?expand[]=subscription.default_payment_method`); + + const subscription = typeof payload.subscription === "object" && payload.subscription ? (payload.subscription as Record) : null; + const subscriptionId = + typeof payload.subscription === "string" ? payload.subscription : subscription && typeof subscription.id === "string" ? subscription.id : null; + const priceId = firstStripePriceId(subscription); + + return { + customerId: typeof payload.customer === "string" ? payload.customer : null, + subscriptionId, + planId: priceId ? this.planIdForPriceId(priceId) : planIdFromMetadata(payload.metadata), + paymentMethodLabel: subscription ? paymentMethodLabelFromObject(subscription.default_payment_method) : "Card on file", + }; + } + + async retrieveSubscription(subscriptionId: string): Promise { + const payload = await this.requestJson>(`/v1/subscriptions/${subscriptionId}?expand[]=default_payment_method`); + return stripeSubscriptionSnapshot(payload); + } + + verifyWebhookEvent(payload: string, signatureHeader: string | null): StripeWebhookEvent { + if (!this.webhookSecret) { + throw new StripeAppError("Stripe webhook secret is not configured", 500); + } + if (!signatureHeader) { + throw new StripeAppError("Missing Stripe signature header", 400); + } + + const parts = Object.fromEntries( + signatureHeader + .split(",") + .map((entry) => entry.split("=")) + .filter((entry): entry is [string, string] => entry.length === 2), + ); + const timestamp = parts.t; + const signature = parts.v1; + if (!timestamp || !signature) { + throw new StripeAppError("Malformed Stripe signature header", 400); + } + + const expected = createHmac("sha256", this.webhookSecret).update(`${timestamp}.${payload}`).digest("hex"); + + const expectedBuffer = Buffer.from(expected, "utf8"); + const actualBuffer = Buffer.from(signature, "utf8"); + if (expectedBuffer.length !== actualBuffer.length || !timingSafeEqual(expectedBuffer, actualBuffer)) { + throw new StripeAppError("Stripe signature verification failed", 400); + } + + return JSON.parse(payload) as StripeWebhookEvent; + } + + planIdForPriceId(priceId: string): FoundryBillingPlanId | null { + if (priceId === this.teamPriceId) { + return "team"; + } + return null; + } + + priceIdForPlan(planId: Exclude): string { + const priceId = this.teamPriceId; + if (!priceId) { + throw new StripeAppError(`Stripe price ID is not configured for ${planId}`, 500); + } + return priceId; + } + + private async requestJson(path: string): Promise { + if (!this.secretKey) { + throw new StripeAppError("Stripe is not configured", 500); + } + + const response = await fetch(`${this.apiBaseUrl}${path}`, { + headers: { + Authorization: `Bearer ${this.secretKey}`, + }, + }); + + const payload = (await response.json()) as T | { error?: { message?: string } }; + if (!response.ok) { + throw new StripeAppError( + typeof payload === "object" && payload && "error" in payload ? (payload.error?.message ?? "Stripe request failed") : "Stripe request failed", + response.status, + ); + } + return payload as T; + } + + private async formRequest(path: string, body: Record): Promise { + if (!this.secretKey) { + throw new StripeAppError("Stripe is not configured", 500); + } + + const form = new URLSearchParams(); + for (const [key, value] of Object.entries(body)) { + form.set(key, value); + } + + const response = await fetch(`${this.apiBaseUrl}${path}`, { + method: "POST", + headers: { + Authorization: `Bearer ${this.secretKey}`, + "Content-Type": "application/x-www-form-urlencoded", + }, + body: form, + }); + + const payload = (await response.json()) as T | { error?: { message?: string } }; + if (!response.ok) { + throw new StripeAppError( + typeof payload === "object" && payload && "error" in payload ? (payload.error?.message ?? "Stripe request failed") : "Stripe request failed", + response.status, + ); + } + return payload as T; + } +} + +function planIdFromMetadata(metadata: unknown): FoundryBillingPlanId | null { + if (!metadata || typeof metadata !== "object") { + return null; + } + const planId = (metadata as Record).planId; + return planId === "team" || planId === "free" ? planId : null; +} + +function firstStripePriceId(subscription: Record | null): string | null { + if (!subscription || typeof subscription.items !== "object" || !subscription.items) { + return null; + } + const data = (subscription.items as { data?: Array> }).data; + const first = data?.[0]; + if (!first || typeof first.price !== "object" || !first.price) { + return null; + } + return typeof (first.price as Record).id === "string" ? ((first.price as Record).id as string) : null; +} + +function paymentMethodLabelFromObject(paymentMethod: unknown): string { + if (!paymentMethod || typeof paymentMethod !== "object") { + return "Card on file"; + } + const card = (paymentMethod as Record).card; + if (card && typeof card === "object") { + const brand = typeof (card as Record).brand === "string" ? ((card as Record).brand as string) : "Card"; + const last4 = typeof (card as Record).last4 === "string" ? ((card as Record).last4 as string) : "file"; + return `${capitalize(brand)} ending in ${last4}`; + } + return "Payment method on file"; +} + +function stripeSubscriptionSnapshot(payload: Record): StripeSubscriptionSnapshot { + return { + id: typeof payload.id === "string" ? payload.id : "", + customerId: typeof payload.customer === "string" ? payload.customer : "", + priceId: firstStripePriceId(payload), + status: typeof payload.status === "string" ? payload.status : "active", + cancelAtPeriodEnd: payload.cancel_at_period_end === true, + currentPeriodEnd: typeof payload.current_period_end === "number" ? payload.current_period_end : null, + trialEnd: typeof payload.trial_end === "number" ? payload.trial_end : null, + defaultPaymentMethodLabel: paymentMethodLabelFromObject(payload.default_payment_method), + }; +} + +function capitalize(value: string): string { + return value.length > 0 ? `${value[0]!.toUpperCase()}${value.slice(1)}` : value; +} diff --git a/foundry/packages/backend/src/services/better-auth.ts b/foundry/packages/backend/src/services/better-auth.ts new file mode 100644 index 0000000..23d227f --- /dev/null +++ b/foundry/packages/backend/src/services/better-auth.ts @@ -0,0 +1,559 @@ +import { betterAuth } from "better-auth"; +import { createAdapterFactory } from "better-auth/adapters"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js"; +import { organizationKey, userKey } from "../actors/keys.js"; +import { logger } from "../logging.js"; + +const AUTH_BASE_PATH = "/v1/auth"; +const SESSION_COOKIE = "better-auth.session_token"; + +let betterAuthService: BetterAuthService | null = null; + +function requireEnv(name: string): string { + const value = process.env[name]?.trim(); + if (!value) { + throw new Error(`${name} is required`); + } + return value; +} + +function stripTrailingSlash(value: string): string { + return value.replace(/\/$/, ""); +} + +function buildCookieHeaders(sessionToken: string): Headers { + return new Headers({ + cookie: `${SESSION_COOKIE}=${encodeURIComponent(sessionToken)}`, + }); +} + +async function readJsonSafe(response: Response): Promise { + const text = await response.text(); + if (!text) { + return null; + } + try { + return JSON.parse(text); + } catch { + return text; + } +} + +async function callAuthEndpoint(auth: any, url: string, init?: RequestInit): Promise { + return await auth.handler(new Request(url, init)); +} + +function resolveRouteUserId(organization: any, resolved: any): string | null { + if (!resolved) { + return null; + } + if (typeof resolved === "string") { + return resolved; + } + if (typeof resolved.userId === "string" && resolved.userId.length > 0) { + return resolved.userId; + } + if (typeof resolved.id === "string" && resolved.id.length > 0) { + return resolved.id; + } + return null; +} + +export interface BetterAuthService { + auth: any; + resolveSession(headers: Headers): Promise<{ session: any; user: any } | null>; + signOut(headers: Headers): Promise; + getAuthState(sessionId: string): Promise; + upsertUserProfile(userId: string, patch: Record): Promise; + setActiveOrganization(sessionId: string, activeOrganizationId: string | null): Promise; + getAccessTokenForSession(sessionId: string): Promise<{ accessToken: string; scopes: string[] } | null>; +} + +export function initBetterAuthService(actorClient: any, options: { apiUrl: string; appUrl: string }): BetterAuthService { + if (betterAuthService) { + return betterAuthService; + } + + // getOrCreate is intentional here: the adapter runs during Better Auth callbacks + // which can fire before any explicit create path. The app organization and user + // actors must exist by the time the adapter needs them. + const appOrganization = () => + actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { + createWithInput: APP_SHELL_ORGANIZATION_ID, + }); + + // getOrCreate is intentional: Better Auth creates user records during OAuth + // callbacks, so the user actor must be lazily provisioned on first access. + const getUser = async (userId: string) => + await actorClient.user.getOrCreate(userKey(userId), { + createWithInput: { userId }, + }); + + const adapter = createAdapterFactory({ + config: { + adapterId: "rivetkit-actor", + adapterName: "RivetKit Actor Adapter", + supportsBooleans: false, + supportsDates: false, + supportsJSON: false, + }, + adapter: ({ transformInput, transformOutput, transformWhereClause }) => { + const resolveUserIdForQuery = async (model: string, where?: any[], data?: Record): Promise => { + const clauses = where ?? []; + const direct = (field: string) => clauses.find((entry) => entry.field === field)?.value; + + if (model === "user") { + const fromId = direct("id") ?? data?.id; + if (typeof fromId === "string" && fromId.length > 0) { + return fromId; + } + const email = direct("email"); + if (typeof email === "string" && email.length > 0) { + const organization = await appOrganization(); + const resolved = await organization.betterAuthFindEmailIndex({ email: email.toLowerCase() }); + return resolveRouteUserId(organization, resolved); + } + return null; + } + + if (model === "session") { + const fromUserId = direct("userId") ?? data?.userId; + if (typeof fromUserId === "string" && fromUserId.length > 0) { + return fromUserId; + } + const sessionId = direct("id") ?? data?.id; + const sessionToken = direct("token") ?? data?.token; + if (typeof sessionId === "string" || typeof sessionToken === "string") { + const organization = await appOrganization(); + const resolved = await organization.betterAuthFindSessionIndex({ + ...(typeof sessionId === "string" ? { sessionId } : {}), + ...(typeof sessionToken === "string" ? { sessionToken } : {}), + }); + return resolveRouteUserId(organization, resolved); + } + return null; + } + + if (model === "account") { + const fromUserId = direct("userId") ?? data?.userId; + if (typeof fromUserId === "string" && fromUserId.length > 0) { + return fromUserId; + } + const accountRecordId = direct("id") ?? data?.id; + const providerId = direct("providerId") ?? data?.providerId; + const accountId = direct("accountId") ?? data?.accountId; + const organization = await appOrganization(); + if (typeof accountRecordId === "string" && accountRecordId.length > 0) { + const resolved = await organization.betterAuthFindAccountIndex({ id: accountRecordId }); + return resolveRouteUserId(organization, resolved); + } + if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) { + const resolved = await organization.betterAuthFindAccountIndex({ providerId, accountId }); + return resolveRouteUserId(organization, resolved); + } + return null; + } + + return null; + }; + + return { + options: { + useDatabaseGeneratedIds: false, + }, + + create: async ({ model, data }) => { + const transformed = await transformInput(data, model, "create", true); + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthCreateVerification({ data: transformed }); + } + + const userId = await resolveUserIdForQuery(model, undefined, transformed); + if (!userId) { + throw new Error(`Unable to resolve auth actor for create(${model})`); + } + + const userActor = await getUser(userId); + const created = await userActor.betterAuthCreateRecord({ model, data: transformed }); + const organization = await appOrganization(); + + if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) { + await organization.betterAuthUpsertEmailIndex({ + email: transformed.email.toLowerCase(), + userId, + }); + } + + if (model === "session") { + await organization.betterAuthUpsertSessionIndex({ + sessionId: String(created.id), + sessionToken: String(created.token), + userId, + }); + } + + if (model === "account") { + await organization.betterAuthUpsertAccountIndex({ + id: String(created.id), + providerId: String(created.providerId), + accountId: String(created.accountId), + userId, + }); + } + + return (await transformOutput(created, model)) as any; + }, + + findOne: async ({ model, where, join }) => { + const transformedWhere = transformWhereClause({ model, where, action: "findOne" }); + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthFindOneVerification({ where: transformedWhere, join }); + } + + const userId = await resolveUserIdForQuery(model, transformedWhere); + if (!userId) { + return null; + } + + const userActor = await getUser(userId); + const found = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere, join }); + return found ? ((await transformOutput(found, model, undefined, join)) as any) : null; + }, + + findMany: async ({ model, where, limit, sortBy, offset, join }) => { + const transformedWhere = transformWhereClause({ model, where, action: "findMany" }); + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthFindManyVerification({ + where: transformedWhere, + limit, + sortBy, + offset, + join, + }); + } + + if (model === "session") { + const tokenClause = transformedWhere?.find((entry: any) => entry.field === "token" && entry.operator === "in"); + if (tokenClause && Array.isArray(tokenClause.value)) { + const organization = await appOrganization(); + const resolved = await Promise.all( + (tokenClause.value as string[]).map(async (sessionToken: string) => ({ + sessionToken, + route: await organization.betterAuthFindSessionIndex({ sessionToken }), + })), + ); + const byUser = new Map(); + for (const item of resolved) { + if (!item.route?.userId) { + continue; + } + const tokens = byUser.get(item.route.userId) ?? []; + tokens.push(item.sessionToken); + byUser.set(item.route.userId, tokens); + } + + const rows = []; + for (const [userId, tokens] of byUser) { + const userActor = await getUser(userId); + const scopedWhere = transformedWhere.map((entry: any) => + entry.field === "token" && entry.operator === "in" ? { ...entry, value: tokens } : entry, + ); + const found = await userActor.betterAuthFindManyRecords({ model, where: scopedWhere, limit, sortBy, offset, join }); + rows.push(...found); + } + return await Promise.all(rows.map(async (row: any) => await transformOutput(row, model, undefined, join))); + } + } + + const userId = await resolveUserIdForQuery(model, transformedWhere); + if (!userId) { + return []; + } + + const userActor = await getUser(userId); + const found = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit, sortBy, offset, join }); + return await Promise.all(found.map(async (row: any) => await transformOutput(row, model, undefined, join))); + }, + + update: async ({ model, where, update }) => { + const transformedWhere = transformWhereClause({ model, where, action: "update" }); + const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthUpdateVerification({ + where: transformedWhere, + update: transformedUpdate, + }); + } + + const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); + if (!userId) { + return null; + } + + const userActor = await getUser(userId); + const before = + model === "user" + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) + : model === "account" + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) + : model === "session" + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) + : null; + const updated = await userActor.betterAuthUpdateRecord({ + model, + where: transformedWhere, + update: transformedUpdate, + }); + const organization = await appOrganization(); + + if (model === "user" && updated) { + if (before?.email && before.email !== updated.email) { + await organization.betterAuthDeleteEmailIndex({ + email: before.email.toLowerCase(), + }); + } + if (updated.email) { + await organization.betterAuthUpsertEmailIndex({ + email: updated.email.toLowerCase(), + userId, + }); + } + } + + if (model === "session" && updated) { + await organization.betterAuthUpsertSessionIndex({ + sessionId: String(updated.id), + sessionToken: String(updated.token), + userId, + }); + } + + if (model === "account" && updated) { + await organization.betterAuthUpsertAccountIndex({ + id: String(updated.id), + providerId: String(updated.providerId), + accountId: String(updated.accountId), + userId, + }); + } + + return updated ? ((await transformOutput(updated, model)) as any) : null; + }, + + updateMany: async ({ model, where, update }) => { + const transformedWhere = transformWhereClause({ model, where, action: "updateMany" }); + const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthUpdateManyVerification({ + where: transformedWhere, + update: transformedUpdate, + }); + } + + const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); + if (!userId) { + return 0; + } + + const userActor = await getUser(userId); + return await userActor.betterAuthUpdateManyRecords({ + model, + where: transformedWhere, + update: transformedUpdate, + }); + }, + + delete: async ({ model, where }) => { + const transformedWhere = transformWhereClause({ model, where, action: "delete" }); + if (model === "verification") { + const organization = await appOrganization(); + await organization.betterAuthDeleteVerification({ where: transformedWhere }); + return; + } + + const userId = await resolveUserIdForQuery(model, transformedWhere); + if (!userId) { + return; + } + + const userActor = await getUser(userId); + const organization = await appOrganization(); + const before = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }); + await userActor.betterAuthDeleteRecord({ model, where: transformedWhere }); + + if (model === "session" && before) { + await organization.betterAuthDeleteSessionIndex({ + sessionId: before.id, + sessionToken: before.token, + }); + } + + if (model === "account" && before) { + await organization.betterAuthDeleteAccountIndex({ + id: before.id, + providerId: before.providerId, + accountId: before.accountId, + }); + } + + if (model === "user" && before?.email) { + await organization.betterAuthDeleteEmailIndex({ + email: before.email.toLowerCase(), + }); + } + }, + + deleteMany: async ({ model, where }) => { + const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" }); + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthDeleteManyVerification({ where: transformedWhere }); + } + + if (model === "session") { + const userId = await resolveUserIdForQuery(model, transformedWhere); + if (!userId) { + return 0; + } + const userActor = await getUser(userId); + const organization = await appOrganization(); + const sessions = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit: 5000 }); + const deleted = await userActor.betterAuthDeleteManyRecords({ model, where: transformedWhere }); + for (const session of sessions) { + await organization.betterAuthDeleteSessionIndex({ + sessionId: session.id, + sessionToken: session.token, + }); + } + return deleted; + } + + const userId = await resolveUserIdForQuery(model, transformedWhere); + if (!userId) { + return 0; + } + + const userActor = await getUser(userId); + return await userActor.betterAuthDeleteManyRecords({ model, where: transformedWhere }); + }, + + count: async ({ model, where }) => { + const transformedWhere = transformWhereClause({ model, where, action: "count" }); + if (model === "verification") { + const organization = await appOrganization(); + return await organization.betterAuthCountVerification({ where: transformedWhere }); + } + + const userId = await resolveUserIdForQuery(model, transformedWhere); + if (!userId) { + return 0; + } + + const userActor = await getUser(userId); + return await userActor.betterAuthCountRecords({ model, where: transformedWhere }); + }, + }; + }, + }); + + const auth = betterAuth({ + baseURL: stripTrailingSlash(process.env.BETTER_AUTH_URL ?? options.apiUrl), + basePath: AUTH_BASE_PATH, + secret: requireEnv("BETTER_AUTH_SECRET"), + database: adapter, + trustedOrigins: [stripTrailingSlash(options.appUrl), stripTrailingSlash(options.apiUrl)], + session: { + cookieCache: { + enabled: true, + maxAge: 5 * 60, + strategy: "compact", + }, + }, + onAPIError: { + errorURL: stripTrailingSlash(options.appUrl) + "/signin", + }, + socialProviders: { + github: { + clientId: requireEnv("GITHUB_CLIENT_ID"), + clientSecret: requireEnv("GITHUB_CLIENT_SECRET"), + scope: ["read:org", "repo"], + redirectURI: process.env.GITHUB_REDIRECT_URI || undefined, + }, + }, + }); + + betterAuthService = { + auth, + + async resolveSession(headers: Headers) { + return (await auth.api.getSession({ headers })) ?? null; + }, + + async signOut(headers: Headers) { + return await callAuthEndpoint(auth, `${stripTrailingSlash(process.env.BETTER_AUTH_URL ?? options.apiUrl)}${AUTH_BASE_PATH}/sign-out`, { + method: "POST", + headers, + }); + }, + + async getAuthState(sessionId: string) { + const organization = await appOrganization(); + const route = await organization.betterAuthFindSessionIndex({ sessionId }); + if (!route?.userId) { + return null; + } + const userActor = await getUser(route.userId); + return await userActor.getAppAuthState({ sessionId }); + }, + + async upsertUserProfile(userId: string, patch: Record) { + const userActor = await getUser(userId); + return await userActor.upsertProfile({ userId, patch }); + }, + + async setActiveOrganization(sessionId: string, activeOrganizationId: string | null) { + const authState = await this.getAuthState(sessionId); + if (!authState?.user?.id) { + throw new Error(`Unknown auth session ${sessionId}`); + } + const userActor = await getUser(authState.user.id); + return await userActor.upsertSessionState({ sessionId, activeOrganizationId }); + }, + + async getAccessTokenForSession(sessionId: string) { + // Read the GitHub access token directly from the account record stored in the + // auth user actor. Better Auth's internal /get-access-token endpoint requires + // session middleware resolution which fails for server-side internal calls (403), + // so we bypass it and read the stored token from our adapter layer directly. + const authState = await this.getAuthState(sessionId); + if (!authState?.user?.id || !authState?.accounts) { + return null; + } + + const githubAccount = authState.accounts.find((account: any) => account.providerId === "github"); + if (!githubAccount?.accessToken) { + logger.warn({ sessionId, userId: authState.user.id }, "get_access_token_no_github_account"); + return null; + } + + return { + accessToken: githubAccount.accessToken, + scopes: githubAccount.scope ? githubAccount.scope.split(/[, ]+/) : [], + }; + }, + }; + + return betterAuthService; +} + +export function getBetterAuthService(): BetterAuthService { + if (!betterAuthService) { + throw new Error("BetterAuth service is not initialized"); + } + return betterAuthService; +} diff --git a/foundry/packages/backend/src/services/branch-name-prefixes.ts b/foundry/packages/backend/src/services/branch-name-prefixes.ts new file mode 100644 index 0000000..aaccaee --- /dev/null +++ b/foundry/packages/backend/src/services/branch-name-prefixes.ts @@ -0,0 +1,584 @@ +// Auto-generated list of branch name prefixes. +// Source: McMaster-Carr product catalog. +export const BRANCH_NAME_PREFIXES: readonly string[] = [ + "abrasive-blasters", + "ac-motors", + "access-doors", + "adjustable-handles", + "aerosol-paint", + "air-cleaners", + "air-cylinders", + "air-filters", + "air-hose", + "air-knives", + "air-nozzles", + "air-regulators", + "air-ride-wheels", + "air-slides", + "alligator-clips", + "alloy-steel", + "aluminum-honeycomb", + "angle-indicators", + "antiseize-lubricants", + "antislip-fluid", + "backlight-panel-kits", + "ball-bearings", + "ball-end-mills", + "ball-joint-linkages", + "ball-transfers", + "band-clamps", + "band-saw-blades", + "bar-clamps", + "bar-grating", + "barbed-hose-fittings", + "barbed-tube-fittings", + "basket-strainers", + "batch-cans", + "battery-chargers", + "battery-holders", + "bead-chain", + "beam-clamps", + "belt-conveyors", + "bench-scales", + "bench-vises", + "bin-boxes", + "bin-storage", + "binding-posts", + "blank-tags", + "blasting-cabinets", + "blind-rivets", + "bluetooth-padlocks", + "boring-lathe-tools", + "box-reducers", + "box-wrenches", + "braided-hose", + "brass-pipe-fittings", + "breather-vents", + "butt-splices", + "c-clamps", + "cable-cutters", + "cable-holders", + "cable-tie-mounts", + "cable-ties", + "cam-handles", + "cam-latches", + "cam-locks", + "cap-nuts", + "captive-panel-screws", + "carbide-burs", + "carbide-inserts", + "carbon-fiber", + "carbon-steel", + "cardstock-tags", + "carriage-bolts", + "cast-acrylic", + "cast-iron", + "cast-nylon", + "casting-compounds", + "ceiling-lights", + "ceramic-adhesives", + "chain-slings", + "check-valves", + "chemical-hose", + "chemistry-meters", + "chemistry-testing", + "chip-clearing-tools", + "chucking-reamers", + "cinching-straps", + "circuit-breakers", + "circular-saw-blades", + "circular-saws", + "clamping-hangers", + "clevis-pins", + "clevis-rod-ends", + "clip-on-nuts", + "coaxial-connectors", + "coaxial-cords", + "coiled-spring-pins", + "compact-connectors", + "computer-adapters", + "concrete-adhesives", + "concrete-repair", + "contour-transfers", + "conveyor-belt-lacing", + "conveyor-belting", + "conveyor-brushes", + "conveyor-rollers", + "coolant-hose", + "copper-tube-fittings", + "copper-tubing", + "cord-grips", + "cord-reels", + "cotter-pins", + "coupling-nuts", + "cpvc-pipe-fittings", + "cup-brushes", + "cutoff-wheels", + "cylinder-hones", + "cylinder-racks", + "cylinder-trucks", + "data-cable", + "data-connectors", + "dc-motors", + "dead-blow-hammers", + "delrin-acetal-resin", + "desiccant-air-dryers", + "desktop-cranes", + "dial-calipers", + "dial-indicators", + "die-springs", + "direct-heaters", + "disconnect-switches", + "dispensing-needles", + "dispensing-pumps", + "disposable-clothing", + "disposable-gloves", + "document-protectors", + "door-closers", + "door-handles", + "door-holders", + "dowel-pins", + "drafting-equipment", + "drain-cleaners", + "drainage-mats", + "draw-latches", + "drawer-cabinets", + "drawer-slides", + "drill-bit-sets", + "drill-bits", + "drill-bushings", + "drill-chucks", + "drill-presses", + "drilling-screws", + "drinking-fountains", + "drive-anchors", + "drive-rollers", + "drive-shafts", + "drum-faucets", + "drum-pumps", + "drum-top-vacuums", + "drum-trucks", + "dry-box-gloves", + "dry-erase-boards", + "dry-film-lubricants", + "duct-fans", + "duct-hose", + "duct-tape", + "dust-collectors", + "dustless-chalk", + "edge-trim", + "electric-actuators", + "electric-drills", + "electric-drum-pumps", + "electric-mixers", + "electrical-switches", + "electrical-tape", + "electronic-calipers", + "enclosure-heaters", + "enclosure-panels", + "ethernet-cords", + "exhaust-fans", + "exit-lights", + "expansion-joints", + "expansion-plugs", + "extension-cords", + "extension-springs", + "fabric-snaps", + "fan-blades", + "fep-tubing", + "fiberglass-grating", + "file-holders", + "filter-bag-housings", + "filter-bags", + "filter-cartridges", + "fire-fighting-hose", + "first-aid-supplies", + "fixture-clamps", + "flange-locknuts", + "flange-mount-seals", + "flap-sanding-discs", + "flap-sanding-wheels", + "flared-tube-fittings", + "flashing-lights", + "flat-washers", + "flexible-shafts", + "flexible-shank-burs", + "flexible-trays", + "float-valves", + "floor-locks", + "floor-marking-tape", + "floor-scales", + "floor-squeegees", + "flow-sights", + "flow-switches", + "flowmeter-totalizers", + "foot-switches", + "force-gauges", + "fume-exhausters", + "garbage-bags", + "garden-hose", + "gas-hose", + "gas-regulators", + "gas-springs", + "gauge-blocks", + "glass-sights", + "gold-wire", + "grab-latches", + "grease-fittings", + "grinding-bits", + "grinding-wheels", + "hand-brushes", + "hand-chain-hoists", + "hand-reamers", + "hand-trucks", + "hand-wheels", + "hand-winches", + "hanging-scales", + "hard-hats", + "hardened-shafts", + "hardness-testers", + "heat-exchangers", + "heat-guns", + "heat-lamps", + "heat-sealable-bags", + "heat-set-inserts", + "heat-shrink-tubing", + "heat-sinks", + "heated-scrapers", + "helical-inserts", + "hex-bit-sockets", + "hex-head-screws", + "hex-nuts", + "high-accuracy-rulers", + "high-amp-relays", + "high-vacuum-filters", + "high-vacuum-sights", + "hinge-adjusters", + "hoist-rings", + "hole-saws", + "hose-couplings", + "hose-reels", + "hot-melt-glue", + "hydraulic-cylinders", + "hydraulic-hose", + "hydraulic-jacks", + "iec-connectors", + "immersion-heaters", + "impression-foam", + "indicating-lights", + "inflatable-wedges", + "ink-markers", + "insertion-heaters", + "inspection-mirrors", + "instrument-carts", + "insulation-jacketing", + "jam-removers", + "jigsaw-blades", + "key-cabinets", + "key-locking-inserts", + "key-stock", + "keyed-drive-shafts", + "keyseat-end-mills", + "l-key-sets", + "l-keys", + "label-holders", + "latching-connectors", + "lathe-tools", + "lavatory-partitions", + "lead-screws", + "leveling-lasers", + "leveling-mounts", + "lid-supports", + "lift-off-hinges", + "lift-trucks", + "light-bulbs", + "limit-switches", + "linear-ball-bearings", + "liquid-level-gauges", + "lock-washers", + "lockout-devices", + "loop-clamps", + "loop-hangers", + "machine-brackets", + "machine-handles", + "machine-keys", + "magnetic-base-drills", + "magnetic-bumpers", + "masking-tape", + "masonry-drill-bits", + "medium-amp-relays", + "metal-cable-ties", + "metal-panels", + "metal-plates", + "metal-tags", + "metering-pumps", + "metric-o-rings", + "mil-spec-connectors", + "mobile-lift-tables", + "motor-controls", + "motor-starters", + "mountable-cable-ties", + "mounting-tape", + "neoprene-foam", + "nickel-titanium", + "nonmarring-hammers", + "nonslip-bumpers", + "nylon-rivets", + "nylon-tubing", + "o-rings", + "oil-level-indicators", + "oil-reservoirs", + "oil-skimmers", + "on-off-valves", + "open-end-wrenches", + "outlet-boxes", + "outlet-strips", + "packaging-tape", + "paint-brushes", + "paint-markers", + "paint-sprayers", + "pallet-racks", + "pallet-trucks", + "panel-air-filters", + "parts-baskets", + "pendant-switches", + "perforated-sheets", + "pest-control", + "petroleum-hose", + "piano-hinges", + "pipe-couplings", + "pipe-gaskets", + "pipe-markers", + "pipe-wrenches", + "plank-grating", + "plastic-clamps", + "plastic-mesh", + "plate-lifting-clamps", + "platinum-wire", + "plier-clamps", + "plug-gauges", + "portable-lights", + "power-cords", + "power-supplied", + "power-supplies", + "precision-knives", + "press-fit-nuts", + "press-in-nuts", + "protecting-tape", + "protective-coatings", + "protective-curtains", + "protective-panels", + "protective-wrap", + "proximity-switches", + "pull-handles", + "push-brooms", + "push-nuts", + "push-on-seals", + "pvc-pipe-fittings", + "pvc-tubing", + "quick-release-pins", + "ratchet-pullers", + "recycled-plastics", + "repair-adhesives", + "repair-clamps", + "reusable-cable-ties", + "ring-terminals", + "rivet-nuts", + "robot-base-mounts", + "robot-bases", + "rocker-switches", + "rod-wipers", + "roller-bearings", + "roller-chain", + "roller-conveyors", + "roof-exhaust-fans", + "roof-repair", + "rotary-broaches", + "rotary-hammers", + "rotary-shaft-seals", + "rotating-cranes", + "rotating-joints", + "router-bits", + "rtd-probes", + "rubber-edge-seals", + "rubber-tread-wheels", + "rubber-tubing", + "safety-cabinets", + "safety-glasses", + "safety-mirrors", + "sanding-belts", + "sanding-discs", + "sanding-guides", + "sanding-rolls", + "sanding-sheets", + "screw-extractors", + "screw-jacks", + "scrub-brushes", + "sealing-washers", + "security-lights", + "sensor-connectors", + "set-screws", + "setup-clamps", + "shaft-collars", + "shaft-couplings", + "shaft-repair-sleeves", + "shaft-supports", + "sharpening-stones", + "sheet-metal-cutters", + "shelf-cabinets", + "shim-stock", + "shim-tape", + "shipping-pails", + "shock-absorbers", + "shoulder-screws", + "shower-stations", + "silicone-foam", + "sleeve-bearings", + "slide-bolts", + "slitting-saws", + "slotted-spring-pins", + "sludge-samplers", + "small-parts-storage", + "snap-acting-switches", + "soap-dispensers", + "socket-head-screws", + "socket-organizers", + "socket-wrenches", + "soldering-irons", + "solid-rivets", + "solid-rod-ends", + "sound-insulation", + "space-heaters", + "spacing-beads", + "spanner-wrenches", + "specialty-pliers", + "specialty-vises", + "specialty-washers", + "speed-reducers", + "splicing-connectors", + "spray-bottles", + "spray-nozzles", + "spring-clamps", + "spring-plungers", + "spring-steel", + "square-drive-sockets", + "square-end-mills", + "square-nuts", + "squeeze-bottles", + "stack-lights", + "stainless-steel", + "stair-treads", + "static-control-mats", + "steel-carts", + "steel-pipe-fittings", + "steel-pipe-flanges", + "steel-stamps", + "steel-tubing", + "step-ladders", + "stepper-motors", + "storage-bags", + "storage-boxes", + "storage-chests", + "straight-ladders", + "strap-hinges", + "stretch-wrap", + "strip-doors", + "strip-springs", + "strobe-lights", + "structural-adhesives", + "strut-channel", + "strut-channel-nuts", + "strut-mount-clamps", + "suction-cup-lifters", + "suction-strainers", + "super-absorbent-foam", + "super-flexible-glass", + "surface-fillers", + "surface-mount-hinges", + "t-handle-keys", + "t-slotted-framing", + "tamper-seals", + "tank-level-measurers", + "tape-dispensers", + "tape-measures", + "taper-pins", + "tapping-screws", + "teflon-ptfe", + "terminal-blocks", + "test-indicators", + "test-leads", + "test-weights", + "tethered-knobs", + "thermal-insulation", + "thread-adapters", + "thread-sealant-tape", + "thread-sealants", + "threaded-inserts", + "threaded-standoffs", + "threaded-studs", + "thrust-ball-bearings", + "thrust-bearings", + "thumb-nuts", + "thumb-screws", + "tie-down-rings", + "time-clocks", + "timer-relays", + "timer-switches", + "toggle-clamps", + "toggle-switches", + "tool-holders", + "tool-sets", + "tool-steel", + "torque-wrenches", + "torsion-springs", + "tote-boxes", + "touch-bars", + "track-casters", + "track-rollers", + "track-wheels", + "traction-mats", + "trolley-systems", + "tube-brushes", + "tube-fittings", + "tubular-light-bulbs", + "turn-lock-connectors", + "twist-ties", + "u-bolts", + "u-joints", + "ul-class-fuses", + "unthreaded-spacers", + "usb-adapters", + "usb-cords", + "utility-knives", + "v-belts", + "vacuum-cups", + "vacuum-pumps", + "wall-louvers", + "wash-fountains", + "wash-guns", + "waste-containers", + "water-deionizers", + "water-filters", + "water-hose", + "water-removal-pumps", + "weather-stations", + "web-slings", + "weld-nuts", + "welding-clothing", + "welding-helmets", + "wet-dry-vacuums", + "wet-mops", + "wheel-brushes", + "wing-nuts", + "wire-cloth", + "wire-connectors", + "wire-cutting-pliers", + "wire-partitions", + "wire-rope", + "wire-rope-clamps", + "wire-wrap", + "wool-felt", + "work-platforms", + "workbench-legs", + "woven-wire-cloth", +] as const; diff --git a/foundry/packages/backend/src/services/create-flow.ts b/foundry/packages/backend/src/services/create-flow.ts new file mode 100644 index 0000000..eb9e53f --- /dev/null +++ b/foundry/packages/backend/src/services/create-flow.ts @@ -0,0 +1,136 @@ +import { BRANCH_NAME_PREFIXES } from "./branch-name-prefixes.js"; + +export interface ResolveCreateFlowDecisionInput { + task: string; + explicitTitle?: string; + explicitBranchName?: string; + localBranches: string[]; + taskBranches: string[]; +} + +export interface ResolveCreateFlowDecisionResult { + title: string; + branchName: string; +} + +function firstNonEmptyLine(input: string): string { + const lines = input + .split(/\r?\n/) + .map((line) => line.trim()) + .filter((line) => line.length > 0); + return lines[0] ?? ""; +} + +export function deriveFallbackTitle(task: string, explicitTitle?: string): string { + const source = (explicitTitle && explicitTitle.trim()) || firstNonEmptyLine(task) || "update task"; + const explicitPrefixMatch = source.match(/^\s*(feat|fix|docs|refactor):\s+(.+)$/i); + if (explicitPrefixMatch) { + const explicitTypePrefix = explicitPrefixMatch[1]!.toLowerCase(); + const explicitSummary = explicitPrefixMatch[2]! + .split("") + .map((char) => (/^[a-zA-Z0-9 -]$/.test(char) ? char : " ")) + .join("") + .split(/\s+/) + .filter((token) => token.length > 0) + .join(" ") + .slice(0, 62) + .trim(); + + return `${explicitTypePrefix}: ${explicitSummary || "update task"}`; + } + + const lowered = source.toLowerCase(); + + const typePrefix = + lowered.includes("fix") || lowered.includes("bug") + ? "fix" + : lowered.includes("doc") || lowered.includes("readme") + ? "docs" + : lowered.includes("refactor") + ? "refactor" + : "feat"; + + const cleaned = source + .split("") + .map((char) => (/^[a-zA-Z0-9 -]$/.test(char) ? char : " ")) + .join("") + .split(/\s+/) + .filter((token) => token.length > 0) + .join(" "); + + const summary = (cleaned || "update task").slice(0, 62).trim(); + return `${typePrefix}: ${summary}`.trim(); +} + +export function sanitizeBranchName(input: string): string { + const normalized = input + .toLowerCase() + .split("") + .map((char) => (/^[a-z0-9]$/.test(char) ? char : "-")) + .join(""); + + let result = ""; + let previousDash = false; + for (const char of normalized) { + if (char === "-") { + if (!previousDash && result.length > 0) { + result += char; + } + previousDash = true; + continue; + } + + result += char; + previousDash = false; + } + + const trimmed = result.replace(/-+$/g, ""); + if (trimmed.length <= 50) { + return trimmed; + } + return trimmed.slice(0, 50).replace(/-+$/g, ""); +} + +function generateRandomSuffix(length: number): string { + const chars = "abcdefghijklmnopqrstuvwxyz0123456789"; + let result = ""; + for (let i = 0; i < length; i++) { + result += chars[Math.floor(Math.random() * chars.length)]; + } + return result; +} + +function generateBranchName(): string { + const prefix = BRANCH_NAME_PREFIXES[Math.floor(Math.random() * BRANCH_NAME_PREFIXES.length)]!; + const suffix = generateRandomSuffix(4); + return `${prefix}-${suffix}`; +} + +export function resolveCreateFlowDecision(input: ResolveCreateFlowDecisionInput): ResolveCreateFlowDecisionResult { + const explicitBranch = input.explicitBranchName?.trim(); + const title = deriveFallbackTitle(input.task, input.explicitTitle); + + const existingBranches = new Set(input.localBranches.map((value) => value.trim()).filter((value) => value.length > 0)); + const existingTaskBranches = new Set(input.taskBranches.map((value) => value.trim()).filter((value) => value.length > 0)); + const conflicts = (name: string): boolean => existingBranches.has(name) || existingTaskBranches.has(name); + + if (explicitBranch && explicitBranch.length > 0) { + if (conflicts(explicitBranch)) { + throw new Error(`Branch '${explicitBranch}' already exists. Choose a different --name/--branch value.`); + } + return { title, branchName: explicitBranch }; + } + + // Generate a random McMaster-Carr-style branch name, retrying on conflicts + let candidate = generateBranchName(); + let attempts = 0; + while (conflicts(candidate) && attempts < 100) { + candidate = generateBranchName(); + attempts += 1; + } + + return { + title, + branchName: candidate, + }; +} diff --git a/foundry/packages/backend/src/services/github-auth.ts b/foundry/packages/backend/src/services/github-auth.ts new file mode 100644 index 0000000..aa475b0 --- /dev/null +++ b/foundry/packages/backend/src/services/github-auth.ts @@ -0,0 +1,30 @@ +import { getOrCreateOrganization } from "../actors/handles.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js"; + +export interface ResolvedGithubAuth { + githubToken: string; + scopes: string[]; +} + +export async function resolveOrganizationGithubAuth(c: any, organizationId: string): Promise { + if (!organizationId || organizationId === APP_SHELL_ORGANIZATION_ID) { + return null; + } + + try { + const appOrganization = await getOrCreateOrganization(c, APP_SHELL_ORGANIZATION_ID); + const resolved = await appOrganization.resolveAppGithubToken({ + organizationId: organizationId, + requireRepoScope: true, + }); + if (!resolved?.accessToken) { + return null; + } + return { + githubToken: resolved.accessToken, + scopes: Array.isArray(resolved.scopes) ? resolved.scopes : [], + }; + } catch { + return null; + } +} diff --git a/foundry/packages/backend/src/services/queue.ts b/foundry/packages/backend/src/services/queue.ts new file mode 100644 index 0000000..34e697c --- /dev/null +++ b/foundry/packages/backend/src/services/queue.ts @@ -0,0 +1,24 @@ +interface QueueSendResult { + status: "completed" | "timedOut"; + response?: unknown; +} + +export function expectQueueResponse(result: QueueSendResult | void): T { + if (!result || result.status === "timedOut") { + throw new Error("Queue command timed out"); + } + if ( + result.response && + typeof result.response === "object" && + "error" in result.response && + typeof (result.response as { error?: unknown }).error === "string" + ) { + throw new Error((result.response as { error: string }).error); + } + return result.response as T; +} + +export function normalizeMessages(input: T | T[] | null | undefined): T[] { + if (!input) return []; + return Array.isArray(input) ? input : [input]; +} diff --git a/foundry/packages/backend/src/services/repo.ts b/foundry/packages/backend/src/services/repo.ts new file mode 100644 index 0000000..fb673cc --- /dev/null +++ b/foundry/packages/backend/src/services/repo.ts @@ -0,0 +1,111 @@ +import { createHash } from "node:crypto"; +import { basename, sep } from "node:path"; + +export function normalizeRemoteUrl(remoteUrl: string): string { + let value = remoteUrl.trim(); + if (!value) return ""; + + // Strip trailing slashes to make hashing stable. + value = value.replace(/\/+$/, ""); + + // GitHub shorthand: owner/repo -> https://github.com/owner/repo.git + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(value)) { + return `https://github.com/${value}.git`; + } + + // If a user pastes "github.com/owner/repo", treat it as HTTPS. + if (/^(?:www\.)?github\.com\/.+/i.test(value)) { + value = `https://${value.replace(/^www\./i, "")}`; + } + + // Canonicalize GitHub URLs to the repo clone URL (drop /tree/*, issues, etc). + // This makes "https://github.com/owner/repo" and ".../tree/main" map to the same repoId. + try { + if (/^https?:\/\//i.test(value)) { + const url = new URL(value); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + const owner = parts[0]!; + const repo = parts[1]!; + const base = `${url.protocol}//${hostname}/${owner}/${repo.replace(/\.git$/i, "")}.git`; + return base; + } + } + // Drop query/fragment for stability. + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // ignore parse failures; fall through to raw value + } + + return value; +} + +export function repoIdFromRemote(remoteUrl: string): string { + const normalized = normalizeRemoteUrl(remoteUrl); + return createHash("sha1").update(normalized).digest("hex").slice(0, 16); +} + +export function repoLabelFromRemote(remoteUrl: string): string { + const trimmed = remoteUrl.trim(); + if (!trimmed) { + return ""; + } + + try { + if (/^[a-z][a-z0-9+.-]*:\/\//i.test(trimmed) || trimmed.startsWith("file:")) { + const url = new URL(trimmed); + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length >= 2) { + return `${parts[parts.length - 2]}/${(parts[parts.length - 1] ?? "").replace(/\.git$/i, "")}`; + } + } else { + const url = new URL(trimmed.startsWith("http") ? trimmed : `https://${trimmed}`); + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length >= 2) { + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } + } + } catch { + // Fall through to path-based parsing. + } + + const normalizedPath = trimmed.replace(/\\/g, sep); + const segments = normalizedPath.split(sep).filter(Boolean); + if (segments.length >= 2) { + return `${segments[segments.length - 2]}/${segments[segments.length - 1]!.replace(/\.git$/i, "")}`; + } + + return basename(trimmed.replace(/\.git$/i, "")); +} + +export function githubRepoFullNameFromRemote(remoteUrl: string): string | null { + const normalized = normalizeRemoteUrl(remoteUrl); + if (!normalized) { + return null; + } + + try { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, "").toLowerCase(); + if (hostname !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + const owner = parts[0]?.trim(); + const repo = (parts[1] ?? "").replace(/\.git$/i, "").trim(); + if (!owner || !repo) { + return null; + } + return `${owner}/${repo}`; + } catch { + return null; + } +} diff --git a/foundry/packages/backend/src/services/tmux.ts b/foundry/packages/backend/src/services/tmux.ts new file mode 100644 index 0000000..646b1d9 --- /dev/null +++ b/foundry/packages/backend/src/services/tmux.ts @@ -0,0 +1,59 @@ +import { execFileSync, spawnSync } from "node:child_process"; + +const SYMBOL_RUNNING = "▶"; +const SYMBOL_IDLE = "✓"; + +function stripStatusPrefix(windowName: string): string { + return windowName + .trimStart() + .replace(new RegExp(`^${SYMBOL_RUNNING}\\s+`), "") + .replace(new RegExp(`^${SYMBOL_IDLE}\\s+`), "") + .trim(); +} + +export function setWindowStatus(branchName: string, status: string): number { + let symbol: string; + if (status === "running") { + symbol = SYMBOL_RUNNING; + } else if (status === "idle") { + symbol = SYMBOL_IDLE; + } else { + return 0; + } + + let stdout: string; + try { + stdout = execFileSync("tmux", ["list-windows", "-a", "-F", "#{session_name}:#{window_id}:#{window_name}"], { + encoding: "utf8", + stdio: ["ignore", "pipe", "ignore"], + }); + } catch { + return 0; + } + + const lines = stdout.split(/\r?\n/).filter((line) => line.trim().length > 0); + let count = 0; + + for (const line of lines) { + const parts = line.split(":", 3); + if (parts.length !== 3) { + continue; + } + + const sessionName = parts[0] ?? ""; + const windowId = parts[1] ?? ""; + const windowName = parts[2] ?? ""; + const clean = stripStatusPrefix(windowName); + if (clean !== branchName) { + continue; + } + + const newName = `${symbol} ${branchName}`; + spawnSync("tmux", ["rename-window", "-t", `${sessionName}:${windowId}`, newName], { + stdio: "ignore", + }); + count += 1; + } + + return count; +} diff --git a/foundry/packages/backend/test/create-flow.test.ts b/foundry/packages/backend/test/create-flow.test.ts new file mode 100644 index 0000000..8c66cb4 --- /dev/null +++ b/foundry/packages/backend/test/create-flow.test.ts @@ -0,0 +1,76 @@ +import { describe, expect, it } from "vitest"; +import { deriveFallbackTitle, resolveCreateFlowDecision, sanitizeBranchName } from "../src/services/create-flow.js"; +import { BRANCH_NAME_PREFIXES } from "../src/services/branch-name-prefixes.js"; + +describe("create flow decision", () => { + it("derives a conventional-style fallback title from task text", () => { + const title = deriveFallbackTitle("Fix OAuth callback bug in handler"); + expect(title).toBe("fix: Fix OAuth callback bug in handler"); + }); + + it("preserves an explicit conventional prefix without duplicating it", () => { + const title = deriveFallbackTitle("Reply with exactly: READY", "feat: Browser UI Flow"); + expect(title).toBe("feat: Browser UI Flow"); + }); + + it("sanitizes generated branch names", () => { + expect(sanitizeBranchName("feat: Add @mentions & #hashtags")).toBe("feat-add-mentions-hashtags"); + expect(sanitizeBranchName(" spaces everywhere ")).toBe("spaces-everywhere"); + }); + + it("generates a McMaster-Carr-style branch name with random suffix", () => { + const resolved = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [], + taskBranches: [], + }); + + expect(resolved.title).toBe("feat: Add auth"); + // Branch name should be "-<4-char-suffix>" where prefix is from BRANCH_NAME_PREFIXES + const lastDash = resolved.branchName.lastIndexOf("-"); + const prefix = resolved.branchName.slice(0, lastDash); + const suffix = resolved.branchName.slice(lastDash + 1); + expect(BRANCH_NAME_PREFIXES).toContain(prefix); + expect(suffix).toMatch(/^[a-z0-9]{4}$/); + }); + + it("avoids conflicts by generating a different random name", () => { + // Even with a conflicting branch, it should produce something different + const resolved = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [], + taskBranches: [], + }); + + // Running again with the first result as a conflict should produce a different name + const resolved2 = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [resolved.branchName], + taskBranches: [], + }); + + expect(resolved2.branchName).not.toBe(resolved.branchName); + }); + + it("uses explicit branch name when provided", () => { + const resolved = resolveCreateFlowDecision({ + task: "new task", + explicitBranchName: "my-branch", + localBranches: [], + taskBranches: [], + }); + + expect(resolved.branchName).toBe("my-branch"); + }); + + it("fails when explicit branch already exists", () => { + expect(() => + resolveCreateFlowDecision({ + task: "new task", + explicitBranchName: "existing-branch", + localBranches: ["existing-branch"], + taskBranches: [], + }), + ).toThrow("already exists"); + }); +}); diff --git a/foundry/packages/backend/test/helpers/test-context.ts b/foundry/packages/backend/test/helpers/test-context.ts new file mode 100644 index 0000000..be169a8 --- /dev/null +++ b/foundry/packages/backend/test/helpers/test-context.ts @@ -0,0 +1,34 @@ +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; +import type { BackendDriver } from "../../src/driver.js"; +import { initActorRuntimeContext } from "../../src/actors/context.js"; +import { createDefaultAppShellServices } from "../../src/services/app-shell-runtime.js"; + +export function createTestConfig(overrides?: Partial): AppConfig { + return ConfigSchema.parse({ + auto_submit: true, + notify: ["terminal" as const], + organization: { default: "default" }, + backend: { + host: "127.0.0.1", + port: 7741, + dbPath: join(tmpdir(), `hf-test-${Date.now()}-${Math.random().toString(16).slice(2)}.db`), + opencode_poll_interval: 2, + github_poll_interval: 30, + backup_interval_secs: 3600, + backup_retention_days: 7, + }, + sandboxProviders: { + local: {}, + e2b: {}, + }, + ...overrides, + }); +} + +export function createTestRuntimeContext(driver: BackendDriver, configOverrides?: Partial): { config: AppConfig } { + const config = createTestConfig(configOverrides); + initActorRuntimeContext(config, undefined, driver, createDefaultAppShellServices()); + return { config }; +} diff --git a/foundry/packages/backend/test/helpers/test-driver.ts b/foundry/packages/backend/test/helpers/test-driver.ts new file mode 100644 index 0000000..39975e5 --- /dev/null +++ b/foundry/packages/backend/test/helpers/test-driver.ts @@ -0,0 +1,26 @@ +import type { BackendDriver, GithubDriver, TmuxDriver } from "../../src/driver.js"; + +export function createTestDriver(overrides?: Partial): BackendDriver { + return { + github: overrides?.github ?? createTestGithubDriver(), + tmux: overrides?.tmux ?? createTestTmuxDriver(), + }; +} + +export function createTestGithubDriver(overrides?: Partial): GithubDriver { + return { + createPr: async (_repoFullName, _headBranch, _title) => ({ + number: 1, + url: `https://github.com/test/repo/pull/1`, + }), + starRepository: async () => {}, + ...overrides, + }; +} + +export function createTestTmuxDriver(overrides?: Partial): TmuxDriver { + return { + setWindowStatus: () => 0, + ...overrides, + }; +} diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts new file mode 100644 index 0000000..c3b2a10 --- /dev/null +++ b/foundry/packages/backend/test/keys.test.ts @@ -0,0 +1,19 @@ +import { describe, expect, it } from "vitest"; +import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; + +describe("actor keys", () => { + it("prefixes every key with organization namespace", () => { + const keys = [ + organizationKey("default"), + taskKey("default", "repo", "task"), + taskSandboxKey("default", "sbx"), + auditLogKey("default"), + githubDataKey("default"), + ]; + + for (const key of keys) { + expect(key[0]).toBe("org"); + expect(key[1]).toBe("default"); + } + }); +}); diff --git a/foundry/packages/backend/test/malformed-uri.test.ts b/foundry/packages/backend/test/malformed-uri.test.ts new file mode 100644 index 0000000..2aed74b --- /dev/null +++ b/foundry/packages/backend/test/malformed-uri.test.ts @@ -0,0 +1,83 @@ +import { describe, expect, it } from "vitest"; + +describe("malformed URI handling", () => { + it("safeFetch wrapper returns 400 on URIError", async () => { + // Simulate the pattern used in backend/src/index.ts + const mockApp = { + fetch: async (_req: Request): Promise => { + // Simulate what happens when rivetkit's router encounters a malformed URI + throw new URIError("URI malformed"); + }, + }; + + const safeFetch = async (req: Request): Promise => { + try { + return await mockApp.fetch(req); + } catch (err) { + if (err instanceof URIError) { + return new Response("Bad Request: Malformed URI", { status: 400 }); + } + throw err; + } + }; + + const response = await safeFetch(new Request("http://localhost/%ZZ")); + expect(response.status).toBe(400); + expect(await response.text()).toBe("Bad Request: Malformed URI"); + }); + + it("safeFetch wrapper re-throws non-URI errors", async () => { + const mockApp = { + fetch: async (_req: Request): Promise => { + throw new TypeError("some other error"); + }, + }; + + const safeFetch = async (req: Request): Promise => { + try { + return await mockApp.fetch(req); + } catch (err) { + if (err instanceof URIError) { + return new Response("Bad Request: Malformed URI", { status: 400 }); + } + throw err; + } + }; + + await expect(safeFetch(new Request("http://localhost/test"))).rejects.toThrow(TypeError); + }); + + it("safeFetch wrapper passes through valid requests", async () => { + const mockApp = { + fetch: async (_req: Request): Promise => { + return new Response("OK", { status: 200 }); + }, + }; + + const safeFetch = async (req: Request): Promise => { + try { + return await mockApp.fetch(req); + } catch (err) { + if (err instanceof URIError) { + return new Response("Bad Request: Malformed URI", { status: 400 }); + } + throw err; + } + }; + + const response = await safeFetch(new Request("http://localhost/valid/path")); + expect(response.status).toBe(200); + expect(await response.text()).toBe("OK"); + }); + + it("decodeURIComponent throws on malformed percent-encoding", () => { + // Validates the core issue: decodeURIComponent throws URIError on malformed input + expect(() => decodeURIComponent("%ZZ")).toThrow(URIError); + expect(() => decodeURIComponent("%")).toThrow(URIError); + expect(() => decodeURIComponent("%E0%A4%A")).toThrow(URIError); + + // Valid encoding should not throw + expect(decodeURIComponent("%20")).toBe(" "); + expect(decodeURIComponent("hello%20world")).toBe("hello world"); + }); +}); diff --git a/foundry/packages/backend/test/organization-isolation.test.ts b/foundry/packages/backend/test/organization-isolation.test.ts new file mode 100644 index 0000000..f5d58f2 --- /dev/null +++ b/foundry/packages/backend/test/organization-isolation.test.ts @@ -0,0 +1,85 @@ +// @ts-nocheck +import { mkdtempSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { execFileSync } from "node:child_process"; +import { setTimeout as delay } from "node:timers/promises"; +import { describe, expect, it } from "vitest"; +import { setupTest } from "rivetkit/test"; +import { organizationKey } from "../src/actors/keys.js"; +import { registry } from "../src/actors/index.js"; +import { organizationWorkflowQueueName } from "../src/actors/organization/queues.js"; +import { repoIdFromRemote } from "../src/services/repo.js"; +import { createTestDriver } from "./helpers/test-driver.js"; +import { createTestRuntimeContext } from "./helpers/test-context.js"; + +const runActorIntegration = process.env.HF_ENABLE_ACTOR_INTEGRATION_TESTS === "1"; + +function createRepo(): { repoPath: string } { + const repoPath = mkdtempSync(join(tmpdir(), "hf-isolation-repo-")); + execFileSync("git", ["init"], { cwd: repoPath }); + execFileSync("git", ["config", "user.email", "test@example.com"], { cwd: repoPath }); + execFileSync("git", ["config", "user.name", "Foundry Test"], { cwd: repoPath }); + writeFileSync(join(repoPath, "README.md"), "hello\n", "utf8"); + execFileSync("git", ["add", "README.md"], { cwd: repoPath }); + execFileSync("git", ["commit", "-m", "init"], { cwd: repoPath }); + return { repoPath }; +} + +async function waitForOrganizationRows(ws: any, organizationId: string, expectedCount: number) { + for (let attempt = 0; attempt < 40; attempt += 1) { + const rows = await ws.listTasks({ organizationId }); + if (rows.length >= expectedCount) { + return rows; + } + await delay(50); + } + return ws.listTasks({ organizationId }); +} + +describe("organization isolation", () => { + it.skipIf(!runActorIntegration)("keeps task lists isolated by organization", async (t) => { + const testDriver = createTestDriver(); + createTestRuntimeContext(testDriver); + + const { client } = await setupTest(t, registry); + const wsA = await client.organization.getOrCreate(organizationKey("alpha"), { + createWithInput: "alpha", + }); + const wsB = await client.organization.getOrCreate(organizationKey("beta"), { + createWithInput: "beta", + }); + + const { repoPath } = createRepo(); + const repoId = repoIdFromRemote(repoPath); + await wsA.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true }); + await wsB.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true }); + + await wsA.createTask({ + organizationId: "alpha", + repoId, + task: "task A", + sandboxProviderId: "local", + explicitBranchName: "feature/a", + explicitTitle: "A", + }); + + await wsB.createTask({ + organizationId: "beta", + repoId, + task: "task B", + sandboxProviderId: "local", + explicitBranchName: "feature/b", + explicitTitle: "B", + }); + + const aRows = await waitForOrganizationRows(wsA, "alpha", 1); + const bRows = await waitForOrganizationRows(wsB, "beta", 1); + + expect(aRows.length).toBe(1); + expect(bRows.length).toBe(1); + expect(aRows[0]?.organizationId).toBe("alpha"); + expect(bRows[0]?.organizationId).toBe("beta"); + expect(aRows[0]?.taskId).not.toBe(bRows[0]?.taskId); + }); +}); diff --git a/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts b/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts new file mode 100644 index 0000000..b3a2410 --- /dev/null +++ b/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts @@ -0,0 +1,39 @@ +// @ts-nocheck +import { describe, expect, it } from "vitest"; +import { setupTest } from "rivetkit/test"; +import { organizationKey } from "../src/actors/keys.js"; +import { registry } from "../src/actors/index.js"; +import { createTestDriver } from "./helpers/test-driver.js"; +import { createTestRuntimeContext } from "./helpers/test-context.js"; + +const runActorIntegration = process.env.HF_ENABLE_ACTOR_INTEGRATION_TESTS === "1"; + +describe("organization star sandbox agent repo", () => { + it.skipIf(!runActorIntegration)("stars the sandbox agent repo through the github driver", async (t) => { + const calls: string[] = []; + const testDriver = createTestDriver({ + github: { + listPullRequests: async () => [], + createPr: async () => ({ + number: 1, + url: "https://github.com/test/repo/pull/1", + }), + starRepository: async (repoFullName) => { + calls.push(repoFullName); + }, + }, + }); + createTestRuntimeContext(testDriver); + + const { client } = await setupTest(t, registry); + const ws = await client.organization.getOrCreate(organizationKey("alpha"), { + createWithInput: "alpha", + }); + + const result = await ws.starSandboxAgentRepo({ organizationId: "alpha" }); + + expect(calls).toEqual(["rivet-dev/sandbox-agent"]); + expect(result.repo).toBe("rivet-dev/sandbox-agent"); + expect(typeof result.starredAt).toBe("number"); + }); +}); diff --git a/foundry/packages/backend/test/repo-normalize.test.ts b/foundry/packages/backend/test/repo-normalize.test.ts new file mode 100644 index 0000000..e391952 --- /dev/null +++ b/foundry/packages/backend/test/repo-normalize.test.ts @@ -0,0 +1,34 @@ +import { describe, expect, test } from "vitest"; +import { normalizeRemoteUrl, repoIdFromRemote } from "../src/services/repo.js"; + +describe("normalizeRemoteUrl", () => { + test("accepts GitHub shorthand owner/repo", () => { + expect(normalizeRemoteUrl("rivet-dev/sandbox-agent")).toBe("https://github.com/rivet-dev/sandbox-agent.git"); + }); + + test("accepts github.com/owner/repo without scheme", () => { + expect(normalizeRemoteUrl("github.com/rivet-dev/sandbox-agent")).toBe("https://github.com/rivet-dev/sandbox-agent.git"); + }); + + test("canonicalizes GitHub repo URLs without .git", () => { + expect(normalizeRemoteUrl("https://github.com/rivet-dev/sandbox-agent")).toBe("https://github.com/rivet-dev/sandbox-agent.git"); + }); + + test("canonicalizes GitHub non-clone URLs (e.g. /tree/main)", () => { + expect(normalizeRemoteUrl("https://github.com/rivet-dev/sandbox-agent/tree/main")).toBe("https://github.com/rivet-dev/sandbox-agent.git"); + }); + + test("does not rewrite scp-style ssh remotes", () => { + expect(normalizeRemoteUrl("git@github.com:rivet-dev/sandbox-agent.git")).toBe("git@github.com:rivet-dev/sandbox-agent.git"); + }); +}); + +describe("repoIdFromRemote", () => { + test("repoId is stable across equivalent GitHub inputs", () => { + const a = repoIdFromRemote("rivet-dev/sandbox-agent"); + const b = repoIdFromRemote("https://github.com/rivet-dev/sandbox-agent.git"); + const c = repoIdFromRemote("https://github.com/rivet-dev/sandbox-agent/tree/main"); + expect(a).toBe(b); + expect(b).toBe(c); + }); +}); diff --git a/foundry/packages/backend/test/sandbox-config.test.ts b/foundry/packages/backend/test/sandbox-config.test.ts new file mode 100644 index 0000000..354f794 --- /dev/null +++ b/foundry/packages/backend/test/sandbox-config.test.ts @@ -0,0 +1,50 @@ +import { describe, expect, it } from "vitest"; +import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; +import { availableSandboxProviderIds, defaultSandboxProviderId, resolveSandboxProviderId } from "../src/sandbox-config.js"; + +function makeConfig(overrides?: Partial): AppConfig { + return ConfigSchema.parse({ + auto_submit: true, + notify: ["terminal"], + organization: { default: "default" }, + backend: { + host: "127.0.0.1", + port: 7741, + dbPath: "~/.local/share/foundry/task.db", + opencode_poll_interval: 2, + github_poll_interval: 30, + backup_interval_secs: 3600, + backup_retention_days: 7, + }, + sandboxProviders: { + local: {}, + e2b: {}, + }, + ...overrides, + }); +} + +describe("sandbox config", () => { + it("defaults to local when e2b is not configured", () => { + const config = makeConfig(); + expect(defaultSandboxProviderId(config)).toBe("local"); + expect(availableSandboxProviderIds(config)).toEqual(["local"]); + }); + + it("prefers e2b when an api key is configured", () => { + const config = makeConfig({ + sandboxProviders: { + local: {}, + e2b: { apiKey: "test-token" }, + }, + }); + expect(defaultSandboxProviderId(config)).toBe("e2b"); + expect(availableSandboxProviderIds(config)).toEqual(["e2b", "local"]); + expect(resolveSandboxProviderId(config, "e2b")).toBe("e2b"); + }); + + it("rejects selecting e2b without an api key", () => { + const config = makeConfig(); + expect(() => resolveSandboxProviderId(config, "e2b")).toThrow("E2B provider is not configured"); + }); +}); diff --git a/foundry/packages/backend/test/setup.ts b/foundry/packages/backend/test/setup.ts new file mode 100644 index 0000000..cfb7223 --- /dev/null +++ b/foundry/packages/backend/test/setup.ts @@ -0,0 +1,9 @@ +// Suppress RivetKit traces driver flush errors that occur during test cleanup. +// These happen when the traces driver tries to write after actor state is unloaded. +process.on("unhandledRejection", (reason) => { + if (reason instanceof Error && reason.message.includes("state not loaded")) { + return; + } + // Re-throw non-suppressed rejections + throw reason; +}); diff --git a/foundry/packages/backend/test/workspace-unread.test.ts b/foundry/packages/backend/test/workspace-unread.test.ts new file mode 100644 index 0000000..5f7221a --- /dev/null +++ b/foundry/packages/backend/test/workspace-unread.test.ts @@ -0,0 +1,86 @@ +import { describe, expect, it } from "vitest"; +import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workspace.js"; + +describe("workspace unread status transitions", () => { + it("marks unread when a running session first becomes idle", () => { + expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "idle")).toBe(true); + }); + + it("does not re-mark unread on repeated idle polls after thinking has cleared", () => { + expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: null }, "idle")).toBe(false); + }); + + it("does not mark unread while the session is still running", () => { + expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "running")).toBe(false); + }); +}); + +describe("workspace model changes", () => { + it("recreates an unused ready session so the selected model takes effect", () => { + expect( + shouldRecreateSessionForModelChange({ + status: "ready", + sandboxSessionId: "session-1", + created: false, + transcript: [], + }), + ).toBe(true); + }); + + it("does not recreate a session once the conversation has started", () => { + expect( + shouldRecreateSessionForModelChange({ + status: "ready", + sandboxSessionId: "session-1", + created: true, + transcript: [], + }), + ).toBe(false); + }); + + it("does not recreate pending or anonymous sessions", () => { + expect( + shouldRecreateSessionForModelChange({ + status: "pending_session_create", + sandboxSessionId: "session-1", + created: false, + transcript: [], + }), + ).toBe(false); + expect( + shouldRecreateSessionForModelChange({ + status: "ready", + sandboxSessionId: null, + created: false, + transcript: [], + }), + ).toBe(false); + }); +}); + +describe("workspace send readiness", () => { + it("rejects unknown sessions", () => { + expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workspace session: session-1"); + }); + + it("rejects pending sessions", () => { + expect(() => + requireSendableSessionMeta( + { + status: "pending_session_create", + sandboxSessionId: null, + }, + "session-2", + ), + ).toThrow("Session is not ready (status: pending_session_create). Wait for session provisioning to complete."); + }); + + it("accepts ready sessions with a sandbox session id", () => { + const meta = { + status: "ready", + sandboxSessionId: "session-1", + }; + + expect(requireSendableSessionMeta(meta, "session-3")).toBe(meta); + }); +}); diff --git a/foundry/packages/backend/tsconfig.json b/foundry/packages/backend/tsconfig.json new file mode 100644 index 0000000..6a579df --- /dev/null +++ b/foundry/packages/backend/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist", + "strict": false, + "declaration": false + }, + "include": ["src", "test"] +} diff --git a/foundry/packages/backend/vitest.config.ts b/foundry/packages/backend/vitest.config.ts new file mode 100644 index 0000000..2cc7491 --- /dev/null +++ b/foundry/packages/backend/vitest.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + fileParallelism: false, + testTimeout: 15_000, + hookTimeout: 20_000, + setupFiles: ["./test/setup.ts"], + }, +}); diff --git a/foundry/packages/cli/package.json b/foundry/packages/cli/package.json new file mode 100644 index 0000000..de75c1f --- /dev/null +++ b/foundry/packages/cli/package.json @@ -0,0 +1,26 @@ +{ + "name": "@sandbox-agent/foundry-cli", + "version": "0.1.0", + "private": true, + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "bin": { + "hf": "dist/index.js" + }, + "scripts": { + "build": "tsup --config tsup.config.ts", + "typecheck": "tsc --noEmit", + "test": "vitest run" + }, + "dependencies": { + "@iarna/toml": "^2.2.5", + "@opentui/core": "^0.1.77", + "@sandbox-agent/foundry-client": "workspace:*", + "@sandbox-agent/foundry-shared": "workspace:*", + "zod": "^4.1.5" + }, + "devDependencies": { + "tsup": "^8.5.0" + } +} diff --git a/foundry/packages/cli/src/backend/manager.ts b/foundry/packages/cli/src/backend/manager.ts new file mode 100644 index 0000000..c120d47 --- /dev/null +++ b/foundry/packages/cli/src/backend/manager.ts @@ -0,0 +1,438 @@ +import * as childProcess from "node:child_process"; +import { closeSync, existsSync, mkdirSync, openSync, readFileSync, rmSync, writeFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { dirname, join, resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { checkBackendHealth } from "@sandbox-agent/foundry-client"; +import type { AppConfig } from "@sandbox-agent/foundry-shared"; +import { CLI_BUILD_ID } from "../build-id.js"; +import { logger } from "../logging.js"; + +const HEALTH_TIMEOUT_MS = 1_500; +const START_TIMEOUT_MS = 30_000; +const STOP_TIMEOUT_MS = 5_000; +const POLL_INTERVAL_MS = 150; + +function sleep(ms: number): Promise { + return new Promise((resolveSleep) => setTimeout(resolveSleep, ms)); +} + +function sanitizeHost(host: string): string { + return host + .split("") + .map((ch) => (/[a-zA-Z0-9]/.test(ch) ? ch : "-")) + .join(""); +} + +function backendStateDir(): string { + const override = process.env.HF_BACKEND_STATE_DIR?.trim(); + if (override) { + return override; + } + + const xdgDataHome = process.env.XDG_DATA_HOME?.trim(); + if (xdgDataHome) { + return join(xdgDataHome, "foundry", "backend"); + } + + return join(homedir(), ".local", "share", "foundry", "backend"); +} + +function backendPidPath(host: string, port: number): string { + return join(backendStateDir(), `backend-${sanitizeHost(host)}-${port}.pid`); +} + +function backendVersionPath(host: string, port: number): string { + return join(backendStateDir(), `backend-${sanitizeHost(host)}-${port}.version`); +} + +function backendLogPath(host: string, port: number): string { + return join(backendStateDir(), `backend-${sanitizeHost(host)}-${port}.log`); +} + +function readText(path: string): string | null { + try { + return readFileSync(path, "utf8").trim(); + } catch { + return null; + } +} + +function readPid(host: string, port: number): number | null { + const raw = readText(backendPidPath(host, port)); + if (!raw) { + return null; + } + + const pid = Number.parseInt(raw, 10); + if (!Number.isInteger(pid) || pid <= 0) { + return null; + } + return pid; +} + +function writePid(host: string, port: number, pid: number): void { + const path = backendPidPath(host, port); + mkdirSync(dirname(path), { recursive: true }); + writeFileSync(path, String(pid), "utf8"); +} + +function removePid(host: string, port: number): void { + const path = backendPidPath(host, port); + if (existsSync(path)) { + rmSync(path); + } +} + +function readBackendVersion(host: string, port: number): string | null { + return readText(backendVersionPath(host, port)); +} + +function writeBackendVersion(host: string, port: number, buildId: string): void { + const path = backendVersionPath(host, port); + mkdirSync(dirname(path), { recursive: true }); + writeFileSync(path, buildId, "utf8"); +} + +function removeBackendVersion(host: string, port: number): void { + const path = backendVersionPath(host, port); + if (existsSync(path)) { + rmSync(path); + } +} + +function readCliBuildId(): string { + const override = process.env.HF_BUILD_ID?.trim(); + if (override) { + return override; + } + + return CLI_BUILD_ID; +} + +function isVersionCurrent(host: string, port: number): boolean { + return readBackendVersion(host, port) === readCliBuildId(); +} + +function isProcessRunning(pid: number): boolean { + try { + process.kill(pid, 0); + return true; + } catch (error) { + if ((error as NodeJS.ErrnoException | undefined)?.code === "EPERM") { + return true; + } + return false; + } +} + +function removeStateFiles(host: string, port: number): void { + removePid(host, port); + removeBackendVersion(host, port); +} + +async function checkHealth(host: string, port: number): Promise { + return await checkBackendHealth({ + endpoint: `http://${host}:${port}/v1/rivet`, + timeoutMs: HEALTH_TIMEOUT_MS, + }); +} + +async function waitForHealth(host: string, port: number, timeoutMs: number, pid?: number): Promise { + const deadline = Date.now() + timeoutMs; + + while (Date.now() < deadline) { + if (pid && !isProcessRunning(pid)) { + throw new Error(`backend process ${pid} exited before becoming healthy`); + } + + if (await checkHealth(host, port)) { + return; + } + + await sleep(POLL_INTERVAL_MS); + } + + throw new Error(`backend did not become healthy within ${timeoutMs}ms`); +} + +async function waitForChildPid(child: childProcess.ChildProcess): Promise { + if (child.pid && child.pid > 0) { + return child.pid; + } + + for (let i = 0; i < 20; i += 1) { + await sleep(50); + if (child.pid && child.pid > 0) { + return child.pid; + } + } + + return null; +} + +interface LaunchSpec { + command: string; + args: string[]; + cwd: string; +} + +function resolveBunCommand(): string { + const override = process.env.HF_BUN?.trim(); + if (override && (override === "bun" || existsSync(override))) { + return override; + } + + const homeBun = join(homedir(), ".bun", "bin", "bun"); + if (existsSync(homeBun)) { + return homeBun; + } + + return "bun"; +} + +function resolveLaunchSpec(host: string, port: number): LaunchSpec { + const repoRoot = resolve(fileURLToPath(new URL("../../..", import.meta.url))); + const backendEntry = resolve(fileURLToPath(new URL("../../backend/dist/index.js", import.meta.url))); + + if (existsSync(backendEntry)) { + return { + command: resolveBunCommand(), + args: [backendEntry, "start", "--host", host, "--port", String(port)], + cwd: repoRoot, + }; + } + + return { + command: "pnpm", + args: ["--filter", "@sandbox-agent/foundry-backend", "exec", "bun", "src/index.ts", "start", "--host", host, "--port", String(port)], + cwd: repoRoot, + }; +} + +async function startBackend(host: string, port: number): Promise { + if (await checkHealth(host, port)) { + return; + } + + const existingPid = readPid(host, port); + if (existingPid && isProcessRunning(existingPid)) { + await waitForHealth(host, port, START_TIMEOUT_MS, existingPid); + return; + } + + if (existingPid) { + removeStateFiles(host, port); + } + + const logPath = backendLogPath(host, port); + mkdirSync(dirname(logPath), { recursive: true }); + const fd = openSync(logPath, "a"); + + const launch = resolveLaunchSpec(host, port); + const child = childProcess.spawn(launch.command, launch.args, { + cwd: launch.cwd, + detached: true, + stdio: ["ignore", fd, fd], + env: process.env, + }); + + child.on("error", (error) => { + logger.error( + { + host, + port, + command: launch.command, + args: launch.args, + errorMessage: error instanceof Error ? error.message : String(error), + errorStack: error instanceof Error ? error.stack : undefined, + }, + "failed_to_launch_backend", + ); + }); + + child.unref(); + closeSync(fd); + + const pid = await waitForChildPid(child); + + writeBackendVersion(host, port, readCliBuildId()); + if (pid) { + writePid(host, port, pid); + } + + try { + await waitForHealth(host, port, START_TIMEOUT_MS, pid ?? undefined); + } catch (error) { + if (pid) { + removeStateFiles(host, port); + } else { + removeBackendVersion(host, port); + } + throw error; + } +} + +function trySignal(pid: number, signal: NodeJS.Signals): boolean { + try { + process.kill(pid, signal); + return true; + } catch (error) { + if ((error as NodeJS.ErrnoException | undefined)?.code === "ESRCH") { + return false; + } + throw error; + } +} + +function findProcessOnPort(port: number): number | null { + try { + const out = childProcess + .execFileSync("lsof", ["-i", `:${port}`, "-t", "-sTCP:LISTEN"], { + encoding: "utf8", + stdio: ["ignore", "pipe", "ignore"], + }) + .trim(); + + const pidRaw = out.split("\n")[0]?.trim(); + if (!pidRaw) { + return null; + } + + const pid = Number.parseInt(pidRaw, 10); + if (!Number.isInteger(pid) || pid <= 0) { + return null; + } + + return pid; + } catch { + return null; + } +} + +export async function stopBackend(host: string, port: number): Promise { + let pid = readPid(host, port); + + if (!pid) { + if (!(await checkHealth(host, port))) { + removeStateFiles(host, port); + return; + } + + pid = findProcessOnPort(port); + if (!pid) { + throw new Error(`backend is healthy at ${host}:${port} but no PID could be resolved`); + } + } + + if (!isProcessRunning(pid)) { + removeStateFiles(host, port); + return; + } + + trySignal(pid, "SIGTERM"); + + const deadline = Date.now() + STOP_TIMEOUT_MS; + while (Date.now() < deadline) { + if (!isProcessRunning(pid)) { + removeStateFiles(host, port); + return; + } + await sleep(100); + } + + trySignal(pid, "SIGKILL"); + removeStateFiles(host, port); +} + +export interface BackendStatus { + running: boolean; + pid: number | null; + version: string | null; + versionCurrent: boolean; + logPath: string; +} + +export async function getBackendStatus(host: string, port: number): Promise { + const logPath = backendLogPath(host, port); + const pid = readPid(host, port); + + if (pid) { + if (isProcessRunning(pid)) { + return { + running: true, + pid, + version: readBackendVersion(host, port), + versionCurrent: isVersionCurrent(host, port), + logPath, + }; + } + removeStateFiles(host, port); + } + + if (await checkHealth(host, port)) { + return { + running: true, + pid: null, + version: readBackendVersion(host, port), + versionCurrent: isVersionCurrent(host, port), + logPath, + }; + } + + return { + running: false, + pid: null, + version: readBackendVersion(host, port), + versionCurrent: false, + logPath, + }; +} + +export async function ensureBackendRunning(config: AppConfig): Promise { + const host = config.backend.host; + const port = config.backend.port; + + if (await checkHealth(host, port)) { + if (!isVersionCurrent(host, port)) { + await stopBackend(host, port); + await startBackend(host, port); + } + return; + } + + const pid = readPid(host, port); + if (pid && isProcessRunning(pid)) { + try { + await waitForHealth(host, port, START_TIMEOUT_MS, pid); + if (!isVersionCurrent(host, port)) { + await stopBackend(host, port); + await startBackend(host, port); + } + return; + } catch { + await stopBackend(host, port); + await startBackend(host, port); + return; + } + } + + if (pid) { + removeStateFiles(host, port); + } + + await startBackend(host, port); +} + +export function parseBackendPort(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const port = Number(value); + if (!Number.isInteger(port) || port <= 0 || port > 65535) { + throw new Error(`Invalid backend port: ${value}`); + } + + return port; +} diff --git a/foundry/packages/cli/src/build-id.ts b/foundry/packages/cli/src/build-id.ts new file mode 100644 index 0000000..c8f02a2 --- /dev/null +++ b/foundry/packages/cli/src/build-id.ts @@ -0,0 +1,3 @@ +declare const __HF_BUILD_ID__: string | undefined; + +export const CLI_BUILD_ID = typeof __HF_BUILD_ID__ === "string" && __HF_BUILD_ID__.trim().length > 0 ? __HF_BUILD_ID__.trim() : "dev"; diff --git a/foundry/packages/cli/src/index.ts b/foundry/packages/cli/src/index.ts new file mode 100644 index 0000000..fdf5a19 --- /dev/null +++ b/foundry/packages/cli/src/index.ts @@ -0,0 +1,823 @@ +#!/usr/bin/env bun +import { spawnSync } from "node:child_process"; +import { existsSync } from "node:fs"; +import { homedir } from "node:os"; +import { AgentTypeSchema, CreateTaskInputSchema, type TaskRecord } from "@sandbox-agent/foundry-shared"; +import { readBackendMetadata, createBackendClientFromConfig, formatRelativeAge, groupTaskStatus, summarizeTasks } from "@sandbox-agent/foundry-client"; +import { ensureBackendRunning, getBackendStatus, parseBackendPort, stopBackend } from "./backend/manager.js"; +import { writeStderr, writeStdout } from "./io.js"; +import { openEditorForTask } from "./task-editor.js"; +import { spawnCreateTmuxWindow } from "./tmux.js"; +import { loadConfig, resolveOrganization, saveConfig } from "./organization/config.js"; + +async function ensureBunRuntime(): Promise { + if (typeof (globalThis as { Bun?: unknown }).Bun !== "undefined") { + return; + } + + const preferred = process.env.HF_BUN?.trim(); + const candidates = [preferred, `${homedir()}/.bun/bin/bun`, "bun"].filter((item): item is string => Boolean(item && item.length > 0)); + + for (const candidate of candidates) { + const command = candidate; + const canExec = command === "bun" || existsSync(command); + if (!canExec) { + continue; + } + + const child = spawnSync(command, [process.argv[1] ?? "", ...process.argv.slice(2)], { + stdio: "inherit", + env: process.env, + }); + + if (child.error) { + continue; + } + + const code = child.status ?? 1; + process.exit(code); + } + + throw new Error("hf requires Bun runtime. Set HF_BUN or install Bun at ~/.bun/bin/bun."); +} + +async function runTuiCommand(config: ReturnType, organizationId: string): Promise { + const mod = await import("./tui.js"); + await mod.runTui(config, organizationId); +} + +function readOption(args: string[], flag: string): string | undefined { + const idx = args.indexOf(flag); + if (idx < 0) return undefined; + return args[idx + 1]; +} + +function hasFlag(args: string[], flag: string): boolean { + return args.includes(flag); +} + +function parseIntOption(value: string | undefined, fallback: number, label: string): number { + if (!value) { + return fallback; + } + const parsed = Number.parseInt(value, 10); + if (!Number.isInteger(parsed) || parsed <= 0) { + throw new Error(`Invalid ${label}: ${value}`); + } + return parsed; +} + +function positionals(args: string[]): string[] { + const out: string[] = []; + for (let i = 0; i < args.length; i += 1) { + const item = args[i]; + if (!item) { + continue; + } + + if (item.startsWith("--")) { + const next = args[i + 1]; + if (next && !next.startsWith("--")) { + i += 1; + } + continue; + } + out.push(item); + } + return out; +} + +function normalizeRepoSelector(value: string): string { + let normalized = value.trim(); + if (!normalized) { + return ""; + } + + normalized = normalized.replace(/\/+$/, ""); + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) { + return `https://github.com/${normalized}.git`; + } + + if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) { + normalized = `https://${normalized.replace(/^www\./i, "")}`; + } + + try { + if (/^https?:\/\//i.test(normalized)) { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`; + } + } + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // Keep the selector as-is for matching below. + } + + return normalized; +} + +function githubRepoFullNameFromSelector(value: string): string | null { + const normalized = normalizeRepoSelector(value); + try { + const url = new URL(normalized); + if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } catch { + return null; + } +} + +async function resolveImportedRepo( + client: ReturnType, + organizationId: string, + repoSelector: string, +): Promise>[number]> { + const selector = repoSelector.trim(); + if (!selector) { + throw new Error("Missing required --repo "); + } + + const normalizedSelector = normalizeRepoSelector(selector); + const selectorFullName = githubRepoFullNameFromSelector(selector); + const repos = await client.listRepos(organizationId); + const match = repos.find((repo) => { + if (repo.repoId === selector) { + return true; + } + if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) { + return true; + } + const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl); + return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName); + }); + + if (!match) { + throw new Error( + `Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running hf create.`, + ); + } + + return match; +} + +function printUsage(): void { + writeStdout(` +Usage: + hf backend start [--host HOST] [--port PORT] + hf backend stop [--host HOST] [--port PORT] + hf backend status + hf backend inspect + hf status [--organization ORG] [--json] + hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json] + hf organization use + hf tui [--organization ORG] + + hf create [task] [--organization ORG] --repo [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH] + hf list [--organization ORG] [--format table|json] [--full] + hf switch [task-id | -] [--organization ORG] + hf attach [--organization ORG] + hf merge [--organization ORG] + hf archive [--organization ORG] + hf push [--organization ORG] + hf sync [--organization ORG] + hf kill [--organization ORG] [--delete-branch] [--abandon] + hf prune [--organization ORG] [--dry-run] [--yes] + hf statusline [--organization ORG] [--format table|claude-code] + hf db path + hf db nuke + +Tips: + hf status --help Show status output format and examples + hf history --help Show history output format and examples + hf switch - Switch to most recently updated task +`); +} + +function printStatusUsage(): void { + writeStdout(` +Usage: + hf status [--organization ORG] [--json] + +Text Output: + organization= + backend running= pid= version= + tasks total= + status queued= running= idle= archived= killed= error= + sandboxProviders = ... + sandboxProviders - + +JSON Output: + { + "organizationId": "default", + "backend": { ...backend status object... }, + "tasks": { + "total": 4, + "byStatus": { "queued": 0, "running": 1, "idle": 2, "archived": 1, "killed": 0, "error": 0 }, + "byProvider": { "local": 4 } + } + } +`); +} + +function printHistoryUsage(): void { + writeStdout(` +Usage: + hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json] + +Text Output: + \t\t\t + \t\t\t + no events + +Notes: + - payload is truncated to 120 characters in text mode. + - --limit defaults to 20. + +JSON Output: + [ + { + "id": "...", + "organizationId": "default", + "kind": "task.created", + "taskId": "...", + "repoId": "...", + "branchName": "feature/foo", + "payloadJson": "{\\"sandboxProviderId\\":\\"local\\"}", + "createdAt": 1770607522229 + } + ] +`); +} + +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { + const rows = await client.listTasks(organizationId); + return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); +} + +async function handleBackend(args: string[]): Promise { + const sub = args[0] ?? "start"; + const config = loadConfig(); + const host = readOption(args, "--host") ?? config.backend.host; + const port = parseBackendPort(readOption(args, "--port"), config.backend.port); + const backendConfig = { + ...config, + backend: { + ...config.backend, + host, + port, + }, + }; + + if (sub === "start") { + await ensureBackendRunning(backendConfig); + const status = await getBackendStatus(host, port); + const pid = status.pid ?? "unknown"; + const version = status.version ?? "unknown"; + const stale = status.running && !status.versionCurrent ? " [outdated]" : ""; + writeStdout(`running=true pid=${pid} version=${version}${stale} log=${status.logPath}`); + return; + } + + if (sub === "stop") { + await stopBackend(host, port); + writeStdout(`running=false host=${host} port=${port}`); + return; + } + + if (sub === "status") { + const status = await getBackendStatus(host, port); + const pid = status.pid ?? "unknown"; + const version = status.version ?? "unknown"; + const stale = status.running && !status.versionCurrent ? " [outdated]" : ""; + writeStdout(`running=${status.running} pid=${pid} version=${version}${stale} host=${host} port=${port} log=${status.logPath}`); + return; + } + + if (sub === "inspect") { + await ensureBackendRunning(backendConfig); + const metadata = await readBackendMetadata({ + endpoint: `http://${host}:${port}/v1/rivet`, + timeoutMs: 4_000, + }); + const managerEndpoint = metadata.clientEndpoint ?? `http://${host}:${port}`; + const inspectorUrl = `https://inspect.rivet.dev?u=${encodeURIComponent(managerEndpoint)}`; + const openCmd = process.platform === "darwin" ? "open" : "xdg-open"; + spawnSync(openCmd, [inspectorUrl], { stdio: "ignore" }); + writeStdout(inspectorUrl); + return; + } + + throw new Error(`Unknown backend subcommand: ${sub}`); +} + +async function handleOrganization(args: string[]): Promise { + const sub = args[0]; + if (sub !== "use") { + throw new Error("Usage: hf organization use "); + } + + const name = args[1]; + if (!name) { + throw new Error("Missing organization name"); + } + + const config = loadConfig(); + config.organization.default = name; + saveConfig(config); + + const client = createBackendClientFromConfig(config); + try { + await client.useOrganization(name); + } catch { + // Backend may not be running yet. Config is already updated. + } + + writeStdout(`organization=${name}`); +} + +async function handleList(args: string[]): Promise { + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const format = readOption(args, "--format") ?? "table"; + const full = hasFlag(args, "--full"); + const client = createBackendClientFromConfig(config); + const rows = await listDetailedTasks(client, organizationId); + + if (format === "json") { + writeStdout(JSON.stringify(rows, null, 2)); + return; + } + + if (rows.length === 0) { + writeStdout("no tasks"); + return; + } + + for (const row of rows) { + const age = formatRelativeAge(row.updatedAt); + let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.sandboxProviderId}\t${age}`; + if (full) { + const preview = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task; + line += `\t${row.title}\t${preview}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`; + } + writeStdout(line); + } +} + +async function handlePush(args: string[]): Promise { + const taskId = positionals(args)[0]; + if (!taskId) { + throw new Error("Missing task id for push"); + } + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const client = createBackendClientFromConfig(config); + await client.runAction(organizationId, taskId, "push"); + writeStdout("ok"); +} + +async function handleSync(args: string[]): Promise { + const taskId = positionals(args)[0]; + if (!taskId) { + throw new Error("Missing task id for sync"); + } + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const client = createBackendClientFromConfig(config); + await client.runAction(organizationId, taskId, "sync"); + writeStdout("ok"); +} + +async function handleKill(args: string[]): Promise { + const taskId = positionals(args)[0]; + if (!taskId) { + throw new Error("Missing task id for kill"); + } + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const deleteBranch = hasFlag(args, "--delete-branch"); + const abandon = hasFlag(args, "--abandon"); + + if (deleteBranch) { + writeStdout("info: --delete-branch flag set, branch will be deleted after kill"); + } + if (abandon) { + writeStdout("info: --abandon flag set, Graphite abandon will be attempted"); + } + + const client = createBackendClientFromConfig(config); + await client.runAction(organizationId, taskId, "kill"); + writeStdout("ok"); +} + +async function handlePrune(args: string[]): Promise { + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const dryRun = hasFlag(args, "--dry-run"); + const yes = hasFlag(args, "--yes"); + const client = createBackendClientFromConfig(config); + const rows = await listDetailedTasks(client, organizationId); + const prunable = rows.filter((r) => r.status === "archived" || r.status === "killed"); + + if (prunable.length === 0) { + writeStdout("nothing to prune"); + return; + } + + for (const row of prunable) { + const age = formatRelativeAge(row.updatedAt); + writeStdout(`${dryRun ? "[dry-run] " : ""}${row.taskId}\t${row.branchName}\t${row.status}\t${age}`); + } + + if (dryRun) { + writeStdout(`\n${prunable.length} task(s) would be pruned`); + return; + } + + if (!yes) { + writeStdout("\nnot yet implemented: auto-pruning requires confirmation"); + return; + } + + writeStdout(`\n${prunable.length} task(s) would be pruned (pruning not yet implemented)`); +} + +async function handleStatusline(args: string[]): Promise { + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const format = readOption(args, "--format") ?? "table"; + const client = createBackendClientFromConfig(config); + const rows = await listDetailedTasks(client, organizationId); + const summary = summarizeTasks(rows); + const running = summary.byStatus.running; + const idle = summary.byStatus.idle; + const errorCount = summary.byStatus.error; + + if (format === "claude-code") { + writeStdout(`hf:${running}R/${idle}I/${errorCount}E`); + return; + } + + writeStdout(`running=${running} idle=${idle} error=${errorCount}`); +} + +async function handleDb(args: string[]): Promise { + const sub = args[0]; + if (sub === "path") { + const config = loadConfig(); + const dbPath = config.backend.dbPath.replace(/^~/, homedir()); + writeStdout(dbPath); + return; + } + + if (sub === "nuke") { + writeStdout("WARNING: hf db nuke would delete the entire database. This is a placeholder and does not delete anything."); + return; + } + + throw new Error("Usage: hf db path | hf db nuke"); +} + +async function waitForTaskReady( + client: ReturnType, + organizationId: string, + taskId: string, + timeoutMs: number, +): Promise { + const start = Date.now(); + let delayMs = 250; + + for (;;) { + const record = await client.getTask(organizationId, taskId); + const hasName = Boolean(record.branchName && record.title); + const hasSandbox = Boolean(record.activeSandboxId); + + if (record.status === "error") { + throw new Error(`task entered error state while provisioning: ${taskId}`); + } + if (hasName && hasSandbox) { + return record; + } + + if (Date.now() - start > timeoutMs) { + throw new Error(`timed out waiting for task provisioning: ${taskId}`); + } + + await new Promise((r) => setTimeout(r, delayMs)); + delayMs = Math.min(Math.round(delayMs * 1.5), 2_000); + } +} + +async function handleCreate(args: string[]): Promise { + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + + const repoSelector = readOption(args, "--repo"); + if (!repoSelector) { + throw new Error("Missing required --repo "); + } + const explicitBranchName = readOption(args, "--name") ?? readOption(args, "--branch"); + const explicitTitle = readOption(args, "--title"); + + const agentRaw = readOption(args, "--agent"); + const agentType = agentRaw ? AgentTypeSchema.parse(agentRaw) : undefined; + const onBranch = readOption(args, "--on"); + + const taskFromArgs = positionals(args).join(" ").trim(); + const taskPrompt = taskFromArgs || openEditorForTask(); + + const client = createBackendClientFromConfig(config); + const repo = await resolveImportedRepo(client, organizationId, repoSelector); + + const payload = CreateTaskInputSchema.parse({ + organizationId, + repoId: repo.repoId, + task: taskPrompt, + explicitTitle: explicitTitle || undefined, + explicitBranchName: explicitBranchName || undefined, + agentType, + onBranch, + }); + + const created = await client.createTask(payload); + const createdTask = await waitForTaskReady(client, organizationId, created.taskId, 180_000); + const switched = await client.switchTask(organizationId, createdTask.taskId); + const attached = await client.attachTask(organizationId, createdTask.taskId); + + writeStdout(`Branch: ${createdTask.branchName ?? "-"}`); + writeStdout(`Task: ${createdTask.taskId}`); + writeStdout(`Provider: ${createdTask.sandboxProviderId}`); + writeStdout(`Session: ${attached.sessionId ?? "none"}`); + writeStdout(`Target: ${switched.switchTarget || attached.target}`); + writeStdout(`Title: ${createdTask.title ?? "-"}`); + + const tmuxResult = spawnCreateTmuxWindow({ + branchName: createdTask.branchName ?? createdTask.taskId, + targetPath: switched.switchTarget || attached.target, + sessionId: attached.sessionId, + }); + + if (tmuxResult.created) { + writeStdout(`Window: created (${createdTask.branchName})`); + return; + } + + writeStdout(""); + writeStdout(`Run: hf switch ${createdTask.taskId}`); + if ((switched.switchTarget || attached.target).startsWith("/")) { + writeStdout(`cd ${switched.switchTarget || attached.target}`); + } +} + +async function handleTui(args: string[]): Promise { + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + await runTuiCommand(config, organizationId); +} + +async function handleStatus(args: string[]): Promise { + if (hasFlag(args, "--help") || hasFlag(args, "-h")) { + printStatusUsage(); + return; + } + + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const client = createBackendClientFromConfig(config); + const backendStatus = await getBackendStatus(config.backend.host, config.backend.port); + const rows = await listDetailedTasks(client, organizationId); + const summary = summarizeTasks(rows); + + if (hasFlag(args, "--json")) { + writeStdout( + JSON.stringify( + { + organizationId, + backend: backendStatus, + tasks: { + total: summary.total, + byStatus: summary.byStatus, + byProvider: summary.byProvider, + }, + }, + null, + 2, + ), + ); + return; + } + + writeStdout(`organization=${organizationId}`); + writeStdout(`backend running=${backendStatus.running} pid=${backendStatus.pid ?? "unknown"} version=${backendStatus.version ?? "unknown"}`); + writeStdout(`tasks total=${summary.total}`); + writeStdout( + `status queued=${summary.byStatus.queued} running=${summary.byStatus.running} idle=${summary.byStatus.idle} archived=${summary.byStatus.archived} killed=${summary.byStatus.killed} error=${summary.byStatus.error}`, + ); + const providerSummary = Object.entries(summary.byProvider) + .map(([provider, count]) => `${provider}=${count}`) + .join(" "); + writeStdout(`sandboxProviders ${providerSummary || "-"}`); +} + +async function handleHistory(args: string[]): Promise { + if (hasFlag(args, "--help") || hasFlag(args, "-h")) { + printHistoryUsage(); + return; + } + + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const limit = parseIntOption(readOption(args, "--limit"), 20, "limit"); + const branch = readOption(args, "--branch"); + const taskId = readOption(args, "--task"); + const client = createBackendClientFromConfig(config); + const rows = await client.listHistory({ + organizationId, + limit, + branch: branch || undefined, + taskId: taskId || undefined, + }); + + if (hasFlag(args, "--json")) { + writeStdout(JSON.stringify(rows, null, 2)); + return; + } + + if (rows.length === 0) { + writeStdout("no events"); + return; + } + + for (const row of rows) { + const ts = new Date(row.createdAt).toISOString(); + const target = row.branchName || row.taskId || row.repoId || "-"; + let payload = row.payloadJson; + if (payload.length > 120) { + payload = `${payload.slice(0, 117)}...`; + } + writeStdout(`${ts}\t${row.kind}\t${target}\t${payload}`); + } +} + +async function handleSwitchLike(cmd: string, args: string[]): Promise { + let taskId = positionals(args)[0]; + if (!taskId && cmd === "switch") { + await handleTui(args); + return; + } + + if (!taskId) { + throw new Error(`Missing task id for ${cmd}`); + } + + const config = loadConfig(); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + const client = createBackendClientFromConfig(config); + + if (cmd === "switch" && taskId === "-") { + const rows = await listDetailedTasks(client, organizationId); + const active = rows.filter((r) => { + const group = groupTaskStatus(r.status); + return group === "running" || group === "idle" || group === "queued"; + }); + const sorted = active.sort((a, b) => b.updatedAt - a.updatedAt); + const target = sorted[0]; + if (!target) { + throw new Error("No active tasks to switch to"); + } + taskId = target.taskId; + } + + if (cmd === "switch") { + const result = await client.switchTask(organizationId, taskId); + writeStdout(`cd ${result.switchTarget}`); + return; + } + + if (cmd === "attach") { + const result = await client.attachTask(organizationId, taskId); + writeStdout(`target=${result.target} session=${result.sessionId ?? "none"}`); + return; + } + + if (cmd === "merge" || cmd === "archive") { + await client.runAction(organizationId, taskId, cmd); + writeStdout("ok"); + return; + } + + throw new Error(`Unsupported action: ${cmd}`); +} + +async function main(): Promise { + await ensureBunRuntime(); + + const args = process.argv.slice(2); + const cmd = args[0]; + const rest = args.slice(1); + + if (cmd === "help" || cmd === "--help" || cmd === "-h") { + printUsage(); + return; + } + + if (cmd === "backend") { + await handleBackend(rest); + return; + } + + const config = loadConfig(); + await ensureBackendRunning(config); + + if (!cmd || cmd.startsWith("--")) { + await handleTui(args); + return; + } + + if (cmd === "organization") { + await handleOrganization(rest); + return; + } + + if (cmd === "create") { + await handleCreate(rest); + return; + } + + if (cmd === "list") { + await handleList(rest); + return; + } + + if (cmd === "tui") { + await handleTui(rest); + return; + } + + if (cmd === "status") { + await handleStatus(rest); + return; + } + + if (cmd === "history") { + await handleHistory(rest); + return; + } + + if (cmd === "push") { + await handlePush(rest); + return; + } + + if (cmd === "sync") { + await handleSync(rest); + return; + } + + if (cmd === "kill") { + await handleKill(rest); + return; + } + + if (cmd === "prune") { + await handlePrune(rest); + return; + } + + if (cmd === "statusline") { + await handleStatusline(rest); + return; + } + + if (cmd === "db") { + await handleDb(rest); + return; + } + + if (["switch", "attach", "merge", "archive"].includes(cmd)) { + await handleSwitchLike(cmd, rest); + return; + } + + printUsage(); + throw new Error(`Unknown command: ${cmd}`); +} + +main().catch((err: unknown) => { + const msg = err instanceof Error ? (err.stack ?? err.message) : String(err); + writeStderr(msg); + process.exit(1); +}); diff --git a/foundry/packages/cli/src/io.ts b/foundry/packages/cli/src/io.ts new file mode 100644 index 0000000..b188206 --- /dev/null +++ b/foundry/packages/cli/src/io.ts @@ -0,0 +1,7 @@ +export function writeStdout(message = ""): void { + process.stdout.write(`${message}\n`); +} + +export function writeStderr(message = ""): void { + process.stderr.write(`${message}\n`); +} diff --git a/foundry/packages/cli/src/logging.ts b/foundry/packages/cli/src/logging.ts new file mode 100644 index 0000000..a7c5892 --- /dev/null +++ b/foundry/packages/cli/src/logging.ts @@ -0,0 +1,5 @@ +import { createFoundryLogger } from "@sandbox-agent/foundry-shared"; + +export const logger = createFoundryLogger({ + service: "foundry-cli", +}); diff --git a/foundry/packages/cli/src/organization/config.ts b/foundry/packages/cli/src/organization/config.ts new file mode 100644 index 0000000..cfaebfe --- /dev/null +++ b/foundry/packages/cli/src/organization/config.ts @@ -0,0 +1,25 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { dirname } from "node:path"; +import { homedir } from "node:os"; +import * as toml from "@iarna/toml"; +import { ConfigSchema, resolveOrganizationId, type AppConfig } from "@sandbox-agent/foundry-shared"; + +export const CONFIG_PATH = `${homedir()}/.config/foundry/config.toml`; + +export function loadConfig(path = CONFIG_PATH): AppConfig { + if (!existsSync(path)) { + return ConfigSchema.parse({}); + } + + const raw = readFileSync(path, "utf8"); + return ConfigSchema.parse(toml.parse(raw)); +} + +export function saveConfig(config: AppConfig, path = CONFIG_PATH): void { + mkdirSync(dirname(path), { recursive: true }); + writeFileSync(path, toml.stringify(config), "utf8"); +} + +export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string { + return resolveOrganizationId(flagOrganization, config); +} diff --git a/foundry/packages/cli/src/task-editor.ts b/foundry/packages/cli/src/task-editor.ts new file mode 100644 index 0000000..502aa13 --- /dev/null +++ b/foundry/packages/cli/src/task-editor.ts @@ -0,0 +1,41 @@ +import { mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { spawnSync } from "node:child_process"; + +const DEFAULT_EDITOR_TEMPLATE = ["# Enter task task details below.", "# Lines starting with # are ignored.", ""].join("\n"); + +export function sanitizeEditorTask(input: string): string { + return input + .split(/\r?\n/) + .filter((line) => !line.trim().startsWith("#")) + .join("\n") + .trim(); +} + +export function openEditorForTask(): string { + const editor = process.env.VISUAL?.trim() || process.env.EDITOR?.trim() || "vi"; + const tempDir = mkdtempSync(join(tmpdir(), "hf-task-")); + const taskPath = join(tempDir, "task.md"); + + try { + writeFileSync(taskPath, DEFAULT_EDITOR_TEMPLATE, "utf8"); + const result = spawnSync(editor, [taskPath], { stdio: "inherit" }); + + if (result.error) { + throw result.error; + } + if ((result.status ?? 1) !== 0) { + throw new Error(`Editor exited with status ${result.status ?? "unknown"}`); + } + + const raw = readFileSync(taskPath, "utf8"); + const task = sanitizeEditorTask(raw); + if (!task) { + throw new Error("Missing task task text"); + } + return task; + } finally { + rmSync(tempDir, { recursive: true, force: true }); + } +} diff --git a/foundry/packages/cli/src/theme.ts b/foundry/packages/cli/src/theme.ts new file mode 100644 index 0000000..633c079 --- /dev/null +++ b/foundry/packages/cli/src/theme.ts @@ -0,0 +1,792 @@ +import { existsSync, readFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { dirname, isAbsolute, join, resolve } from "node:path"; +import { cwd } from "node:process"; +import * as toml from "@iarna/toml"; +import type { AppConfig } from "@sandbox-agent/foundry-shared"; +import opencodeThemePackJson from "./themes/opencode-pack.json" with { type: "json" }; + +export type ThemeMode = "dark" | "light"; + +export interface TuiTheme { + background: string; + text: string; + muted: string; + header: string; + status: string; + highlightBg: string; + highlightFg: string; + selectionBorder: string; + success: string; + warning: string; + error: string; + info: string; + diffAdd: string; + diffDel: string; + diffSep: string; + agentRunning: string; + agentIdle: string; + agentNone: string; + agentError: string; + prUnpushed: string; + author: string; + ciRunning: string; + ciPass: string; + ciFail: string; + ciNone: string; + reviewApproved: string; + reviewChanges: string; + reviewPending: string; + reviewNone: string; +} + +export interface TuiThemeResolution { + theme: TuiTheme; + name: string; + source: string; + mode: ThemeMode; +} + +interface ThemeCandidate { + theme: TuiTheme; + name: string; +} + +type JsonObject = Record; + +type ConfigLike = AppConfig & { theme?: string }; + +const DEFAULT_THEME: TuiTheme = { + background: "#282828", + text: "#ffffff", + muted: "#6b7280", + header: "#6b7280", + status: "#6b7280", + highlightBg: "#282828", + highlightFg: "#ffffff", + selectionBorder: "#d946ef", + success: "#22c55e", + warning: "#eab308", + error: "#ef4444", + info: "#22d3ee", + diffAdd: "#22c55e", + diffDel: "#ef4444", + diffSep: "#6b7280", + agentRunning: "#22c55e", + agentIdle: "#eab308", + agentNone: "#6b7280", + agentError: "#ef4444", + prUnpushed: "#eab308", + author: "#22d3ee", + ciRunning: "#eab308", + ciPass: "#22c55e", + ciFail: "#ef4444", + ciNone: "#6b7280", + reviewApproved: "#22c55e", + reviewChanges: "#ef4444", + reviewPending: "#eab308", + reviewNone: "#6b7280", +}; + +const OPENCODE_THEME_PACK = opencodeThemePackJson as Record; + +export function resolveTuiTheme(config: AppConfig, baseDir = cwd()): TuiThemeResolution { + const mode = opencodeStateThemeMode() ?? "dark"; + const configWithTheme = config as ConfigLike; + const override = typeof configWithTheme.theme === "string" ? configWithTheme.theme.trim() : ""; + + if (override) { + const candidate = loadFromSpec(override, [], mode, baseDir); + if (candidate) { + return { + theme: candidate.theme, + name: candidate.name, + source: "foundry config", + mode, + }; + } + } + + const fromConfig = loadOpencodeThemeFromConfig(mode, baseDir); + if (fromConfig) { + return fromConfig; + } + + const fromState = loadOpencodeThemeFromState(mode, baseDir); + if (fromState) { + return fromState; + } + + return { + theme: DEFAULT_THEME, + name: "opencode-default", + source: "default", + mode, + }; +} + +function loadOpencodeThemeFromConfig(mode: ThemeMode, baseDir: string): TuiThemeResolution | null { + for (const path of opencodeConfigPaths(baseDir)) { + if (!existsSync(path)) { + continue; + } + + const value = readJsonWithComments(path); + if (!value) { + continue; + } + + const themeValue = findOpencodeThemeValue(value); + if (themeValue === undefined) { + continue; + } + + const candidate = themeFromOpencodeValue(themeValue, opencodeThemeDirs(dirname(path), baseDir), mode, baseDir); + if (!candidate) { + continue; + } + + return { + theme: candidate.theme, + name: candidate.name, + source: `opencode config (${path})`, + mode, + }; + } + + return null; +} + +function loadOpencodeThemeFromState(mode: ThemeMode, baseDir: string): TuiThemeResolution | null { + const path = opencodeStatePath(); + if (!path || !existsSync(path)) { + return null; + } + + const value = readJsonWithComments(path); + if (!isObject(value)) { + return null; + } + + const spec = value.theme; + if (typeof spec !== "string" || !spec.trim()) { + return null; + } + + const candidate = loadFromSpec(spec.trim(), opencodeThemeDirs(undefined, baseDir), mode, baseDir); + if (!candidate) { + return null; + } + + return { + theme: candidate.theme, + name: candidate.name, + source: `opencode state (${path})`, + mode, + }; +} + +function loadFromSpec(spec: string, searchDirs: string[], mode: ThemeMode, baseDir: string): ThemeCandidate | null { + if (isDefaultThemeName(spec)) { + return { + theme: DEFAULT_THEME, + name: "opencode-default", + }; + } + + if (isPathLike(spec)) { + const resolved = resolvePath(spec, baseDir); + if (existsSync(resolved)) { + const candidate = loadThemeFromPath(resolved, mode); + if (candidate) { + return candidate; + } + } + } + + for (const dir of searchDirs) { + for (const ext of ["json", "toml"]) { + const path = join(dir, `${spec}.${ext}`); + if (!existsSync(path)) { + continue; + } + + const candidate = loadThemeFromPath(path, mode); + if (candidate) { + return candidate; + } + } + } + + const builtIn = OPENCODE_THEME_PACK[spec]; + if (builtIn !== undefined) { + const theme = themeFromOpencodeJson(builtIn, mode); + if (theme) { + return { + theme, + name: spec, + }; + } + } + + return null; +} + +function loadThemeFromPath(path: string, mode: ThemeMode): ThemeCandidate | null { + const content = safeReadText(path); + if (!content) { + return null; + } + + const lower = path.toLowerCase(); + if (lower.endsWith(".toml")) { + try { + const parsed = toml.parse(content); + const theme = themeFromAny(parsed); + if (!theme) { + return null; + } + return { + theme, + name: themeNameFromPath(path), + }; + } catch { + return null; + } + } + + const value = parseJsonWithComments(content); + if (!value) { + return null; + } + + const opencodeTheme = themeFromOpencodeJson(value, mode); + if (opencodeTheme) { + return { + theme: opencodeTheme, + name: themeNameFromPath(path), + }; + } + + const paletteTheme = themeFromAny(value); + if (!paletteTheme) { + return null; + } + + return { + theme: paletteTheme, + name: themeNameFromPath(path), + }; +} + +function themeNameFromPath(path: string): string { + const base = path.split(/[\\/]/).pop() ?? path; + if (base.endsWith(".json") || base.endsWith(".toml")) { + return base.replace(/\.(json|toml)$/i, ""); + } + return base; +} + +function themeFromOpencodeValue(value: unknown, searchDirs: string[], mode: ThemeMode, baseDir: string): ThemeCandidate | null { + if (typeof value === "string") { + return loadFromSpec(value, searchDirs, mode, baseDir); + } + + if (!isObject(value)) { + return null; + } + + if (value.theme !== undefined) { + const theme = themeFromOpencodeJson(value, mode); + if (theme) { + return { + theme, + name: typeof value.name === "string" ? value.name : "inline", + }; + } + } + + const paletteTheme = themeFromAny(value.colors ?? value.palette ?? value); + if (paletteTheme) { + return { + theme: paletteTheme, + name: typeof value.name === "string" ? value.name : "inline", + }; + } + + if (typeof value.name === "string") { + const named = loadFromSpec(value.name, searchDirs, mode, baseDir); + if (named) { + return named; + } + } + + const pathLike = value.path ?? value.file; + if (typeof pathLike === "string") { + const resolved = resolvePath(pathLike, baseDir); + const candidate = loadThemeFromPath(resolved, mode); + if (candidate) { + return candidate; + } + } + + return null; +} + +function themeFromOpencodeJson(value: unknown, mode: ThemeMode): TuiTheme | null { + if (!isObject(value)) { + return null; + } + + const themeMap = value.theme; + if (!isObject(themeMap)) { + return null; + } + + const defs = isObject(value.defs) ? value.defs : {}; + + const background = + opencodeColor(themeMap, defs, mode, "background") ?? + opencodeColor(themeMap, defs, mode, "backgroundPanel") ?? + opencodeColor(themeMap, defs, mode, "backgroundElement") ?? + DEFAULT_THEME.background; + + const text = opencodeColor(themeMap, defs, mode, "text") ?? DEFAULT_THEME.text; + const muted = opencodeColor(themeMap, defs, mode, "textMuted") ?? DEFAULT_THEME.muted; + const highlightBg = opencodeColor(themeMap, defs, mode, "text") ?? text; + const highlightFg = + opencodeColor(themeMap, defs, mode, "backgroundElement") ?? + opencodeColor(themeMap, defs, mode, "backgroundPanel") ?? + opencodeColor(themeMap, defs, mode, "background") ?? + DEFAULT_THEME.highlightFg; + + const selectionBorder = + opencodeColor(themeMap, defs, mode, "secondary") ?? + opencodeColor(themeMap, defs, mode, "accent") ?? + opencodeColor(themeMap, defs, mode, "primary") ?? + DEFAULT_THEME.selectionBorder; + + const success = opencodeColor(themeMap, defs, mode, "success") ?? DEFAULT_THEME.success; + const warning = opencodeColor(themeMap, defs, mode, "warning") ?? DEFAULT_THEME.warning; + const error = opencodeColor(themeMap, defs, mode, "error") ?? DEFAULT_THEME.error; + const info = opencodeColor(themeMap, defs, mode, "info") ?? DEFAULT_THEME.info; + const diffAdd = opencodeColor(themeMap, defs, mode, "diffAdded") ?? success; + const diffDel = opencodeColor(themeMap, defs, mode, "diffRemoved") ?? error; + const diffSep = opencodeColor(themeMap, defs, mode, "diffContext") ?? opencodeColor(themeMap, defs, mode, "diffHunkHeader") ?? muted; + + return { + background, + text, + muted, + header: muted, + status: muted, + highlightBg, + highlightFg, + selectionBorder, + success, + warning, + error, + info, + diffAdd, + diffDel, + diffSep, + agentRunning: success, + agentIdle: warning, + agentNone: muted, + agentError: error, + prUnpushed: warning, + author: info, + ciRunning: warning, + ciPass: success, + ciFail: error, + ciNone: muted, + reviewApproved: success, + reviewChanges: error, + reviewPending: warning, + reviewNone: muted, + }; +} + +function opencodeColor(themeMap: JsonObject, defs: JsonObject, mode: ThemeMode, key: string): string | null { + const raw = themeMap[key]; + if (raw === undefined) { + return null; + } + return resolveOpencodeColor(raw, themeMap, defs, mode, 0); +} + +function resolveOpencodeColor(value: unknown, themeMap: JsonObject, defs: JsonObject, mode: ThemeMode, depth: number): string | null { + if (depth > 12) { + return null; + } + + if (typeof value === "string") { + const trimmed = value.trim(); + if (!trimmed || trimmed.toLowerCase() === "transparent" || trimmed.toLowerCase() === "none") { + return null; + } + + const fromDefs = defs[trimmed]; + if (fromDefs !== undefined) { + return resolveOpencodeColor(fromDefs, themeMap, defs, mode, depth + 1); + } + + const fromTheme = themeMap[trimmed]; + if (fromTheme !== undefined) { + return resolveOpencodeColor(fromTheme, themeMap, defs, mode, depth + 1); + } + + if (isColorLike(trimmed)) { + return trimmed; + } + + return null; + } + + if (isObject(value)) { + const nested = value[mode]; + if (nested !== undefined) { + return resolveOpencodeColor(nested, themeMap, defs, mode, depth + 1); + } + } + + return null; +} + +function themeFromAny(value: unknown): TuiTheme | null { + const palette = extractPalette(value); + if (!palette) { + return null; + } + + const pick = (keys: string[], fallback: string): string => { + for (const key of keys) { + const v = palette[normalizeKey(key)]; + if (v && isColorLike(v)) { + return v; + } + } + return fallback; + }; + + const background = pick(["background", "bg", "base", "background_color"], DEFAULT_THEME.background); + const text = pick(["text", "foreground", "fg", "primary"], DEFAULT_THEME.text); + const muted = pick(["muted", "subtle", "secondary", "dim"], DEFAULT_THEME.muted); + const header = pick(["header", "header_text"], muted); + const status = pick(["status", "status_text"], muted); + const highlightBg = pick(["highlight_bg", "selection", "highlight", "accent_bg"], DEFAULT_THEME.highlightBg); + const highlightFg = pick(["highlight_fg", "selection_fg", "accent_fg"], text); + const selectionBorder = pick(["selection_border", "highlight_border", "accent", "secondary"], DEFAULT_THEME.selectionBorder); + const success = pick(["success", "green"], DEFAULT_THEME.success); + const warning = pick(["warning", "yellow"], DEFAULT_THEME.warning); + const error = pick(["error", "red"], DEFAULT_THEME.error); + const info = pick(["info", "cyan", "blue"], DEFAULT_THEME.info); + const diffAdd = pick(["diff_add", "diff_addition", "add"], success); + const diffDel = pick(["diff_del", "diff_deletion", "delete"], error); + const diffSep = pick(["diff_sep", "diff_separator", "separator"], muted); + + return { + background, + text, + muted, + header, + status, + highlightBg, + highlightFg, + selectionBorder, + success, + warning, + error, + info, + diffAdd, + diffDel, + diffSep, + agentRunning: pick(["agent_running", "running"], success), + agentIdle: pick(["agent_idle", "idle"], warning), + agentNone: pick(["agent_none", "none"], muted), + agentError: pick(["agent_error", "agent_failed"], error), + prUnpushed: pick(["pr_unpushed", "unpushed"], warning), + author: pick(["author"], info), + ciRunning: pick(["ci_running"], warning), + ciPass: pick(["ci_pass", "ci_success"], success), + ciFail: pick(["ci_fail", "ci_error"], error), + ciNone: pick(["ci_none", "ci_unknown"], muted), + reviewApproved: pick(["review_approved", "approved"], success), + reviewChanges: pick(["review_changes", "changes"], error), + reviewPending: pick(["review_pending", "pending"], warning), + reviewNone: pick(["review_none", "review_unknown"], muted), + }; +} + +function extractPalette(value: unknown): Record | null { + if (!isObject(value)) { + return null; + } + + const colors = isObject(value.colors) ? value.colors : undefined; + const palette = isObject(value.palette) ? value.palette : undefined; + const source = colors ?? palette ?? value; + if (!isObject(source)) { + return null; + } + + const out: Record = {}; + for (const [key, raw] of Object.entries(source)) { + if (typeof raw !== "string") { + continue; + } + out[normalizeKey(key)] = raw; + } + + return Object.keys(out).length > 0 ? out : null; +} + +function normalizeKey(key: string): string { + return key.toLowerCase().replace(/[\-\s.]/g, "_"); +} + +function isColorLike(value: string): boolean { + const lower = value.trim().toLowerCase(); + if (!lower) { + return false; + } + + if (/^#[0-9a-f]{3}$/.test(lower) || /^#[0-9a-f]{6}$/.test(lower) || /^#[0-9a-f]{8}$/.test(lower)) { + return true; + } + + if (/^rgba?\(\s*\d+\s*,\s*\d+\s*,\s*\d+(\s*,\s*[\d.]+)?\s*\)$/.test(lower)) { + return true; + } + + return /^[a-z_\-]+$/.test(lower); +} + +function findOpencodeThemeValue(value: unknown): unknown { + if (!isObject(value)) { + return undefined; + } + + if (value.theme !== undefined) { + return value.theme; + } + + return pointer(value, ["ui", "theme"]) ?? pointer(value, ["tui", "theme"]) ?? pointer(value, ["options", "theme"]); +} + +function pointer(obj: JsonObject, parts: string[]): unknown { + let current: unknown = obj; + for (const part of parts) { + if (!isObject(current)) { + return undefined; + } + current = current[part]; + } + return current; +} + +function opencodeConfigPaths(baseDir: string): string[] { + const paths: string[] = []; + + const rootish = opencodeRepositoryConfigPaths(baseDir); + paths.push(...rootish); + + const configDir = process.env.XDG_CONFIG_HOME || join(homedir(), ".config"); + const opencodeDir = join(configDir, "opencode"); + paths.push(join(opencodeDir, "opencode.json")); + paths.push(join(opencodeDir, "opencode.jsonc")); + paths.push(join(opencodeDir, "config.json")); + + return paths; +} + +function opencodeThemeDirs(configDir: string | undefined, baseDir: string): string[] { + const dirs: string[] = []; + + if (configDir) { + dirs.push(join(configDir, "themes")); + } + + const xdgConfig = process.env.XDG_CONFIG_HOME || join(homedir(), ".config"); + dirs.push(join(xdgConfig, "opencode", "themes")); + dirs.push(join(homedir(), ".opencode", "themes")); + + dirs.push(...opencodeRepositoryThemeDirs(baseDir)); + + return dirs; +} + +function opencodeRepositoryConfigPaths(baseDir: string): string[] { + const dirs = ancestorDirs(baseDir); + const out: string[] = []; + for (const dir of dirs) { + out.push(join(dir, "opencode.json")); + out.push(join(dir, "opencode.jsonc")); + out.push(join(dir, ".opencode", "opencode.json")); + out.push(join(dir, ".opencode", "opencode.jsonc")); + } + return out; +} + +function opencodeRepositoryThemeDirs(baseDir: string): string[] { + const dirs = ancestorDirs(baseDir); + const out: string[] = []; + for (const dir of dirs) { + out.push(join(dir, ".opencode", "themes")); + } + return out; +} + +function ancestorDirs(start: string): string[] { + const out: string[] = []; + let current = resolve(start); + + while (true) { + out.push(current); + const parent = dirname(current); + if (parent === current) { + break; + } + current = parent; + } + + return out; +} + +function opencodeStatePath(): string | null { + const stateHome = process.env.XDG_STATE_HOME || join(homedir(), ".local", "state"); + return join(stateHome, "opencode", "kv.json"); +} + +function opencodeStateThemeMode(): ThemeMode | null { + const path = opencodeStatePath(); + if (!path || !existsSync(path)) { + return null; + } + + const value = readJsonWithComments(path); + if (!isObject(value)) { + return null; + } + + const mode = value.theme_mode; + if (typeof mode !== "string") { + return null; + } + + const lower = mode.toLowerCase(); + if (lower === "dark" || lower === "light") { + return lower; + } + + return null; +} + +function parseJsonWithComments(content: string): unknown { + try { + return JSON.parse(content); + } catch { + // Fall through. + } + + try { + return JSON.parse(stripJsoncComments(content)); + } catch { + return null; + } +} + +function readJsonWithComments(path: string): unknown { + const content = safeReadText(path); + if (!content) { + return null; + } + return parseJsonWithComments(content); +} + +function stripJsoncComments(input: string): string { + let output = ""; + let i = 0; + let inString = false; + let escaped = false; + + while (i < input.length) { + const ch = input[i]; + + if (inString) { + output += ch; + if (escaped) { + escaped = false; + } else if (ch === "\\") { + escaped = true; + } else if (ch === '"') { + inString = false; + } + i += 1; + continue; + } + + if (ch === '"') { + inString = true; + output += ch; + i += 1; + continue; + } + + if (ch === "/" && input[i + 1] === "/") { + i += 2; + while (i < input.length && input[i] !== "\n") { + i += 1; + } + continue; + } + + if (ch === "/" && input[i + 1] === "*") { + i += 2; + while (i < input.length) { + if (input[i] === "*" && input[i + 1] === "/") { + i += 2; + break; + } + i += 1; + } + continue; + } + + output += ch; + i += 1; + } + + return output; +} + +function safeReadText(path: string): string | null { + try { + return readFileSync(path, "utf8"); + } catch { + return null; + } +} + +function resolvePath(path: string, baseDir: string): string { + if (path.startsWith("~/")) { + return join(homedir(), path.slice(2)); + } + if (isAbsolute(path)) { + return path; + } + return resolve(baseDir, path); +} + +function isPathLike(spec: string): boolean { + return spec.includes("/") || spec.includes("\\") || spec.endsWith(".json") || spec.endsWith(".toml"); +} + +function isDefaultThemeName(spec: string): boolean { + const lower = spec.toLowerCase(); + return lower === "default" || lower === "opencode" || lower === "opencode-default" || lower === "system"; +} + +function isObject(value: unknown): value is JsonObject { + return typeof value === "object" && value !== null && !Array.isArray(value); +} diff --git a/foundry/packages/cli/src/themes/opencode-pack.json b/foundry/packages/cli/src/themes/opencode-pack.json new file mode 100644 index 0000000..391bca1 --- /dev/null +++ b/foundry/packages/cli/src/themes/opencode-pack.json @@ -0,0 +1,7408 @@ +{ + "aura": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg": "#0f0f0f", + "darkBgPanel": "#15141b", + "darkBorder": "#2d2d2d", + "darkFgMuted": "#6d6d6d", + "darkFg": "#edecee", + "purple": "#a277ff", + "pink": "#f694ff", + "blue": "#82e2ff", + "red": "#ff6767", + "orange": "#ffca85", + "cyan": "#61ffca", + "green": "#9dff65" + }, + "theme": { + "primary": "purple", + "secondary": "pink", + "accent": "purple", + "error": "red", + "warning": "orange", + "success": "cyan", + "info": "purple", + "text": "darkFg", + "textMuted": "darkFgMuted", + "background": "darkBg", + "backgroundPanel": "darkBgPanel", + "backgroundElement": "darkBgPanel", + "border": "darkBorder", + "borderActive": "darkFgMuted", + "borderSubtle": "darkBorder", + "diffAdded": "cyan", + "diffRemoved": "red", + "diffContext": "darkFgMuted", + "diffHunkHeader": "darkFgMuted", + "diffHighlightAdded": "cyan", + "diffHighlightRemoved": "red", + "diffAddedBg": "#354933", + "diffRemovedBg": "#3f191a", + "diffContextBg": "darkBgPanel", + "diffLineNumber": "darkBorder", + "diffAddedLineNumberBg": "#162620", + "diffRemovedLineNumberBg": "#26161a", + "markdownText": "darkFg", + "markdownHeading": "purple", + "markdownLink": "pink", + "markdownLinkText": "purple", + "markdownCode": "cyan", + "markdownBlockQuote": "darkFgMuted", + "markdownEmph": "orange", + "markdownStrong": "purple", + "markdownHorizontalRule": "darkFgMuted", + "markdownListItem": "purple", + "markdownListEnumeration": "purple", + "markdownImage": "pink", + "markdownImageText": "purple", + "markdownCodeBlock": "darkFg", + "syntaxComment": "darkFgMuted", + "syntaxKeyword": "pink", + "syntaxFunction": "purple", + "syntaxVariable": "purple", + "syntaxString": "cyan", + "syntaxNumber": "green", + "syntaxType": "purple", + "syntaxOperator": "pink", + "syntaxPunctuation": "darkFg" + } + }, + "ayu": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg": "#0B0E14", + "darkBgAlt": "#0D1017", + "darkLine": "#11151C", + "darkPanel": "#0F131A", + "darkFg": "#BFBDB6", + "darkFgMuted": "#565B66", + "darkGutter": "#6C7380", + "darkTag": "#39BAE6", + "darkFunc": "#FFB454", + "darkEntity": "#59C2FF", + "darkString": "#AAD94C", + "darkRegexp": "#95E6CB", + "darkMarkup": "#F07178", + "darkKeyword": "#FF8F40", + "darkSpecial": "#E6B673", + "darkComment": "#ACB6BF", + "darkConstant": "#D2A6FF", + "darkOperator": "#F29668", + "darkAdded": "#7FD962", + "darkRemoved": "#F26D78", + "darkAccent": "#E6B450", + "darkError": "#D95757", + "darkIndentActive": "#6C7380" + }, + "theme": { + "primary": "darkEntity", + "secondary": "darkConstant", + "accent": "darkAccent", + "error": "darkError", + "warning": "darkSpecial", + "success": "darkAdded", + "info": "darkTag", + "text": "darkFg", + "textMuted": "darkFgMuted", + "background": "darkBg", + "backgroundPanel": "darkPanel", + "backgroundElement": "darkBgAlt", + "border": "darkGutter", + "borderActive": "darkIndentActive", + "borderSubtle": "darkLine", + "diffAdded": "darkAdded", + "diffRemoved": "darkRemoved", + "diffContext": "darkComment", + "diffHunkHeader": "darkComment", + "diffHighlightAdded": "darkString", + "diffHighlightRemoved": "darkMarkup", + "diffAddedBg": "#20303b", + "diffRemovedBg": "#37222c", + "diffContextBg": "darkPanel", + "diffLineNumber": "darkGutter", + "diffAddedLineNumberBg": "#1b2b34", + "diffRemovedLineNumberBg": "#2d1f26", + "markdownText": "darkFg", + "markdownHeading": "darkConstant", + "markdownLink": "darkEntity", + "markdownLinkText": "darkTag", + "markdownCode": "darkString", + "markdownBlockQuote": "darkSpecial", + "markdownEmph": "darkSpecial", + "markdownStrong": "darkFunc", + "markdownHorizontalRule": "darkFgMuted", + "markdownListItem": "darkEntity", + "markdownListEnumeration": "darkTag", + "markdownImage": "darkEntity", + "markdownImageText": "darkTag", + "markdownCodeBlock": "darkFg", + "syntaxComment": "darkComment", + "syntaxKeyword": "darkKeyword", + "syntaxFunction": "darkFunc", + "syntaxVariable": "darkEntity", + "syntaxString": "darkString", + "syntaxNumber": "darkConstant", + "syntaxType": "darkSpecial", + "syntaxOperator": "darkOperator", + "syntaxPunctuation": "darkFg" + } + }, + "carbonfox": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "bg0": "#0d0d0d", + "bg1": "#161616", + "bg1a": "#1a1a1a", + "bg2": "#1e1e1e", + "bg3": "#262626", + "bg4": "#303030", + "fg0": "#ffffff", + "fg1": "#f2f4f8", + "fg2": "#a9afbc", + "fg3": "#7d848f", + "lbg0": "#ffffff", + "lbg1": "#f4f4f4", + "lbg2": "#e8e8e8", + "lbg3": "#dcdcdc", + "lfg0": "#000000", + "lfg1": "#161616", + "lfg2": "#525252", + "lfg3": "#6f6f6f", + "red": "#ee5396", + "green": "#25be6a", + "yellow": "#08bdba", + "blue": "#78a9ff", + "magenta": "#be95ff", + "cyan": "#33b1ff", + "white": "#dfdfe0", + "orange": "#3ddbd9", + "pink": "#ff7eb6", + "blueBright": "#8cb6ff", + "cyanBright": "#52c7ff", + "greenBright": "#46c880", + "redLight": "#9f1853", + "greenLight": "#198038", + "yellowLight": "#007d79", + "blueLight": "#0043ce", + "magentaLight": "#6929c4", + "cyanLight": "#0072c3", + "warning": "#f1c21b", + "diffGreen": "#50fa7b", + "diffRed": "#ff6b6b", + "diffGreenBg": "#0f2418", + "diffRedBg": "#2a1216" + }, + "theme": { + "primary": { + "dark": "cyan", + "light": "blueLight" + }, + "secondary": { + "dark": "blue", + "light": "blueLight" + }, + "accent": { + "dark": "pink", + "light": "redLight" + }, + "error": { + "dark": "red", + "light": "redLight" + }, + "warning": { + "dark": "warning", + "light": "yellowLight" + }, + "success": { + "dark": "green", + "light": "greenLight" + }, + "info": { + "dark": "blue", + "light": "blueLight" + }, + "text": { + "dark": "fg1", + "light": "lfg1" + }, + "textMuted": { + "dark": "fg3", + "light": "lfg3" + }, + "background": { + "dark": "bg1", + "light": "lbg0" + }, + "backgroundPanel": { + "dark": "bg1a", + "light": "lbg1" + }, + "backgroundElement": { + "dark": "bg2", + "light": "lbg1" + }, + "border": { + "dark": "bg4", + "light": "lbg3" + }, + "borderActive": { + "dark": "cyan", + "light": "blueLight" + }, + "borderSubtle": { + "dark": "bg3", + "light": "lbg2" + }, + "diffAdded": { + "dark": "diffGreen", + "light": "greenLight" + }, + "diffRemoved": { + "dark": "diffRed", + "light": "redLight" + }, + "diffContext": { + "dark": "fg3", + "light": "lfg3" + }, + "diffHunkHeader": { + "dark": "blue", + "light": "blueLight" + }, + "diffHighlightAdded": { + "dark": "#7dffaa", + "light": "greenLight" + }, + "diffHighlightRemoved": { + "dark": "#ff9999", + "light": "redLight" + }, + "diffAddedBg": { + "dark": "diffGreenBg", + "light": "#defbe6" + }, + "diffRemovedBg": { + "dark": "diffRedBg", + "light": "#fff1f1" + }, + "diffContextBg": { + "dark": "bg1", + "light": "lbg1" + }, + "diffLineNumber": { + "dark": "fg3", + "light": "lfg3" + }, + "diffAddedLineNumberBg": { + "dark": "diffGreenBg", + "light": "#defbe6" + }, + "diffRemovedLineNumberBg": { + "dark": "diffRedBg", + "light": "#fff1f1" + }, + "markdownText": { + "dark": "fg1", + "light": "lfg1" + }, + "markdownHeading": { + "dark": "blueBright", + "light": "blueLight" + }, + "markdownLink": { + "dark": "blue", + "light": "blueLight" + }, + "markdownLinkText": { + "dark": "cyan", + "light": "cyanLight" + }, + "markdownCode": { + "dark": "green", + "light": "greenLight" + }, + "markdownBlockQuote": { + "dark": "fg3", + "light": "lfg3" + }, + "markdownEmph": { + "dark": "magenta", + "light": "magentaLight" + }, + "markdownStrong": { + "dark": "fg0", + "light": "lfg0" + }, + "markdownHorizontalRule": { + "dark": "bg4", + "light": "lbg3" + }, + "markdownListItem": { + "dark": "cyan", + "light": "cyanLight" + }, + "markdownListEnumeration": { + "dark": "cyan", + "light": "cyanLight" + }, + "markdownImage": { + "dark": "blue", + "light": "blueLight" + }, + "markdownImageText": { + "dark": "cyan", + "light": "cyanLight" + }, + "markdownCodeBlock": { + "dark": "fg2", + "light": "lfg2" + }, + "syntaxComment": { + "dark": "fg3", + "light": "lfg3" + }, + "syntaxKeyword": { + "dark": "magenta", + "light": "magentaLight" + }, + "syntaxFunction": { + "dark": "blueBright", + "light": "blueLight" + }, + "syntaxVariable": { + "dark": "white", + "light": "lfg1" + }, + "syntaxString": { + "dark": "green", + "light": "greenLight" + }, + "syntaxNumber": { + "dark": "orange", + "light": "yellowLight" + }, + "syntaxType": { + "dark": "yellow", + "light": "yellowLight" + }, + "syntaxOperator": { + "dark": "fg2", + "light": "lfg2" + }, + "syntaxPunctuation": { + "dark": "fg2", + "light": "lfg1" + } + } + }, + "catppuccin-frappe": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "frappeRosewater": "#f2d5cf", + "frappeFlamingo": "#eebebe", + "frappePink": "#f4b8e4", + "frappeMauve": "#ca9ee6", + "frappeRed": "#e78284", + "frappeMaroon": "#ea999c", + "frappePeach": "#ef9f76", + "frappeYellow": "#e5c890", + "frappeGreen": "#a6d189", + "frappeTeal": "#81c8be", + "frappeSky": "#99d1db", + "frappeSapphire": "#85c1dc", + "frappeBlue": "#8da4e2", + "frappeLavender": "#babbf1", + "frappeText": "#c6d0f5", + "frappeSubtext1": "#b5bfe2", + "frappeSubtext0": "#a5adce", + "frappeOverlay2": "#949cb8", + "frappeOverlay1": "#838ba7", + "frappeOverlay0": "#737994", + "frappeSurface2": "#626880", + "frappeSurface1": "#51576d", + "frappeSurface0": "#414559", + "frappeBase": "#303446", + "frappeMantle": "#292c3c", + "frappeCrust": "#232634" + }, + "theme": { + "primary": { + "dark": "frappeBlue", + "light": "frappeBlue" + }, + "secondary": { + "dark": "frappeMauve", + "light": "frappeMauve" + }, + "accent": { + "dark": "frappePink", + "light": "frappePink" + }, + "error": { + "dark": "frappeRed", + "light": "frappeRed" + }, + "warning": { + "dark": "frappeYellow", + "light": "frappeYellow" + }, + "success": { + "dark": "frappeGreen", + "light": "frappeGreen" + }, + "info": { + "dark": "frappeTeal", + "light": "frappeTeal" + }, + "text": { + "dark": "frappeText", + "light": "frappeText" + }, + "textMuted": { + "dark": "frappeSubtext1", + "light": "frappeSubtext1" + }, + "background": { + "dark": "frappeBase", + "light": "frappeBase" + }, + "backgroundPanel": { + "dark": "frappeMantle", + "light": "frappeMantle" + }, + "backgroundElement": { + "dark": "frappeCrust", + "light": "frappeCrust" + }, + "border": { + "dark": "frappeSurface0", + "light": "frappeSurface0" + }, + "borderActive": { + "dark": "frappeSurface1", + "light": "frappeSurface1" + }, + "borderSubtle": { + "dark": "frappeSurface2", + "light": "frappeSurface2" + }, + "diffAdded": { + "dark": "frappeGreen", + "light": "frappeGreen" + }, + "diffRemoved": { + "dark": "frappeRed", + "light": "frappeRed" + }, + "diffContext": { + "dark": "frappeOverlay2", + "light": "frappeOverlay2" + }, + "diffHunkHeader": { + "dark": "frappePeach", + "light": "frappePeach" + }, + "diffHighlightAdded": { + "dark": "frappeGreen", + "light": "frappeGreen" + }, + "diffHighlightRemoved": { + "dark": "frappeRed", + "light": "frappeRed" + }, + "diffAddedBg": { + "dark": "#29342b", + "light": "#29342b" + }, + "diffRemovedBg": { + "dark": "#3a2a31", + "light": "#3a2a31" + }, + "diffContextBg": { + "dark": "frappeMantle", + "light": "frappeMantle" + }, + "diffLineNumber": { + "dark": "frappeSurface1", + "light": "frappeSurface1" + }, + "diffAddedLineNumberBg": { + "dark": "#223025", + "light": "#223025" + }, + "diffRemovedLineNumberBg": { + "dark": "#2f242b", + "light": "#2f242b" + }, + "markdownText": { + "dark": "frappeText", + "light": "frappeText" + }, + "markdownHeading": { + "dark": "frappeMauve", + "light": "frappeMauve" + }, + "markdownLink": { + "dark": "frappeBlue", + "light": "frappeBlue" + }, + "markdownLinkText": { + "dark": "frappeSky", + "light": "frappeSky" + }, + "markdownCode": { + "dark": "frappeGreen", + "light": "frappeGreen" + }, + "markdownBlockQuote": { + "dark": "frappeYellow", + "light": "frappeYellow" + }, + "markdownEmph": { + "dark": "frappeYellow", + "light": "frappeYellow" + }, + "markdownStrong": { + "dark": "frappePeach", + "light": "frappePeach" + }, + "markdownHorizontalRule": { + "dark": "frappeSubtext0", + "light": "frappeSubtext0" + }, + "markdownListItem": { + "dark": "frappeBlue", + "light": "frappeBlue" + }, + "markdownListEnumeration": { + "dark": "frappeSky", + "light": "frappeSky" + }, + "markdownImage": { + "dark": "frappeBlue", + "light": "frappeBlue" + }, + "markdownImageText": { + "dark": "frappeSky", + "light": "frappeSky" + }, + "markdownCodeBlock": { + "dark": "frappeText", + "light": "frappeText" + }, + "syntaxComment": { + "dark": "frappeOverlay2", + "light": "frappeOverlay2" + }, + "syntaxKeyword": { + "dark": "frappeMauve", + "light": "frappeMauve" + }, + "syntaxFunction": { + "dark": "frappeBlue", + "light": "frappeBlue" + }, + "syntaxVariable": { + "dark": "frappeRed", + "light": "frappeRed" + }, + "syntaxString": { + "dark": "frappeGreen", + "light": "frappeGreen" + }, + "syntaxNumber": { + "dark": "frappePeach", + "light": "frappePeach" + }, + "syntaxType": { + "dark": "frappeYellow", + "light": "frappeYellow" + }, + "syntaxOperator": { + "dark": "frappeSky", + "light": "frappeSky" + }, + "syntaxPunctuation": { + "dark": "frappeText", + "light": "frappeText" + } + } + }, + "catppuccin-macchiato": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "macRosewater": "#f4dbd6", + "macFlamingo": "#f0c6c6", + "macPink": "#f5bde6", + "macMauve": "#c6a0f6", + "macRed": "#ed8796", + "macMaroon": "#ee99a0", + "macPeach": "#f5a97f", + "macYellow": "#eed49f", + "macGreen": "#a6da95", + "macTeal": "#8bd5ca", + "macSky": "#91d7e3", + "macSapphire": "#7dc4e4", + "macBlue": "#8aadf4", + "macLavender": "#b7bdf8", + "macText": "#cad3f5", + "macSubtext1": "#b8c0e0", + "macSubtext0": "#a5adcb", + "macOverlay2": "#939ab7", + "macOverlay1": "#8087a2", + "macOverlay0": "#6e738d", + "macSurface2": "#5b6078", + "macSurface1": "#494d64", + "macSurface0": "#363a4f", + "macBase": "#24273a", + "macMantle": "#1e2030", + "macCrust": "#181926" + }, + "theme": { + "primary": { + "dark": "macBlue", + "light": "macBlue" + }, + "secondary": { + "dark": "macMauve", + "light": "macMauve" + }, + "accent": { + "dark": "macPink", + "light": "macPink" + }, + "error": { + "dark": "macRed", + "light": "macRed" + }, + "warning": { + "dark": "macYellow", + "light": "macYellow" + }, + "success": { + "dark": "macGreen", + "light": "macGreen" + }, + "info": { + "dark": "macTeal", + "light": "macTeal" + }, + "text": { + "dark": "macText", + "light": "macText" + }, + "textMuted": { + "dark": "macSubtext1", + "light": "macSubtext1" + }, + "background": { + "dark": "macBase", + "light": "macBase" + }, + "backgroundPanel": { + "dark": "macMantle", + "light": "macMantle" + }, + "backgroundElement": { + "dark": "macCrust", + "light": "macCrust" + }, + "border": { + "dark": "macSurface0", + "light": "macSurface0" + }, + "borderActive": { + "dark": "macSurface1", + "light": "macSurface1" + }, + "borderSubtle": { + "dark": "macSurface2", + "light": "macSurface2" + }, + "diffAdded": { + "dark": "macGreen", + "light": "macGreen" + }, + "diffRemoved": { + "dark": "macRed", + "light": "macRed" + }, + "diffContext": { + "dark": "macOverlay2", + "light": "macOverlay2" + }, + "diffHunkHeader": { + "dark": "macPeach", + "light": "macPeach" + }, + "diffHighlightAdded": { + "dark": "macGreen", + "light": "macGreen" + }, + "diffHighlightRemoved": { + "dark": "macRed", + "light": "macRed" + }, + "diffAddedBg": { + "dark": "#29342b", + "light": "#29342b" + }, + "diffRemovedBg": { + "dark": "#3a2a31", + "light": "#3a2a31" + }, + "diffContextBg": { + "dark": "macMantle", + "light": "macMantle" + }, + "diffLineNumber": { + "dark": "macSurface1", + "light": "macSurface1" + }, + "diffAddedLineNumberBg": { + "dark": "#223025", + "light": "#223025" + }, + "diffRemovedLineNumberBg": { + "dark": "#2f242b", + "light": "#2f242b" + }, + "markdownText": { + "dark": "macText", + "light": "macText" + }, + "markdownHeading": { + "dark": "macMauve", + "light": "macMauve" + }, + "markdownLink": { + "dark": "macBlue", + "light": "macBlue" + }, + "markdownLinkText": { + "dark": "macSky", + "light": "macSky" + }, + "markdownCode": { + "dark": "macGreen", + "light": "macGreen" + }, + "markdownBlockQuote": { + "dark": "macYellow", + "light": "macYellow" + }, + "markdownEmph": { + "dark": "macYellow", + "light": "macYellow" + }, + "markdownStrong": { + "dark": "macPeach", + "light": "macPeach" + }, + "markdownHorizontalRule": { + "dark": "macSubtext0", + "light": "macSubtext0" + }, + "markdownListItem": { + "dark": "macBlue", + "light": "macBlue" + }, + "markdownListEnumeration": { + "dark": "macSky", + "light": "macSky" + }, + "markdownImage": { + "dark": "macBlue", + "light": "macBlue" + }, + "markdownImageText": { + "dark": "macSky", + "light": "macSky" + }, + "markdownCodeBlock": { + "dark": "macText", + "light": "macText" + }, + "syntaxComment": { + "dark": "macOverlay2", + "light": "macOverlay2" + }, + "syntaxKeyword": { + "dark": "macMauve", + "light": "macMauve" + }, + "syntaxFunction": { + "dark": "macBlue", + "light": "macBlue" + }, + "syntaxVariable": { + "dark": "macRed", + "light": "macRed" + }, + "syntaxString": { + "dark": "macGreen", + "light": "macGreen" + }, + "syntaxNumber": { + "dark": "macPeach", + "light": "macPeach" + }, + "syntaxType": { + "dark": "macYellow", + "light": "macYellow" + }, + "syntaxOperator": { + "dark": "macSky", + "light": "macSky" + }, + "syntaxPunctuation": { + "dark": "macText", + "light": "macText" + } + } + }, + "catppuccin": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "lightRosewater": "#dc8a78", + "lightFlamingo": "#dd7878", + "lightPink": "#ea76cb", + "lightMauve": "#8839ef", + "lightRed": "#d20f39", + "lightMaroon": "#e64553", + "lightPeach": "#fe640b", + "lightYellow": "#df8e1d", + "lightGreen": "#40a02b", + "lightTeal": "#179299", + "lightSky": "#04a5e5", + "lightSapphire": "#209fb5", + "lightBlue": "#1e66f5", + "lightLavender": "#7287fd", + "lightText": "#4c4f69", + "lightSubtext1": "#5c5f77", + "lightSubtext0": "#6c6f85", + "lightOverlay2": "#7c7f93", + "lightOverlay1": "#8c8fa1", + "lightOverlay0": "#9ca0b0", + "lightSurface2": "#acb0be", + "lightSurface1": "#bcc0cc", + "lightSurface0": "#ccd0da", + "lightBase": "#eff1f5", + "lightMantle": "#e6e9ef", + "lightCrust": "#dce0e8", + "darkRosewater": "#f5e0dc", + "darkFlamingo": "#f2cdcd", + "darkPink": "#f5c2e7", + "darkMauve": "#cba6f7", + "darkRed": "#f38ba8", + "darkMaroon": "#eba0ac", + "darkPeach": "#fab387", + "darkYellow": "#f9e2af", + "darkGreen": "#a6e3a1", + "darkTeal": "#94e2d5", + "darkSky": "#89dceb", + "darkSapphire": "#74c7ec", + "darkBlue": "#89b4fa", + "darkLavender": "#b4befe", + "darkText": "#cdd6f4", + "darkSubtext1": "#bac2de", + "darkSubtext0": "#a6adc8", + "darkOverlay2": "#9399b2", + "darkOverlay1": "#7f849c", + "darkOverlay0": "#6c7086", + "darkSurface2": "#585b70", + "darkSurface1": "#45475a", + "darkSurface0": "#313244", + "darkBase": "#1e1e2e", + "darkMantle": "#181825", + "darkCrust": "#11111b" + }, + "theme": { + "primary": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "secondary": { + "dark": "darkMauve", + "light": "lightMauve" + }, + "accent": { + "dark": "darkPink", + "light": "lightPink" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkTeal", + "light": "lightTeal" + }, + "text": { + "dark": "darkText", + "light": "lightText" + }, + "textMuted": { + "dark": "darkSubtext1", + "light": "lightSubtext1" + }, + "background": { + "dark": "darkBase", + "light": "lightBase" + }, + "backgroundPanel": { + "dark": "darkMantle", + "light": "lightMantle" + }, + "backgroundElement": { + "dark": "darkCrust", + "light": "lightCrust" + }, + "border": { + "dark": "darkSurface0", + "light": "lightSurface0" + }, + "borderActive": { + "dark": "darkSurface1", + "light": "lightSurface1" + }, + "borderSubtle": { + "dark": "darkSurface2", + "light": "lightSurface2" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkOverlay2", + "light": "lightOverlay2" + }, + "diffHunkHeader": { + "dark": "darkPeach", + "light": "lightPeach" + }, + "diffHighlightAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffHighlightRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffAddedBg": { + "dark": "#24312b", + "light": "#d6f0d9" + }, + "diffRemovedBg": { + "dark": "#3c2a32", + "light": "#f6dfe2" + }, + "diffContextBg": { + "dark": "darkMantle", + "light": "lightMantle" + }, + "diffLineNumber": { + "dark": "darkSurface1", + "light": "lightSurface1" + }, + "diffAddedLineNumberBg": { + "dark": "#1e2a25", + "light": "#c9e3cb" + }, + "diffRemovedLineNumberBg": { + "dark": "#32232a", + "light": "#e9d3d6" + }, + "markdownText": { + "dark": "darkText", + "light": "lightText" + }, + "markdownHeading": { + "dark": "darkMauve", + "light": "lightMauve" + }, + "markdownLink": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownLinkText": { + "dark": "darkSky", + "light": "lightSky" + }, + "markdownCode": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkPeach", + "light": "lightPeach" + }, + "markdownHorizontalRule": { + "dark": "darkSubtext0", + "light": "lightSubtext0" + }, + "markdownListItem": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownListEnumeration": { + "dark": "darkSky", + "light": "lightSky" + }, + "markdownImage": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownImageText": { + "dark": "darkSky", + "light": "lightSky" + }, + "markdownCodeBlock": { + "dark": "darkText", + "light": "lightText" + }, + "syntaxComment": { + "dark": "darkOverlay2", + "light": "lightOverlay2" + }, + "syntaxKeyword": { + "dark": "darkMauve", + "light": "lightMauve" + }, + "syntaxFunction": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkPeach", + "light": "lightPeach" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkSky", + "light": "lightSky" + }, + "syntaxPunctuation": { + "dark": "darkText", + "light": "lightText" + } + } + }, + "cobalt2": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "background": "#193549", + "backgroundAlt": "#122738", + "backgroundPanel": "#1f4662", + "foreground": "#ffffff", + "foregroundMuted": "#adb7c9", + "yellow": "#ffc600", + "yellowBright": "#ffe14c", + "orange": "#ff9d00", + "orangeBright": "#ffb454", + "mint": "#2affdf", + "mintBright": "#7efff5", + "blue": "#0088ff", + "blueBright": "#5cb7ff", + "pink": "#ff628c", + "pinkBright": "#ff86a5", + "green": "#9eff80", + "greenBright": "#b9ff9f", + "purple": "#9a5feb", + "purpleBright": "#b88cfd", + "red": "#ff0088", + "redBright": "#ff5fb3" + }, + "theme": { + "primary": { + "dark": "blue", + "light": "#0066cc" + }, + "secondary": { + "dark": "purple", + "light": "#7c4dff" + }, + "accent": { + "dark": "mint", + "light": "#00acc1" + }, + "error": { + "dark": "red", + "light": "#e91e63" + }, + "warning": { + "dark": "yellow", + "light": "#ff9800" + }, + "success": { + "dark": "green", + "light": "#4caf50" + }, + "info": { + "dark": "orange", + "light": "#ff5722" + }, + "text": { + "dark": "foreground", + "light": "#193549" + }, + "textMuted": { + "dark": "foregroundMuted", + "light": "#5c6b7d" + }, + "background": { + "dark": "#193549", + "light": "#ffffff" + }, + "backgroundPanel": { + "dark": "#122738", + "light": "#f5f7fa" + }, + "backgroundElement": { + "dark": "#1f4662", + "light": "#e8ecf1" + }, + "border": { + "dark": "#1f4662", + "light": "#d3dae3" + }, + "borderActive": { + "dark": "blue", + "light": "#0066cc" + }, + "borderSubtle": { + "dark": "#0e1e2e", + "light": "#e8ecf1" + }, + "diffAdded": { + "dark": "green", + "light": "#4caf50" + }, + "diffRemoved": { + "dark": "red", + "light": "#e91e63" + }, + "diffContext": { + "dark": "foregroundMuted", + "light": "#5c6b7d" + }, + "diffHunkHeader": { + "dark": "mint", + "light": "#00acc1" + }, + "diffHighlightAdded": { + "dark": "greenBright", + "light": "#4caf50" + }, + "diffHighlightRemoved": { + "dark": "redBright", + "light": "#e91e63" + }, + "diffAddedBg": { + "dark": "#1a3a2a", + "light": "#e8f5e9" + }, + "diffRemovedBg": { + "dark": "#3a1a2a", + "light": "#ffebee" + }, + "diffContextBg": { + "dark": "#122738", + "light": "#f5f7fa" + }, + "diffLineNumber": { + "dark": "#2d5a7b", + "light": "#b0bec5" + }, + "diffAddedLineNumberBg": { + "dark": "#1a3a2a", + "light": "#e8f5e9" + }, + "diffRemovedLineNumberBg": { + "dark": "#3a1a2a", + "light": "#ffebee" + }, + "markdownText": { + "dark": "foreground", + "light": "#193549" + }, + "markdownHeading": { + "dark": "yellow", + "light": "#ff9800" + }, + "markdownLink": { + "dark": "blue", + "light": "#0066cc" + }, + "markdownLinkText": { + "dark": "mint", + "light": "#00acc1" + }, + "markdownCode": { + "dark": "green", + "light": "#4caf50" + }, + "markdownBlockQuote": { + "dark": "foregroundMuted", + "light": "#5c6b7d" + }, + "markdownEmph": { + "dark": "orange", + "light": "#ff5722" + }, + "markdownStrong": { + "dark": "pink", + "light": "#e91e63" + }, + "markdownHorizontalRule": { + "dark": "#2d5a7b", + "light": "#d3dae3" + }, + "markdownListItem": { + "dark": "blue", + "light": "#0066cc" + }, + "markdownListEnumeration": { + "dark": "mint", + "light": "#00acc1" + }, + "markdownImage": { + "dark": "blue", + "light": "#0066cc" + }, + "markdownImageText": { + "dark": "mint", + "light": "#00acc1" + }, + "markdownCodeBlock": { + "dark": "foreground", + "light": "#193549" + }, + "syntaxComment": { + "dark": "#0088ff", + "light": "#5c6b7d" + }, + "syntaxKeyword": { + "dark": "orange", + "light": "#ff5722" + }, + "syntaxFunction": { + "dark": "yellow", + "light": "#ff9800" + }, + "syntaxVariable": { + "dark": "foreground", + "light": "#193549" + }, + "syntaxString": { + "dark": "green", + "light": "#4caf50" + }, + "syntaxNumber": { + "dark": "pink", + "light": "#e91e63" + }, + "syntaxType": { + "dark": "mint", + "light": "#00acc1" + }, + "syntaxOperator": { + "dark": "orange", + "light": "#ff5722" + }, + "syntaxPunctuation": { + "dark": "foreground", + "light": "#193549" + } + } + }, + "cursor": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg": "#181818", + "darkPanel": "#141414", + "darkElement": "#262626", + "darkFg": "#e4e4e4", + "darkMuted": "#e4e4e45e", + "darkBorder": "#e4e4e413", + "darkBorderActive": "#e4e4e426", + "darkCyan": "#88c0d0", + "darkBlue": "#81a1c1", + "darkGreen": "#3fa266", + "darkGreenBright": "#70b489", + "darkRed": "#e34671", + "darkRedBright": "#fc6b83", + "darkYellow": "#f1b467", + "darkOrange": "#d2943e", + "darkPink": "#E394DC", + "darkPurple": "#AAA0FA", + "darkTeal": "#82D2CE", + "darkSyntaxYellow": "#F8C762", + "darkSyntaxOrange": "#EFB080", + "darkSyntaxGreen": "#A8CC7C", + "darkSyntaxBlue": "#87C3FF", + "lightBg": "#fcfcfc", + "lightPanel": "#f3f3f3", + "lightElement": "#ededed", + "lightFg": "#141414", + "lightMuted": "#141414ad", + "lightBorder": "#14141413", + "lightBorderActive": "#14141426", + "lightTeal": "#6f9ba6", + "lightBlue": "#3c7cab", + "lightBlueDark": "#206595", + "lightGreen": "#1f8a65", + "lightGreenBright": "#55a583", + "lightRed": "#cf2d56", + "lightRedBright": "#e75e78", + "lightOrange": "#db704b", + "lightYellow": "#c08532", + "lightPurple": "#9e94d5", + "lightPurpleDark": "#6049b3", + "lightPink": "#b8448b", + "lightMagenta": "#b3003f" + }, + "theme": { + "primary": { + "dark": "darkCyan", + "light": "lightTeal" + }, + "secondary": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "accent": { + "dark": "darkCyan", + "light": "lightTeal" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkYellow", + "light": "lightOrange" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "text": { + "dark": "darkFg", + "light": "lightFg" + }, + "textMuted": { + "dark": "darkMuted", + "light": "lightMuted" + }, + "background": { + "dark": "darkBg", + "light": "lightBg" + }, + "backgroundPanel": { + "dark": "darkPanel", + "light": "lightPanel" + }, + "backgroundElement": { + "dark": "darkElement", + "light": "lightElement" + }, + "border": { + "dark": "darkBorder", + "light": "lightBorder" + }, + "borderActive": { + "dark": "darkCyan", + "light": "lightTeal" + }, + "borderSubtle": { + "dark": "#0f0f0f", + "light": "#e0e0e0" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkMuted", + "light": "lightMuted" + }, + "diffHunkHeader": { + "dark": "darkMuted", + "light": "lightMuted" + }, + "diffHighlightAdded": { + "dark": "darkGreenBright", + "light": "lightGreenBright" + }, + "diffHighlightRemoved": { + "dark": "darkRedBright", + "light": "lightRedBright" + }, + "diffAddedBg": { + "dark": "#3fa26633", + "light": "#1f8a651f" + }, + "diffRemovedBg": { + "dark": "#b8004933", + "light": "#cf2d5614" + }, + "diffContextBg": { + "dark": "darkPanel", + "light": "lightPanel" + }, + "diffLineNumber": { + "dark": "#e4e4e442", + "light": "#1414147a" + }, + "diffAddedLineNumberBg": { + "dark": "#3fa26633", + "light": "#1f8a651f" + }, + "diffRemovedLineNumberBg": { + "dark": "#b8004933", + "light": "#cf2d5614" + }, + "markdownText": { + "dark": "darkFg", + "light": "lightFg" + }, + "markdownHeading": { + "dark": "darkPurple", + "light": "lightBlueDark" + }, + "markdownLink": { + "dark": "darkTeal", + "light": "lightBlueDark" + }, + "markdownLinkText": { + "dark": "darkBlue", + "light": "lightMuted" + }, + "markdownCode": { + "dark": "darkPink", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkMuted", + "light": "lightMuted" + }, + "markdownEmph": { + "dark": "darkTeal", + "light": "lightFg" + }, + "markdownStrong": { + "dark": "darkSyntaxYellow", + "light": "lightFg" + }, + "markdownHorizontalRule": { + "dark": "darkMuted", + "light": "lightMuted" + }, + "markdownListItem": { + "dark": "darkFg", + "light": "lightFg" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightMuted" + }, + "markdownImage": { + "dark": "darkCyan", + "light": "lightBlueDark" + }, + "markdownImageText": { + "dark": "darkBlue", + "light": "lightMuted" + }, + "markdownCodeBlock": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxComment": { + "dark": "darkMuted", + "light": "lightMuted" + }, + "syntaxKeyword": { + "dark": "darkTeal", + "light": "lightMagenta" + }, + "syntaxFunction": { + "dark": "darkSyntaxOrange", + "light": "lightOrange" + }, + "syntaxVariable": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxString": { + "dark": "darkPink", + "light": "lightPurple" + }, + "syntaxNumber": { + "dark": "darkSyntaxYellow", + "light": "lightPink" + }, + "syntaxType": { + "dark": "darkSyntaxOrange", + "light": "lightBlueDark" + }, + "syntaxOperator": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxPunctuation": { + "dark": "darkFg", + "light": "lightFg" + } + } + }, + "dracula": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "background": "#282a36", + "currentLine": "#44475a", + "selection": "#44475a", + "foreground": "#f8f8f2", + "comment": "#6272a4", + "cyan": "#8be9fd", + "green": "#50fa7b", + "orange": "#ffb86c", + "pink": "#ff79c6", + "purple": "#bd93f9", + "red": "#ff5555", + "yellow": "#f1fa8c" + }, + "theme": { + "primary": { + "dark": "purple", + "light": "purple" + }, + "secondary": { + "dark": "pink", + "light": "pink" + }, + "accent": { + "dark": "cyan", + "light": "cyan" + }, + "error": { + "dark": "red", + "light": "red" + }, + "warning": { + "dark": "yellow", + "light": "yellow" + }, + "success": { + "dark": "green", + "light": "green" + }, + "info": { + "dark": "orange", + "light": "orange" + }, + "text": { + "dark": "foreground", + "light": "#282a36" + }, + "textMuted": { + "dark": "comment", + "light": "#6272a4" + }, + "background": { + "dark": "#282a36", + "light": "#f8f8f2" + }, + "backgroundPanel": { + "dark": "#21222c", + "light": "#e8e8e2" + }, + "backgroundElement": { + "dark": "currentLine", + "light": "#d8d8d2" + }, + "border": { + "dark": "currentLine", + "light": "#c8c8c2" + }, + "borderActive": { + "dark": "purple", + "light": "purple" + }, + "borderSubtle": { + "dark": "#191a21", + "light": "#e0e0e0" + }, + "diffAdded": { + "dark": "green", + "light": "green" + }, + "diffRemoved": { + "dark": "red", + "light": "red" + }, + "diffContext": { + "dark": "comment", + "light": "#6272a4" + }, + "diffHunkHeader": { + "dark": "comment", + "light": "#6272a4" + }, + "diffHighlightAdded": { + "dark": "green", + "light": "green" + }, + "diffHighlightRemoved": { + "dark": "red", + "light": "red" + }, + "diffAddedBg": { + "dark": "#1a3a1a", + "light": "#e0ffe0" + }, + "diffRemovedBg": { + "dark": "#3a1a1a", + "light": "#ffe0e0" + }, + "diffContextBg": { + "dark": "#21222c", + "light": "#e8e8e2" + }, + "diffLineNumber": { + "dark": "currentLine", + "light": "#c8c8c2" + }, + "diffAddedLineNumberBg": { + "dark": "#1a3a1a", + "light": "#e0ffe0" + }, + "diffRemovedLineNumberBg": { + "dark": "#3a1a1a", + "light": "#ffe0e0" + }, + "markdownText": { + "dark": "foreground", + "light": "#282a36" + }, + "markdownHeading": { + "dark": "purple", + "light": "purple" + }, + "markdownLink": { + "dark": "cyan", + "light": "cyan" + }, + "markdownLinkText": { + "dark": "pink", + "light": "pink" + }, + "markdownCode": { + "dark": "green", + "light": "green" + }, + "markdownBlockQuote": { + "dark": "comment", + "light": "#6272a4" + }, + "markdownEmph": { + "dark": "yellow", + "light": "yellow" + }, + "markdownStrong": { + "dark": "orange", + "light": "orange" + }, + "markdownHorizontalRule": { + "dark": "comment", + "light": "#6272a4" + }, + "markdownListItem": { + "dark": "purple", + "light": "purple" + }, + "markdownListEnumeration": { + "dark": "cyan", + "light": "cyan" + }, + "markdownImage": { + "dark": "cyan", + "light": "cyan" + }, + "markdownImageText": { + "dark": "pink", + "light": "pink" + }, + "markdownCodeBlock": { + "dark": "foreground", + "light": "#282a36" + }, + "syntaxComment": { + "dark": "comment", + "light": "#6272a4" + }, + "syntaxKeyword": { + "dark": "pink", + "light": "pink" + }, + "syntaxFunction": { + "dark": "green", + "light": "green" + }, + "syntaxVariable": { + "dark": "foreground", + "light": "#282a36" + }, + "syntaxString": { + "dark": "yellow", + "light": "yellow" + }, + "syntaxNumber": { + "dark": "purple", + "light": "purple" + }, + "syntaxType": { + "dark": "cyan", + "light": "cyan" + }, + "syntaxOperator": { + "dark": "pink", + "light": "pink" + }, + "syntaxPunctuation": { + "dark": "foreground", + "light": "#282a36" + } + } + }, + "everforest": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkStep1": "#2d353b", + "darkStep2": "#333c43", + "darkStep3": "#343f44", + "darkStep4": "#3d484d", + "darkStep5": "#475258", + "darkStep6": "#7a8478", + "darkStep7": "#859289", + "darkStep8": "#9da9a0", + "darkStep9": "#a7c080", + "darkStep10": "#83c092", + "darkStep11": "#7a8478", + "darkStep12": "#d3c6aa", + "darkRed": "#e67e80", + "darkOrange": "#e69875", + "darkGreen": "#a7c080", + "darkCyan": "#83c092", + "darkYellow": "#dbbc7f", + "lightStep1": "#fdf6e3", + "lightStep2": "#efebd4", + "lightStep3": "#f4f0d9", + "lightStep4": "#efebd4", + "lightStep5": "#e6e2cc", + "lightStep6": "#a6b0a0", + "lightStep7": "#939f91", + "lightStep8": "#829181", + "lightStep9": "#8da101", + "lightStep10": "#35a77c", + "lightStep11": "#a6b0a0", + "lightStep12": "#5c6a72", + "lightRed": "#f85552", + "lightOrange": "#f57d26", + "lightGreen": "#8da101", + "lightCyan": "#35a77c", + "lightYellow": "#dfa000" + }, + "theme": { + "primary": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "secondary": { + "dark": "#7fbbb3", + "light": "#3a94c5" + }, + "accent": { + "dark": "#d699b6", + "light": "#df69ba" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "text": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "textMuted": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "background": { + "dark": "darkStep1", + "light": "lightStep1" + }, + "backgroundPanel": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "backgroundElement": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "border": { + "dark": "darkStep7", + "light": "lightStep7" + }, + "borderActive": { + "dark": "darkStep8", + "light": "lightStep8" + }, + "borderSubtle": { + "dark": "darkStep6", + "light": "lightStep6" + }, + "diffAdded": { + "dark": "#4fd6be", + "light": "#1e725c" + }, + "diffRemoved": { + "dark": "#c53b53", + "light": "#c53b53" + }, + "diffContext": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHunkHeader": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHighlightAdded": { + "dark": "#b8db87", + "light": "#4db380" + }, + "diffHighlightRemoved": { + "dark": "#e26a75", + "light": "#f52a65" + }, + "diffAddedBg": { + "dark": "#20303b", + "light": "#d5e5d5" + }, + "diffRemovedBg": { + "dark": "#37222c", + "light": "#f7d8db" + }, + "diffContextBg": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "diffLineNumber": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "diffAddedLineNumberBg": { + "dark": "#1b2b34", + "light": "#c5d5c5" + }, + "diffRemovedLineNumberBg": { + "dark": "#2d1f26", + "light": "#e7c8cb" + }, + "markdownText": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "markdownHeading": { + "dark": "#d699b6", + "light": "#df69ba" + }, + "markdownLink": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "markdownListItem": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "syntaxComment": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "syntaxKeyword": { + "dark": "#d699b6", + "light": "#df69ba" + }, + "syntaxFunction": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkStep12", + "light": "lightStep12" + } + } + }, + "flexoki": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "black": "#100F0F", + "base950": "#1C1B1A", + "base900": "#282726", + "base850": "#343331", + "base800": "#403E3C", + "base700": "#575653", + "base600": "#6F6E69", + "base500": "#878580", + "base300": "#B7B5AC", + "base200": "#CECDC3", + "base150": "#DAD8CE", + "base100": "#E6E4D9", + "base50": "#F2F0E5", + "paper": "#FFFCF0", + "red400": "#D14D41", + "red600": "#AF3029", + "orange400": "#DA702C", + "orange600": "#BC5215", + "yellow400": "#D0A215", + "yellow600": "#AD8301", + "green400": "#879A39", + "green600": "#66800B", + "cyan400": "#3AA99F", + "cyan600": "#24837B", + "blue400": "#4385BE", + "blue600": "#205EA6", + "purple400": "#8B7EC8", + "purple600": "#5E409D", + "magenta400": "#CE5D97", + "magenta600": "#A02F6F" + }, + "theme": { + "primary": { + "dark": "orange400", + "light": "blue600" + }, + "secondary": { + "dark": "blue400", + "light": "purple600" + }, + "accent": { + "dark": "purple400", + "light": "orange600" + }, + "error": { + "dark": "red400", + "light": "red600" + }, + "warning": { + "dark": "orange400", + "light": "orange600" + }, + "success": { + "dark": "green400", + "light": "green600" + }, + "info": { + "dark": "cyan400", + "light": "cyan600" + }, + "text": { + "dark": "base200", + "light": "black" + }, + "textMuted": { + "dark": "base600", + "light": "base600" + }, + "background": { + "dark": "black", + "light": "paper" + }, + "backgroundPanel": { + "dark": "base950", + "light": "base50" + }, + "backgroundElement": { + "dark": "base900", + "light": "base100" + }, + "border": { + "dark": "base700", + "light": "base300" + }, + "borderActive": { + "dark": "base600", + "light": "base500" + }, + "borderSubtle": { + "dark": "base800", + "light": "base200" + }, + "diffAdded": { + "dark": "green400", + "light": "green600" + }, + "diffRemoved": { + "dark": "red400", + "light": "red600" + }, + "diffContext": { + "dark": "base600", + "light": "base600" + }, + "diffHunkHeader": { + "dark": "blue400", + "light": "blue600" + }, + "diffHighlightAdded": { + "dark": "green400", + "light": "green600" + }, + "diffHighlightRemoved": { + "dark": "red400", + "light": "red600" + }, + "diffAddedBg": { + "dark": "#1A2D1A", + "light": "#D5E5D5" + }, + "diffRemovedBg": { + "dark": "#2D1A1A", + "light": "#F7D8DB" + }, + "diffContextBg": { + "dark": "base950", + "light": "base50" + }, + "diffLineNumber": { + "dark": "base600", + "light": "base600" + }, + "diffAddedLineNumberBg": { + "dark": "#152515", + "light": "#C5D5C5" + }, + "diffRemovedLineNumberBg": { + "dark": "#251515", + "light": "#E7C8CB" + }, + "markdownText": { + "dark": "base200", + "light": "black" + }, + "markdownHeading": { + "dark": "purple400", + "light": "purple600" + }, + "markdownLink": { + "dark": "blue400", + "light": "blue600" + }, + "markdownLinkText": { + "dark": "cyan400", + "light": "cyan600" + }, + "markdownCode": { + "dark": "cyan400", + "light": "cyan600" + }, + "markdownBlockQuote": { + "dark": "yellow400", + "light": "yellow600" + }, + "markdownEmph": { + "dark": "yellow400", + "light": "yellow600" + }, + "markdownStrong": { + "dark": "orange400", + "light": "orange600" + }, + "markdownHorizontalRule": { + "dark": "base600", + "light": "base600" + }, + "markdownListItem": { + "dark": "orange400", + "light": "orange600" + }, + "markdownListEnumeration": { + "dark": "cyan400", + "light": "cyan600" + }, + "markdownImage": { + "dark": "magenta400", + "light": "magenta600" + }, + "markdownImageText": { + "dark": "cyan400", + "light": "cyan600" + }, + "markdownCodeBlock": { + "dark": "base200", + "light": "black" + }, + "syntaxComment": { + "dark": "base600", + "light": "base600" + }, + "syntaxKeyword": { + "dark": "green400", + "light": "green600" + }, + "syntaxFunction": { + "dark": "orange400", + "light": "orange600" + }, + "syntaxVariable": { + "dark": "blue400", + "light": "blue600" + }, + "syntaxString": { + "dark": "cyan400", + "light": "cyan600" + }, + "syntaxNumber": { + "dark": "purple400", + "light": "purple600" + }, + "syntaxType": { + "dark": "yellow400", + "light": "yellow600" + }, + "syntaxOperator": { + "dark": "base300", + "light": "base600" + }, + "syntaxPunctuation": { + "dark": "base300", + "light": "base600" + } + } + }, + "github": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg": "#0d1117", + "darkBgAlt": "#010409", + "darkBgPanel": "#161b22", + "darkFg": "#c9d1d9", + "darkFgMuted": "#8b949e", + "darkBlue": "#58a6ff", + "darkGreen": "#3fb950", + "darkRed": "#f85149", + "darkOrange": "#d29922", + "darkPurple": "#bc8cff", + "darkPink": "#ff7b72", + "darkYellow": "#e3b341", + "darkCyan": "#39c5cf", + "lightBg": "#ffffff", + "lightBgAlt": "#f6f8fa", + "lightBgPanel": "#f0f3f6", + "lightFg": "#24292f", + "lightFgMuted": "#57606a", + "lightBlue": "#0969da", + "lightGreen": "#1a7f37", + "lightRed": "#cf222e", + "lightOrange": "#bc4c00", + "lightPurple": "#8250df", + "lightPink": "#bf3989", + "lightYellow": "#9a6700", + "lightCyan": "#1b7c83" + }, + "theme": { + "primary": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "secondary": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "accent": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "text": { + "dark": "darkFg", + "light": "lightFg" + }, + "textMuted": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "background": { + "dark": "darkBg", + "light": "lightBg" + }, + "backgroundPanel": { + "dark": "darkBgAlt", + "light": "lightBgAlt" + }, + "backgroundElement": { + "dark": "darkBgPanel", + "light": "lightBgPanel" + }, + "border": { + "dark": "#30363d", + "light": "#d0d7de" + }, + "borderActive": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "borderSubtle": { + "dark": "#21262d", + "light": "#d8dee4" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "diffHunkHeader": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "diffHighlightAdded": { + "dark": "#3fb950", + "light": "#1a7f37" + }, + "diffHighlightRemoved": { + "dark": "#f85149", + "light": "#cf222e" + }, + "diffAddedBg": { + "dark": "#033a16", + "light": "#dafbe1" + }, + "diffRemovedBg": { + "dark": "#67060c", + "light": "#ffebe9" + }, + "diffContextBg": { + "dark": "darkBgAlt", + "light": "lightBgAlt" + }, + "diffLineNumber": { + "dark": "#484f58", + "light": "#afb8c1" + }, + "diffAddedLineNumberBg": { + "dark": "#033a16", + "light": "#dafbe1" + }, + "diffRemovedLineNumberBg": { + "dark": "#67060c", + "light": "#ffebe9" + }, + "markdownText": { + "dark": "darkFg", + "light": "lightFg" + }, + "markdownHeading": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownLink": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkPink", + "light": "lightPink" + }, + "markdownBlockQuote": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "#30363d", + "light": "#d0d7de" + }, + "markdownListItem": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxComment": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "syntaxKeyword": { + "dark": "darkPink", + "light": "lightRed" + }, + "syntaxFunction": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "syntaxVariable": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxString": { + "dark": "darkCyan", + "light": "lightBlue" + }, + "syntaxNumber": { + "dark": "darkBlue", + "light": "lightCyan" + }, + "syntaxType": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxOperator": { + "dark": "darkPink", + "light": "lightRed" + }, + "syntaxPunctuation": { + "dark": "darkFg", + "light": "lightFg" + } + } + }, + "gruvbox": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg0": "#282828", + "darkBg1": "#3c3836", + "darkBg2": "#504945", + "darkBg3": "#665c54", + "darkFg0": "#fbf1c7", + "darkFg1": "#ebdbb2", + "darkGray": "#928374", + "darkRed": "#cc241d", + "darkGreen": "#98971a", + "darkYellow": "#d79921", + "darkBlue": "#458588", + "darkPurple": "#b16286", + "darkAqua": "#689d6a", + "darkOrange": "#d65d0e", + "darkRedBright": "#fb4934", + "darkGreenBright": "#b8bb26", + "darkYellowBright": "#fabd2f", + "darkBlueBright": "#83a598", + "darkPurpleBright": "#d3869b", + "darkAquaBright": "#8ec07c", + "darkOrangeBright": "#fe8019", + "lightBg0": "#fbf1c7", + "lightBg1": "#ebdbb2", + "lightBg2": "#d5c4a1", + "lightBg3": "#bdae93", + "lightFg0": "#282828", + "lightFg1": "#3c3836", + "lightGray": "#7c6f64", + "lightRed": "#9d0006", + "lightGreen": "#79740e", + "lightYellow": "#b57614", + "lightBlue": "#076678", + "lightPurple": "#8f3f71", + "lightAqua": "#427b58", + "lightOrange": "#af3a03" + }, + "theme": { + "primary": { + "dark": "darkBlueBright", + "light": "lightBlue" + }, + "secondary": { + "dark": "darkPurpleBright", + "light": "lightPurple" + }, + "accent": { + "dark": "darkAquaBright", + "light": "lightAqua" + }, + "error": { + "dark": "darkRedBright", + "light": "lightRed" + }, + "warning": { + "dark": "darkOrangeBright", + "light": "lightOrange" + }, + "success": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "info": { + "dark": "darkYellowBright", + "light": "lightYellow" + }, + "text": { + "dark": "darkFg1", + "light": "lightFg1" + }, + "textMuted": { + "dark": "darkGray", + "light": "lightGray" + }, + "background": { + "dark": "darkBg0", + "light": "lightBg0" + }, + "backgroundPanel": { + "dark": "darkBg1", + "light": "lightBg1" + }, + "backgroundElement": { + "dark": "darkBg2", + "light": "lightBg2" + }, + "border": { + "dark": "darkBg3", + "light": "lightBg3" + }, + "borderActive": { + "dark": "darkFg1", + "light": "lightFg1" + }, + "borderSubtle": { + "dark": "darkBg2", + "light": "lightBg2" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkGray", + "light": "lightGray" + }, + "diffHunkHeader": { + "dark": "darkAqua", + "light": "lightAqua" + }, + "diffHighlightAdded": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "diffHighlightRemoved": { + "dark": "darkRedBright", + "light": "lightRed" + }, + "diffAddedBg": { + "dark": "#32302f", + "light": "#dcd8a4" + }, + "diffRemovedBg": { + "dark": "#322929", + "light": "#e2c7c3" + }, + "diffContextBg": { + "dark": "darkBg1", + "light": "lightBg1" + }, + "diffLineNumber": { + "dark": "darkBg3", + "light": "lightBg3" + }, + "diffAddedLineNumberBg": { + "dark": "#2a2827", + "light": "#cec99e" + }, + "diffRemovedLineNumberBg": { + "dark": "#2a2222", + "light": "#d3bdb9" + }, + "markdownText": { + "dark": "darkFg1", + "light": "lightFg1" + }, + "markdownHeading": { + "dark": "darkBlueBright", + "light": "lightBlue" + }, + "markdownLink": { + "dark": "darkAquaBright", + "light": "lightAqua" + }, + "markdownLinkText": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "markdownCode": { + "dark": "darkYellowBright", + "light": "lightYellow" + }, + "markdownBlockQuote": { + "dark": "darkGray", + "light": "lightGray" + }, + "markdownEmph": { + "dark": "darkPurpleBright", + "light": "lightPurple" + }, + "markdownStrong": { + "dark": "darkOrangeBright", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "darkGray", + "light": "lightGray" + }, + "markdownListItem": { + "dark": "darkBlueBright", + "light": "lightBlue" + }, + "markdownListEnumeration": { + "dark": "darkAquaBright", + "light": "lightAqua" + }, + "markdownImage": { + "dark": "darkAquaBright", + "light": "lightAqua" + }, + "markdownImageText": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "markdownCodeBlock": { + "dark": "darkFg1", + "light": "lightFg1" + }, + "syntaxComment": { + "dark": "darkGray", + "light": "lightGray" + }, + "syntaxKeyword": { + "dark": "darkRedBright", + "light": "lightRed" + }, + "syntaxFunction": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "syntaxVariable": { + "dark": "darkBlueBright", + "light": "lightBlue" + }, + "syntaxString": { + "dark": "darkYellowBright", + "light": "lightYellow" + }, + "syntaxNumber": { + "dark": "darkPurpleBright", + "light": "lightPurple" + }, + "syntaxType": { + "dark": "darkAquaBright", + "light": "lightAqua" + }, + "syntaxOperator": { + "dark": "darkOrangeBright", + "light": "lightOrange" + }, + "syntaxPunctuation": { + "dark": "darkFg1", + "light": "lightFg1" + } + } + }, + "kanagawa": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "sumiInk0": "#1F1F28", + "sumiInk1": "#2A2A37", + "sumiInk2": "#363646", + "sumiInk3": "#54546D", + "fujiWhite": "#DCD7BA", + "oldWhite": "#C8C093", + "fujiGray": "#727169", + "oniViolet": "#957FB8", + "crystalBlue": "#7E9CD8", + "carpYellow": "#C38D9D", + "sakuraPink": "#D27E99", + "waveAqua": "#76946A", + "roninYellow": "#D7A657", + "dragonRed": "#E82424", + "lotusGreen": "#98BB6C", + "waveBlue": "#2D4F67", + "lightBg": "#F2E9DE", + "lightPaper": "#EAE4D7", + "lightText": "#54433A", + "lightGray": "#9E9389" + }, + "theme": { + "primary": { + "dark": "crystalBlue", + "light": "waveBlue" + }, + "secondary": { + "dark": "oniViolet", + "light": "oniViolet" + }, + "accent": { + "dark": "sakuraPink", + "light": "sakuraPink" + }, + "error": { + "dark": "dragonRed", + "light": "dragonRed" + }, + "warning": { + "dark": "roninYellow", + "light": "roninYellow" + }, + "success": { + "dark": "lotusGreen", + "light": "lotusGreen" + }, + "info": { + "dark": "waveAqua", + "light": "waveAqua" + }, + "text": { + "dark": "fujiWhite", + "light": "lightText" + }, + "textMuted": { + "dark": "fujiGray", + "light": "lightGray" + }, + "background": { + "dark": "sumiInk0", + "light": "lightBg" + }, + "backgroundPanel": { + "dark": "sumiInk1", + "light": "lightPaper" + }, + "backgroundElement": { + "dark": "sumiInk2", + "light": "#E3DCD2" + }, + "border": { + "dark": "sumiInk3", + "light": "#D4CBBF" + }, + "borderActive": { + "dark": "carpYellow", + "light": "carpYellow" + }, + "borderSubtle": { + "dark": "sumiInk2", + "light": "#DCD4C9" + }, + "diffAdded": { + "dark": "lotusGreen", + "light": "lotusGreen" + }, + "diffRemoved": { + "dark": "dragonRed", + "light": "dragonRed" + }, + "diffContext": { + "dark": "fujiGray", + "light": "lightGray" + }, + "diffHunkHeader": { + "dark": "waveBlue", + "light": "waveBlue" + }, + "diffHighlightAdded": { + "dark": "#A9D977", + "light": "#89AF5B" + }, + "diffHighlightRemoved": { + "dark": "#F24A4A", + "light": "#D61F1F" + }, + "diffAddedBg": { + "dark": "#252E25", + "light": "#EAF3E4" + }, + "diffRemovedBg": { + "dark": "#362020", + "light": "#FBE6E6" + }, + "diffContextBg": { + "dark": "sumiInk1", + "light": "lightPaper" + }, + "diffLineNumber": { + "dark": "sumiInk3", + "light": "#C7BEB4" + }, + "diffAddedLineNumberBg": { + "dark": "#202820", + "light": "#DDE8D6" + }, + "diffRemovedLineNumberBg": { + "dark": "#2D1C1C", + "light": "#F2DADA" + }, + "markdownText": { + "dark": "fujiWhite", + "light": "lightText" + }, + "markdownHeading": { + "dark": "oniViolet", + "light": "oniViolet" + }, + "markdownLink": { + "dark": "crystalBlue", + "light": "waveBlue" + }, + "markdownLinkText": { + "dark": "waveAqua", + "light": "waveAqua" + }, + "markdownCode": { + "dark": "lotusGreen", + "light": "lotusGreen" + }, + "markdownBlockQuote": { + "dark": "fujiGray", + "light": "lightGray" + }, + "markdownEmph": { + "dark": "carpYellow", + "light": "carpYellow" + }, + "markdownStrong": { + "dark": "roninYellow", + "light": "roninYellow" + }, + "markdownHorizontalRule": { + "dark": "fujiGray", + "light": "lightGray" + }, + "markdownListItem": { + "dark": "crystalBlue", + "light": "waveBlue" + }, + "markdownListEnumeration": { + "dark": "waveAqua", + "light": "waveAqua" + }, + "markdownImage": { + "dark": "crystalBlue", + "light": "waveBlue" + }, + "markdownImageText": { + "dark": "waveAqua", + "light": "waveAqua" + }, + "markdownCodeBlock": { + "dark": "fujiWhite", + "light": "lightText" + }, + "syntaxComment": { + "dark": "fujiGray", + "light": "lightGray" + }, + "syntaxKeyword": { + "dark": "oniViolet", + "light": "oniViolet" + }, + "syntaxFunction": { + "dark": "crystalBlue", + "light": "waveBlue" + }, + "syntaxVariable": { + "dark": "fujiWhite", + "light": "lightText" + }, + "syntaxString": { + "dark": "lotusGreen", + "light": "lotusGreen" + }, + "syntaxNumber": { + "dark": "roninYellow", + "light": "roninYellow" + }, + "syntaxType": { + "dark": "carpYellow", + "light": "carpYellow" + }, + "syntaxOperator": { + "dark": "sakuraPink", + "light": "sakuraPink" + }, + "syntaxPunctuation": { + "dark": "fujiWhite", + "light": "lightText" + } + } + }, + "lucent-orng": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkStep6": "#3c3c3c", + "darkStep11": "#808080", + "darkStep12": "#eeeeee", + "darkSecondary": "#EE7948", + "darkAccent": "#FFF7F1", + "darkRed": "#e06c75", + "darkOrange": "#EC5B2B", + "darkBlue": "#6ba1e6", + "darkCyan": "#56b6c2", + "darkYellow": "#e5c07b", + "darkPanelBg": "#2a1a1599", + "lightStep6": "#d4d4d4", + "lightStep11": "#8a8a8a", + "lightStep12": "#1a1a1a", + "lightSecondary": "#EE7948", + "lightAccent": "#c94d24", + "lightRed": "#d1383d", + "lightOrange": "#EC5B2B", + "lightBlue": "#0062d1", + "lightCyan": "#318795", + "lightYellow": "#b0851f", + "lightPanelBg": "#fff5f099" + }, + "theme": { + "primary": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "secondary": { + "dark": "darkSecondary", + "light": "lightSecondary" + }, + "accent": { + "dark": "darkAccent", + "light": "lightAccent" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "success": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "info": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "text": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "textMuted": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "selectedListItemText": { + "dark": "#0a0a0a", + "light": "#ffffff" + }, + "background": { + "dark": "transparent", + "light": "transparent" + }, + "backgroundPanel": { + "dark": "transparent", + "light": "transparent" + }, + "backgroundElement": { + "dark": "transparent", + "light": "transparent" + }, + "backgroundMenu": { + "dark": "darkPanelBg", + "light": "lightPanelBg" + }, + "border": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "borderActive": { + "dark": "darkSecondary", + "light": "lightAccent" + }, + "borderSubtle": { + "dark": "darkStep6", + "light": "lightStep6" + }, + "diffAdded": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "diffRemoved": { + "dark": "#c53b53", + "light": "#c53b53" + }, + "diffContext": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHunkHeader": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHighlightAdded": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "diffHighlightRemoved": { + "dark": "#e26a75", + "light": "#f52a65" + }, + "diffAddedBg": { + "dark": "transparent", + "light": "transparent" + }, + "diffRemovedBg": { + "dark": "transparent", + "light": "transparent" + }, + "diffContextBg": { + "dark": "transparent", + "light": "transparent" + }, + "diffLineNumber": { + "dark": "#666666", + "light": "#999999" + }, + "diffAddedLineNumberBg": { + "dark": "transparent", + "light": "transparent" + }, + "diffRemovedLineNumberBg": { + "dark": "transparent", + "light": "transparent" + }, + "markdownText": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "markdownHeading": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownLink": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownBlockQuote": { + "dark": "darkAccent", + "light": "lightYellow" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkSecondary", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "markdownListItem": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "syntaxComment": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "syntaxKeyword": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxFunction": { + "dark": "darkSecondary", + "light": "lightAccent" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "syntaxNumber": { + "dark": "darkAccent", + "light": "lightOrange" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkStep12", + "light": "lightStep12" + } + } + }, + "material": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg": "#263238", + "darkBgAlt": "#1e272c", + "darkBgPanel": "#37474f", + "darkFg": "#eeffff", + "darkFgMuted": "#546e7a", + "darkRed": "#f07178", + "darkPink": "#f78c6c", + "darkOrange": "#ffcb6b", + "darkYellow": "#ffcb6b", + "darkGreen": "#c3e88d", + "darkCyan": "#89ddff", + "darkBlue": "#82aaff", + "darkPurple": "#c792ea", + "darkViolet": "#bb80b3", + "lightBg": "#fafafa", + "lightBgAlt": "#f5f5f5", + "lightBgPanel": "#e7e7e8", + "lightFg": "#263238", + "lightFgMuted": "#90a4ae", + "lightRed": "#e53935", + "lightPink": "#ec407a", + "lightOrange": "#f4511e", + "lightYellow": "#ffb300", + "lightGreen": "#91b859", + "lightCyan": "#39adb5", + "lightBlue": "#6182b8", + "lightPurple": "#7c4dff", + "lightViolet": "#945eb8" + }, + "theme": { + "primary": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "secondary": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "accent": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "text": { + "dark": "darkFg", + "light": "lightFg" + }, + "textMuted": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "background": { + "dark": "darkBg", + "light": "lightBg" + }, + "backgroundPanel": { + "dark": "darkBgAlt", + "light": "lightBgAlt" + }, + "backgroundElement": { + "dark": "darkBgPanel", + "light": "lightBgPanel" + }, + "border": { + "dark": "#37474f", + "light": "#e0e0e0" + }, + "borderActive": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "borderSubtle": { + "dark": "#1e272c", + "light": "#eeeeee" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "diffHunkHeader": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "diffHighlightAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffHighlightRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffAddedBg": { + "dark": "#2e3c2b", + "light": "#e8f5e9" + }, + "diffRemovedBg": { + "dark": "#3c2b2b", + "light": "#ffebee" + }, + "diffContextBg": { + "dark": "darkBgAlt", + "light": "lightBgAlt" + }, + "diffLineNumber": { + "dark": "#37474f", + "light": "#cfd8dc" + }, + "diffAddedLineNumberBg": { + "dark": "#2e3c2b", + "light": "#e8f5e9" + }, + "diffRemovedLineNumberBg": { + "dark": "#3c2b2b", + "light": "#ffebee" + }, + "markdownText": { + "dark": "darkFg", + "light": "lightFg" + }, + "markdownHeading": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownLink": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownLinkText": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "markdownCode": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "#37474f", + "light": "#e0e0e0" + }, + "markdownListItem": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImageText": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "markdownCodeBlock": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxComment": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "syntaxKeyword": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "syntaxFunction": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "syntaxVariable": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxString": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkFg", + "light": "lightFg" + } + } + }, + "matrix": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "matrixInk0": "#0a0e0a", + "matrixInk1": "#0e130d", + "matrixInk2": "#141c12", + "matrixInk3": "#1e2a1b", + "rainGreen": "#2eff6a", + "rainGreenDim": "#1cc24b", + "rainGreenHi": "#62ff94", + "rainCyan": "#00efff", + "rainTeal": "#24f6d9", + "rainPurple": "#c770ff", + "rainOrange": "#ffa83d", + "alertRed": "#ff4b4b", + "alertYellow": "#e6ff57", + "alertBlue": "#30b3ff", + "rainGray": "#8ca391", + "lightBg": "#eef3ea", + "lightPaper": "#e4ebe1", + "lightInk1": "#dae1d7", + "lightText": "#203022", + "lightGray": "#748476" + }, + "theme": { + "primary": { + "dark": "rainGreen", + "light": "rainGreenDim" + }, + "secondary": { + "dark": "rainCyan", + "light": "rainTeal" + }, + "accent": { + "dark": "rainPurple", + "light": "rainPurple" + }, + "error": { + "dark": "alertRed", + "light": "alertRed" + }, + "warning": { + "dark": "alertYellow", + "light": "alertYellow" + }, + "success": { + "dark": "rainGreenHi", + "light": "rainGreenDim" + }, + "info": { + "dark": "alertBlue", + "light": "alertBlue" + }, + "text": { + "dark": "rainGreenHi", + "light": "lightText" + }, + "textMuted": { + "dark": "rainGray", + "light": "lightGray" + }, + "background": { + "dark": "matrixInk0", + "light": "lightBg" + }, + "backgroundPanel": { + "dark": "matrixInk1", + "light": "lightPaper" + }, + "backgroundElement": { + "dark": "matrixInk2", + "light": "lightInk1" + }, + "border": { + "dark": "matrixInk3", + "light": "lightGray" + }, + "borderActive": { + "dark": "rainGreen", + "light": "rainGreenDim" + }, + "borderSubtle": { + "dark": "matrixInk2", + "light": "lightInk1" + }, + "diffAdded": { + "dark": "rainGreenDim", + "light": "rainGreenDim" + }, + "diffRemoved": { + "dark": "alertRed", + "light": "alertRed" + }, + "diffContext": { + "dark": "rainGray", + "light": "lightGray" + }, + "diffHunkHeader": { + "dark": "alertBlue", + "light": "alertBlue" + }, + "diffHighlightAdded": { + "dark": "#77ffaf", + "light": "#5dac7e" + }, + "diffHighlightRemoved": { + "dark": "#ff7171", + "light": "#d53a3a" + }, + "diffAddedBg": { + "dark": "#132616", + "light": "#e0efde" + }, + "diffRemovedBg": { + "dark": "#261212", + "light": "#f9e5e5" + }, + "diffContextBg": { + "dark": "matrixInk1", + "light": "lightPaper" + }, + "diffLineNumber": { + "dark": "matrixInk3", + "light": "lightGray" + }, + "diffAddedLineNumberBg": { + "dark": "#0f1b11", + "light": "#d6e7d2" + }, + "diffRemovedLineNumberBg": { + "dark": "#1b1414", + "light": "#f2d2d2" + }, + "markdownText": { + "dark": "rainGreenHi", + "light": "lightText" + }, + "markdownHeading": { + "dark": "rainCyan", + "light": "rainTeal" + }, + "markdownLink": { + "dark": "alertBlue", + "light": "alertBlue" + }, + "markdownLinkText": { + "dark": "rainTeal", + "light": "rainTeal" + }, + "markdownCode": { + "dark": "rainGreenDim", + "light": "rainGreenDim" + }, + "markdownBlockQuote": { + "dark": "rainGray", + "light": "lightGray" + }, + "markdownEmph": { + "dark": "rainOrange", + "light": "rainOrange" + }, + "markdownStrong": { + "dark": "alertYellow", + "light": "alertYellow" + }, + "markdownHorizontalRule": { + "dark": "rainGray", + "light": "lightGray" + }, + "markdownListItem": { + "dark": "alertBlue", + "light": "alertBlue" + }, + "markdownListEnumeration": { + "dark": "rainTeal", + "light": "rainTeal" + }, + "markdownImage": { + "dark": "alertBlue", + "light": "alertBlue" + }, + "markdownImageText": { + "dark": "rainTeal", + "light": "rainTeal" + }, + "markdownCodeBlock": { + "dark": "rainGreenHi", + "light": "lightText" + }, + "syntaxComment": { + "dark": "rainGray", + "light": "lightGray" + }, + "syntaxKeyword": { + "dark": "rainPurple", + "light": "rainPurple" + }, + "syntaxFunction": { + "dark": "alertBlue", + "light": "alertBlue" + }, + "syntaxVariable": { + "dark": "rainGreenHi", + "light": "lightText" + }, + "syntaxString": { + "dark": "rainGreenDim", + "light": "rainGreenDim" + }, + "syntaxNumber": { + "dark": "rainOrange", + "light": "rainOrange" + }, + "syntaxType": { + "dark": "alertYellow", + "light": "alertYellow" + }, + "syntaxOperator": { + "dark": "rainTeal", + "light": "rainTeal" + }, + "syntaxPunctuation": { + "dark": "rainGreenHi", + "light": "lightText" + } + } + }, + "mercury": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "purple-800": "#3442a6", + "purple-700": "#465bd1", + "purple-600": "#5266eb", + "purple-400": "#8da4f5", + "purple-300": "#a7b6f8", + "red-700": "#b0175f", + "red-600": "#d03275", + "red-400": "#fc92b4", + "green-700": "#036e43", + "green-600": "#188554", + "green-400": "#77c599", + "orange-700": "#a44200", + "orange-600": "#c45000", + "orange-400": "#fc9b6f", + "blue-600": "#007f95", + "blue-400": "#77becf", + "neutral-1000": "#10101a", + "neutral-950": "#171721", + "neutral-900": "#1e1e2a", + "neutral-800": "#272735", + "neutral-700": "#363644", + "neutral-600": "#535461", + "neutral-500": "#70707d", + "neutral-400": "#9d9da8", + "neutral-300": "#c3c3cc", + "neutral-200": "#dddde5", + "neutral-100": "#f4f5f9", + "neutral-050": "#fbfcfd", + "neutral-000": "#ffffff", + "neutral-150": "#ededf3", + "border-light": "#7073931a", + "border-light-subtle": "#7073930f", + "border-dark": "#b4b7c81f", + "border-dark-subtle": "#b4b7c814", + "diff-added-light": "#1885541a", + "diff-removed-light": "#d032751a", + "diff-added-dark": "#77c59933", + "diff-removed-dark": "#fc92b433" + }, + "theme": { + "primary": { + "light": "purple-600", + "dark": "purple-400" + }, + "secondary": { + "light": "purple-700", + "dark": "purple-300" + }, + "accent": { + "light": "purple-400", + "dark": "purple-400" + }, + "error": { + "light": "red-700", + "dark": "red-400" + }, + "warning": { + "light": "orange-700", + "dark": "orange-400" + }, + "success": { + "light": "green-700", + "dark": "green-400" + }, + "info": { + "light": "blue-600", + "dark": "blue-400" + }, + "text": { + "light": "neutral-700", + "dark": "neutral-200" + }, + "textMuted": { + "light": "neutral-500", + "dark": "neutral-400" + }, + "background": { + "light": "neutral-000", + "dark": "neutral-950" + }, + "backgroundPanel": { + "light": "neutral-050", + "dark": "neutral-1000" + }, + "backgroundElement": { + "light": "neutral-100", + "dark": "neutral-800" + }, + "border": { + "light": "border-light", + "dark": "border-dark" + }, + "borderActive": { + "light": "purple-600", + "dark": "purple-400" + }, + "borderSubtle": { + "light": "border-light-subtle", + "dark": "border-dark-subtle" + }, + "diffAdded": { + "light": "green-700", + "dark": "green-400" + }, + "diffRemoved": { + "light": "red-700", + "dark": "red-400" + }, + "diffContext": { + "light": "neutral-500", + "dark": "neutral-400" + }, + "diffHunkHeader": { + "light": "neutral-500", + "dark": "neutral-400" + }, + "diffHighlightAdded": { + "light": "green-700", + "dark": "green-400" + }, + "diffHighlightRemoved": { + "light": "red-700", + "dark": "red-400" + }, + "diffAddedBg": { + "light": "diff-added-light", + "dark": "diff-added-dark" + }, + "diffRemovedBg": { + "light": "diff-removed-light", + "dark": "diff-removed-dark" + }, + "diffContextBg": { + "light": "neutral-050", + "dark": "neutral-900" + }, + "diffLineNumber": { + "light": "neutral-600", + "dark": "neutral-300" + }, + "diffAddedLineNumberBg": { + "light": "diff-added-light", + "dark": "diff-added-dark" + }, + "diffRemovedLineNumberBg": { + "light": "diff-removed-light", + "dark": "diff-removed-dark" + }, + "markdownText": { + "light": "neutral-700", + "dark": "neutral-200" + }, + "markdownHeading": { + "light": "neutral-900", + "dark": "neutral-000" + }, + "markdownLink": { + "light": "purple-700", + "dark": "purple-400" + }, + "markdownLinkText": { + "light": "purple-600", + "dark": "purple-300" + }, + "markdownCode": { + "light": "green-700", + "dark": "green-400" + }, + "markdownBlockQuote": { + "light": "neutral-500", + "dark": "neutral-400" + }, + "markdownEmph": { + "light": "orange-700", + "dark": "orange-400" + }, + "markdownStrong": { + "light": "neutral-900", + "dark": "neutral-100" + }, + "markdownHorizontalRule": { + "light": "border-light", + "dark": "border-dark" + }, + "markdownListItem": { + "light": "neutral-900", + "dark": "neutral-000" + }, + "markdownListEnumeration": { + "light": "purple-600", + "dark": "purple-400" + }, + "markdownImage": { + "light": "purple-700", + "dark": "purple-400" + }, + "markdownImageText": { + "light": "purple-600", + "dark": "purple-300" + }, + "markdownCodeBlock": { + "light": "neutral-700", + "dark": "neutral-200" + }, + "syntaxComment": { + "light": "neutral-500", + "dark": "neutral-400" + }, + "syntaxKeyword": { + "light": "purple-700", + "dark": "purple-400" + }, + "syntaxFunction": { + "light": "purple-600", + "dark": "purple-400" + }, + "syntaxVariable": { + "light": "blue-600", + "dark": "blue-400" + }, + "syntaxString": { + "light": "green-700", + "dark": "green-400" + }, + "syntaxNumber": { + "light": "orange-700", + "dark": "orange-400" + }, + "syntaxType": { + "light": "blue-600", + "dark": "blue-400" + }, + "syntaxOperator": { + "light": "purple-700", + "dark": "purple-400" + }, + "syntaxPunctuation": { + "light": "neutral-700", + "dark": "neutral-200" + } + } + }, + "monokai": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "background": "#272822", + "backgroundAlt": "#1e1f1c", + "backgroundPanel": "#3e3d32", + "foreground": "#f8f8f2", + "comment": "#75715e", + "red": "#f92672", + "orange": "#fd971f", + "lightOrange": "#e69f66", + "yellow": "#e6db74", + "green": "#a6e22e", + "cyan": "#66d9ef", + "blue": "#66d9ef", + "purple": "#ae81ff", + "pink": "#f92672" + }, + "theme": { + "primary": { + "dark": "cyan", + "light": "blue" + }, + "secondary": { + "dark": "purple", + "light": "purple" + }, + "accent": { + "dark": "green", + "light": "green" + }, + "error": { + "dark": "red", + "light": "red" + }, + "warning": { + "dark": "yellow", + "light": "orange" + }, + "success": { + "dark": "green", + "light": "green" + }, + "info": { + "dark": "orange", + "light": "orange" + }, + "text": { + "dark": "foreground", + "light": "#272822" + }, + "textMuted": { + "dark": "comment", + "light": "#75715e" + }, + "background": { + "dark": "#272822", + "light": "#fafafa" + }, + "backgroundPanel": { + "dark": "#1e1f1c", + "light": "#f0f0f0" + }, + "backgroundElement": { + "dark": "#3e3d32", + "light": "#e0e0e0" + }, + "border": { + "dark": "#3e3d32", + "light": "#d0d0d0" + }, + "borderActive": { + "dark": "cyan", + "light": "blue" + }, + "borderSubtle": { + "dark": "#1e1f1c", + "light": "#e8e8e8" + }, + "diffAdded": { + "dark": "green", + "light": "green" + }, + "diffRemoved": { + "dark": "red", + "light": "red" + }, + "diffContext": { + "dark": "comment", + "light": "#75715e" + }, + "diffHunkHeader": { + "dark": "comment", + "light": "#75715e" + }, + "diffHighlightAdded": { + "dark": "green", + "light": "green" + }, + "diffHighlightRemoved": { + "dark": "red", + "light": "red" + }, + "diffAddedBg": { + "dark": "#1a3a1a", + "light": "#e0ffe0" + }, + "diffRemovedBg": { + "dark": "#3a1a1a", + "light": "#ffe0e0" + }, + "diffContextBg": { + "dark": "#1e1f1c", + "light": "#f0f0f0" + }, + "diffLineNumber": { + "dark": "#3e3d32", + "light": "#d0d0d0" + }, + "diffAddedLineNumberBg": { + "dark": "#1a3a1a", + "light": "#e0ffe0" + }, + "diffRemovedLineNumberBg": { + "dark": "#3a1a1a", + "light": "#ffe0e0" + }, + "markdownText": { + "dark": "foreground", + "light": "#272822" + }, + "markdownHeading": { + "dark": "pink", + "light": "pink" + }, + "markdownLink": { + "dark": "cyan", + "light": "blue" + }, + "markdownLinkText": { + "dark": "purple", + "light": "purple" + }, + "markdownCode": { + "dark": "green", + "light": "green" + }, + "markdownBlockQuote": { + "dark": "comment", + "light": "#75715e" + }, + "markdownEmph": { + "dark": "yellow", + "light": "orange" + }, + "markdownStrong": { + "dark": "orange", + "light": "orange" + }, + "markdownHorizontalRule": { + "dark": "comment", + "light": "#75715e" + }, + "markdownListItem": { + "dark": "cyan", + "light": "blue" + }, + "markdownListEnumeration": { + "dark": "purple", + "light": "purple" + }, + "markdownImage": { + "dark": "cyan", + "light": "blue" + }, + "markdownImageText": { + "dark": "purple", + "light": "purple" + }, + "markdownCodeBlock": { + "dark": "foreground", + "light": "#272822" + }, + "syntaxComment": { + "dark": "comment", + "light": "#75715e" + }, + "syntaxKeyword": { + "dark": "pink", + "light": "pink" + }, + "syntaxFunction": { + "dark": "green", + "light": "green" + }, + "syntaxVariable": { + "dark": "foreground", + "light": "#272822" + }, + "syntaxString": { + "dark": "yellow", + "light": "orange" + }, + "syntaxNumber": { + "dark": "purple", + "light": "purple" + }, + "syntaxType": { + "dark": "cyan", + "light": "blue" + }, + "syntaxOperator": { + "dark": "pink", + "light": "pink" + }, + "syntaxPunctuation": { + "dark": "foreground", + "light": "#272822" + } + } + }, + "nightowl": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "nightOwlBg": "#011627", + "nightOwlFg": "#d6deeb", + "nightOwlBlue": "#82AAFF", + "nightOwlCyan": "#7fdbca", + "nightOwlGreen": "#c5e478", + "nightOwlYellow": "#ecc48d", + "nightOwlOrange": "#F78C6C", + "nightOwlRed": "#EF5350", + "nightOwlPink": "#ff5874", + "nightOwlPurple": "#c792ea", + "nightOwlMuted": "#5f7e97", + "nightOwlGray": "#637777", + "nightOwlLightGray": "#89a4bb", + "nightOwlPanel": "#0b253a" + }, + "theme": { + "primary": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "secondary": { + "dark": "nightOwlCyan", + "light": "nightOwlCyan" + }, + "accent": { + "dark": "nightOwlPurple", + "light": "nightOwlPurple" + }, + "error": { + "dark": "nightOwlRed", + "light": "nightOwlRed" + }, + "warning": { + "dark": "nightOwlYellow", + "light": "nightOwlYellow" + }, + "success": { + "dark": "nightOwlGreen", + "light": "nightOwlGreen" + }, + "info": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "text": { + "dark": "nightOwlFg", + "light": "nightOwlFg" + }, + "textMuted": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "background": { + "dark": "nightOwlBg", + "light": "nightOwlBg" + }, + "backgroundPanel": { + "dark": "nightOwlPanel", + "light": "nightOwlPanel" + }, + "backgroundElement": { + "dark": "nightOwlPanel", + "light": "nightOwlPanel" + }, + "border": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "borderActive": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "borderSubtle": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "diffAdded": { + "dark": "nightOwlGreen", + "light": "nightOwlGreen" + }, + "diffRemoved": { + "dark": "nightOwlRed", + "light": "nightOwlRed" + }, + "diffContext": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "diffHunkHeader": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "diffHighlightAdded": { + "dark": "nightOwlGreen", + "light": "nightOwlGreen" + }, + "diffHighlightRemoved": { + "dark": "nightOwlRed", + "light": "nightOwlRed" + }, + "diffAddedBg": { + "dark": "#0a2e1a", + "light": "#0a2e1a" + }, + "diffRemovedBg": { + "dark": "#2d1b1b", + "light": "#2d1b1b" + }, + "diffContextBg": { + "dark": "nightOwlPanel", + "light": "nightOwlPanel" + }, + "diffLineNumber": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "diffAddedLineNumberBg": { + "dark": "#0a2e1a", + "light": "#0a2e1a" + }, + "diffRemovedLineNumberBg": { + "dark": "#2d1b1b", + "light": "#2d1b1b" + }, + "markdownText": { + "dark": "nightOwlFg", + "light": "nightOwlFg" + }, + "markdownHeading": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "markdownLink": { + "dark": "nightOwlCyan", + "light": "nightOwlCyan" + }, + "markdownLinkText": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "markdownCode": { + "dark": "nightOwlGreen", + "light": "nightOwlGreen" + }, + "markdownBlockQuote": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "markdownEmph": { + "dark": "nightOwlPurple", + "light": "nightOwlPurple" + }, + "markdownStrong": { + "dark": "nightOwlYellow", + "light": "nightOwlYellow" + }, + "markdownHorizontalRule": { + "dark": "nightOwlMuted", + "light": "nightOwlMuted" + }, + "markdownListItem": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "markdownListEnumeration": { + "dark": "nightOwlCyan", + "light": "nightOwlCyan" + }, + "markdownImage": { + "dark": "nightOwlCyan", + "light": "nightOwlCyan" + }, + "markdownImageText": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "markdownCodeBlock": { + "dark": "nightOwlFg", + "light": "nightOwlFg" + }, + "syntaxComment": { + "dark": "nightOwlGray", + "light": "nightOwlGray" + }, + "syntaxKeyword": { + "dark": "nightOwlPurple", + "light": "nightOwlPurple" + }, + "syntaxFunction": { + "dark": "nightOwlBlue", + "light": "nightOwlBlue" + }, + "syntaxVariable": { + "dark": "nightOwlFg", + "light": "nightOwlFg" + }, + "syntaxString": { + "dark": "nightOwlYellow", + "light": "nightOwlYellow" + }, + "syntaxNumber": { + "dark": "nightOwlOrange", + "light": "nightOwlOrange" + }, + "syntaxType": { + "dark": "nightOwlGreen", + "light": "nightOwlGreen" + }, + "syntaxOperator": { + "dark": "nightOwlCyan", + "light": "nightOwlCyan" + }, + "syntaxPunctuation": { + "dark": "nightOwlFg", + "light": "nightOwlFg" + } + } + }, + "nord": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "nord0": "#2E3440", + "nord1": "#3B4252", + "nord2": "#434C5E", + "nord3": "#4C566A", + "nord4": "#D8DEE9", + "nord5": "#E5E9F0", + "nord6": "#ECEFF4", + "nord7": "#8FBCBB", + "nord8": "#88C0D0", + "nord9": "#81A1C1", + "nord10": "#5E81AC", + "nord11": "#BF616A", + "nord12": "#D08770", + "nord13": "#EBCB8B", + "nord14": "#A3BE8C", + "nord15": "#B48EAD" + }, + "theme": { + "primary": { + "dark": "nord8", + "light": "nord10" + }, + "secondary": { + "dark": "nord9", + "light": "nord9" + }, + "accent": { + "dark": "nord7", + "light": "nord7" + }, + "error": { + "dark": "nord11", + "light": "nord11" + }, + "warning": { + "dark": "nord12", + "light": "nord12" + }, + "success": { + "dark": "nord14", + "light": "nord14" + }, + "info": { + "dark": "nord8", + "light": "nord10" + }, + "text": { + "dark": "nord6", + "light": "nord0" + }, + "textMuted": { + "dark": "#8B95A7", + "light": "nord1" + }, + "background": { + "dark": "nord0", + "light": "nord6" + }, + "backgroundPanel": { + "dark": "nord1", + "light": "nord5" + }, + "backgroundElement": { + "dark": "nord2", + "light": "nord4" + }, + "border": { + "dark": "nord2", + "light": "nord3" + }, + "borderActive": { + "dark": "nord3", + "light": "nord2" + }, + "borderSubtle": { + "dark": "nord2", + "light": "nord3" + }, + "diffAdded": { + "dark": "nord14", + "light": "nord14" + }, + "diffRemoved": { + "dark": "nord11", + "light": "nord11" + }, + "diffContext": { + "dark": "#8B95A7", + "light": "nord3" + }, + "diffHunkHeader": { + "dark": "#8B95A7", + "light": "nord3" + }, + "diffHighlightAdded": { + "dark": "nord14", + "light": "nord14" + }, + "diffHighlightRemoved": { + "dark": "nord11", + "light": "nord11" + }, + "diffAddedBg": { + "dark": "#3B4252", + "light": "#E5E9F0" + }, + "diffRemovedBg": { + "dark": "#3B4252", + "light": "#E5E9F0" + }, + "diffContextBg": { + "dark": "nord1", + "light": "nord5" + }, + "diffLineNumber": { + "dark": "nord2", + "light": "nord4" + }, + "diffAddedLineNumberBg": { + "dark": "#3B4252", + "light": "#E5E9F0" + }, + "diffRemovedLineNumberBg": { + "dark": "#3B4252", + "light": "#E5E9F0" + }, + "markdownText": { + "dark": "nord4", + "light": "nord0" + }, + "markdownHeading": { + "dark": "nord8", + "light": "nord10" + }, + "markdownLink": { + "dark": "nord9", + "light": "nord9" + }, + "markdownLinkText": { + "dark": "nord7", + "light": "nord7" + }, + "markdownCode": { + "dark": "nord14", + "light": "nord14" + }, + "markdownBlockQuote": { + "dark": "#8B95A7", + "light": "nord3" + }, + "markdownEmph": { + "dark": "nord12", + "light": "nord12" + }, + "markdownStrong": { + "dark": "nord13", + "light": "nord13" + }, + "markdownHorizontalRule": { + "dark": "#8B95A7", + "light": "nord3" + }, + "markdownListItem": { + "dark": "nord8", + "light": "nord10" + }, + "markdownListEnumeration": { + "dark": "nord7", + "light": "nord7" + }, + "markdownImage": { + "dark": "nord9", + "light": "nord9" + }, + "markdownImageText": { + "dark": "nord7", + "light": "nord7" + }, + "markdownCodeBlock": { + "dark": "nord4", + "light": "nord0" + }, + "syntaxComment": { + "dark": "#8B95A7", + "light": "nord3" + }, + "syntaxKeyword": { + "dark": "nord9", + "light": "nord9" + }, + "syntaxFunction": { + "dark": "nord8", + "light": "nord8" + }, + "syntaxVariable": { + "dark": "nord7", + "light": "nord7" + }, + "syntaxString": { + "dark": "nord14", + "light": "nord14" + }, + "syntaxNumber": { + "dark": "nord15", + "light": "nord15" + }, + "syntaxType": { + "dark": "nord7", + "light": "nord7" + }, + "syntaxOperator": { + "dark": "nord9", + "light": "nord9" + }, + "syntaxPunctuation": { + "dark": "nord4", + "light": "nord0" + } + } + }, + "one-dark": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg": "#282c34", + "darkBgAlt": "#21252b", + "darkBgPanel": "#353b45", + "darkFg": "#abb2bf", + "darkFgMuted": "#5c6370", + "darkPurple": "#c678dd", + "darkBlue": "#61afef", + "darkRed": "#e06c75", + "darkGreen": "#98c379", + "darkYellow": "#e5c07b", + "darkOrange": "#d19a66", + "darkCyan": "#56b6c2", + "lightBg": "#fafafa", + "lightBgAlt": "#f0f0f1", + "lightBgPanel": "#eaeaeb", + "lightFg": "#383a42", + "lightFgMuted": "#a0a1a7", + "lightPurple": "#a626a4", + "lightBlue": "#4078f2", + "lightRed": "#e45649", + "lightGreen": "#50a14f", + "lightYellow": "#c18401", + "lightOrange": "#986801", + "lightCyan": "#0184bc" + }, + "theme": { + "primary": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "secondary": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "accent": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "text": { + "dark": "darkFg", + "light": "lightFg" + }, + "textMuted": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "background": { + "dark": "darkBg", + "light": "lightBg" + }, + "backgroundPanel": { + "dark": "darkBgAlt", + "light": "lightBgAlt" + }, + "backgroundElement": { + "dark": "darkBgPanel", + "light": "lightBgPanel" + }, + "border": { + "dark": "#393f4a", + "light": "#d1d1d2" + }, + "borderActive": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "borderSubtle": { + "dark": "#2c313a", + "light": "#e0e0e1" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "diffHunkHeader": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "diffHighlightAdded": { + "dark": "#aad482", + "light": "#489447" + }, + "diffHighlightRemoved": { + "dark": "#e8828b", + "light": "#d65145" + }, + "diffAddedBg": { + "dark": "#2c382b", + "light": "#eafbe9" + }, + "diffRemovedBg": { + "dark": "#3a2d2f", + "light": "#fce9e8" + }, + "diffContextBg": { + "dark": "darkBgAlt", + "light": "lightBgAlt" + }, + "diffLineNumber": { + "dark": "#495162", + "light": "#c9c9ca" + }, + "diffAddedLineNumberBg": { + "dark": "#283427", + "light": "#e1f3df" + }, + "diffRemovedLineNumberBg": { + "dark": "#36292b", + "light": "#f5e2e1" + }, + "markdownText": { + "dark": "darkFg", + "light": "lightFg" + }, + "markdownHeading": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "markdownLink": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "markdownListItem": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkFg", + "light": "lightFg" + }, + "syntaxComment": { + "dark": "darkFgMuted", + "light": "lightFgMuted" + }, + "syntaxKeyword": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "syntaxFunction": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkFg", + "light": "lightFg" + } + } + }, + "opencode": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkStep1": "#0a0a0a", + "darkStep2": "#141414", + "darkStep3": "#1e1e1e", + "darkStep4": "#282828", + "darkStep5": "#323232", + "darkStep6": "#3c3c3c", + "darkStep7": "#484848", + "darkStep8": "#606060", + "darkStep9": "#fab283", + "darkStep10": "#ffc09f", + "darkStep11": "#808080", + "darkStep12": "#eeeeee", + "darkSecondary": "#5c9cf5", + "darkAccent": "#9d7cd8", + "darkRed": "#e06c75", + "darkOrange": "#f5a742", + "darkGreen": "#7fd88f", + "darkCyan": "#56b6c2", + "darkYellow": "#e5c07b", + "lightStep1": "#ffffff", + "lightStep2": "#fafafa", + "lightStep3": "#f5f5f5", + "lightStep4": "#ebebeb", + "lightStep5": "#e1e1e1", + "lightStep6": "#d4d4d4", + "lightStep7": "#b8b8b8", + "lightStep8": "#a0a0a0", + "lightStep9": "#3b7dd8", + "lightStep10": "#2968c3", + "lightStep11": "#8a8a8a", + "lightStep12": "#1a1a1a", + "lightSecondary": "#7b5bb6", + "lightAccent": "#d68c27", + "lightRed": "#d1383d", + "lightOrange": "#d68c27", + "lightGreen": "#3d9a57", + "lightCyan": "#318795", + "lightYellow": "#b0851f" + }, + "theme": { + "primary": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "secondary": { + "dark": "darkSecondary", + "light": "lightSecondary" + }, + "accent": { + "dark": "darkAccent", + "light": "lightAccent" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "text": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "textMuted": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "background": { + "dark": "darkStep1", + "light": "lightStep1" + }, + "backgroundPanel": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "backgroundElement": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "border": { + "dark": "darkStep7", + "light": "lightStep7" + }, + "borderActive": { + "dark": "darkStep8", + "light": "lightStep8" + }, + "borderSubtle": { + "dark": "darkStep6", + "light": "lightStep6" + }, + "diffAdded": { + "dark": "#4fd6be", + "light": "#1e725c" + }, + "diffRemoved": { + "dark": "#c53b53", + "light": "#c53b53" + }, + "diffContext": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHunkHeader": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHighlightAdded": { + "dark": "#b8db87", + "light": "#4db380" + }, + "diffHighlightRemoved": { + "dark": "#e26a75", + "light": "#f52a65" + }, + "diffAddedBg": { + "dark": "#20303b", + "light": "#d5e5d5" + }, + "diffRemovedBg": { + "dark": "#37222c", + "light": "#f7d8db" + }, + "diffContextBg": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "diffLineNumber": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "diffAddedLineNumberBg": { + "dark": "#1b2b34", + "light": "#c5d5c5" + }, + "diffRemovedLineNumberBg": { + "dark": "#2d1f26", + "light": "#e7c8cb" + }, + "markdownText": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "markdownHeading": { + "dark": "darkAccent", + "light": "lightAccent" + }, + "markdownLink": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "markdownListItem": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "syntaxComment": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "syntaxKeyword": { + "dark": "darkAccent", + "light": "lightAccent" + }, + "syntaxFunction": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkStep12", + "light": "lightStep12" + } + } + }, + "orng": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkStep1": "#0a0a0a", + "darkStep2": "#141414", + "darkStep3": "#1e1e1e", + "darkStep4": "#282828", + "darkStep5": "#323232", + "darkStep6": "#3c3c3c", + "darkStep7": "#484848", + "darkStep8": "#606060", + "darkStep9": "#EC5B2B", + "darkStep10": "#EE7948", + "darkStep11": "#808080", + "darkStep12": "#eeeeee", + "darkSecondary": "#EE7948", + "darkAccent": "#FFF7F1", + "darkRed": "#e06c75", + "darkOrange": "#EC5B2B", + "darkBlue": "#6ba1e6", + "darkCyan": "#56b6c2", + "darkYellow": "#e5c07b", + "lightStep1": "#ffffff", + "lightStep2": "#FFF7F1", + "lightStep3": "#f5f0eb", + "lightStep4": "#ebebeb", + "lightStep5": "#e1e1e1", + "lightStep6": "#d4d4d4", + "lightStep7": "#b8b8b8", + "lightStep8": "#a0a0a0", + "lightStep9": "#EC5B2B", + "lightStep10": "#c94d24", + "lightStep11": "#8a8a8a", + "lightStep12": "#1a1a1a", + "lightSecondary": "#EE7948", + "lightAccent": "#c94d24", + "lightRed": "#d1383d", + "lightOrange": "#EC5B2B", + "lightBlue": "#0062d1", + "lightCyan": "#318795", + "lightYellow": "#b0851f" + }, + "theme": { + "primary": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "secondary": { + "dark": "darkSecondary", + "light": "lightSecondary" + }, + "accent": { + "dark": "darkAccent", + "light": "lightAccent" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "success": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "info": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "text": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "textMuted": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "selectedListItemText": { + "dark": "#0a0a0a", + "light": "#ffffff" + }, + "background": { + "dark": "darkStep1", + "light": "lightStep1" + }, + "backgroundPanel": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "backgroundElement": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "border": { + "dark": "#EC5B2B", + "light": "#EC5B2B" + }, + "borderActive": { + "dark": "#EE7948", + "light": "#c94d24" + }, + "borderSubtle": { + "dark": "darkStep6", + "light": "lightStep6" + }, + "diffAdded": { + "dark": "#6ba1e6", + "light": "#0062d1" + }, + "diffRemoved": { + "dark": "#c53b53", + "light": "#c53b53" + }, + "diffContext": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHunkHeader": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHighlightAdded": { + "dark": "#6ba1e6", + "light": "#0062d1" + }, + "diffHighlightRemoved": { + "dark": "#e26a75", + "light": "#f52a65" + }, + "diffAddedBg": { + "dark": "#1a2a3d", + "light": "#e0edfa" + }, + "diffRemovedBg": { + "dark": "#37222c", + "light": "#f7d8db" + }, + "diffContextBg": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "diffLineNumber": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "diffAddedLineNumberBg": { + "dark": "#162535", + "light": "#d0e5f5" + }, + "diffRemovedLineNumberBg": { + "dark": "#2d1f26", + "light": "#e7c8cb" + }, + "markdownText": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "markdownHeading": { + "dark": "#EC5B2B", + "light": "#EC5B2B" + }, + "markdownLink": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "markdownBlockQuote": { + "dark": "#FFF7F1", + "light": "lightYellow" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "#EE7948", + "light": "#EC5B2B" + }, + "markdownHorizontalRule": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "markdownListItem": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "syntaxComment": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "syntaxKeyword": { + "dark": "#EC5B2B", + "light": "#EC5B2B" + }, + "syntaxFunction": { + "dark": "#EE7948", + "light": "#c94d24" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "syntaxNumber": { + "dark": "#FFF7F1", + "light": "#EC5B2B" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkStep12", + "light": "lightStep12" + } + } + }, + "osaka-jade": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkBg0": "#111c18", + "darkBg1": "#1a2520", + "darkBg2": "#23372B", + "darkBg3": "#3d4a44", + "darkFg0": "#C1C497", + "darkFg1": "#9aa88a", + "darkGray": "#53685B", + "darkRed": "#FF5345", + "darkGreen": "#549e6a", + "darkYellow": "#459451", + "darkBlue": "#509475", + "darkMagenta": "#D2689C", + "darkCyan": "#2DD5B7", + "darkWhite": "#F6F5DD", + "darkRedBright": "#db9f9c", + "darkGreenBright": "#63b07a", + "darkYellowBright": "#E5C736", + "darkBlueBright": "#ACD4CF", + "darkMagentaBright": "#75bbb3", + "darkCyanBright": "#8CD3CB", + "lightBg0": "#F6F5DD", + "lightBg1": "#E8E7CC", + "lightBg2": "#D5D4B8", + "lightBg3": "#A8A78C", + "lightFg0": "#111c18", + "lightFg1": "#1a2520", + "lightGray": "#53685B", + "lightRed": "#c7392d", + "lightGreen": "#3d7a52", + "lightYellow": "#b5a020", + "lightBlue": "#3d7560", + "lightMagenta": "#a8527a", + "lightCyan": "#1faa90" + }, + "theme": { + "primary": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "secondary": { + "dark": "darkMagenta", + "light": "lightMagenta" + }, + "accent": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkYellowBright", + "light": "lightYellow" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "text": { + "dark": "darkFg0", + "light": "lightFg0" + }, + "textMuted": { + "dark": "darkGray", + "light": "lightGray" + }, + "background": { + "dark": "darkBg0", + "light": "lightBg0" + }, + "backgroundPanel": { + "dark": "darkBg1", + "light": "lightBg1" + }, + "backgroundElement": { + "dark": "darkBg2", + "light": "lightBg2" + }, + "border": { + "dark": "darkBg3", + "light": "lightBg3" + }, + "borderActive": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "borderSubtle": { + "dark": "darkBg2", + "light": "lightBg2" + }, + "diffAdded": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "diffRemoved": { + "dark": "darkRed", + "light": "lightRed" + }, + "diffContext": { + "dark": "darkGray", + "light": "lightGray" + }, + "diffHunkHeader": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "diffHighlightAdded": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "diffHighlightRemoved": { + "dark": "darkRedBright", + "light": "lightRed" + }, + "diffAddedBg": { + "dark": "#15241c", + "light": "#e0eee5" + }, + "diffRemovedBg": { + "dark": "#241515", + "light": "#eee0e0" + }, + "diffContextBg": { + "dark": "darkBg1", + "light": "lightBg1" + }, + "diffLineNumber": { + "dark": "darkBg3", + "light": "lightBg3" + }, + "diffAddedLineNumberBg": { + "dark": "#121f18", + "light": "#d5e5da" + }, + "diffRemovedLineNumberBg": { + "dark": "#1f1212", + "light": "#e5d5d5" + }, + "markdownText": { + "dark": "darkFg0", + "light": "lightFg0" + }, + "markdownHeading": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownLink": { + "dark": "darkCyanBright", + "light": "lightCyan" + }, + "markdownLinkText": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownCode": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkGray", + "light": "lightGray" + }, + "markdownEmph": { + "dark": "darkMagenta", + "light": "lightMagenta" + }, + "markdownStrong": { + "dark": "darkFg0", + "light": "lightFg0" + }, + "markdownHorizontalRule": { + "dark": "darkGray", + "light": "lightGray" + }, + "markdownListItem": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownListEnumeration": { + "dark": "darkCyanBright", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkCyanBright", + "light": "lightCyan" + }, + "markdownImageText": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownCodeBlock": { + "dark": "darkFg0", + "light": "lightFg0" + }, + "syntaxComment": { + "dark": "darkGray", + "light": "lightGray" + }, + "syntaxKeyword": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxFunction": { + "dark": "darkBlue", + "light": "lightBlue" + }, + "syntaxVariable": { + "dark": "darkFg0", + "light": "lightFg0" + }, + "syntaxString": { + "dark": "darkGreenBright", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkMagenta", + "light": "lightMagenta" + }, + "syntaxType": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxOperator": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxPunctuation": { + "dark": "darkFg0", + "light": "lightFg0" + } + } + }, + "palenight": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "background": "#292d3e", + "backgroundAlt": "#1e2132", + "backgroundPanel": "#32364a", + "foreground": "#a6accd", + "foregroundBright": "#bfc7d5", + "comment": "#676e95", + "red": "#f07178", + "orange": "#f78c6c", + "yellow": "#ffcb6b", + "green": "#c3e88d", + "cyan": "#89ddff", + "blue": "#82aaff", + "purple": "#c792ea", + "magenta": "#ff5370", + "pink": "#f07178" + }, + "theme": { + "primary": { + "dark": "blue", + "light": "#4976eb" + }, + "secondary": { + "dark": "purple", + "light": "#a854f2" + }, + "accent": { + "dark": "cyan", + "light": "#00acc1" + }, + "error": { + "dark": "red", + "light": "#e53935" + }, + "warning": { + "dark": "yellow", + "light": "#ffb300" + }, + "success": { + "dark": "green", + "light": "#91b859" + }, + "info": { + "dark": "orange", + "light": "#f4511e" + }, + "text": { + "dark": "foreground", + "light": "#292d3e" + }, + "textMuted": { + "dark": "comment", + "light": "#8796b0" + }, + "background": { + "dark": "#292d3e", + "light": "#fafafa" + }, + "backgroundPanel": { + "dark": "#1e2132", + "light": "#f5f5f5" + }, + "backgroundElement": { + "dark": "#32364a", + "light": "#e7e7e8" + }, + "border": { + "dark": "#32364a", + "light": "#e0e0e0" + }, + "borderActive": { + "dark": "blue", + "light": "#4976eb" + }, + "borderSubtle": { + "dark": "#1e2132", + "light": "#eeeeee" + }, + "diffAdded": { + "dark": "green", + "light": "#91b859" + }, + "diffRemoved": { + "dark": "red", + "light": "#e53935" + }, + "diffContext": { + "dark": "comment", + "light": "#8796b0" + }, + "diffHunkHeader": { + "dark": "cyan", + "light": "#00acc1" + }, + "diffHighlightAdded": { + "dark": "green", + "light": "#91b859" + }, + "diffHighlightRemoved": { + "dark": "red", + "light": "#e53935" + }, + "diffAddedBg": { + "dark": "#2e3c2b", + "light": "#e8f5e9" + }, + "diffRemovedBg": { + "dark": "#3c2b2b", + "light": "#ffebee" + }, + "diffContextBg": { + "dark": "#1e2132", + "light": "#f5f5f5" + }, + "diffLineNumber": { + "dark": "#444760", + "light": "#cfd8dc" + }, + "diffAddedLineNumberBg": { + "dark": "#2e3c2b", + "light": "#e8f5e9" + }, + "diffRemovedLineNumberBg": { + "dark": "#3c2b2b", + "light": "#ffebee" + }, + "markdownText": { + "dark": "foreground", + "light": "#292d3e" + }, + "markdownHeading": { + "dark": "purple", + "light": "#a854f2" + }, + "markdownLink": { + "dark": "blue", + "light": "#4976eb" + }, + "markdownLinkText": { + "dark": "cyan", + "light": "#00acc1" + }, + "markdownCode": { + "dark": "green", + "light": "#91b859" + }, + "markdownBlockQuote": { + "dark": "comment", + "light": "#8796b0" + }, + "markdownEmph": { + "dark": "yellow", + "light": "#ffb300" + }, + "markdownStrong": { + "dark": "orange", + "light": "#f4511e" + }, + "markdownHorizontalRule": { + "dark": "comment", + "light": "#8796b0" + }, + "markdownListItem": { + "dark": "blue", + "light": "#4976eb" + }, + "markdownListEnumeration": { + "dark": "cyan", + "light": "#00acc1" + }, + "markdownImage": { + "dark": "blue", + "light": "#4976eb" + }, + "markdownImageText": { + "dark": "cyan", + "light": "#00acc1" + }, + "markdownCodeBlock": { + "dark": "foreground", + "light": "#292d3e" + }, + "syntaxComment": { + "dark": "comment", + "light": "#8796b0" + }, + "syntaxKeyword": { + "dark": "purple", + "light": "#a854f2" + }, + "syntaxFunction": { + "dark": "blue", + "light": "#4976eb" + }, + "syntaxVariable": { + "dark": "foreground", + "light": "#292d3e" + }, + "syntaxString": { + "dark": "green", + "light": "#91b859" + }, + "syntaxNumber": { + "dark": "orange", + "light": "#f4511e" + }, + "syntaxType": { + "dark": "yellow", + "light": "#ffb300" + }, + "syntaxOperator": { + "dark": "cyan", + "light": "#00acc1" + }, + "syntaxPunctuation": { + "dark": "foreground", + "light": "#292d3e" + } + } + }, + "rosepine": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "base": "#191724", + "surface": "#1f1d2e", + "overlay": "#26233a", + "muted": "#6e6a86", + "subtle": "#908caa", + "text": "#e0def4", + "love": "#eb6f92", + "gold": "#f6c177", + "rose": "#ebbcba", + "pine": "#31748f", + "foam": "#9ccfd8", + "iris": "#c4a7e7", + "highlightLow": "#21202e", + "highlightMed": "#403d52", + "highlightHigh": "#524f67", + "moonBase": "#232136", + "moonSurface": "#2a273f", + "moonOverlay": "#393552", + "moonMuted": "#6e6a86", + "moonSubtle": "#908caa", + "moonText": "#e0def4", + "dawnBase": "#faf4ed", + "dawnSurface": "#fffaf3", + "dawnOverlay": "#f2e9e1", + "dawnMuted": "#9893a5", + "dawnSubtle": "#797593", + "dawnText": "#575279" + }, + "theme": { + "primary": { + "dark": "foam", + "light": "pine" + }, + "secondary": { + "dark": "iris", + "light": "#907aa9" + }, + "accent": { + "dark": "rose", + "light": "#d7827e" + }, + "error": { + "dark": "love", + "light": "#b4637a" + }, + "warning": { + "dark": "gold", + "light": "#ea9d34" + }, + "success": { + "dark": "pine", + "light": "#286983" + }, + "info": { + "dark": "foam", + "light": "#56949f" + }, + "text": { + "dark": "#e0def4", + "light": "#575279" + }, + "textMuted": { + "dark": "muted", + "light": "dawnMuted" + }, + "background": { + "dark": "base", + "light": "dawnBase" + }, + "backgroundPanel": { + "dark": "surface", + "light": "dawnSurface" + }, + "backgroundElement": { + "dark": "overlay", + "light": "dawnOverlay" + }, + "border": { + "dark": "highlightMed", + "light": "#dfdad9" + }, + "borderActive": { + "dark": "foam", + "light": "pine" + }, + "borderSubtle": { + "dark": "highlightLow", + "light": "#f4ede8" + }, + "diffAdded": { + "dark": "pine", + "light": "#286983" + }, + "diffRemoved": { + "dark": "love", + "light": "#b4637a" + }, + "diffContext": { + "dark": "muted", + "light": "dawnMuted" + }, + "diffHunkHeader": { + "dark": "iris", + "light": "#907aa9" + }, + "diffHighlightAdded": { + "dark": "pine", + "light": "#286983" + }, + "diffHighlightRemoved": { + "dark": "love", + "light": "#b4637a" + }, + "diffAddedBg": { + "dark": "#1f2d3a", + "light": "#e5f2f3" + }, + "diffRemovedBg": { + "dark": "#3a1f2d", + "light": "#fce5e8" + }, + "diffContextBg": { + "dark": "surface", + "light": "dawnSurface" + }, + "diffLineNumber": { + "dark": "muted", + "light": "dawnMuted" + }, + "diffAddedLineNumberBg": { + "dark": "#1f2d3a", + "light": "#e5f2f3" + }, + "diffRemovedLineNumberBg": { + "dark": "#3a1f2d", + "light": "#fce5e8" + }, + "markdownText": { + "dark": "#e0def4", + "light": "#575279" + }, + "markdownHeading": { + "dark": "iris", + "light": "#907aa9" + }, + "markdownLink": { + "dark": "foam", + "light": "pine" + }, + "markdownLinkText": { + "dark": "rose", + "light": "#d7827e" + }, + "markdownCode": { + "dark": "pine", + "light": "#286983" + }, + "markdownBlockQuote": { + "dark": "muted", + "light": "dawnMuted" + }, + "markdownEmph": { + "dark": "gold", + "light": "#ea9d34" + }, + "markdownStrong": { + "dark": "love", + "light": "#b4637a" + }, + "markdownHorizontalRule": { + "dark": "highlightMed", + "light": "#dfdad9" + }, + "markdownListItem": { + "dark": "foam", + "light": "pine" + }, + "markdownListEnumeration": { + "dark": "rose", + "light": "#d7827e" + }, + "markdownImage": { + "dark": "foam", + "light": "pine" + }, + "markdownImageText": { + "dark": "rose", + "light": "#d7827e" + }, + "markdownCodeBlock": { + "dark": "#e0def4", + "light": "#575279" + }, + "syntaxComment": { + "dark": "muted", + "light": "dawnMuted" + }, + "syntaxKeyword": { + "dark": "pine", + "light": "#286983" + }, + "syntaxFunction": { + "dark": "rose", + "light": "#d7827e" + }, + "syntaxVariable": { + "dark": "#e0def4", + "light": "#575279" + }, + "syntaxString": { + "dark": "gold", + "light": "#ea9d34" + }, + "syntaxNumber": { + "dark": "iris", + "light": "#907aa9" + }, + "syntaxType": { + "dark": "foam", + "light": "#56949f" + }, + "syntaxOperator": { + "dark": "subtle", + "light": "dawnSubtle" + }, + "syntaxPunctuation": { + "dark": "subtle", + "light": "dawnSubtle" + } + } + }, + "solarized": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "base03": "#002b36", + "base02": "#073642", + "base01": "#586e75", + "base00": "#657b83", + "base0": "#839496", + "base1": "#93a1a1", + "base2": "#eee8d5", + "base3": "#fdf6e3", + "yellow": "#b58900", + "orange": "#cb4b16", + "red": "#dc322f", + "magenta": "#d33682", + "violet": "#6c71c4", + "blue": "#268bd2", + "cyan": "#2aa198", + "green": "#859900" + }, + "theme": { + "primary": { + "dark": "blue", + "light": "blue" + }, + "secondary": { + "dark": "violet", + "light": "violet" + }, + "accent": { + "dark": "cyan", + "light": "cyan" + }, + "error": { + "dark": "red", + "light": "red" + }, + "warning": { + "dark": "yellow", + "light": "yellow" + }, + "success": { + "dark": "green", + "light": "green" + }, + "info": { + "dark": "orange", + "light": "orange" + }, + "text": { + "dark": "base0", + "light": "base00" + }, + "textMuted": { + "dark": "base01", + "light": "base1" + }, + "background": { + "dark": "base03", + "light": "base3" + }, + "backgroundPanel": { + "dark": "base02", + "light": "base2" + }, + "backgroundElement": { + "dark": "#073642", + "light": "#eee8d5" + }, + "border": { + "dark": "base02", + "light": "base2" + }, + "borderActive": { + "dark": "base01", + "light": "base1" + }, + "borderSubtle": { + "dark": "#073642", + "light": "#eee8d5" + }, + "diffAdded": { + "dark": "green", + "light": "green" + }, + "diffRemoved": { + "dark": "red", + "light": "red" + }, + "diffContext": { + "dark": "base01", + "light": "base1" + }, + "diffHunkHeader": { + "dark": "base01", + "light": "base1" + }, + "diffHighlightAdded": { + "dark": "green", + "light": "green" + }, + "diffHighlightRemoved": { + "dark": "red", + "light": "red" + }, + "diffAddedBg": { + "dark": "#073642", + "light": "#eee8d5" + }, + "diffRemovedBg": { + "dark": "#073642", + "light": "#eee8d5" + }, + "diffContextBg": { + "dark": "base02", + "light": "base2" + }, + "diffLineNumber": { + "dark": "base01", + "light": "base1" + }, + "diffAddedLineNumberBg": { + "dark": "#073642", + "light": "#eee8d5" + }, + "diffRemovedLineNumberBg": { + "dark": "#073642", + "light": "#eee8d5" + }, + "markdownText": { + "dark": "base0", + "light": "base00" + }, + "markdownHeading": { + "dark": "blue", + "light": "blue" + }, + "markdownLink": { + "dark": "cyan", + "light": "cyan" + }, + "markdownLinkText": { + "dark": "violet", + "light": "violet" + }, + "markdownCode": { + "dark": "green", + "light": "green" + }, + "markdownBlockQuote": { + "dark": "base01", + "light": "base1" + }, + "markdownEmph": { + "dark": "yellow", + "light": "yellow" + }, + "markdownStrong": { + "dark": "orange", + "light": "orange" + }, + "markdownHorizontalRule": { + "dark": "base01", + "light": "base1" + }, + "markdownListItem": { + "dark": "blue", + "light": "blue" + }, + "markdownListEnumeration": { + "dark": "cyan", + "light": "cyan" + }, + "markdownImage": { + "dark": "cyan", + "light": "cyan" + }, + "markdownImageText": { + "dark": "violet", + "light": "violet" + }, + "markdownCodeBlock": { + "dark": "base0", + "light": "base00" + }, + "syntaxComment": { + "dark": "base01", + "light": "base1" + }, + "syntaxKeyword": { + "dark": "green", + "light": "green" + }, + "syntaxFunction": { + "dark": "blue", + "light": "blue" + }, + "syntaxVariable": { + "dark": "cyan", + "light": "cyan" + }, + "syntaxString": { + "dark": "cyan", + "light": "cyan" + }, + "syntaxNumber": { + "dark": "magenta", + "light": "magenta" + }, + "syntaxType": { + "dark": "yellow", + "light": "yellow" + }, + "syntaxOperator": { + "dark": "green", + "light": "green" + }, + "syntaxPunctuation": { + "dark": "base0", + "light": "base00" + } + } + }, + "synthwave84": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "background": "#262335", + "backgroundAlt": "#1e1a29", + "backgroundPanel": "#2a2139", + "foreground": "#ffffff", + "foregroundMuted": "#848bbd", + "pink": "#ff7edb", + "pinkBright": "#ff92df", + "cyan": "#36f9f6", + "cyanBright": "#72f1f8", + "yellow": "#fede5d", + "yellowBright": "#fff95d", + "orange": "#ff8b39", + "orangeBright": "#ff9f43", + "purple": "#b084eb", + "purpleBright": "#c792ea", + "red": "#fe4450", + "redBright": "#ff5e5b", + "green": "#72f1b8", + "greenBright": "#97f1d8" + }, + "theme": { + "primary": { + "dark": "cyan", + "light": "#00bcd4" + }, + "secondary": { + "dark": "pink", + "light": "#e91e63" + }, + "accent": { + "dark": "purple", + "light": "#9c27b0" + }, + "error": { + "dark": "red", + "light": "#f44336" + }, + "warning": { + "dark": "yellow", + "light": "#ff9800" + }, + "success": { + "dark": "green", + "light": "#4caf50" + }, + "info": { + "dark": "orange", + "light": "#ff5722" + }, + "text": { + "dark": "foreground", + "light": "#262335" + }, + "textMuted": { + "dark": "foregroundMuted", + "light": "#5c5c8a" + }, + "background": { + "dark": "#262335", + "light": "#fafafa" + }, + "backgroundPanel": { + "dark": "#1e1a29", + "light": "#f5f5f5" + }, + "backgroundElement": { + "dark": "#2a2139", + "light": "#eeeeee" + }, + "border": { + "dark": "#495495", + "light": "#e0e0e0" + }, + "borderActive": { + "dark": "cyan", + "light": "#00bcd4" + }, + "borderSubtle": { + "dark": "#241b2f", + "light": "#f0f0f0" + }, + "diffAdded": { + "dark": "green", + "light": "#4caf50" + }, + "diffRemoved": { + "dark": "red", + "light": "#f44336" + }, + "diffContext": { + "dark": "foregroundMuted", + "light": "#5c5c8a" + }, + "diffHunkHeader": { + "dark": "purple", + "light": "#9c27b0" + }, + "diffHighlightAdded": { + "dark": "greenBright", + "light": "#4caf50" + }, + "diffHighlightRemoved": { + "dark": "redBright", + "light": "#f44336" + }, + "diffAddedBg": { + "dark": "#1a3a2a", + "light": "#e8f5e9" + }, + "diffRemovedBg": { + "dark": "#3a1a2a", + "light": "#ffebee" + }, + "diffContextBg": { + "dark": "#1e1a29", + "light": "#f5f5f5" + }, + "diffLineNumber": { + "dark": "#495495", + "light": "#b0b0b0" + }, + "diffAddedLineNumberBg": { + "dark": "#1a3a2a", + "light": "#e8f5e9" + }, + "diffRemovedLineNumberBg": { + "dark": "#3a1a2a", + "light": "#ffebee" + }, + "markdownText": { + "dark": "foreground", + "light": "#262335" + }, + "markdownHeading": { + "dark": "pink", + "light": "#e91e63" + }, + "markdownLink": { + "dark": "cyan", + "light": "#00bcd4" + }, + "markdownLinkText": { + "dark": "purple", + "light": "#9c27b0" + }, + "markdownCode": { + "dark": "green", + "light": "#4caf50" + }, + "markdownBlockQuote": { + "dark": "foregroundMuted", + "light": "#5c5c8a" + }, + "markdownEmph": { + "dark": "yellow", + "light": "#ff9800" + }, + "markdownStrong": { + "dark": "orange", + "light": "#ff5722" + }, + "markdownHorizontalRule": { + "dark": "#495495", + "light": "#e0e0e0" + }, + "markdownListItem": { + "dark": "cyan", + "light": "#00bcd4" + }, + "markdownListEnumeration": { + "dark": "purple", + "light": "#9c27b0" + }, + "markdownImage": { + "dark": "cyan", + "light": "#00bcd4" + }, + "markdownImageText": { + "dark": "purple", + "light": "#9c27b0" + }, + "markdownCodeBlock": { + "dark": "foreground", + "light": "#262335" + }, + "syntaxComment": { + "dark": "foregroundMuted", + "light": "#5c5c8a" + }, + "syntaxKeyword": { + "dark": "pink", + "light": "#e91e63" + }, + "syntaxFunction": { + "dark": "orange", + "light": "#ff5722" + }, + "syntaxVariable": { + "dark": "foreground", + "light": "#262335" + }, + "syntaxString": { + "dark": "yellow", + "light": "#ff9800" + }, + "syntaxNumber": { + "dark": "purple", + "light": "#9c27b0" + }, + "syntaxType": { + "dark": "cyan", + "light": "#00bcd4" + }, + "syntaxOperator": { + "dark": "pink", + "light": "#e91e63" + }, + "syntaxPunctuation": { + "dark": "foreground", + "light": "#262335" + } + } + }, + "tokyonight": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "darkStep1": "#1a1b26", + "darkStep2": "#1e2030", + "darkStep3": "#222436", + "darkStep4": "#292e42", + "darkStep5": "#3b4261", + "darkStep6": "#545c7e", + "darkStep7": "#737aa2", + "darkStep8": "#9099b2", + "darkStep9": "#82aaff", + "darkStep10": "#89b4fa", + "darkStep11": "#828bb8", + "darkStep12": "#c8d3f5", + "darkRed": "#ff757f", + "darkOrange": "#ff966c", + "darkYellow": "#ffc777", + "darkGreen": "#c3e88d", + "darkCyan": "#86e1fc", + "darkPurple": "#c099ff", + "lightStep1": "#e1e2e7", + "lightStep2": "#d5d6db", + "lightStep3": "#c8c9ce", + "lightStep4": "#b9bac1", + "lightStep5": "#a8aecb", + "lightStep6": "#9699a8", + "lightStep7": "#737a8c", + "lightStep8": "#5a607d", + "lightStep9": "#2e7de9", + "lightStep10": "#1a6ce7", + "lightStep11": "#8990a3", + "lightStep12": "#3760bf", + "lightRed": "#f52a65", + "lightOrange": "#b15c00", + "lightYellow": "#8c6c3e", + "lightGreen": "#587539", + "lightCyan": "#007197", + "lightPurple": "#9854f1" + }, + "theme": { + "primary": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "secondary": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "accent": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "error": { + "dark": "darkRed", + "light": "lightRed" + }, + "warning": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "success": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "info": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "text": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "textMuted": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "background": { + "dark": "darkStep1", + "light": "lightStep1" + }, + "backgroundPanel": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "backgroundElement": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "border": { + "dark": "darkStep7", + "light": "lightStep7" + }, + "borderActive": { + "dark": "darkStep8", + "light": "lightStep8" + }, + "borderSubtle": { + "dark": "darkStep6", + "light": "lightStep6" + }, + "diffAdded": { + "dark": "#4fd6be", + "light": "#1e725c" + }, + "diffRemoved": { + "dark": "#c53b53", + "light": "#c53b53" + }, + "diffContext": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHunkHeader": { + "dark": "#828bb8", + "light": "#7086b5" + }, + "diffHighlightAdded": { + "dark": "#b8db87", + "light": "#4db380" + }, + "diffHighlightRemoved": { + "dark": "#e26a75", + "light": "#f52a65" + }, + "diffAddedBg": { + "dark": "#20303b", + "light": "#d5e5d5" + }, + "diffRemovedBg": { + "dark": "#37222c", + "light": "#f7d8db" + }, + "diffContextBg": { + "dark": "darkStep2", + "light": "lightStep2" + }, + "diffLineNumber": { + "dark": "darkStep3", + "light": "lightStep3" + }, + "diffAddedLineNumberBg": { + "dark": "#1b2b34", + "light": "#c5d5c5" + }, + "diffRemovedLineNumberBg": { + "dark": "#2d1f26", + "light": "#e7c8cb" + }, + "markdownText": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "markdownHeading": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "markdownLink": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownLinkText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCode": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "markdownBlockQuote": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownEmph": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "markdownStrong": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "markdownHorizontalRule": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "markdownListItem": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownListEnumeration": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownImage": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "markdownImageText": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "markdownCodeBlock": { + "dark": "darkStep12", + "light": "lightStep12" + }, + "syntaxComment": { + "dark": "darkStep11", + "light": "lightStep11" + }, + "syntaxKeyword": { + "dark": "darkPurple", + "light": "lightPurple" + }, + "syntaxFunction": { + "dark": "darkStep9", + "light": "lightStep9" + }, + "syntaxVariable": { + "dark": "darkRed", + "light": "lightRed" + }, + "syntaxString": { + "dark": "darkGreen", + "light": "lightGreen" + }, + "syntaxNumber": { + "dark": "darkOrange", + "light": "lightOrange" + }, + "syntaxType": { + "dark": "darkYellow", + "light": "lightYellow" + }, + "syntaxOperator": { + "dark": "darkCyan", + "light": "lightCyan" + }, + "syntaxPunctuation": { + "dark": "darkStep12", + "light": "lightStep12" + } + } + }, + "vercel": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "background100": "#0A0A0A", + "background200": "#000000", + "gray100": "#1A1A1A", + "gray200": "#1F1F1F", + "gray300": "#292929", + "gray400": "#2E2E2E", + "gray500": "#454545", + "gray600": "#878787", + "gray700": "#8F8F8F", + "gray900": "#A1A1A1", + "gray1000": "#EDEDED", + "blue600": "#0099FF", + "blue700": "#0070F3", + "blue900": "#52A8FF", + "blue1000": "#EBF8FF", + "red700": "#E5484D", + "red900": "#FF6166", + "red1000": "#FDECED", + "amber700": "#FFB224", + "amber900": "#F2A700", + "amber1000": "#FDF4DC", + "green700": "#46A758", + "green900": "#63C46D", + "green1000": "#E6F9E9", + "teal700": "#12A594", + "teal900": "#0AC7AC", + "purple700": "#8E4EC6", + "purple900": "#BF7AF0", + "pink700": "#E93D82", + "pink900": "#F75590", + "highlightPink": "#FF0080", + "highlightPurple": "#F81CE5", + "cyan": "#50E3C2", + "lightBackground": "#FFFFFF", + "lightGray100": "#FAFAFA", + "lightGray200": "#EAEAEA", + "lightGray600": "#666666", + "lightGray1000": "#171717" + }, + "theme": { + "primary": { + "dark": "blue700", + "light": "blue700" + }, + "secondary": { + "dark": "blue900", + "light": "#0062D1" + }, + "accent": { + "dark": "purple700", + "light": "purple700" + }, + "error": { + "dark": "red700", + "light": "#DC3545" + }, + "warning": { + "dark": "amber700", + "light": "#FF9500" + }, + "success": { + "dark": "green700", + "light": "#388E3C" + }, + "info": { + "dark": "blue900", + "light": "blue700" + }, + "text": { + "dark": "gray1000", + "light": "lightGray1000" + }, + "textMuted": { + "dark": "gray600", + "light": "lightGray600" + }, + "background": { + "dark": "background200", + "light": "lightBackground" + }, + "backgroundPanel": { + "dark": "gray100", + "light": "lightGray100" + }, + "backgroundElement": { + "dark": "gray300", + "light": "lightGray200" + }, + "border": { + "dark": "gray200", + "light": "lightGray200" + }, + "borderActive": { + "dark": "gray500", + "light": "#999999" + }, + "borderSubtle": { + "dark": "gray100", + "light": "#EAEAEA" + }, + "diffAdded": { + "dark": "green900", + "light": "green700" + }, + "diffRemoved": { + "dark": "red900", + "light": "red700" + }, + "diffContext": { + "dark": "gray600", + "light": "lightGray600" + }, + "diffHunkHeader": { + "dark": "gray600", + "light": "lightGray600" + }, + "diffHighlightAdded": { + "dark": "green900", + "light": "green700" + }, + "diffHighlightRemoved": { + "dark": "red900", + "light": "red700" + }, + "diffAddedBg": { + "dark": "#0B1D0F", + "light": "#E6F9E9" + }, + "diffRemovedBg": { + "dark": "#2A1314", + "light": "#FDECED" + }, + "diffContextBg": { + "dark": "background200", + "light": "lightBackground" + }, + "diffLineNumber": { + "dark": "gray600", + "light": "lightGray600" + }, + "diffAddedLineNumberBg": { + "dark": "#0F2613", + "light": "#D6F5D6" + }, + "diffRemovedLineNumberBg": { + "dark": "#3C1618", + "light": "#FFE5E5" + }, + "markdownText": { + "dark": "gray1000", + "light": "lightGray1000" + }, + "markdownHeading": { + "dark": "purple900", + "light": "purple700" + }, + "markdownLink": { + "dark": "blue900", + "light": "blue700" + }, + "markdownLinkText": { + "dark": "teal900", + "light": "teal700" + }, + "markdownCode": { + "dark": "green900", + "light": "green700" + }, + "markdownBlockQuote": { + "dark": "gray600", + "light": "lightGray600" + }, + "markdownEmph": { + "dark": "amber900", + "light": "amber700" + }, + "markdownStrong": { + "dark": "pink900", + "light": "pink700" + }, + "markdownHorizontalRule": { + "dark": "gray500", + "light": "#999999" + }, + "markdownListItem": { + "dark": "gray1000", + "light": "lightGray1000" + }, + "markdownListEnumeration": { + "dark": "blue900", + "light": "blue700" + }, + "markdownImage": { + "dark": "teal900", + "light": "teal700" + }, + "markdownImageText": { + "dark": "cyan", + "light": "teal700" + }, + "markdownCodeBlock": { + "dark": "gray1000", + "light": "lightGray1000" + }, + "syntaxComment": { + "dark": "gray600", + "light": "#888888" + }, + "syntaxKeyword": { + "dark": "pink900", + "light": "pink700" + }, + "syntaxFunction": { + "dark": "purple900", + "light": "purple700" + }, + "syntaxVariable": { + "dark": "blue900", + "light": "blue700" + }, + "syntaxString": { + "dark": "green900", + "light": "green700" + }, + "syntaxNumber": { + "dark": "amber900", + "light": "amber700" + }, + "syntaxType": { + "dark": "teal900", + "light": "teal700" + }, + "syntaxOperator": { + "dark": "pink900", + "light": "pink700" + }, + "syntaxPunctuation": { + "dark": "gray1000", + "light": "lightGray1000" + } + } + }, + "vesper": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "vesperBg": "#101010", + "vesperFg": "#FFF", + "vesperComment": "#8b8b8b", + "vesperKeyword": "#A0A0A0", + "vesperFunction": "#FFC799", + "vesperString": "#99FFE4", + "vesperNumber": "#FFC799", + "vesperError": "#FF8080", + "vesperWarning": "#FFC799", + "vesperSuccess": "#99FFE4", + "vesperMuted": "#A0A0A0" + }, + "theme": { + "primary": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "secondary": { + "dark": "#99FFE4", + "light": "#99FFE4" + }, + "accent": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "error": { + "dark": "vesperError", + "light": "vesperError" + }, + "warning": { + "dark": "vesperWarning", + "light": "vesperWarning" + }, + "success": { + "dark": "vesperSuccess", + "light": "vesperSuccess" + }, + "info": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "text": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "textMuted": { + "dark": "vesperMuted", + "light": "vesperMuted" + }, + "background": { + "dark": "vesperBg", + "light": "#FFF" + }, + "backgroundPanel": { + "dark": "vesperBg", + "light": "#F0F0F0" + }, + "backgroundElement": { + "dark": "vesperBg", + "light": "#E0E0E0" + }, + "border": { + "dark": "#282828", + "light": "#D0D0D0" + }, + "borderActive": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "borderSubtle": { + "dark": "#1C1C1C", + "light": "#E8E8E8" + }, + "diffAdded": { + "dark": "vesperSuccess", + "light": "vesperSuccess" + }, + "diffRemoved": { + "dark": "vesperError", + "light": "vesperError" + }, + "diffContext": { + "dark": "vesperMuted", + "light": "vesperMuted" + }, + "diffHunkHeader": { + "dark": "vesperMuted", + "light": "vesperMuted" + }, + "diffHighlightAdded": { + "dark": "vesperSuccess", + "light": "vesperSuccess" + }, + "diffHighlightRemoved": { + "dark": "vesperError", + "light": "vesperError" + }, + "diffAddedBg": { + "dark": "#0d2818", + "light": "#e8f5e8" + }, + "diffRemovedBg": { + "dark": "#281a1a", + "light": "#f5e8e8" + }, + "diffContextBg": { + "dark": "vesperBg", + "light": "#F8F8F8" + }, + "diffLineNumber": { + "dark": "#505050", + "light": "#808080" + }, + "diffAddedLineNumberBg": { + "dark": "#0d2818", + "light": "#e8f5e8" + }, + "diffRemovedLineNumberBg": { + "dark": "#281a1a", + "light": "#f5e8e8" + }, + "markdownText": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "markdownHeading": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "markdownLink": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "markdownLinkText": { + "dark": "vesperMuted", + "light": "vesperMuted" + }, + "markdownCode": { + "dark": "vesperMuted", + "light": "vesperMuted" + }, + "markdownBlockQuote": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "markdownEmph": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "markdownStrong": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "markdownHorizontalRule": { + "dark": "#65737E", + "light": "#65737E" + }, + "markdownListItem": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "markdownListEnumeration": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "markdownImage": { + "dark": "#FFC799", + "light": "#FFC799" + }, + "markdownImageText": { + "dark": "vesperMuted", + "light": "vesperMuted" + }, + "markdownCodeBlock": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "syntaxComment": { + "dark": "vesperComment", + "light": "vesperComment" + }, + "syntaxKeyword": { + "dark": "vesperKeyword", + "light": "vesperKeyword" + }, + "syntaxFunction": { + "dark": "vesperFunction", + "light": "vesperFunction" + }, + "syntaxVariable": { + "dark": "vesperFg", + "light": "vesperBg" + }, + "syntaxString": { + "dark": "vesperString", + "light": "vesperString" + }, + "syntaxNumber": { + "dark": "vesperNumber", + "light": "vesperNumber" + }, + "syntaxType": { + "dark": "vesperFunction", + "light": "vesperFunction" + }, + "syntaxOperator": { + "dark": "vesperKeyword", + "light": "vesperKeyword" + }, + "syntaxPunctuation": { + "dark": "vesperFg", + "light": "vesperBg" + } + } + }, + "zenburn": { + "$schema": "https://opencode.ai/theme.json", + "defs": { + "bg": "#3f3f3f", + "bgAlt": "#4f4f4f", + "bgPanel": "#5f5f5f", + "fg": "#dcdccc", + "fgMuted": "#9f9f9f", + "red": "#cc9393", + "redBright": "#dca3a3", + "green": "#7f9f7f", + "greenBright": "#8fb28f", + "yellow": "#f0dfaf", + "yellowDim": "#e0cf9f", + "blue": "#8cd0d3", + "blueDim": "#7cb8bb", + "magenta": "#dc8cc3", + "cyan": "#93e0e3", + "orange": "#dfaf8f" + }, + "theme": { + "primary": { + "dark": "blue", + "light": "#5f7f8f" + }, + "secondary": { + "dark": "magenta", + "light": "#8f5f8f" + }, + "accent": { + "dark": "cyan", + "light": "#5f8f8f" + }, + "error": { + "dark": "red", + "light": "#8f5f5f" + }, + "warning": { + "dark": "yellow", + "light": "#8f8f5f" + }, + "success": { + "dark": "green", + "light": "#5f8f5f" + }, + "info": { + "dark": "orange", + "light": "#8f7f5f" + }, + "text": { + "dark": "fg", + "light": "#3f3f3f" + }, + "textMuted": { + "dark": "fgMuted", + "light": "#6f6f6f" + }, + "background": { + "dark": "bg", + "light": "#ffffef" + }, + "backgroundPanel": { + "dark": "bgAlt", + "light": "#f5f5e5" + }, + "backgroundElement": { + "dark": "bgPanel", + "light": "#ebebdb" + }, + "border": { + "dark": "#5f5f5f", + "light": "#d0d0c0" + }, + "borderActive": { + "dark": "blue", + "light": "#5f7f8f" + }, + "borderSubtle": { + "dark": "#4f4f4f", + "light": "#e0e0d0" + }, + "diffAdded": { + "dark": "green", + "light": "#5f8f5f" + }, + "diffRemoved": { + "dark": "red", + "light": "#8f5f5f" + }, + "diffContext": { + "dark": "fgMuted", + "light": "#6f6f6f" + }, + "diffHunkHeader": { + "dark": "cyan", + "light": "#5f8f8f" + }, + "diffHighlightAdded": { + "dark": "greenBright", + "light": "#5f8f5f" + }, + "diffHighlightRemoved": { + "dark": "redBright", + "light": "#8f5f5f" + }, + "diffAddedBg": { + "dark": "#4f5f4f", + "light": "#efffef" + }, + "diffRemovedBg": { + "dark": "#5f4f4f", + "light": "#ffefef" + }, + "diffContextBg": { + "dark": "bgAlt", + "light": "#f5f5e5" + }, + "diffLineNumber": { + "dark": "#6f6f6f", + "light": "#b0b0a0" + }, + "diffAddedLineNumberBg": { + "dark": "#4f5f4f", + "light": "#efffef" + }, + "diffRemovedLineNumberBg": { + "dark": "#5f4f4f", + "light": "#ffefef" + }, + "markdownText": { + "dark": "fg", + "light": "#3f3f3f" + }, + "markdownHeading": { + "dark": "yellow", + "light": "#8f8f5f" + }, + "markdownLink": { + "dark": "blue", + "light": "#5f7f8f" + }, + "markdownLinkText": { + "dark": "cyan", + "light": "#5f8f8f" + }, + "markdownCode": { + "dark": "green", + "light": "#5f8f5f" + }, + "markdownBlockQuote": { + "dark": "fgMuted", + "light": "#6f6f6f" + }, + "markdownEmph": { + "dark": "yellowDim", + "light": "#8f8f5f" + }, + "markdownStrong": { + "dark": "orange", + "light": "#8f7f5f" + }, + "markdownHorizontalRule": { + "dark": "fgMuted", + "light": "#6f6f6f" + }, + "markdownListItem": { + "dark": "blue", + "light": "#5f7f8f" + }, + "markdownListEnumeration": { + "dark": "cyan", + "light": "#5f8f8f" + }, + "markdownImage": { + "dark": "blue", + "light": "#5f7f8f" + }, + "markdownImageText": { + "dark": "cyan", + "light": "#5f8f8f" + }, + "markdownCodeBlock": { + "dark": "fg", + "light": "#3f3f3f" + }, + "syntaxComment": { + "dark": "#7f9f7f", + "light": "#5f7f5f" + }, + "syntaxKeyword": { + "dark": "yellow", + "light": "#8f8f5f" + }, + "syntaxFunction": { + "dark": "blue", + "light": "#5f7f8f" + }, + "syntaxVariable": { + "dark": "fg", + "light": "#3f3f3f" + }, + "syntaxString": { + "dark": "red", + "light": "#8f5f5f" + }, + "syntaxNumber": { + "dark": "greenBright", + "light": "#5f8f5f" + }, + "syntaxType": { + "dark": "cyan", + "light": "#5f8f8f" + }, + "syntaxOperator": { + "dark": "yellow", + "light": "#8f8f5f" + }, + "syntaxPunctuation": { + "dark": "fg", + "light": "#3f3f3f" + } + } + } +} diff --git a/foundry/packages/cli/src/tmux.ts b/foundry/packages/cli/src/tmux.ts new file mode 100644 index 0000000..297932e --- /dev/null +++ b/foundry/packages/cli/src/tmux.ts @@ -0,0 +1,180 @@ +import { execFileSync, spawnSync } from "node:child_process"; +import { existsSync } from "node:fs"; +import { homedir } from "node:os"; + +const SYMBOL_RUNNING = "▶"; +const SYMBOL_IDLE = "✓"; +const DEFAULT_OPENCODE_ENDPOINT = "http://127.0.0.1:4097/opencode"; + +export interface TmuxWindowMatch { + target: string; + windowName: string; +} + +export interface SpawnCreateTmuxWindowInput { + branchName: string; + targetPath: string; + sessionId?: string | null; + opencodeEndpoint?: string; +} + +export interface SpawnCreateTmuxWindowResult { + created: boolean; + reason: "created" | "not-in-tmux" | "not-local-path" | "window-exists" | "tmux-new-window-failed"; +} + +function isTmuxSession(): boolean { + return Boolean(process.env.TMUX); +} + +function isAbsoluteLocalPath(path: string): boolean { + return path.startsWith("/"); +} + +function runTmux(args: string[]): boolean { + const result = spawnSync("tmux", args, { stdio: "ignore" }); + return !result.error && result.status === 0; +} + +function shellEscape(value: string): string { + if (value.length === 0) { + return "''"; + } + return `'${value.replace(/'/g, `'\\''`)}'`; +} + +function opencodeExistsOnPath(): boolean { + const probe = spawnSync("which", ["opencode"], { stdio: "ignore" }); + return !probe.error && probe.status === 0; +} + +function resolveOpencodeBinary(): string { + const envOverride = process.env.HF_OPENCODE_BIN?.trim(); + if (envOverride) { + return envOverride; + } + + if (opencodeExistsOnPath()) { + return "opencode"; + } + + const bundledCandidates = [`${homedir()}/.local/share/sandbox-agent/bin/opencode`, `${homedir()}/.opencode/bin/opencode`]; + + for (const candidate of bundledCandidates) { + if (existsSync(candidate)) { + return candidate; + } + } + + return "opencode"; +} + +function attachCommand(sessionId: string, targetPath: string, endpoint: string): string { + const opencode = resolveOpencodeBinary(); + return [shellEscape(opencode), "attach", shellEscape(endpoint), "--session", shellEscape(sessionId), "--dir", shellEscape(targetPath)].join(" "); +} + +export function stripStatusPrefix(windowName: string): string { + return windowName + .trimStart() + .replace(new RegExp(`^${SYMBOL_RUNNING}\\s+`), "") + .replace(new RegExp(`^${SYMBOL_IDLE}\\s+`), "") + .trim(); +} + +export function findTmuxWindowsByBranch(branchName: string): TmuxWindowMatch[] { + const output = spawnSync("tmux", ["list-windows", "-a", "-F", "#{session_name}:#{window_id}:#{window_name}"], { encoding: "utf8" }); + + if (output.error || output.status !== 0 || !output.stdout) { + return []; + } + + const lines = output.stdout.split(/\r?\n/).filter((line) => line.trim().length > 0); + const matches: TmuxWindowMatch[] = []; + + for (const line of lines) { + const parts = line.split(":", 3); + if (parts.length !== 3) { + continue; + } + + const sessionName = parts[0] ?? ""; + const windowId = parts[1] ?? ""; + const windowName = parts[2] ?? ""; + const clean = stripStatusPrefix(windowName); + if (clean !== branchName) { + continue; + } + + matches.push({ + target: `${sessionName}:${windowId}`, + windowName, + }); + } + + return matches; +} + +export function spawnCreateTmuxWindow(input: SpawnCreateTmuxWindowInput): SpawnCreateTmuxWindowResult { + if (!isTmuxSession()) { + return { created: false, reason: "not-in-tmux" }; + } + + if (!isAbsoluteLocalPath(input.targetPath)) { + return { created: false, reason: "not-local-path" }; + } + + if (findTmuxWindowsByBranch(input.branchName).length > 0) { + return { created: false, reason: "window-exists" }; + } + + const windowName = input.sessionId ? `${SYMBOL_RUNNING} ${input.branchName}` : input.branchName; + const endpoint = input.opencodeEndpoint ?? DEFAULT_OPENCODE_ENDPOINT; + let output = ""; + try { + output = execFileSync("tmux", ["new-window", "-d", "-P", "-F", "#{window_id}", "-n", windowName, "-c", input.targetPath], { + encoding: "utf8", + stdio: ["ignore", "pipe", "pipe"], + }); + } catch { + return { created: false, reason: "tmux-new-window-failed" }; + } + + const windowId = output.trim(); + if (!windowId) { + return { created: false, reason: "tmux-new-window-failed" }; + } + + if (input.sessionId) { + const leftPane = `${windowId}.0`; + + // Split left pane horizontally → creates right pane; capture its pane ID + let rightPane: string; + try { + rightPane = execFileSync("tmux", ["split-window", "-h", "-P", "-F", "#{pane_id}", "-t", leftPane, "-c", input.targetPath], { + encoding: "utf8", + stdio: ["ignore", "pipe", "pipe"], + }).trim(); + } catch { + return { created: true, reason: "created" }; + } + + if (!rightPane) { + return { created: true, reason: "created" }; + } + + // Split right pane vertically → top-right (rightPane) + bottom-right (new) + runTmux(["split-window", "-v", "-t", rightPane, "-c", input.targetPath]); + + // Left pane 60% width, top-right pane 70% height + runTmux(["resize-pane", "-t", leftPane, "-x", "60%"]); + runTmux(["resize-pane", "-t", rightPane, "-y", "70%"]); + + // Editor in left pane, agent attach in top-right pane + runTmux(["send-keys", "-t", leftPane, "nvim .", "Enter"]); + runTmux(["send-keys", "-t", rightPane, attachCommand(input.sessionId, input.targetPath, endpoint), "Enter"]); + runTmux(["select-pane", "-t", rightPane]); + } + + return { created: true, reason: "created" }; +} diff --git a/foundry/packages/cli/src/tui.ts b/foundry/packages/cli/src/tui.ts new file mode 100644 index 0000000..062bb95 --- /dev/null +++ b/foundry/packages/cli/src/tui.ts @@ -0,0 +1,625 @@ +import type { AppConfig, TaskRecord, WorkspaceTaskDetail } from "@sandbox-agent/foundry-shared"; +import { spawnSync } from "node:child_process"; +import { createBackendClientFromConfig, filterTasks, formatRelativeAge, groupTaskStatus } from "@sandbox-agent/foundry-client"; +import { CLI_BUILD_ID } from "./build-id.js"; +import { writeStdout } from "./io.js"; +import { resolveTuiTheme, type TuiTheme } from "./theme.js"; + +interface KeyEventLike { + name?: string; + ctrl?: boolean; + meta?: boolean; +} + +const HELP_LINES = [ + "Shortcuts", + "Ctrl-H toggle cheatsheet", + "Enter switch to branch", + "Ctrl-A attach to session", + "Ctrl-O open PR in browser", + "Ctrl-X archive branch / close PR", + "Ctrl-Y merge highlighted PR", + "Ctrl-S sync task with remote", + "Ctrl-N / Down next row", + "Ctrl-P / Up previous row", + "Backspace delete filter", + "Type filter by branch/PR/author", + "Esc / Ctrl-C cancel", + "", + "Legend", + "Agent: \u{1F916} running \u{1F4AC} idle \u25CC queued", +]; + +const COLUMN_WIDTHS = { + diff: 10, + agent: 5, + pr: 6, + author: 10, + ci: 7, + review: 8, + age: 5, +} as const; + +interface DisplayRow { + name: string; + diff: string; + agent: string; + pr: string; + author: string; + ci: string; + review: string; + age: string; +} + +type TuiTaskRow = TaskRecord & Pick & { activeSessionId?: string | null }; + +interface RenderOptions { + width?: number; + height?: number; +} + +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { + const rows = await client.listTasks(organizationId); + return await Promise.all( + rows.map(async (row) => { + const [task, detail] = await Promise.all([ + client.getTask(organizationId, row.repoId, row.taskId), + client.getTaskDetail(organizationId, row.repoId, row.taskId).catch(() => null), + ]); + return { + ...task, + pullRequest: detail?.pullRequest ?? null, + activeSessionId: detail?.activeSessionId ?? null, + }; + }), + ); +} + +function pad(input: string, width: number): string { + if (width <= 0) { + return ""; + } + const chars = Array.from(input); + const text = chars.length > width ? `${chars.slice(0, Math.max(1, width - 1)).join("")}…` : input; + return text.padEnd(width, " "); +} + +function truncateToLen(input: string, maxLen: number): string { + if (maxLen <= 0) { + return ""; + } + return Array.from(input).slice(0, maxLen).join(""); +} + +function fitLine(input: string, width: number): string { + if (width <= 0) { + return ""; + } + const clipped = truncateToLen(input, width); + const len = Array.from(clipped).length; + if (len >= width) { + return clipped; + } + return `${clipped}${" ".repeat(width - len)}`; +} + +function overlayLine(base: string, overlay: string, startCol: number, width: number): string { + const out = Array.from(fitLine(base, width)); + const src = Array.from(truncateToLen(overlay, Math.max(0, width - startCol))); + for (let i = 0; i < src.length; i += 1) { + const col = startCol + i; + if (col >= 0 && col < out.length) { + out[col] = src[i] ?? " "; + } + } + return out.join(""); +} + +function buildFooterLine(width: number, segments: string[], right: string): string { + if (width <= 0) { + return ""; + } + + const rightLen = Array.from(right).length; + if (width <= rightLen + 1) { + return truncateToLen(right, width); + } + + const leftMax = width - rightLen - 1; + let used = 0; + let left = ""; + let first = true; + + for (const segment of segments) { + const chunk = first ? segment : ` | ${segment}`; + const clipped = truncateToLen(chunk, leftMax - used); + if (!clipped) { + break; + } + left += clipped; + used += Array.from(clipped).length; + first = false; + if (used >= leftMax) { + break; + } + } + + const padding = " ".repeat(Math.max(0, leftMax - used) + 1); + return `${left}${padding}${right}`; +} + +function agentSymbol(status: TaskRecord["status"]): string { + const group = groupTaskStatus(status); + if (group === "running") return "🤖"; + if (group === "idle") return "💬"; + if (group === "error") return "⚠"; + if (group === "queued") return "◌"; + return "-"; +} + +function toDisplayRow(row: TuiTaskRow): DisplayRow { + const prLabel = row.pullRequest ? `#${row.pullRequest.number}` : "-"; + const reviewLabel = row.pullRequest ? (row.pullRequest.isDraft ? "draft" : row.pullRequest.state.toLowerCase()) : "-"; + + return { + name: row.title || row.branchName || row.taskId, + diff: "-", + agent: agentSymbol(row.status), + pr: prLabel, + author: row.pullRequest?.authorLogin ?? "-", + ci: "-", + review: reviewLabel, + age: formatRelativeAge(row.updatedAt), + }; +} + +function helpLines(width: number): string[] { + const popupWidth = Math.max(40, Math.min(width - 2, 100)); + const innerWidth = Math.max(2, popupWidth - 2); + const borderTop = `┌${"─".repeat(innerWidth)}┐`; + const borderBottom = `└${"─".repeat(innerWidth)}┘`; + + const lines = [borderTop]; + for (const line of HELP_LINES) { + lines.push(`│${pad(line, innerWidth)}│`); + } + lines.push(borderBottom); + return lines; +} + +export function formatRows( + rows: TuiTaskRow[], + selected: number, + organizationId: string, + status: string, + searchQuery = "", + showHelp = false, + options: RenderOptions = {}, +): string { + const totalWidth = options.width ?? process.stdout.columns ?? 120; + const totalHeight = Math.max(6, options.height ?? process.stdout.rows ?? 24); + const fixedWidth = + COLUMN_WIDTHS.diff + COLUMN_WIDTHS.agent + COLUMN_WIDTHS.pr + COLUMN_WIDTHS.author + COLUMN_WIDTHS.ci + COLUMN_WIDTHS.review + COLUMN_WIDTHS.age; + const separators = 7; + const prefixWidth = 2; + const branchWidth = Math.max(20, totalWidth - (fixedWidth + separators + prefixWidth)); + + const branchHeader = searchQuery ? `Branch/PR: ${searchQuery}_` : "Branch/PR (type to filter)"; + const header = [ + ` ${pad(branchHeader, branchWidth)} ${pad("Diff", COLUMN_WIDTHS.diff)} ${pad("Agent", COLUMN_WIDTHS.agent)} ${pad("PR", COLUMN_WIDTHS.pr)} ${pad("Author", COLUMN_WIDTHS.author)} ${pad("CI", COLUMN_WIDTHS.ci)} ${pad("Review", COLUMN_WIDTHS.review)} ${pad("Age", COLUMN_WIDTHS.age)}`, + "-".repeat(Math.max(24, Math.min(totalWidth, 180))), + ]; + + const body = + rows.length === 0 + ? ["No branches found."] + : rows.map((row, index) => { + const marker = index === selected ? "┃ " : " "; + const display = toDisplayRow(row); + return `${marker}${pad(display.name, branchWidth)} ${pad(display.diff, COLUMN_WIDTHS.diff)} ${pad(display.agent, COLUMN_WIDTHS.agent)} ${pad(display.pr, COLUMN_WIDTHS.pr)} ${pad(display.author, COLUMN_WIDTHS.author)} ${pad(display.ci, COLUMN_WIDTHS.ci)} ${pad(display.review, COLUMN_WIDTHS.review)} ${pad(display.age, COLUMN_WIDTHS.age)}`; + }); + + const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `organization:${organizationId}`, status], `v${CLI_BUILD_ID}`), totalWidth); + + const contentHeight = totalHeight - 1; + const lines = [...header, ...body].map((line) => fitLine(line, totalWidth)); + const page = lines.slice(0, contentHeight); + while (page.length < contentHeight) { + page.push(" ".repeat(totalWidth)); + } + + if (showHelp) { + const popup = helpLines(totalWidth); + const startRow = Math.max(0, Math.floor((contentHeight - popup.length) / 2)); + for (let i = 0; i < popup.length; i += 1) { + const target = startRow + i; + if (target >= page.length) { + break; + } + const popupLine = popup[i] ?? ""; + const popupLen = Array.from(popupLine).length; + const startCol = Math.max(0, Math.floor((totalWidth - popupLen) / 2)); + page[target] = overlayLine(page[target] ?? "", popupLine, startCol, totalWidth); + } + } + + return [...page, footer].join("\n"); +} + +interface OpenTuiLike { + createCliRenderer?: (options?: Record) => Promise; + TextRenderable?: new ( + ctx: any, + options: { id: string; content: string }, + ) => { + content: unknown; + fg?: string; + bg?: string; + }; + fg?: (color: string) => (input: unknown) => unknown; + bg?: (color: string) => (input: unknown) => unknown; + StyledText?: new (chunks: unknown[]) => unknown; +} + +interface StyledTextApi { + fg: (color: string) => (input: unknown) => unknown; + bg: (color: string) => (input: unknown) => unknown; + StyledText: new (chunks: unknown[]) => unknown; +} + +function buildStyledContent(content: string, theme: TuiTheme, api: StyledTextApi): unknown { + const lines = content.split("\n"); + const chunks: unknown[] = []; + const footerIndex = Math.max(0, lines.length - 1); + + for (let i = 0; i < lines.length; i += 1) { + const line = lines[i] ?? ""; + + let fgColor = theme.text; + let bgColor: string | undefined; + + if (line.startsWith("┃ ")) { + const marker = "┃ "; + const rest = line.slice(marker.length); + bgColor = theme.highlightBg; + const markerChunk = api.bg(bgColor)(api.fg(theme.selectionBorder)(marker)); + const restChunk = api.bg(bgColor)(api.fg(theme.highlightFg)(rest)); + chunks.push(markerChunk); + chunks.push(restChunk); + if (i < lines.length - 1) { + chunks.push(api.fg(theme.text)("\n")); + } + continue; + } + + if (i === 0) { + fgColor = theme.header; + } else if (i === 1) { + fgColor = theme.muted; + } else if (i === footerIndex) { + fgColor = theme.status; + } else if (line.startsWith("┌") || line.startsWith("│") || line.startsWith("└")) { + fgColor = theme.info; + } + + let chunk: unknown = api.fg(fgColor)(line); + if (bgColor) { + chunk = api.bg(bgColor)(chunk); + } + chunks.push(chunk); + + if (i < lines.length - 1) { + chunks.push(api.fg(theme.text)("\n")); + } + } + + return new api.StyledText(chunks); +} + +export async function runTui(config: AppConfig, organizationId: string): Promise { + const core = (await import("@opentui/core")) as OpenTuiLike; + const createCliRenderer = core.createCliRenderer; + const TextRenderable = core.TextRenderable; + const styleApi = core.fg && core.bg && core.StyledText ? { fg: core.fg, bg: core.bg, StyledText: core.StyledText } : null; + + if (!createCliRenderer || !TextRenderable) { + throw new Error("OpenTUI runtime missing createCliRenderer/TextRenderable exports"); + } + + const themeResolution = resolveTuiTheme(config); + const client = createBackendClientFromConfig(config); + const renderer = await createCliRenderer({ exitOnCtrlC: false }); + const text = new TextRenderable(renderer, { + id: "foundry-switch", + content: "Loading...", + }); + text.fg = themeResolution.theme.text; + text.bg = themeResolution.theme.background; + renderer.root.add(text); + renderer.start(); + + let allRows: TuiTaskRow[] = []; + let filteredRows: TuiTaskRow[] = []; + let selected = 0; + let searchQuery = ""; + let showHelp = false; + let status = "loading..."; + let busy = false; + let closed = false; + let timer: ReturnType | null = null; + + const clampSelected = (): void => { + if (filteredRows.length === 0) { + selected = 0; + return; + } + if (selected < 0) { + selected = 0; + return; + } + if (selected >= filteredRows.length) { + selected = filteredRows.length - 1; + } + }; + + const render = (): void => { + if (closed) { + return; + } + const output = formatRows(filteredRows, selected, organizationId, status, searchQuery, showHelp, { + width: renderer.width ?? process.stdout.columns, + height: renderer.height ?? process.stdout.rows, + }); + text.content = styleApi ? buildStyledContent(output, themeResolution.theme, styleApi) : output; + renderer.requestRender(); + }; + + const refresh = async (): Promise => { + if (closed) { + return; + } + try { + allRows = await listDetailedTasks(client, organizationId); + if (closed) { + return; + } + filteredRows = filterTasks(allRows, searchQuery); + clampSelected(); + status = `tasks=${allRows.length} filtered=${filteredRows.length}`; + } catch (err) { + if (closed) { + return; + } + status = err instanceof Error ? err.message : String(err); + } + render(); + }; + + const selectedRow = (): TuiTaskRow | null => { + if (filteredRows.length === 0) { + return null; + } + return filteredRows[selected] ?? null; + }; + + let resolveDone: () => void = () => {}; + const done = new Promise((resolve) => { + resolveDone = () => resolve(); + }); + + const close = (output?: string): void => { + if (closed) { + return; + } + closed = true; + if (timer) { + clearInterval(timer); + timer = null; + } + process.off("SIGINT", handleSignal); + process.off("SIGTERM", handleSignal); + renderer.destroy(); + if (output) { + writeStdout(output); + } + resolveDone(); + }; + + const handleSignal = (): void => { + close(); + }; + + const runActionWithRefresh = async (label: string, fn: () => Promise, success: string): Promise => { + if (busy) { + return; + } + busy = true; + status = `${label}...`; + render(); + try { + await fn(); + status = success; + await refresh(); + } catch (err) { + status = err instanceof Error ? err.message : String(err); + render(); + } finally { + busy = false; + } + }; + + await refresh(); + timer = setInterval(() => { + void refresh(); + }, 10_000); + process.once("SIGINT", handleSignal); + process.once("SIGTERM", handleSignal); + + const keyInput = (renderer.keyInput ?? renderer.keyHandler) as { on: (name: string, cb: (event: KeyEventLike) => void) => void } | undefined; + + if (!keyInput) { + clearInterval(timer); + renderer.destroy(); + throw new Error("OpenTUI key input handler is unavailable"); + } + + keyInput.on("keypress", (event: KeyEventLike) => { + if (closed) { + return; + } + + const name = event.name ?? ""; + const ctrl = Boolean(event.ctrl); + + if (ctrl && name === "h") { + showHelp = !showHelp; + render(); + return; + } + + if (showHelp) { + if (name === "escape") { + showHelp = false; + render(); + } + return; + } + + if (name === "q" || name === "escape" || (ctrl && name === "c")) { + close(); + return; + } + + if ((ctrl && name === "n") || name === "down") { + if (filteredRows.length > 0) { + selected = selected >= filteredRows.length - 1 ? 0 : selected + 1; + render(); + } + return; + } + + if ((ctrl && name === "p") || name === "up") { + if (filteredRows.length > 0) { + selected = selected <= 0 ? filteredRows.length - 1 : selected - 1; + render(); + } + return; + } + + if (name === "backspace") { + searchQuery = searchQuery.slice(0, -1); + filteredRows = filterTasks(allRows, searchQuery); + selected = 0; + render(); + return; + } + + if (name === "return" || name === "enter") { + const row = selectedRow(); + if (!row || busy) { + return; + } + busy = true; + status = `switching ${row.taskId}...`; + render(); + void (async () => { + try { + const result = await client.switchTask(organizationId, row.repoId, row.taskId); + close(`cd ${result.switchTarget}`); + } catch (err) { + busy = false; + status = err instanceof Error ? err.message : String(err); + render(); + } + })(); + return; + } + + if (ctrl && name === "a") { + const row = selectedRow(); + if (!row || busy) { + return; + } + busy = true; + status = `attaching ${row.taskId}...`; + render(); + void (async () => { + try { + const result = await client.attachTask(organizationId, row.repoId, row.taskId); + close(`target=${result.target} session=${result.sessionId ?? "none"}`); + } catch (err) { + busy = false; + status = err instanceof Error ? err.message : String(err); + render(); + } + })(); + return; + } + + if (ctrl && name === "x") { + const row = selectedRow(); + if (!row) { + return; + } + void runActionWithRefresh( + `archiving ${row.taskId}`, + async () => client.runAction(organizationId, row.repoId, row.taskId, "archive"), + `archived ${row.taskId}`, + ); + return; + } + + if (ctrl && name === "s") { + const row = selectedRow(); + if (!row) { + return; + } + void runActionWithRefresh( + `syncing ${row.taskId}`, + async () => client.runAction(organizationId, row.repoId, row.taskId, "sync"), + `synced ${row.taskId}`, + ); + return; + } + + if (ctrl && name === "y") { + const row = selectedRow(); + if (!row) { + return; + } + void runActionWithRefresh( + `merging ${row.taskId}`, + async () => { + await client.runAction(organizationId, row.repoId, row.taskId, "merge"); + await client.runAction(organizationId, row.repoId, row.taskId, "archive"); + }, + `merged+archived ${row.taskId}`, + ); + return; + } + + if (ctrl && name === "o") { + const row = selectedRow(); + const prUrl = row?.pullRequest?.url ?? null; + if (!prUrl) { + status = "no PR URL available for this task"; + render(); + return; + } + const openCmd = process.platform === "darwin" ? "open" : "xdg-open"; + spawnSync(openCmd, [prUrl], { stdio: "ignore" }); + status = `opened ${prUrl}`; + render(); + return; + } + + if (!ctrl && !event.meta && name.length === 1) { + searchQuery += name; + filteredRows = filterTasks(allRows, searchQuery); + selected = 0; + render(); + } + }); + + await done; +} diff --git a/foundry/packages/cli/test/backend-manager.test.ts b/foundry/packages/cli/test/backend-manager.test.ts new file mode 100644 index 0000000..a6089c5 --- /dev/null +++ b/foundry/packages/cli/test/backend-manager.test.ts @@ -0,0 +1,160 @@ +import { mkdtempSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { EventEmitter } from "node:events"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import type { ChildProcess } from "node:child_process"; + +const { spawnMock, execFileSyncMock } = vi.hoisted(() => ({ + spawnMock: vi.fn(), + execFileSyncMock: vi.fn(), +})); + +vi.mock("node:child_process", async () => { + const actual = await vi.importActual("node:child_process"); + return { + ...actual, + spawn: spawnMock, + execFileSync: execFileSyncMock, + }; +}); + +import { ensureBackendRunning, parseBackendPort } from "../src/backend/manager.js"; +import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; + +function backendStateFile(baseDir: string, host: string, port: number, suffix: string): string { + const sanitized = host + .split("") + .map((ch) => (/[a-zA-Z0-9]/.test(ch) ? ch : "-")) + .join(""); + + return join(baseDir, `backend-${sanitized}-${port}.${suffix}`); +} + +function healthyMetadataResponse(): { ok: boolean; json: () => Promise } { + return { + ok: true, + json: async () => ({ + runtime: "rivetkit", + actorNames: { + organization: {}, + }, + }), + }; +} + +function unhealthyMetadataResponse(): { ok: boolean; json: () => Promise } { + return { + ok: false, + json: async () => ({}), + }; +} + +describe("backend manager", () => { + const originalFetch = globalThis.fetch; + const originalStateDir = process.env.HF_BACKEND_STATE_DIR; + const originalBuildId = process.env.HF_BUILD_ID; + + const config: AppConfig = ConfigSchema.parse({ + auto_submit: true, + notify: ["terminal"], + organization: { default: "default" }, + backend: { + host: "127.0.0.1", + port: 7741, + dbPath: "~/.local/share/foundry/task.db", + opencode_poll_interval: 2, + github_poll_interval: 30, + backup_interval_secs: 3600, + backup_retention_days: 7, + }, + sandboxProviders: { + local: {}, + e2b: {}, + }, + }); + + beforeEach(() => { + process.env.HF_BUILD_ID = "test-build"; + }); + + afterEach(() => { + vi.restoreAllMocks(); + spawnMock.mockReset(); + execFileSyncMock.mockReset(); + globalThis.fetch = originalFetch; + + if (originalStateDir === undefined) { + delete process.env.HF_BACKEND_STATE_DIR; + } else { + process.env.HF_BACKEND_STATE_DIR = originalStateDir; + } + + if (originalBuildId === undefined) { + delete process.env.HF_BUILD_ID; + } else { + process.env.HF_BUILD_ID = originalBuildId; + } + }); + + it("restarts backend when healthy but build is outdated", async () => { + const stateDir = mkdtempSync(join(tmpdir(), "hf-backend-test-")); + process.env.HF_BACKEND_STATE_DIR = stateDir; + + const pidPath = backendStateFile(stateDir, config.backend.host, config.backend.port, "pid"); + const versionPath = backendStateFile(stateDir, config.backend.host, config.backend.port, "version"); + + mkdirSync(stateDir, { recursive: true }); + writeFileSync(pidPath, "999999", "utf8"); + writeFileSync(versionPath, "old-build", "utf8"); + + const fetchMock = vi + .fn<() => Promise<{ ok: boolean; json: () => Promise }>>() + .mockResolvedValueOnce(healthyMetadataResponse()) + .mockResolvedValueOnce(unhealthyMetadataResponse()) + .mockResolvedValue(healthyMetadataResponse()); + globalThis.fetch = fetchMock as unknown as typeof fetch; + + const fakeChild = Object.assign(new EventEmitter(), { + pid: process.pid, + unref: vi.fn(), + }) as unknown as ChildProcess; + spawnMock.mockReturnValue(fakeChild); + + await ensureBackendRunning(config); + + expect(spawnMock).toHaveBeenCalledTimes(1); + const launchCommand = spawnMock.mock.calls[0]?.[0]; + const launchArgs = spawnMock.mock.calls[0]?.[1] as string[] | undefined; + expect(launchCommand === "pnpm" || launchCommand === "bun" || (typeof launchCommand === "string" && launchCommand.endsWith("/bun"))).toBe(true); + expect(launchArgs).toEqual(expect.arrayContaining(["start", "--host", config.backend.host, "--port", String(config.backend.port)])); + if (launchCommand === "pnpm") { + expect(launchArgs).toEqual(expect.arrayContaining(["exec", "bun", "src/index.ts"])); + } + expect(readFileSync(pidPath, "utf8").trim()).toBe(String(process.pid)); + expect(readFileSync(versionPath, "utf8").trim()).toBe("test-build"); + }); + + it("does not restart when backend is healthy and build is current", async () => { + const stateDir = mkdtempSync(join(tmpdir(), "hf-backend-test-")); + process.env.HF_BACKEND_STATE_DIR = stateDir; + + const versionPath = backendStateFile(stateDir, config.backend.host, config.backend.port, "version"); + mkdirSync(stateDir, { recursive: true }); + writeFileSync(versionPath, "test-build", "utf8"); + + const fetchMock = vi.fn<() => Promise<{ ok: boolean; json: () => Promise }>>().mockResolvedValue(healthyMetadataResponse()); + globalThis.fetch = fetchMock as unknown as typeof fetch; + + await ensureBackendRunning(config); + + expect(spawnMock).not.toHaveBeenCalled(); + }); + + it("validates backend port parsing", () => { + expect(parseBackendPort(undefined, 7741)).toBe(7741); + expect(parseBackendPort("8080", 7741)).toBe(8080); + expect(() => parseBackendPort("0", 7741)).toThrow("Invalid backend port"); + expect(() => parseBackendPort("abc", 7741)).toThrow("Invalid backend port"); + }); +}); diff --git a/foundry/packages/cli/test/organization-config.test.ts b/foundry/packages/cli/test/organization-config.test.ts new file mode 100644 index 0000000..5053ec2 --- /dev/null +++ b/foundry/packages/cli/test/organization-config.test.ts @@ -0,0 +1,29 @@ +import { describe, expect, it } from "vitest"; +import { ConfigSchema } from "@sandbox-agent/foundry-shared"; +import { resolveOrganization } from "../src/organization/config.js"; + +describe("cli organization resolution", () => { + it("uses default organization when no flag", () => { + const config = ConfigSchema.parse({ + auto_submit: true as const, + notify: ["terminal" as const], + organization: { default: "team" }, + backend: { + host: "127.0.0.1", + port: 7741, + dbPath: "~/.local/share/foundry/task.db", + opencode_poll_interval: 2, + github_poll_interval: 30, + backup_interval_secs: 3600, + backup_retention_days: 7, + }, + sandboxProviders: { + local: {}, + e2b: {}, + }, + }); + + expect(resolveOrganization(undefined, config)).toBe("team"); + expect(resolveOrganization("alpha", config)).toBe("alpha"); + }); +}); diff --git a/foundry/packages/cli/test/task-editor.test.ts b/foundry/packages/cli/test/task-editor.test.ts new file mode 100644 index 0000000..998e9ff --- /dev/null +++ b/foundry/packages/cli/test/task-editor.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, it } from "vitest"; +import { sanitizeEditorTask } from "../src/task-editor.js"; + +describe("task editor helpers", () => { + it("strips comment lines and trims whitespace", () => { + const value = sanitizeEditorTask(` +# comment +Implement feature + +# another comment +with more detail +`); + + expect(value).toBe("Implement feature\n\nwith more detail"); + }); + + it("returns empty string when only comments are present", () => { + const value = sanitizeEditorTask(` +# hello +# world +`); + + expect(value).toBe(""); + }); +}); diff --git a/foundry/packages/cli/test/theme.test.ts b/foundry/packages/cli/test/theme.test.ts new file mode 100644 index 0000000..2a0d7e3 --- /dev/null +++ b/foundry/packages/cli/test/theme.test.ts @@ -0,0 +1,105 @@ +import { afterEach, describe, expect, it } from "vitest"; +import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; +import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; +import { resolveTuiTheme } from "../src/theme.js"; + +function withEnv(key: string, value: string | undefined): void { + if (value === undefined) { + delete process.env[key]; + return; + } + process.env[key] = value; +} + +describe("resolveTuiTheme", () => { + let tempDir: string | null = null; + const originalState = process.env.XDG_STATE_HOME; + const originalConfig = process.env.XDG_CONFIG_HOME; + + const baseConfig: AppConfig = ConfigSchema.parse({ + auto_submit: true, + notify: ["terminal"], + organization: { default: "default" }, + backend: { + host: "127.0.0.1", + port: 7741, + dbPath: "~/.local/share/foundry/task.db", + opencode_poll_interval: 2, + github_poll_interval: 30, + backup_interval_secs: 3600, + backup_retention_days: 7, + }, + sandboxProviders: { + local: {}, + e2b: {}, + }, + }); + + afterEach(() => { + withEnv("XDG_STATE_HOME", originalState); + withEnv("XDG_CONFIG_HOME", originalConfig); + if (tempDir) { + rmSync(tempDir, { recursive: true, force: true }); + tempDir = null; + } + }); + + it("falls back to default theme when no theme sources are present", () => { + tempDir = mkdtempSync(join(tmpdir(), "hf-theme-test-")); + withEnv("XDG_STATE_HOME", join(tempDir, "state")); + withEnv("XDG_CONFIG_HOME", join(tempDir, "config")); + + const resolution = resolveTuiTheme(baseConfig, tempDir); + + expect(resolution.name).toBe("opencode-default"); + expect(resolution.source).toBe("default"); + expect(resolution.theme.text).toBe("#ffffff"); + }); + + it("loads theme from opencode state when configured", () => { + tempDir = mkdtempSync(join(tmpdir(), "hf-theme-test-")); + const stateHome = join(tempDir, "state"); + const configHome = join(tempDir, "config"); + withEnv("XDG_STATE_HOME", stateHome); + withEnv("XDG_CONFIG_HOME", configHome); + mkdirSync(join(stateHome, "opencode"), { recursive: true }); + writeFileSync(join(stateHome, "opencode", "kv.json"), JSON.stringify({ theme: "gruvbox", theme_mode: "dark" }), "utf8"); + + const resolution = resolveTuiTheme(baseConfig, tempDir); + + expect(resolution.name).toBe("gruvbox"); + expect(resolution.source).toContain("opencode state"); + expect(resolution.mode).toBe("dark"); + expect(resolution.theme.selectionBorder.toLowerCase()).not.toContain("dark"); + }); + + it("resolves OpenCode token references in theme defs", () => { + tempDir = mkdtempSync(join(tmpdir(), "hf-theme-test-")); + const stateHome = join(tempDir, "state"); + const configHome = join(tempDir, "config"); + withEnv("XDG_STATE_HOME", stateHome); + withEnv("XDG_CONFIG_HOME", configHome); + mkdirSync(join(stateHome, "opencode"), { recursive: true }); + writeFileSync(join(stateHome, "opencode", "kv.json"), JSON.stringify({ theme: "orng", theme_mode: "dark" }), "utf8"); + + const resolution = resolveTuiTheme(baseConfig, tempDir); + + expect(resolution.name).toBe("orng"); + expect(resolution.theme.selectionBorder).toBe("#EE7948"); + expect(resolution.theme.background).toBe("#0a0a0a"); + }); + + it("prefers explicit foundry theme override from config", () => { + tempDir = mkdtempSync(join(tmpdir(), "hf-theme-test-")); + withEnv("XDG_STATE_HOME", join(tempDir, "state")); + withEnv("XDG_CONFIG_HOME", join(tempDir, "config")); + + const config = { ...baseConfig, theme: "default" } as AppConfig & { theme: string }; + const resolution = resolveTuiTheme(config, tempDir); + + expect(resolution.name).toBe("opencode-default"); + expect(resolution.source).toBe("foundry config"); + }); +}); diff --git a/foundry/packages/cli/test/tmux.test.ts b/foundry/packages/cli/test/tmux.test.ts new file mode 100644 index 0000000..29b7801 --- /dev/null +++ b/foundry/packages/cli/test/tmux.test.ts @@ -0,0 +1,10 @@ +import { describe, expect, it } from "vitest"; +import { stripStatusPrefix } from "../src/tmux.js"; + +describe("tmux helpers", () => { + it("strips running and idle markers from window names", () => { + expect(stripStatusPrefix("▶ feature/auth")).toBe("feature/auth"); + expect(stripStatusPrefix("✓ feature/auth")).toBe("feature/auth"); + expect(stripStatusPrefix("feature/auth")).toBe("feature/auth"); + }); +}); diff --git a/foundry/packages/cli/test/tui-format.test.ts b/foundry/packages/cli/test/tui-format.test.ts new file mode 100644 index 0000000..15d3fe8 --- /dev/null +++ b/foundry/packages/cli/test/tui-format.test.ts @@ -0,0 +1,82 @@ +import { describe, expect, it } from "vitest"; +import type { TaskRecord } from "@sandbox-agent/foundry-shared"; +import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client"; +import { formatRows } from "../src/tui.js"; + +const sample = { + organizationId: "default", + repoId: "repo-a", + repoRemote: "https://example.com/repo-a.git", + taskId: "task-1", + branchName: "feature/test", + title: "Test Title", + task: "Do test", + sandboxProviderId: "local", + status: "running", + activeSandboxId: "sandbox-1", + pullRequest: null, + sandboxes: [ + { + sandboxId: "sandbox-1", + sandboxProviderId: "local", + sandboxActorId: null, + switchTarget: "sandbox://local/sandbox-1", + cwd: null, + createdAt: 1, + updatedAt: 1, + }, + ], + createdAt: 1, + updatedAt: 1, +} satisfies TaskRecord & { pullRequest: null; activeSessionId?: null }; + +describe("formatRows", () => { + it("renders rust-style table header and empty state", () => { + const output = formatRows([], 0, "default", "ok"); + expect(output).toContain("Branch/PR (type to filter)"); + expect(output).toContain("No branches found."); + expect(output).toContain("Ctrl-H:cheatsheet"); + expect(output).toContain("ok"); + }); + + it("marks selected row with highlight", () => { + const output = formatRows([sample], 0, "default", "ready"); + expect(output).toContain("┃ "); + expect(output).toContain("Test Title"); + expect(output).toContain("Ctrl-H:cheatsheet"); + }); + + it("pins footer to the last terminal row", () => { + const output = formatRows([sample], 0, "default", "ready", "", false, { + width: 80, + height: 12, + }); + const lines = output.split("\n"); + expect(lines).toHaveLength(12); + expect(lines[11]).toContain("Ctrl-H:cheatsheet"); + expect(lines[11]).toContain("v"); + }); +}); + +describe("search", () => { + it("supports ordered fuzzy matching", () => { + expect(fuzzyMatch("feature/test-branch", "ftb")).toBe(true); + expect(fuzzyMatch("feature/test-branch", "fbt")).toBe(false); + }); + + it("filters rows across branch and title", () => { + const rows: TaskRecord[] = [ + sample, + { + ...sample, + taskId: "task-2", + branchName: "docs/update-intro", + title: "Docs Intro Refresh", + status: "idle", + }, + ]; + expect(filterTasks(rows, "doc")).toHaveLength(1); + expect(filterTasks(rows, "h2")).toHaveLength(1); + expect(filterTasks(rows, "test")).toHaveLength(2); + }); +}); diff --git a/foundry/packages/cli/tsconfig.json b/foundry/packages/cli/tsconfig.json new file mode 100644 index 0000000..ae5ba21 --- /dev/null +++ b/foundry/packages/cli/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": ["src", "test"] +} diff --git a/foundry/packages/cli/tsup.config.ts b/foundry/packages/cli/tsup.config.ts new file mode 100644 index 0000000..9ee19a7 --- /dev/null +++ b/foundry/packages/cli/tsup.config.ts @@ -0,0 +1,53 @@ +import { execSync } from "node:child_process"; +import { readFileSync } from "node:fs"; +import { resolve } from "node:path"; +import { defineConfig } from "tsup"; + +function packageVersion(): string { + try { + const packageJsonPath = resolve(process.cwd(), "package.json"); + const parsed = JSON.parse(readFileSync(packageJsonPath, "utf8")) as { version?: unknown }; + if (typeof parsed.version === "string" && parsed.version.trim()) { + return parsed.version.trim(); + } + } catch { + // Fall through. + } + return "dev"; +} + +function sourceId(): string { + try { + const raw = execSync("git rev-parse --short HEAD", { + encoding: "utf8", + stdio: ["ignore", "pipe", "ignore"], + }).trim(); + if (raw.length > 0) { + return raw; + } + } catch { + // Fall through. + } + return packageVersion(); +} + +function resolveBuildId(): string { + const override = process.env.HF_BUILD_ID?.trim(); + if (override) { + return override; + } + + // Match sandbox-agent semantics: source id + unique build timestamp. + return `${sourceId()}-${Date.now().toString()}`; +} + +const buildId = resolveBuildId(); + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm"], + dts: true, + define: { + __HF_BUILD_ID__: JSON.stringify(buildId), + }, +}); diff --git a/foundry/packages/client/package.json b/foundry/packages/client/package.json new file mode 100644 index 0000000..fa73dab --- /dev/null +++ b/foundry/packages/client/package.json @@ -0,0 +1,26 @@ +{ + "name": "@sandbox-agent/foundry-client", + "version": "0.1.0", + "private": true, + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsup src/index.ts --format esm --dts --tsconfig tsconfig.build.json", + "typecheck": "tsc --noEmit", + "test": "vitest run", + "test:e2e:full": "HF_ENABLE_DAEMON_FULL_E2E=1 vitest run test/e2e/full-integration-e2e.test.ts", + "test:e2e:workspace": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workspace-e2e.test.ts", + "test:e2e:workspace-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workspace-load-e2e.test.ts" + }, + "dependencies": { + "@sandbox-agent/foundry-shared": "workspace:*", + "react": "^19.1.1", + "rivetkit": "2.1.6", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/react": "^19.1.12", + "tsup": "^8.5.0" + } +} diff --git a/foundry/packages/client/src/app-client.ts b/foundry/packages/client/src/app-client.ts new file mode 100644 index 0000000..0bf5526 --- /dev/null +++ b/foundry/packages/client/src/app-client.ts @@ -0,0 +1,69 @@ +import type { + FoundryAppSnapshot, + FoundryBillingPlanId, + FoundryOrganization, + FoundryUser, + UpdateFoundryOrganizationProfileInput, + WorkspaceModelId, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "./backend-client.js"; +import { getMockFoundryAppClient } from "./mock-app.js"; +import { createRemoteFoundryAppClient } from "./remote/app-client.js"; + +export interface FoundryAppClient { + getSnapshot(): FoundryAppSnapshot; + subscribe(listener: () => void): () => void; + signInWithGithub(userId?: string): Promise; + signOut(): Promise; + skipStarterRepo(): Promise; + starStarterRepo(organizationId: string): Promise; + selectOrganization(organizationId: string): Promise; + setDefaultModel(model: WorkspaceModelId): Promise; + updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise; + triggerGithubSync(organizationId: string): Promise; + completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise; + openBillingPortal(organizationId: string): Promise; + cancelScheduledRenewal(organizationId: string): Promise; + resumeSubscription(organizationId: string): Promise; + reconnectGithub(organizationId: string): Promise; + recordSeatUsage(organizationId: string): Promise; +} + +export interface CreateFoundryAppClientOptions { + mode: "mock" | "remote"; + backend?: BackendClient; +} + +export function createFoundryAppClient(options: CreateFoundryAppClientOptions): FoundryAppClient { + if (options.mode === "mock") { + return getMockFoundryAppClient() as unknown as FoundryAppClient; + } + if (!options.backend) { + throw new Error("Remote app client requires a backend client"); + } + return createRemoteFoundryAppClient({ backend: options.backend }); +} + +export function currentFoundryUser(snapshot: FoundryAppSnapshot): FoundryUser | null { + if (!snapshot.auth.currentUserId) { + return null; + } + return snapshot.users.find((candidate) => candidate.id === snapshot.auth.currentUserId) ?? null; +} + +export function currentFoundryOrganization(snapshot: FoundryAppSnapshot): FoundryOrganization | null { + if (!snapshot.activeOrganizationId) { + return null; + } + return snapshot.organizations.find((candidate) => candidate.id === snapshot.activeOrganizationId) ?? null; +} + +export function eligibleFoundryOrganizations(snapshot: FoundryAppSnapshot): FoundryOrganization[] { + const user = currentFoundryUser(snapshot); + if (!user) { + return []; + } + + const eligible = new Set(user.eligibleOrganizationIds); + return snapshot.organizations.filter((organization) => eligible.has(organization.id)); +} diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts new file mode 100644 index 0000000..c2222cc --- /dev/null +++ b/foundry/packages/client/src/backend-client.ts @@ -0,0 +1,1320 @@ +import { createClient } from "rivetkit/client"; +import type { + AgentType, + AppConfig, + FoundryAppSnapshot, + FoundryBillingPlanId, + CreateTaskInput, + AppEvent, + SessionEvent, + SandboxProcessSnapshot, + SandboxProcessesEvent, + TaskRecord, + TaskSummary, + TaskWorkspaceChangeModelInput, + TaskWorkspaceChangeOwnerInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, + TaskEvent, + WorkspaceTaskDetail, + WorkspaceTaskSummary, + WorkspaceSessionDetail, + OrganizationEvent, + OrganizationSummarySnapshot, + AuditLogEvent as HistoryEvent, + HistoryQueryInput, + SandboxProviderId, + RepoOverview, + RepoRecord, + StarSandboxAgentRepoInput, + StarSandboxAgentRepoResult, + SwitchResult, + UpdateFoundryOrganizationProfileInput, + WorkspaceModelGroup, + WorkspaceModelId, +} from "@sandbox-agent/foundry-shared"; +import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; +import { createMockBackendClient } from "./mock/backend-client.js"; +import { taskKey, taskSandboxKey, organizationKey } from "./keys.js"; + +export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill"; + +export interface SandboxSessionRecord { + id: string; + agent: string; + agentSessionId: string; + lastConnectionId: string; + createdAt: number; + destroyedAt?: number; + status?: "pending_provision" | "pending_session_create" | "ready" | "running" | "idle" | "error"; +} + +export interface SandboxSessionEventRecord { + id: string; + eventIndex: number; + sessionId: string; + createdAt: number; + connectionId: string; + sender: "client" | "agent"; + payload: unknown; +} + +export type SandboxProcessRecord = SandboxProcessSnapshot; + +export interface ActorConn { + on(event: string, listener: (payload: any) => void): () => void; + onError(listener: (error: unknown) => void): () => void; + dispose(): Promise; +} + +interface AuthSessionScopedInput { + authSessionId?: string; +} + +interface OrganizationHandle { + connect(): ActorConn; + listRepos(input: { organizationId: string }): Promise; + createTask(input: CreateTaskInput): Promise; + listTasks(input: { organizationId: string; repoId?: string }): Promise; + getRepoOverview(input: { organizationId: string; repoId: string }): Promise; + auditLog(input: HistoryQueryInput): Promise; + switchTask(input: { repoId: string; taskId: string }): Promise; + getTask(input: { organizationId: string; repoId: string; taskId: string }): Promise; + attachTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; + pushTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + syncTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + mergeTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + archiveTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + killTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + useOrganization(input: { organizationId: string }): Promise<{ organizationId: string }>; + starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise; + getOrganizationSummary(input: { organizationId: string }): Promise; + createWorkspaceTask(input: TaskWorkspaceCreateTaskInput & AuthSessionScopedInput): Promise; + markWorkspaceUnread(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise; + renameWorkspaceTask(input: TaskWorkspaceRenameInput & AuthSessionScopedInput): Promise; + createWorkspaceSession(input: TaskWorkspaceSelectInput & { model?: string } & AuthSessionScopedInput): Promise<{ sessionId: string }>; + renameWorkspaceSession(input: TaskWorkspaceRenameSessionInput & AuthSessionScopedInput): Promise; + selectWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + setWorkspaceSessionUnread(input: TaskWorkspaceSetSessionUnreadInput & AuthSessionScopedInput): Promise; + updateWorkspaceDraft(input: TaskWorkspaceUpdateDraftInput & AuthSessionScopedInput): Promise; + changeWorkspaceModel(input: TaskWorkspaceChangeModelInput & AuthSessionScopedInput): Promise; + sendWorkspaceMessage(input: TaskWorkspaceSendMessageInput & AuthSessionScopedInput): Promise; + stopWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + closeWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + publishWorkspacePr(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise; + changeWorkspaceTaskOwner(input: TaskWorkspaceChangeOwnerInput & AuthSessionScopedInput): Promise; + revertWorkspaceFile(input: TaskWorkspaceDiffInput & AuthSessionScopedInput): Promise; + adminReloadGithubOrganization(): Promise; + adminReloadGithubRepository(input: { repoId: string }): Promise; +} + +interface AppOrganizationHandle { + connect(): ActorConn; + getAppSnapshot(input: { sessionId: string }): Promise; + skipAppStarterRepo(input: { sessionId: string }): Promise; + starAppStarterRepo(input: { sessionId: string; organizationId: string }): Promise; + selectAppOrganization(input: { sessionId: string; organizationId: string }): Promise; + setAppDefaultModel(input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise; + updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput & { sessionId: string }): Promise; + triggerAppRepoImport(input: { sessionId: string; organizationId: string }): Promise; + beginAppGithubInstall(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>; + createAppCheckoutSession(input: { sessionId: string; organizationId: string; planId: FoundryBillingPlanId }): Promise<{ url: string }>; + createAppBillingPortalSession(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>; + cancelAppScheduledRenewal(input: { sessionId: string; organizationId: string }): Promise; + resumeAppSubscription(input: { sessionId: string; organizationId: string }): Promise; + recordAppSeatUsage(input: { sessionId: string; organizationId: string }): Promise; +} + +interface TaskHandle { + getTaskSummary(): Promise; + getTaskDetail(input?: AuthSessionScopedInput): Promise; + getSessionDetail(input: { sessionId: string } & AuthSessionScopedInput): Promise; + connect(): ActorConn; +} + +interface TaskSandboxHandle { + connect(): ActorConn; + createSession(input: { + id?: string; + agent: string; + model?: string; + sessionInit?: { + cwd?: string; + }; + }): Promise<{ id: string }>; + listSessions(input?: { cursor?: string; limit?: number }): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>; + getEvents(input: { sessionId: string; cursor?: string; limit?: number }): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>; + createProcess(input: ProcessCreateRequest): Promise; + listProcesses(): Promise<{ processes: SandboxProcessRecord[] }>; + getProcessLogs(processId: string, query?: ProcessLogFollowQuery): Promise; + stopProcess(processId: string, query?: ProcessSignalQuery): Promise; + killProcess(processId: string, query?: ProcessSignalQuery): Promise; + deleteProcess(processId: string): Promise; + rawSendSessionMethod(sessionId: string, method: string, params: Record): Promise; + destroySession(sessionId: string): Promise; + sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>; + listWorkspaceModelGroups(): Promise; + providerState(): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; +} + +interface RivetClient { + organization: { + getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): OrganizationHandle; + }; + task: { + get(key?: string | string[]): TaskHandle; + getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskHandle; + }; + taskSandbox: { + get(key?: string | string[]): TaskSandboxHandle; + getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskSandboxHandle; + getForId(actorId: string): TaskSandboxHandle; + }; +} + +export interface BackendClientOptions { + endpoint: string; + defaultOrganizationId?: string; + mode?: "remote" | "mock"; + encoding?: "json" | "cbor" | "bare"; +} + +export interface BackendClient { + getAppSnapshot(): Promise; + connectOrganization(organizationId: string): Promise; + connectTask(organizationId: string, repoId: string, taskId: string): Promise; + connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise; + subscribeApp(listener: () => void): () => void; + signInWithGithub(): Promise; + signOutApp(): Promise; + skipAppStarterRepo(): Promise; + starAppStarterRepo(organizationId: string): Promise; + selectAppOrganization(organizationId: string): Promise; + setAppDefaultModel(defaultModel: WorkspaceModelId): Promise; + updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise; + triggerAppRepoImport(organizationId: string): Promise; + reconnectAppGithub(organizationId: string): Promise; + completeAppHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise; + openAppBillingPortal(organizationId: string): Promise; + cancelAppScheduledRenewal(organizationId: string): Promise; + resumeAppSubscription(organizationId: string): Promise; + recordAppSeatUsage(organizationId: string): Promise; + listRepos(organizationId: string): Promise; + createTask(input: CreateTaskInput): Promise; + listTasks(organizationId: string, repoId?: string): Promise; + getRepoOverview(organizationId: string, repoId: string): Promise; + getTask(organizationId: string, repoId: string, taskId: string): Promise; + listHistory(input: HistoryQueryInput): Promise; + switchTask(organizationId: string, repoId: string, taskId: string): Promise; + attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; + runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise; + createSandboxSession(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + prompt: string; + cwd?: string; + agent?: AgentType | "opencode"; + }): Promise<{ id: string; status: "running" | "idle" | "error" }>; + listSandboxSessions( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + input?: { cursor?: string; limit?: number }, + ): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>; + listSandboxSessionEvents( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + input: { sessionId: string; cursor?: string; limit?: number }, + ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>; + createSandboxProcess(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + request: ProcessCreateRequest; + }): Promise; + listSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }>; + getSandboxProcessLogs( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessLogFollowQuery, + ): Promise; + stopSandboxProcess( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessSignalQuery, + ): Promise; + killSandboxProcess( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessSignalQuery, + ): Promise; + deleteSandboxProcess(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string): Promise; + subscribeSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): () => void; + sendSandboxPrompt(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + sessionId: string; + prompt: string; + notification?: boolean; + }): Promise; + sandboxSessionStatus( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + sessionId: string, + ): Promise<{ id: string; status: "running" | "idle" | "error" }>; + sandboxProviderState( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; + getSandboxAgentConnection(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; + getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise; + getOrganizationSummary(organizationId: string): Promise; + getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; + getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; + getWorkspace(organizationId: string): Promise; + subscribeWorkspace(organizationId: string, listener: () => void): () => void; + createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise; + markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise; + renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise; + createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }>; + renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise; + selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise; + updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise; + changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise; + sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise; + stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise; + changeWorkspaceTaskOwner(organizationId: string, input: TaskWorkspaceChangeOwnerInput): Promise; + revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise; + adminReloadGithubOrganization(organizationId: string): Promise; + adminReloadGithubRepository(organizationId: string, repoId: string): Promise; + health(): Promise<{ ok: true }>; + useOrganization(organizationId: string): Promise<{ organizationId: string }>; + starSandboxAgentRepo(organizationId: string): Promise; +} + +export function rivetEndpoint(config: AppConfig): string { + return `http://${config.backend.host}:${config.backend.port}/v1/rivet`; +} + +export function createBackendClientFromConfig(config: AppConfig): BackendClient { + return createBackendClient({ + endpoint: rivetEndpoint(config), + defaultOrganizationId: config.organization.default, + }); +} + +export interface BackendHealthCheckOptions { + endpoint: string; + timeoutMs?: number; +} + +export interface BackendMetadata { + clientEndpoint: string; + appEndpoint: string; + rivetEndpoint: string; +} + +export async function checkBackendHealth(options: BackendHealthCheckOptions): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), options.timeoutMs ?? 1_500); + + try { + const response = await fetch(normalizeLegacyBackendEndpoint(options.endpoint), { + method: "GET", + signal: controller.signal, + }); + return response.status < 500; + } catch { + return false; + } finally { + clearTimeout(timeout); + } +} + +export async function readBackendMetadata(options: BackendHealthCheckOptions): Promise { + const endpoints = deriveBackendEndpoints(options.endpoint); + const clientEndpoint = endpoints.rivetEndpoint.replace(/\/v1\/rivet\/?$/, ""); + + return { + clientEndpoint, + appEndpoint: endpoints.appEndpoint, + rivetEndpoint: endpoints.rivetEndpoint, + }; +} + +function stripTrailingSlash(value: string): string { + return value.replace(/\/$/, ""); +} + +function normalizeLegacyBackendEndpoint(endpoint: string): string { + const normalized = stripTrailingSlash(endpoint); + if (normalized.endsWith("/api/rivet")) { + return `${normalized.slice(0, -"/api/rivet".length)}/v1/rivet`; + } + return normalized; +} + +function deriveBackendEndpoints(endpoint: string): { appEndpoint: string; rivetEndpoint: string } { + const normalized = normalizeLegacyBackendEndpoint(endpoint); + if (normalized.endsWith("/rivet")) { + return { + appEndpoint: normalized.slice(0, -"/rivet".length), + rivetEndpoint: normalized, + }; + } + return { + appEndpoint: normalized, + rivetEndpoint: `${normalized}/rivet`, + }; +} + +function signedOutAppSnapshot(): FoundryAppSnapshot { + return { + auth: { status: "signed_out", currentUserId: null }, + activeOrganizationId: null, + onboarding: { + starterRepo: { + repoFullName: "rivet-dev/sandbox-agent", + repoUrl: "https://github.com/rivet-dev/sandbox-agent", + status: "pending", + starredAt: null, + skippedAt: null, + }, + }, + users: [], + organizations: [], + }; +} + +export function createBackendClient(options: BackendClientOptions): BackendClient { + if (options.mode === "mock") { + return createMockBackendClient(options.defaultOrganizationId); + } + + const endpoints = deriveBackendEndpoints(options.endpoint); + const rivetApiEndpoint = endpoints.rivetEndpoint; + const appApiEndpoint = endpoints.appEndpoint; + const client = createClient({ endpoint: rivetApiEndpoint, encoding: options.encoding }) as unknown as RivetClient; + const workspaceSubscriptions = new Map< + string, + { + listeners: Set<() => void>; + disposeConnPromise: Promise<(() => Promise) | null> | null; + } + >(); + const sandboxProcessSubscriptions = new Map< + string, + { + listeners: Set<() => void>; + disposeConnPromise: Promise<(() => Promise) | null> | null; + } + >(); + const appSubscriptions = { + listeners: new Set<() => void>(), + disposeConnPromise: null as Promise<(() => Promise) | null> | null, + }; + + const appRequest = async (path: string, init?: RequestInit): Promise => { + const headers = new Headers(init?.headers); + if (init?.body && !headers.has("Content-Type")) { + headers.set("Content-Type", "application/json"); + } + + const res = await fetch(`${appApiEndpoint}${path}`, { + ...init, + headers, + credentials: "include", + }); + if (!res.ok) { + throw new Error(`app request failed: ${res.status} ${res.statusText}`); + } + return (await res.json()) as T; + }; + + const getSessionId = async (): Promise => { + const res = await fetch(`${appApiEndpoint}/auth/get-session`, { + credentials: "include", + }); + if (res.status === 401) { + return null; + } + if (!res.ok) { + throw new Error(`auth session request failed: ${res.status} ${res.statusText}`); + } + const data = (await res.json().catch(() => null)) as { session?: { id?: string | null } | null } | null; + const sessionId = data?.session?.id; + return typeof sessionId === "string" && sessionId.length > 0 ? sessionId : null; + }; + + const getAuthSessionInput = async (): Promise => { + const authSessionId = await getSessionId(); + return authSessionId ? { authSessionId } : undefined; + }; + + const withAuthSessionInput = async (input: TInput): Promise => { + const authSessionInput = await getAuthSessionInput(); + return authSessionInput ? { ...input, ...authSessionInput } : input; + }; + + const organization = async (organizationId: string): Promise => + client.organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, + }); + + const appOrganization = async (): Promise => + client.organization.getOrCreate(organizationKey("app"), { + createWithInput: "app", + }) as unknown as AppOrganizationHandle; + + // getOrCreate is intentional here — this is the ONLY lazy creation point for + // virtual tasks (PR-driven entries that exist in the org's local tables but + // have no task actor yet). The task actor self-initializes from org data in + // getCurrentRecord(). Backend code must NEVER use getOrCreateTask except in + // createTaskMutation. See backend/CLAUDE.md "Lazy Task Actor Creation". + const task = async (organizationId: string, repoId: string, taskId: string): Promise => + client.task.getOrCreate(taskKey(organizationId, repoId, taskId), { + createWithInput: { organizationId, repoId, taskId }, + }); + + const sandboxByKey = async (organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise => { + return (client as any).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); + }; + + function isActorNotFoundError(error: unknown): boolean { + const message = error instanceof Error ? error.message : String(error); + return message.includes("Actor not found"); + } + + const sandboxByActorIdFromTask = async ( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise => { + const ws = await organization(organizationId); + const rows = await ws.listTasks({ organizationId }); + const candidates = [...rows].sort((a, b) => b.updatedAt - a.updatedAt); + + for (const row of candidates) { + try { + const detail = await ws.getTask({ organizationId, repoId: row.repoId, taskId: row.taskId }); + if (detail.sandboxProviderId !== sandboxProviderId) { + continue; + } + const sandboxes = detail.sandboxes as Array<(typeof detail.sandboxes)[number] & { sandboxActorId?: string }>; + const sandbox = sandboxes.find( + (sb) => + sb.sandboxId === sandboxId && sb.sandboxProviderId === sandboxProviderId && typeof sb.sandboxActorId === "string" && sb.sandboxActorId.length > 0, + ); + if (sandbox?.sandboxActorId) { + return (client as any).taskSandbox.getForId(sandbox.sandboxActorId); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (!isActorNotFoundError(error) && !message.includes("Unknown task")) { + throw error; + } + // Best effort fallback path; ignore missing task actors here. + } + } + + return null; + }; + + const withSandboxHandle = async ( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + run: (handle: TaskSandboxHandle) => Promise, + ): Promise => { + const handle = await sandboxByKey(organizationId, sandboxProviderId, sandboxId); + try { + return await run(handle); + } catch (error) { + if (!isActorNotFoundError(error)) { + throw error; + } + const fallback = await sandboxByActorIdFromTask(organizationId, sandboxProviderId, sandboxId); + if (!fallback) { + throw error; + } + return await run(fallback); + } + }; + + const connectOrganization = async (organizationId: string): Promise => { + return (await organization(organizationId)).connect() as ActorConn; + }; + + const connectTask = async (organizationId: string, repoId: string, taskIdValue: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).connect() as ActorConn; + }; + + const connectSandbox = async (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise => { + try { + return (await sandboxByKey(organizationId, sandboxProviderId, sandboxId)).connect() as ActorConn; + } catch (error) { + if (!isActorNotFoundError(error)) { + throw error; + } + const fallback = await sandboxByActorIdFromTask(organizationId, sandboxProviderId, sandboxId); + if (!fallback) { + throw error; + } + return fallback.connect() as ActorConn; + } + }; + + const getTaskDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(await getAuthSessionInput()); + }; + + const getSessionDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).getSessionDetail(await withAuthSessionInput({ sessionId })); + }; + + const getWorkspaceCompat = async (organizationId: string): Promise => { + const authSessionInput = await getAuthSessionInput(); + const summary = await (await organization(organizationId)).getOrganizationSummary({ organizationId }); + const resolvedTasks = await Promise.all( + summary.taskSummaries.map(async (taskSummary) => { + let detail; + try { + const taskHandle = await task(organizationId, taskSummary.repoId, taskSummary.id); + detail = await taskHandle.getTaskDetail(authSessionInput); + } catch (error) { + if (isActorNotFoundError(error)) { + return null; + } + throw error; + } + const sessionDetails = await Promise.all( + detail.sessionsSummary.map(async (session) => { + try { + const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ + sessionId: session.id, + ...(authSessionInput ?? {}), + }); + return [session.id, full] as const; + } catch (error) { + if (isActorNotFoundError(error)) { + return null; + } + throw error; + } + }), + ); + const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkspaceSessionDetail] => entry !== null)); + return { + id: detail.id, + repoId: detail.repoId, + title: detail.title, + status: detail.status, + repoName: detail.repoName, + updatedAtMs: detail.updatedAtMs, + branch: detail.branch, + pullRequest: detail.pullRequest, + activeSessionId: detail.activeSessionId ?? null, + sessions: detail.sessionsSummary.map((session) => { + const full = sessionDetailsById.get(session.id); + return { + id: session.id, + sessionId: session.sessionId, + sessionName: session.sessionName, + agent: session.agent, + model: session.model, + status: session.status, + thinkingSinceMs: session.thinkingSinceMs, + unread: session.unread, + created: session.created, + draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, + transcript: full?.transcript ?? [], + }; + }), + fileChanges: detail.fileChanges, + diffs: detail.diffs, + fileTree: detail.fileTree, + minutesUsed: detail.minutesUsed, + activeSandboxId: detail.activeSandboxId ?? null, + }; + }), + ); + const tasks = resolvedTasks.filter((task): task is Exclude<(typeof resolvedTasks)[number], null> => task !== null); + + const repositories = summary.repos + .map((repo) => ({ + id: repo.id, + label: repo.label, + updatedAtMs: tasks.filter((task) => task.repoId === repo.id).reduce((latest, task) => Math.max(latest, task.updatedAtMs), repo.latestActivityMs), + tasks: tasks.filter((task) => task.repoId === repo.id).sort((left, right) => right.updatedAtMs - left.updatedAtMs), + })) + .filter((repo) => repo.tasks.length > 0); + + return { + organizationId, + repos: summary.repos.map((repo) => ({ id: repo.id, label: repo.label })), + repositories, + tasks: tasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs), + }; + }; + + const subscribeWorkspace = (organizationId: string, listener: () => void): (() => void) => { + let entry = workspaceSubscriptions.get(organizationId); + if (!entry) { + entry = { + listeners: new Set(), + disposeConnPromise: null, + }; + workspaceSubscriptions.set(organizationId, entry); + } + + entry.listeners.add(listener); + + if (!entry.disposeConnPromise) { + entry.disposeConnPromise = (async () => { + const handle = await organization(organizationId); + const conn = (handle as any).connect(); + const unsubscribeEvent = conn.on("organizationUpdated", () => { + const current = workspaceSubscriptions.get(organizationId); + if (!current) { + return; + } + for (const currentListener of [...current.listeners]) { + currentListener(); + } + }); + const unsubscribeError = conn.onError(() => {}); + return async () => { + unsubscribeEvent(); + unsubscribeError(); + await conn.dispose(); + }; + })().catch(() => null); + } + + return () => { + const current = workspaceSubscriptions.get(organizationId); + if (!current) { + return; + } + current.listeners.delete(listener); + if (current.listeners.size > 0) { + return; + } + + workspaceSubscriptions.delete(organizationId); + void current.disposeConnPromise?.then(async (disposeConn) => { + await disposeConn?.(); + }); + }; + }; + + const sandboxProcessSubscriptionKey = (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): string => + `${organizationId}:${sandboxProviderId}:${sandboxId}`; + + const subscribeSandboxProcesses = (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): (() => void) => { + const key = sandboxProcessSubscriptionKey(organizationId, sandboxProviderId, sandboxId); + let entry = sandboxProcessSubscriptions.get(key); + if (!entry) { + entry = { + listeners: new Set(), + disposeConnPromise: null, + }; + sandboxProcessSubscriptions.set(key, entry); + } + + entry.listeners.add(listener); + + if (!entry.disposeConnPromise) { + entry.disposeConnPromise = (async () => { + const conn = await connectSandbox(organizationId, sandboxProviderId, sandboxId); + const unsubscribeEvent = conn.on("processesUpdated", () => { + const current = sandboxProcessSubscriptions.get(key); + if (!current) { + return; + } + for (const currentListener of [...current.listeners]) { + currentListener(); + } + }); + const unsubscribeError = conn.onError(() => {}); + return async () => { + unsubscribeEvent(); + unsubscribeError(); + await conn.dispose(); + }; + })().catch(() => null); + } + + return () => { + const current = sandboxProcessSubscriptions.get(key); + if (!current) { + return; + } + current.listeners.delete(listener); + if (current.listeners.size > 0) { + return; + } + + sandboxProcessSubscriptions.delete(key); + void current.disposeConnPromise?.then(async (disposeConn) => { + await disposeConn?.(); + }); + }; + }; + + const subscribeApp = (listener: () => void): (() => void) => { + appSubscriptions.listeners.add(listener); + + if (!appSubscriptions.disposeConnPromise) { + appSubscriptions.disposeConnPromise = (async () => { + const handle = await appOrganization(); + const conn = (handle as any).connect(); + const unsubscribeEvent = conn.on("appUpdated", () => { + for (const currentListener of [...appSubscriptions.listeners]) { + currentListener(); + } + }); + const unsubscribeError = conn.onError(() => {}); + return async () => { + unsubscribeEvent(); + unsubscribeError(); + await conn.dispose(); + }; + })().catch(() => null); + } + + return () => { + appSubscriptions.listeners.delete(listener); + if (appSubscriptions.listeners.size > 0) { + return; + } + + void appSubscriptions.disposeConnPromise?.then(async (disposeConn) => { + await disposeConn?.(); + }); + appSubscriptions.disposeConnPromise = null; + }; + }; + + return { + async getAppSnapshot(): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + return signedOutAppSnapshot(); + } + return await (await appOrganization()).getAppSnapshot({ sessionId }); + }, + + async connectOrganization(organizationId: string): Promise { + return await connectOrganization(organizationId); + }, + + async connectTask(organizationId: string, repoId: string, taskIdValue: string): Promise { + return await connectTask(organizationId, repoId, taskIdValue); + }, + + async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return await connectSandbox(organizationId, sandboxProviderId, sandboxId); + }, + + subscribeApp(listener: () => void): () => void { + return subscribeApp(listener); + }, + + async signInWithGithub(): Promise { + const callbackURL = typeof window !== "undefined" ? `${window.location.origin}/organizations` : `${appApiEndpoint.replace(/\/$/, "")}/organizations`; + const response = await appRequest<{ url: string; redirect?: boolean }>("/auth/sign-in/social", { + method: "POST", + body: JSON.stringify({ + provider: "github", + callbackURL, + disableRedirect: true, + }), + }); + if (typeof window !== "undefined") { + window.location.assign(response.url); + } + }, + + async signOutApp(): Promise { + return await appRequest("/app/sign-out", { method: "POST" }); + }, + + async skipAppStarterRepo(): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).skipAppStarterRepo({ sessionId }); + }, + + async starAppStarterRepo(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).starAppStarterRepo({ sessionId, organizationId }); + }, + + async selectAppOrganization(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).selectAppOrganization({ sessionId, organizationId }); + }, + + async setAppDefaultModel(defaultModel: WorkspaceModelId): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).setAppDefaultModel({ sessionId, defaultModel }); + }, + + async updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).updateAppOrganizationProfile({ + sessionId, + organizationId: input.organizationId, + displayName: input.displayName, + slug: input.slug, + primaryDomain: input.primaryDomain, + }); + }, + + async triggerAppRepoImport(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).triggerAppRepoImport({ sessionId, organizationId }); + }, + + async reconnectAppGithub(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + const response = await (await appOrganization()).beginAppGithubInstall({ sessionId, organizationId }); + if (typeof window !== "undefined") { + window.location.assign(response.url); + } + }, + + async completeAppHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + const response = await (await appOrganization()).createAppCheckoutSession({ sessionId, organizationId, planId }); + if (typeof window !== "undefined") { + window.location.assign(response.url); + } + }, + + async openAppBillingPortal(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + const response = await (await appOrganization()).createAppBillingPortalSession({ sessionId, organizationId }); + if (typeof window !== "undefined") { + window.location.assign(response.url); + } + }, + + async cancelAppScheduledRenewal(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).cancelAppScheduledRenewal({ sessionId, organizationId }); + }, + + async resumeAppSubscription(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).resumeAppSubscription({ sessionId, organizationId }); + }, + + async recordAppSeatUsage(organizationId: string): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).recordAppSeatUsage({ sessionId, organizationId }); + }, + + async listRepos(organizationId: string): Promise { + return (await organization(organizationId)).listRepos({ organizationId }); + }, + + async createTask(input: CreateTaskInput): Promise { + return (await organization(input.organizationId)).createTask(input); + }, + + async starSandboxAgentRepo(organizationId: string): Promise { + return (await organization(organizationId)).starSandboxAgentRepo({ organizationId }); + }, + + async listTasks(organizationId: string, repoId?: string): Promise { + return (await organization(organizationId)).listTasks({ organizationId, repoId }); + }, + + async getRepoOverview(organizationId: string, repoId: string): Promise { + return (await organization(organizationId)).getRepoOverview({ organizationId, repoId }); + }, + + async getTask(organizationId: string, repoId: string, taskId: string): Promise { + return (await organization(organizationId)).getTask({ + organizationId, + repoId, + taskId, + }); + }, + + async listHistory(input: HistoryQueryInput): Promise { + return (await organization(input.organizationId)).auditLog(input); + }, + + async switchTask(organizationId: string, repoId: string, taskId: string): Promise { + return (await organization(organizationId)).switchTask({ repoId, taskId }); + }, + + async attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + return (await organization(organizationId)).attachTask({ + organizationId, + repoId, + taskId, + reason: "cli.attach", + }); + }, + + async runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise { + if (action === "push") { + await (await organization(organizationId)).pushTask({ + organizationId, + repoId, + taskId, + reason: "cli.push", + }); + return; + } + if (action === "sync") { + await (await organization(organizationId)).syncTask({ + organizationId, + repoId, + taskId, + reason: "cli.sync", + }); + return; + } + if (action === "merge") { + await (await organization(organizationId)).mergeTask({ + organizationId, + repoId, + taskId, + reason: "cli.merge", + }); + return; + } + if (action === "archive") { + await (await organization(organizationId)).archiveTask({ + organizationId, + repoId, + taskId, + reason: "cli.archive", + }); + return; + } + await (await organization(organizationId)).killTask({ + organizationId, + repoId, + taskId, + reason: "cli.kill", + }); + }, + + async createSandboxSession(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + prompt: string; + cwd?: string; + agent?: AgentType | "opencode"; + }): Promise<{ id: string; status: "running" | "idle" | "error" }> { + const created = await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => + handle.createSession({ + agent: input.agent ?? "claude", + sessionInit: { + cwd: input.cwd, + }, + }), + ); + if (input.prompt.trim().length > 0) { + await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => + handle.rawSendSessionMethod(created.id, "session/prompt", { + prompt: [{ type: "text", text: input.prompt }], + }), + ); + } + return { + id: created.id, + status: "idle", + }; + }, + + async listSandboxSessions( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + input?: { cursor?: string; limit?: number }, + ): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listSessions(input ?? {})); + }, + + async listSandboxSessionEvents( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + input: { sessionId: string; cursor?: string; limit?: number }, + ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.getEvents(input)); + }, + + async createSandboxProcess(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + request: ProcessCreateRequest; + }): Promise { + return await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.createProcess(input.request)); + }, + + async listSandboxProcesses( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ processes: SandboxProcessRecord[] }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listProcesses()); + }, + + async getSandboxProcessLogs( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessLogFollowQuery, + ): Promise { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.getProcessLogs(processId, query)); + }, + + async stopSandboxProcess( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessSignalQuery, + ): Promise { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.stopProcess(processId, query)); + }, + + async killSandboxProcess( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessSignalQuery, + ): Promise { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.killProcess(processId, query)); + }, + + async deleteSandboxProcess(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string): Promise { + await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.deleteProcess(processId)); + }, + + subscribeSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): () => void { + return subscribeSandboxProcesses(organizationId, sandboxProviderId, sandboxId, listener); + }, + + async sendSandboxPrompt(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + sessionId: string; + prompt: string; + notification?: boolean; + }): Promise { + await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => + handle.rawSendSessionMethod(input.sessionId, "session/prompt", { + prompt: [{ type: "text", text: input.prompt }], + }), + ); + }, + + async sandboxSessionStatus( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + sessionId: string, + ): Promise<{ id: string; status: "running" | "idle" | "error" }> { + return { + id: sessionId, + status: "idle", + }; + }, + + async sandboxProviderState( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.providerState()); + }, + + async getSandboxAgentConnection( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ endpoint: string; token?: string }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.sandboxAgentConnection()); + }, + + async getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listWorkspaceModelGroups()); + }, + + async getOrganizationSummary(organizationId: string): Promise { + return (await organization(organizationId)).getOrganizationSummary({ organizationId }); + }, + + async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { + return await getTaskDetailWithAuth(organizationId, repoId, taskIdValue); + }, + + async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { + return await getSessionDetailWithAuth(organizationId, repoId, taskIdValue, sessionId); + }, + + async getWorkspace(organizationId: string): Promise { + return await getWorkspaceCompat(organizationId); + }, + + subscribeWorkspace(organizationId: string, listener: () => void): () => void { + return subscribeWorkspace(organizationId, listener); + }, + + async createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise { + return (await organization(organizationId)).createWorkspaceTask(await withAuthSessionInput(input)); + }, + + async markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await (await organization(organizationId)).markWorkspaceUnread(await withAuthSessionInput(input)); + }, + + async renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise { + await (await organization(organizationId)).renameWorkspaceTask(await withAuthSessionInput(input)); + }, + + async createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + return await (await organization(organizationId)).createWorkspaceSession(await withAuthSessionInput(input)); + }, + + async renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise { + await (await organization(organizationId)).renameWorkspaceSession(await withAuthSessionInput(input)); + }, + + async selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).selectWorkspaceSession(await withAuthSessionInput(input)); + }, + + async setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise { + await (await organization(organizationId)).setWorkspaceSessionUnread(await withAuthSessionInput(input)); + }, + + async updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise { + await (await organization(organizationId)).updateWorkspaceDraft(await withAuthSessionInput(input)); + }, + + async changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise { + await (await organization(organizationId)).changeWorkspaceModel(await withAuthSessionInput(input)); + }, + + async sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise { + await (await organization(organizationId)).sendWorkspaceMessage(await withAuthSessionInput(input)); + }, + + async stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).stopWorkspaceSession(await withAuthSessionInput(input)); + }, + + async closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).closeWorkspaceSession(await withAuthSessionInput(input)); + }, + + async publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await (await organization(organizationId)).publishWorkspacePr(await withAuthSessionInput(input)); + }, + + async changeWorkspaceTaskOwner(organizationId: string, input: TaskWorkspaceChangeOwnerInput): Promise { + await (await organization(organizationId)).changeWorkspaceTaskOwner(await withAuthSessionInput(input)); + }, + + async revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise { + await (await organization(organizationId)).revertWorkspaceFile(await withAuthSessionInput(input)); + }, + + async adminReloadGithubOrganization(organizationId: string): Promise { + await (await organization(organizationId)).adminReloadGithubOrganization(); + }, + + async adminReloadGithubRepository(organizationId: string, repoId: string): Promise { + await (await organization(organizationId)).adminReloadGithubRepository({ repoId }); + }, + + async health(): Promise<{ ok: true }> { + const organizationId = options.defaultOrganizationId; + if (!organizationId) { + throw new Error("Backend client default organization is required for health checks"); + } + + await (await organization(organizationId)).useOrganization({ + organizationId, + }); + return { ok: true }; + }, + + async useOrganization(organizationId: string): Promise<{ organizationId: string }> { + return (await organization(organizationId)).useOrganization({ organizationId }); + }, + }; +} diff --git a/foundry/packages/client/src/index.ts b/foundry/packages/client/src/index.ts new file mode 100644 index 0000000..e28745f --- /dev/null +++ b/foundry/packages/client/src/index.ts @@ -0,0 +1,11 @@ +export * from "./app-client.js"; +export * from "./backend-client.js"; +export * from "./subscription/manager.js"; +export * from "./subscription/mock-manager.js"; +export * from "./subscription/remote-manager.js"; +export * from "./subscription/topics.js"; +export * from "./subscription/use-subscription.js"; +export * from "./keys.js"; +export * from "./mock-app.js"; +export * from "./view-model.js"; +export * from "./workspace-client.js"; diff --git a/foundry/packages/client/src/keys.ts b/foundry/packages/client/src/keys.ts new file mode 100644 index 0000000..7242aae --- /dev/null +++ b/foundry/packages/client/src/keys.ts @@ -0,0 +1,17 @@ +export type ActorKey = string[]; + +export function organizationKey(organizationId: string): ActorKey { + return ["org", organizationId]; +} + +export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { + return ["org", organizationId, "task", repoId, taskId]; +} + +export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { + return ["org", organizationId, "sandbox", sandboxId]; +} + +export function auditLogKey(organizationId: string): ActorKey { + return ["org", organizationId, "audit-log"]; +} diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts new file mode 100644 index 0000000..00fd9ca --- /dev/null +++ b/foundry/packages/client/src/mock-app.ts @@ -0,0 +1,755 @@ +import { DEFAULT_WORKSPACE_MODEL_GROUPS, DEFAULT_WORKSPACE_MODEL_ID, type WorkspaceModelId } from "@sandbox-agent/foundry-shared"; + +const claudeModels = DEFAULT_WORKSPACE_MODEL_GROUPS.find((group) => group.agentKind === "Claude")?.models ?? []; +const CLAUDE_SECONDARY_MODEL_ID = claudeModels[1]?.id ?? claudeModels[0]?.id ?? DEFAULT_WORKSPACE_MODEL_ID; +const CLAUDE_TERTIARY_MODEL_ID = claudeModels[2]?.id ?? CLAUDE_SECONDARY_MODEL_ID; +import { injectMockLatency } from "./mock/latency.js"; +import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; + +export type MockBillingPlanId = "free" | "team"; +export type MockBillingStatus = "active" | "trialing" | "past_due" | "scheduled_cancel"; +export type MockGithubInstallationStatus = "connected" | "install_required" | "reconnect_required"; +export type MockGithubSyncStatus = "pending" | "syncing" | "synced" | "error"; +export type MockOrganizationKind = "personal" | "organization"; +export type MockStarterRepoStatus = "pending" | "starred" | "skipped"; + +export interface MockFoundryUser { + id: string; + name: string; + email: string; + githubLogin: string; + roleLabel: string; + eligibleOrganizationIds: string[]; + defaultModel: WorkspaceModelId; +} + +export interface MockFoundryOrganizationMember { + id: string; + name: string; + email: string; + role: "owner" | "admin" | "member"; + state: "active" | "invited"; +} + +export interface MockFoundryInvoice { + id: string; + label: string; + issuedAt: string; + amountUsd: number; + status: "paid" | "open"; +} + +export interface MockFoundryBillingState { + planId: MockBillingPlanId; + status: MockBillingStatus; + seatsIncluded: number; + trialEndsAt: string | null; + renewalAt: string | null; + stripeCustomerId: string; + paymentMethodLabel: string; + invoices: MockFoundryInvoice[]; +} + +export interface MockFoundryGithubState { + connectedAccount: string; + installationStatus: MockGithubInstallationStatus; + syncStatus: MockGithubSyncStatus; + importedRepoCount: number; + lastSyncLabel: string; + lastSyncAt: number | null; + lastWebhookAt: number | null; + lastWebhookEvent: string; +} + +export interface MockFoundryOrganizationSettings { + displayName: string; + slug: string; + primaryDomain: string; + seatAccrualMode: "first_prompt"; + autoImportRepos: boolean; +} + +export interface MockFoundryOrganization { + id: string; + organizationId: string; + kind: MockOrganizationKind; + settings: MockFoundryOrganizationSettings; + github: MockFoundryGithubState; + billing: MockFoundryBillingState; + members: MockFoundryOrganizationMember[]; + seatAssignments: string[]; + repoCatalog: string[]; +} + +export interface MockFoundryAppSnapshot { + auth: { + status: "signed_out" | "signed_in"; + currentUserId: string | null; + }; + activeOrganizationId: string | null; + onboarding: { + starterRepo: { + repoFullName: string; + repoUrl: string; + status: MockStarterRepoStatus; + starredAt: number | null; + skippedAt: number | null; + }; + }; + users: MockFoundryUser[]; + organizations: MockFoundryOrganization[]; +} + +export interface UpdateMockOrganizationProfileInput { + organizationId: string; + displayName: string; + slug: string; + primaryDomain: string; +} + +export interface MockFoundryAppClient { + getSnapshot(): MockFoundryAppSnapshot; + subscribe(listener: () => void): () => void; + signInWithGithub(userId: string): Promise; + signOut(): Promise; + skipStarterRepo(): Promise; + starStarterRepo(organizationId: string): Promise; + selectOrganization(organizationId: string): Promise; + setDefaultModel(model: WorkspaceModelId): Promise; + updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise; + triggerGithubSync(organizationId: string): Promise; + completeHostedCheckout(organizationId: string, planId: MockBillingPlanId): Promise; + openBillingPortal(organizationId: string): Promise; + cancelScheduledRenewal(organizationId: string): Promise; + resumeSubscription(organizationId: string): Promise; + reconnectGithub(organizationId: string): Promise; + recordSeatUsage(organizationId: string): void; +} + +const STORAGE_KEY = "sandbox-agent-foundry:mock-app:v1"; + +function isoDate(daysFromNow: number): string { + const value = new Date(); + value.setDate(value.getDate() + daysFromNow); + return value.toISOString(); +} + +function syncStatusFromLegacy(value: unknown): MockGithubSyncStatus { + switch (value) { + case "ready": + case "synced": + return "synced"; + case "importing": + case "syncing": + return "syncing"; + case "error": + return "error"; + default: + return "pending"; + } +} + +/** + * Build the "rivet" mock organization from real public GitHub data. + * Fixture sourced from: scripts/pull-org-data.ts (run against rivet-dev). + * Members that don't exist in the public fixture get synthetic entries + * so the mock still has realistic owner/admin/member role distribution. + */ +function buildRivetOrganization(): MockFoundryOrganization { + const repos = rivetDevFixture.repos.map((r) => r.fullName); + const fixtureMembers: MockFoundryOrganizationMember[] = rivetDevFixture.members.map((m) => ({ + id: `member-rivet-${m.login.toLowerCase()}`, + name: m.login, + email: `${m.login.toLowerCase()}@rivet.dev`, + role: "member" as const, + state: "active" as const, + })); + + // Ensure we have named owner/admin roles for the mock user personas + // that may not appear in the public members list + const knownMembers: MockFoundryOrganizationMember[] = [ + { id: "member-rivet-jamie", name: "Jamie", email: "jamie@rivet.dev", role: "owner", state: "active" }, + { id: "member-rivet-nathan", name: "Nathan", email: "nathan@acme.dev", role: "member", state: "active" }, + ]; + + // Merge: known members take priority, then fixture members not already covered + const knownIds = new Set(knownMembers.map((m) => m.id)); + const members = [...knownMembers, ...fixtureMembers.filter((m) => !knownIds.has(m.id))]; + + return { + id: "rivet", + organizationId: "rivet", + kind: "organization", + settings: { + displayName: rivetDevFixture.name ?? rivetDevFixture.login, + slug: "rivet", + primaryDomain: "rivet.dev", + seatAccrualMode: "first_prompt", + autoImportRepos: true, + }, + github: { + connectedAccount: rivetDevFixture.login, + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: repos.length, + lastSyncLabel: "Synced just now", + lastSyncAt: Date.now() - 60_000, + lastWebhookAt: Date.now() - 30_000, + lastWebhookEvent: "push", + }, + billing: { + planId: "team", + status: "trialing", + seatsIncluded: 5, + trialEndsAt: isoDate(12), + renewalAt: isoDate(12), + stripeCustomerId: "cus_mock_rivet_team", + paymentMethodLabel: "Visa ending in 4242", + invoices: [{ id: "inv-rivet-001", label: "Team pilot", issuedAt: "2026-03-04", amountUsd: 0, status: "paid" }], + }, + members, + seatAssignments: ["jamie@rivet.dev"], + repoCatalog: repos, + }; +} + +function buildDefaultSnapshot(): MockFoundryAppSnapshot { + return { + auth: { + status: "signed_out", + currentUserId: null, + }, + activeOrganizationId: null, + onboarding: { + starterRepo: { + repoFullName: "rivet-dev/sandbox-agent", + repoUrl: "https://github.com/rivet-dev/sandbox-agent", + status: "pending", + starredAt: null, + skippedAt: null, + }, + }, + users: [ + { + id: "user-nathan", + name: "Nathan", + email: "nathan@acme.dev", + githubLogin: "nathan", + roleLabel: "Founder", + eligibleOrganizationIds: ["personal-nathan", "acme", "rivet"], + defaultModel: DEFAULT_WORKSPACE_MODEL_ID, + }, + { + id: "user-maya", + name: "Maya", + email: "maya@acme.dev", + githubLogin: "maya", + roleLabel: "Staff Engineer", + eligibleOrganizationIds: ["acme"], + defaultModel: CLAUDE_SECONDARY_MODEL_ID, + }, + { + id: "user-jamie", + name: "Jamie", + email: "jamie@rivet.dev", + githubLogin: "jamie", + roleLabel: "Platform Lead", + eligibleOrganizationIds: ["personal-jamie", "rivet"], + defaultModel: CLAUDE_TERTIARY_MODEL_ID, + }, + ], + organizations: [ + { + id: "personal-nathan", + organizationId: "personal-nathan", + kind: "personal", + settings: { + displayName: "Nathan", + slug: "nathan", + primaryDomain: "personal", + seatAccrualMode: "first_prompt", + autoImportRepos: true, + }, + github: { + connectedAccount: "nathan", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: 1, + lastSyncLabel: "Synced just now", + lastSyncAt: Date.now() - 60_000, + lastWebhookAt: Date.now() - 120_000, + lastWebhookEvent: "pull_request.opened", + }, + billing: { + planId: "free", + status: "active", + seatsIncluded: 1, + trialEndsAt: null, + renewalAt: null, + stripeCustomerId: "cus_mock_personal_nathan", + paymentMethodLabel: "No card required", + invoices: [], + }, + members: [{ id: "member-nathan", name: "Nathan", email: "nathan@acme.dev", role: "owner", state: "active" }], + seatAssignments: ["nathan@acme.dev"], + repoCatalog: ["nathan/personal-site"], + }, + { + id: "acme", + organizationId: "acme", + kind: "organization", + settings: { + displayName: "Acme", + slug: "acme", + primaryDomain: "acme.dev", + seatAccrualMode: "first_prompt", + autoImportRepos: true, + }, + github: { + connectedAccount: "acme", + installationStatus: "connected", + syncStatus: "pending", + importedRepoCount: 3, + lastSyncLabel: "Waiting for first import", + lastSyncAt: null, + lastWebhookAt: null, + lastWebhookEvent: "", + }, + billing: { + planId: "team", + status: "active", + seatsIncluded: 5, + trialEndsAt: null, + renewalAt: isoDate(18), + stripeCustomerId: "cus_mock_acme_team", + paymentMethodLabel: "Visa ending in 4242", + invoices: [ + { id: "inv-acme-001", label: "March 2026", issuedAt: "2026-03-01", amountUsd: 240, status: "paid" }, + { id: "inv-acme-000", label: "February 2026", issuedAt: "2026-02-01", amountUsd: 240, status: "paid" }, + ], + }, + members: [ + { id: "member-acme-nathan", name: "Nathan", email: "nathan@acme.dev", role: "owner", state: "active" }, + { id: "member-acme-maya", name: "Maya", email: "maya@acme.dev", role: "admin", state: "active" }, + { id: "member-acme-priya", name: "Priya", email: "priya@acme.dev", role: "member", state: "active" }, + { id: "member-acme-devon", name: "Devon", email: "devon@acme.dev", role: "member", state: "invited" }, + ], + seatAssignments: ["nathan@acme.dev", "maya@acme.dev"], + repoCatalog: ["acme/backend", "acme/frontend", "acme/infra"], + }, + buildRivetOrganization(), + { + id: "personal-jamie", + organizationId: "personal-jamie", + kind: "personal", + settings: { + displayName: "Jamie", + slug: "jamie", + primaryDomain: "personal", + seatAccrualMode: "first_prompt", + autoImportRepos: true, + }, + github: { + connectedAccount: "jamie", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: 1, + lastSyncLabel: "Synced yesterday", + lastSyncAt: Date.now() - 24 * 60 * 60_000, + lastWebhookAt: Date.now() - 3_600_000, + lastWebhookEvent: "check_run.completed", + }, + billing: { + planId: "free", + status: "active", + seatsIncluded: 1, + trialEndsAt: null, + renewalAt: null, + stripeCustomerId: "cus_mock_personal_jamie", + paymentMethodLabel: "No card required", + invoices: [], + }, + members: [{ id: "member-jamie", name: "Jamie", email: "jamie@rivet.dev", role: "owner", state: "active" }], + seatAssignments: ["jamie@rivet.dev"], + repoCatalog: ["jamie/demo-app"], + }, + ], + }; +} + +function parseStoredSnapshot(): MockFoundryAppSnapshot | null { + if (typeof window === "undefined") { + return null; + } + + const raw = window.localStorage.getItem(STORAGE_KEY); + if (!raw) { + return null; + } + + try { + const parsed = JSON.parse(raw) as MockFoundryAppSnapshot & { + organizations?: Array; + }; + if (!parsed || typeof parsed !== "object") { + return null; + } + return { + ...parsed, + onboarding: { + starterRepo: { + repoFullName: parsed.onboarding?.starterRepo?.repoFullName ?? "rivet-dev/sandbox-agent", + repoUrl: parsed.onboarding?.starterRepo?.repoUrl ?? "https://github.com/rivet-dev/sandbox-agent", + status: parsed.onboarding?.starterRepo?.status ?? "pending", + starredAt: parsed.onboarding?.starterRepo?.starredAt ?? null, + skippedAt: parsed.onboarding?.starterRepo?.skippedAt ?? null, + }, + }, + organizations: (parsed.organizations ?? []).map((organization: MockFoundryOrganization & { repoImportStatus?: string }) => ({ + ...organization, + github: { + ...organization.github, + syncStatus: syncStatusFromLegacy(organization.github?.syncStatus ?? organization.repoImportStatus), + lastSyncAt: organization.github?.lastSyncAt ?? null, + lastWebhookAt: organization.github?.lastWebhookAt ?? null, + lastWebhookEvent: organization.github?.lastWebhookEvent ?? "", + }, + })), + }; + } catch { + return null; + } +} + +function saveSnapshot(snapshot: MockFoundryAppSnapshot): void { + if (typeof window === "undefined") { + return; + } + + window.localStorage.setItem(STORAGE_KEY, JSON.stringify(snapshot)); +} + +function planSeatsIncluded(planId: MockBillingPlanId): number { + switch (planId) { + case "free": + return 1; + case "team": + return 5; + } +} + +class MockFoundryAppStore implements MockFoundryAppClient { + private snapshot = parseStoredSnapshot() ?? buildDefaultSnapshot(); + private listeners = new Set<() => void>(); + private importTimers = new Map>(); + + getSnapshot(): MockFoundryAppSnapshot { + return this.snapshot; + } + + subscribe(listener: () => void): () => void { + this.listeners.add(listener); + return () => { + this.listeners.delete(listener); + }; + } + + async signInWithGithub(userId: string): Promise { + await this.injectAsyncLatency(); + const user = this.snapshot.users.find((candidate) => candidate.id === userId); + if (!user) { + throw new Error(`Unknown mock user ${userId}`); + } + + this.updateSnapshot((current) => { + const activeOrganizationId = user.eligibleOrganizationIds.length === 1 ? (user.eligibleOrganizationIds[0] ?? null) : null; + return { + ...current, + auth: { + status: "signed_in", + currentUserId: userId, + }, + activeOrganizationId, + }; + }); + + if (user.eligibleOrganizationIds.length === 1) { + await this.selectOrganization(user.eligibleOrganizationIds[0]!); + } + } + + async signOut(): Promise { + await this.injectAsyncLatency(); + this.updateSnapshot((current) => ({ + ...current, + auth: { + status: "signed_out", + currentUserId: null, + }, + activeOrganizationId: null, + onboarding: { + starterRepo: { + ...current.onboarding.starterRepo, + status: "pending", + starredAt: null, + skippedAt: null, + }, + }, + })); + } + + async skipStarterRepo(): Promise { + await this.injectAsyncLatency(); + this.updateSnapshot((current) => ({ + ...current, + onboarding: { + starterRepo: { + ...current.onboarding.starterRepo, + status: "skipped", + skippedAt: Date.now(), + starredAt: null, + }, + }, + })); + } + + async starStarterRepo(organizationId: string): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(organizationId); + this.updateSnapshot((current) => ({ + ...current, + onboarding: { + starterRepo: { + ...current.onboarding.starterRepo, + status: "starred", + starredAt: Date.now(), + skippedAt: null, + }, + }, + })); + } + + async selectOrganization(organizationId: string): Promise { + await this.injectAsyncLatency(); + const org = this.requireOrganization(organizationId); + this.updateSnapshot((current) => ({ + ...current, + activeOrganizationId: organizationId, + })); + + if (org.github.syncStatus !== "synced") { + await this.triggerGithubSync(organizationId); + } + } + + async setDefaultModel(model: WorkspaceModelId): Promise { + await this.injectAsyncLatency(); + const currentUserId = this.snapshot.auth.currentUserId; + if (!currentUserId) { + throw new Error("No signed-in mock user"); + } + this.updateSnapshot((current) => ({ + ...current, + users: current.users.map((user) => (user.id === currentUserId ? { ...user, defaultModel: model } : user)), + })); + } + + async updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(input.organizationId); + this.updateOrganization(input.organizationId, (organization) => ({ + ...organization, + settings: { + ...organization.settings, + displayName: input.displayName.trim() || organization.settings.displayName, + slug: input.slug.trim() || organization.settings.slug, + primaryDomain: input.primaryDomain.trim() || organization.settings.primaryDomain, + }, + })); + } + + async triggerGithubSync(organizationId: string): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(organizationId); + const existingTimer = this.importTimers.get(organizationId); + if (existingTimer) { + clearTimeout(existingTimer); + } + + this.updateOrganization(organizationId, (organization) => ({ + ...organization, + github: { + ...organization.github, + syncStatus: "syncing", + lastSyncLabel: "Syncing repositories...", + }, + })); + + const timer = setTimeout(() => { + this.updateOrganization(organizationId, (organization) => ({ + ...organization, + github: { + ...organization.github, + importedRepoCount: organization.repoCatalog.length, + installationStatus: "connected", + syncStatus: "synced", + lastSyncLabel: "Synced just now", + lastSyncAt: Date.now(), + lastWebhookAt: Date.now(), + lastWebhookEvent: "installation_repositories.added", + }, + })); + this.importTimers.delete(organizationId); + }, 1_250); + + this.importTimers.set(organizationId, timer); + } + + async completeHostedCheckout(organizationId: string, planId: MockBillingPlanId): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(organizationId); + this.updateOrganization(organizationId, (organization) => ({ + ...organization, + billing: { + ...organization.billing, + planId, + status: "active", + seatsIncluded: planSeatsIncluded(planId), + trialEndsAt: null, + renewalAt: isoDate(30), + paymentMethodLabel: "Visa ending in 4242", + invoices: [ + { + id: `inv-${organizationId}-${Date.now()}`, + label: `${organization.settings.displayName} ${planId} upgrade`, + issuedAt: new Date().toISOString().slice(0, 10), + amountUsd: planId === "team" ? 240 : 0, + status: "paid", + }, + ...organization.billing.invoices, + ], + }, + })); + } + + async openBillingPortal(_organizationId: string): Promise { + await this.injectAsyncLatency(); + } + + async cancelScheduledRenewal(organizationId: string): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(organizationId); + this.updateOrganization(organizationId, (organization) => ({ + ...organization, + billing: { + ...organization.billing, + status: "scheduled_cancel", + }, + })); + } + + async resumeSubscription(organizationId: string): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(organizationId); + this.updateOrganization(organizationId, (organization) => ({ + ...organization, + billing: { + ...organization.billing, + status: "active", + }, + })); + } + + async reconnectGithub(organizationId: string): Promise { + await this.injectAsyncLatency(); + this.requireOrganization(organizationId); + this.updateOrganization(organizationId, (organization) => ({ + ...organization, + github: { + ...organization.github, + installationStatus: "connected", + syncStatus: "pending", + lastSyncLabel: "Reconnected just now", + lastSyncAt: Date.now(), + }, + })); + } + + recordSeatUsage(organizationId: string): void { + const org = this.snapshot.organizations.find((candidate) => candidate.organizationId === organizationId); + const currentUser = currentMockUser(this.snapshot); + if (!org || !currentUser) { + return; + } + + if (org.seatAssignments.includes(currentUser.email)) { + return; + } + + this.updateOrganization(org.id, (organization) => ({ + ...organization, + seatAssignments: [...organization.seatAssignments, currentUser.email], + })); + } + + private injectAsyncLatency(): Promise { + return injectMockLatency(); + } + + private updateOrganization(organizationId: string, updater: (organization: MockFoundryOrganization) => MockFoundryOrganization): void { + this.updateSnapshot((current) => ({ + ...current, + organizations: current.organizations.map((organization) => (organization.id === organizationId ? updater(organization) : organization)), + })); + } + + private updateSnapshot(updater: (current: MockFoundryAppSnapshot) => MockFoundryAppSnapshot): void { + this.snapshot = updater(this.snapshot); + saveSnapshot(this.snapshot); + for (const listener of this.listeners) { + listener(); + } + } + + private requireOrganization(organizationId: string): MockFoundryOrganization { + const organization = this.snapshot.organizations.find((candidate) => candidate.id === organizationId); + if (!organization) { + throw new Error(`Unknown mock organization ${organizationId}`); + } + return organization; + } +} + +function currentMockUser(snapshot: MockFoundryAppSnapshot): MockFoundryUser | null { + if (!snapshot.auth.currentUserId) { + return null; + } + return snapshot.users.find((candidate) => candidate.id === snapshot.auth.currentUserId) ?? null; +} + +const mockFoundryAppStore = new MockFoundryAppStore(); + +export function getMockFoundryAppClient(): MockFoundryAppClient { + return mockFoundryAppStore; +} + +export function currentMockFoundryUser(snapshot: MockFoundryAppSnapshot): MockFoundryUser | null { + return currentMockUser(snapshot); +} + +export function currentMockFoundryOrganization(snapshot: MockFoundryAppSnapshot): MockFoundryOrganization | null { + if (!snapshot.activeOrganizationId) { + return null; + } + return snapshot.organizations.find((candidate) => candidate.id === snapshot.activeOrganizationId) ?? null; +} + +export function eligibleMockOrganizations(snapshot: MockFoundryAppSnapshot): MockFoundryOrganization[] { + const user = currentMockUser(snapshot); + if (!user) { + return []; + } + + const eligible = new Set(user.eligibleOrganizationIds); + return snapshot.organizations.filter((organization) => eligible.has(organization.id)); +} diff --git a/foundry/packages/client/src/mock/backend-client.ts b/foundry/packages/client/src/mock/backend-client.ts new file mode 100644 index 0000000..191f68c --- /dev/null +++ b/foundry/packages/client/src/mock/backend-client.ts @@ -0,0 +1,789 @@ +import type { + AppEvent, + CreateTaskInput, + FoundryAppSnapshot, + SandboxProcessesEvent, + SessionEvent, + TaskRecord, + TaskSummary, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, + TaskEvent, + WorkspaceSessionDetail, + WorkspaceModelGroup, + WorkspaceTaskDetail, + WorkspaceTaskSummary, + OrganizationEvent, + OrganizationSummarySnapshot, + AuditLogEvent as HistoryEvent, + HistoryQueryInput, + SandboxProviderId, + RepoOverview, + RepoRecord, + StarSandboxAgentRepoResult, + SwitchResult, +} from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS } from "@sandbox-agent/foundry-shared"; +import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; +import type { ActorConn, BackendClient, SandboxProcessRecord, SandboxSessionEventRecord, SandboxSessionRecord } from "../backend-client.js"; +import { getSharedMockWorkspaceClient } from "./workspace-client.js"; + +interface MockProcessRecord extends SandboxProcessRecord { + logText: string; +} + +function notSupported(name: string): never { + throw new Error(`${name} is not supported by the mock backend client.`); +} + +function encodeBase64Utf8(value: string): string { + if (typeof Buffer !== "undefined") { + return Buffer.from(value, "utf8").toString("base64"); + } + return globalThis.btoa(unescape(encodeURIComponent(value))); +} + +function nowMs(): number { + return Date.now(); +} + +function mockRepoRemote(label: string): string { + return `https://example.test/${label}.git`; +} + +function mockCwd(repoLabel: string, taskId: string): string { + return `/mock/${repoLabel.replace(/\//g, "-")}/${taskId}`; +} + +function unsupportedAppSnapshot(): FoundryAppSnapshot { + return { + auth: { status: "signed_out", currentUserId: null }, + activeOrganizationId: null, + onboarding: { + starterRepo: { + repoFullName: "rivet-dev/sandbox-agent", + repoUrl: "https://github.com/rivet-dev/sandbox-agent", + status: "pending", + starredAt: null, + skippedAt: null, + }, + }, + users: [], + organizations: [], + }; +} + +function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskRecord["status"] { + if (archived) { + return "archived"; + } + return status; +} + +export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient { + const workspace = getSharedMockWorkspaceClient(); + const listenersBySandboxId = new Map void>>(); + const processesBySandboxId = new Map(); + const connectionListeners = new Map void>>(); + let nextPid = 4000; + let nextProcessId = 1; + + const requireTask = (taskId: string) => { + const task = workspace.getSnapshot().tasks.find((candidate) => candidate.id === taskId); + if (!task) { + throw new Error(`Unknown mock task ${taskId}`); + } + return task; + }; + + const ensureProcessList = (sandboxId: string): MockProcessRecord[] => { + const existing = processesBySandboxId.get(sandboxId); + if (existing) { + return existing; + } + const created: MockProcessRecord[] = []; + processesBySandboxId.set(sandboxId, created); + return created; + }; + + const notifySandbox = (sandboxId: string): void => { + const listeners = listenersBySandboxId.get(sandboxId); + if (!listeners) { + emitSandboxProcessesUpdate(sandboxId); + return; + } + for (const listener of [...listeners]) { + listener(); + } + emitSandboxProcessesUpdate(sandboxId); + }; + + const connectionChannel = (scope: string, event: string): string => `${scope}:${event}`; + + const emitConnectionEvent = (scope: string, event: string, payload: any): void => { + const listeners = connectionListeners.get(connectionChannel(scope, event)); + if (!listeners) { + return; + } + for (const listener of [...listeners]) { + listener(payload); + } + }; + + const createConn = (scope: string): ActorConn => ({ + on(event: string, listener: (payload: any) => void): () => void { + const channel = connectionChannel(scope, event); + let listeners = connectionListeners.get(channel); + if (!listeners) { + listeners = new Set(); + connectionListeners.set(channel, listeners); + } + listeners.add(listener); + return () => { + const current = connectionListeners.get(channel); + if (!current) { + return; + } + current.delete(listener); + if (current.size === 0) { + connectionListeners.delete(channel); + } + }; + }, + onError(): () => void { + return () => {}; + }, + async dispose(): Promise {}, + }); + + const buildTaskSummary = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskSummary => ({ + id: task.id, + repoId: task.repoId, + title: task.title, + status: task.status, + repoName: task.repoName, + updatedAtMs: task.updatedAtMs, + branch: task.branch, + pullRequest: task.pullRequest, + activeSessionId: task.activeSessionId ?? task.sessions[0]?.id ?? null, + sessionsSummary: task.sessions.map((tab) => ({ + id: tab.id, + sessionId: tab.sessionId, + sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId, + sessionName: tab.sessionName, + agent: tab.agent, + model: tab.model, + status: tab.status, + thinkingSinceMs: tab.thinkingSinceMs, + unread: tab.unread, + created: tab.created, + })), + primaryUserLogin: null, + primaryUserAvatarUrl: null, + }); + + const buildTaskDetail = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskDetail => ({ + ...buildTaskSummary(task), + task: task.title, + fileChanges: task.fileChanges, + diffs: task.diffs, + fileTree: task.fileTree, + minutesUsed: task.minutesUsed, + sandboxes: [ + { + sandboxProviderId: "local", + sandboxId: task.id, + cwd: mockCwd(task.repoName, task.id), + url: null, + }, + ], + activeSandboxId: task.id, + }); + + const buildSessionDetail = (task: TaskWorkspaceSnapshot["tasks"][number], sessionId: string): WorkspaceSessionDetail => { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); + if (!tab) { + throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`); + } + return { + sessionId: tab.id, + sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId, + sessionName: tab.sessionName, + agent: tab.agent, + model: tab.model, + status: tab.status, + thinkingSinceMs: tab.thinkingSinceMs, + unread: tab.unread, + created: tab.created, + draft: tab.draft, + transcript: tab.transcript, + }; + }; + + const buildOrganizationSummary = (): OrganizationSummarySnapshot => { + const snapshot = workspace.getSnapshot(); + const taskSummaries = snapshot.tasks.map(buildTaskSummary); + return { + organizationId: defaultOrganizationId, + github: { + connectedAccount: "mock", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: snapshot.repos.length, + lastSyncLabel: "Synced just now", + lastSyncAt: nowMs(), + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 1, + syncPhase: null, + processedRepositoryCount: snapshot.repos.length, + totalRepositoryCount: snapshot.repos.length, + }, + repos: snapshot.repos.map((repo) => { + const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id); + return { + id: repo.id, + label: repo.label, + taskCount: repoTasks.length, + latestActivityMs: repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), 0), + }; + }), + taskSummaries, + }; + }; + + const organizationScope = (organizationId: string): string => `organization:${organizationId}`; + const taskScope = (organizationId: string, repoId: string, taskId: string): string => `task:${organizationId}:${repoId}:${taskId}`; + const sandboxScope = (organizationId: string, sandboxProviderId: string, sandboxId: string): string => + `sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`; + + const emitOrganizationSnapshot = (): void => { + emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { + type: "organizationUpdated", + snapshot: buildOrganizationSummary(), + } satisfies OrganizationEvent); + }; + + const emitTaskUpdate = (taskId: string): void => { + const task = requireTask(taskId); + emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", { + type: "taskUpdated", + detail: buildTaskDetail(task), + } satisfies TaskEvent); + }; + + const emitSessionUpdate = (taskId: string, sessionId: string): void => { + const task = requireTask(taskId); + emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "sessionUpdated", { + type: "sessionUpdated", + session: buildSessionDetail(task, sessionId), + } satisfies SessionEvent); + }; + + const emitSandboxProcessesUpdate = (sandboxId: string): void => { + emitConnectionEvent(sandboxScope(defaultOrganizationId, "local", sandboxId), "processesUpdated", { + type: "processesUpdated", + processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)), + } satisfies SandboxProcessesEvent); + }; + + const buildTaskRecord = (taskId: string): TaskRecord => { + const task = requireTask(taskId); + const cwd = mockCwd(task.repoName, task.id); + const archived = task.status === "archived"; + return { + organizationId: defaultOrganizationId, + repoId: task.repoId, + repoRemote: mockRepoRemote(task.repoName), + taskId: task.id, + branchName: task.branch, + title: task.title, + task: task.title, + sandboxProviderId: "local", + status: toTaskStatus(archived ? "archived" : "running", archived), + pullRequest: null, + activeSandboxId: task.id, + sandboxes: [ + { + sandboxId: task.id, + sandboxProviderId: "local", + sandboxActorId: "mock-sandbox", + switchTarget: `mock://${task.id}`, + cwd, + createdAt: task.updatedAtMs, + updatedAt: task.updatedAtMs, + }, + ], + createdAt: task.updatedAtMs, + updatedAt: task.updatedAtMs, + }; + }; + + const cloneProcess = (process: MockProcessRecord): MockProcessRecord => ({ ...process }); + + const createProcessRecord = (sandboxId: string, cwd: string, request: ProcessCreateRequest): MockProcessRecord => { + const processId = `proc_${nextProcessId++}`; + const createdAtMs = nowMs(); + const args = request.args ?? []; + const interactive = request.interactive ?? false; + const tty = request.tty ?? false; + const statusLine = interactive && tty ? "Mock terminal session created.\nInteractive transport is unavailable in mock mode.\n" : "Mock process created.\n"; + const commandLine = `$ ${[request.command, ...args].join(" ").trim()}\n`; + return { + id: processId, + command: request.command, + args, + createdAtMs, + cwd: request.cwd ?? cwd, + exitCode: null, + exitedAtMs: null, + interactive, + pid: nextPid++, + status: "running", + tty, + logText: `${statusLine}${commandLine}`, + }; + }; + + return { + async getAppSnapshot(): Promise { + return unsupportedAppSnapshot(); + }, + + async connectOrganization(organizationId: string): Promise { + return createConn(organizationScope(organizationId)); + }, + + async connectTask(organizationId: string, repoId: string, taskId: string): Promise { + return createConn(taskScope(organizationId, repoId, taskId)); + }, + + async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return createConn(sandboxScope(organizationId, sandboxProviderId, sandboxId)); + }, + + subscribeApp(): () => void { + return () => {}; + }, + + async signInWithGithub(): Promise { + notSupported("signInWithGithub"); + }, + + async signOutApp(): Promise { + return unsupportedAppSnapshot(); + }, + + async skipAppStarterRepo(): Promise { + return unsupportedAppSnapshot(); + }, + + async starAppStarterRepo(): Promise { + return unsupportedAppSnapshot(); + }, + + async selectAppOrganization(): Promise { + return unsupportedAppSnapshot(); + }, + + async setAppDefaultModel(): Promise { + return unsupportedAppSnapshot(); + }, + + async updateAppOrganizationProfile(): Promise { + return unsupportedAppSnapshot(); + }, + + async triggerAppRepoImport(): Promise { + return unsupportedAppSnapshot(); + }, + + async reconnectAppGithub(): Promise { + notSupported("reconnectAppGithub"); + }, + + async completeAppHostedCheckout(): Promise { + notSupported("completeAppHostedCheckout"); + }, + + async openAppBillingPortal(): Promise { + notSupported("openAppBillingPortal"); + }, + + async cancelAppScheduledRenewal(): Promise { + return unsupportedAppSnapshot(); + }, + + async resumeAppSubscription(): Promise { + return unsupportedAppSnapshot(); + }, + + async recordAppSeatUsage(): Promise { + return unsupportedAppSnapshot(); + }, + + async listRepos(_organizationId: string): Promise { + return workspace.getSnapshot().repos.map((repo) => ({ + organizationId: defaultOrganizationId, + repoId: repo.id, + remoteUrl: mockRepoRemote(repo.label), + createdAt: nowMs(), + updatedAt: nowMs(), + })); + }, + + async createTask(_input: CreateTaskInput): Promise { + notSupported("createTask"); + }, + + async listTasks(_organizationId: string, repoId?: string): Promise { + return workspace + .getSnapshot() + .tasks.filter((task) => !repoId || task.repoId === repoId) + .map((task) => ({ + organizationId: defaultOrganizationId, + repoId: task.repoId, + taskId: task.id, + branchName: task.branch, + title: task.title, + status: task.status === "archived" ? "archived" : "running", + pullRequest: null, + updatedAt: task.updatedAtMs, + })); + }, + + async getRepoOverview(_organizationId: string, _repoId: string): Promise { + notSupported("getRepoOverview"); + }, + async getTask(_organizationId: string, _repoId: string, taskId: string): Promise { + return buildTaskRecord(taskId); + }, + + async listHistory(_input: HistoryQueryInput): Promise { + return []; + }, + + async switchTask(_organizationId: string, _repoId: string, taskId: string): Promise { + return { + organizationId: defaultOrganizationId, + taskId, + sandboxProviderId: "local", + switchTarget: `mock://${taskId}`, + }; + }, + + async attachTask(_organizationId: string, _repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + return { + target: `mock://${taskId}`, + sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null, + }; + }, + + async runAction(_organizationId: string, _repoId: string, _taskId: string): Promise { + notSupported("runAction"); + }, + + async createSandboxSession(): Promise<{ id: string; status: "running" | "idle" | "error" }> { + notSupported("createSandboxSession"); + }, + + async listSandboxSessions(): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }> { + return { items: [] }; + }, + + async listSandboxSessionEvents(): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> { + return { items: [] }; + }, + + async createSandboxProcess(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + request: ProcessCreateRequest; + }): Promise { + const task = requireTask(input.sandboxId); + const processes = ensureProcessList(input.sandboxId); + const created = createProcessRecord(input.sandboxId, mockCwd(task.repoName, task.id), input.request); + processes.unshift(created); + notifySandbox(input.sandboxId); + return cloneProcess(created); + }, + + async listSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { + return { + processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)), + }; + }, + + async getSandboxProcessLogs( + _organizationId: string, + _providerId: SandboxProviderId, + sandboxId: string, + processId: string, + query?: ProcessLogFollowQuery, + ): Promise { + const process = ensureProcessList(sandboxId).find((candidate) => candidate.id === processId); + if (!process) { + throw new Error(`Unknown mock process ${processId}`); + } + return { + processId, + stream: query?.stream ?? (process.tty ? "pty" : "combined"), + entries: process.logText + ? [ + { + data: encodeBase64Utf8(process.logText), + encoding: "base64", + sequence: 1, + stream: query?.stream ?? (process.tty ? "pty" : "combined"), + timestampMs: process.createdAtMs, + }, + ] + : [], + }; + }, + + async stopSandboxProcess( + _organizationId: string, + _providerId: SandboxProviderId, + sandboxId: string, + processId: string, + _query?: ProcessSignalQuery, + ): Promise { + const process = ensureProcessList(sandboxId).find((candidate) => candidate.id === processId); + if (!process) { + throw new Error(`Unknown mock process ${processId}`); + } + process.status = "exited"; + process.exitCode = 0; + process.exitedAtMs = nowMs(); + process.logText += "\n[stopped]\n"; + notifySandbox(sandboxId); + return cloneProcess(process); + }, + + async killSandboxProcess( + _organizationId: string, + _providerId: SandboxProviderId, + sandboxId: string, + processId: string, + _query?: ProcessSignalQuery, + ): Promise { + const process = ensureProcessList(sandboxId).find((candidate) => candidate.id === processId); + if (!process) { + throw new Error(`Unknown mock process ${processId}`); + } + process.status = "exited"; + process.exitCode = 137; + process.exitedAtMs = nowMs(); + process.logText += "\n[killed]\n"; + notifySandbox(sandboxId); + return cloneProcess(process); + }, + + async deleteSandboxProcess(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, processId: string): Promise { + processesBySandboxId.set( + sandboxId, + ensureProcessList(sandboxId).filter((candidate) => candidate.id !== processId), + ); + notifySandbox(sandboxId); + }, + + subscribeSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, listener: () => void): () => void { + let listeners = listenersBySandboxId.get(sandboxId); + if (!listeners) { + listeners = new Set(); + listenersBySandboxId.set(sandboxId, listeners); + } + listeners.add(listener); + return () => { + const current = listenersBySandboxId.get(sandboxId); + if (!current) { + return; + } + current.delete(listener); + if (current.size === 0) { + listenersBySandboxId.delete(sandboxId); + } + }; + }, + + async sendSandboxPrompt(): Promise { + notSupported("sendSandboxPrompt"); + }, + + async sandboxSessionStatus(sessionId: string): Promise<{ id: string; status: "running" | "idle" | "error" }> { + return { id: sessionId, status: "idle" }; + }, + + async sandboxProviderState( + _organizationId: string, + _providerId: SandboxProviderId, + sandboxId: string, + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> { + return { sandboxProviderId: "local", sandboxId, state: "running", at: nowMs() }; + }, + + async getSandboxAgentConnection(): Promise<{ endpoint: string; token?: string }> { + return { endpoint: "mock://terminal-unavailable" }; + }, + + async getSandboxWorkspaceModelGroups(_organizationId: string, _sandboxProviderId: SandboxProviderId, _sandboxId: string): Promise { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + }, + + async getOrganizationSummary(): Promise { + return buildOrganizationSummary(); + }, + + async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { + return buildTaskDetail(requireTask(taskId)); + }, + + async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { + return buildSessionDetail(requireTask(taskId), sessionId); + }, + + async getWorkspace(): Promise { + return workspace.getSnapshot(); + }, + + subscribeWorkspace(_organizationId: string, listener: () => void): () => void { + return workspace.subscribe(listener); + }, + + async createWorkspaceTask(_organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise { + const created = await workspace.createTask(input); + emitOrganizationSnapshot(); + emitTaskUpdate(created.taskId); + if (created.sessionId) { + emitSessionUpdate(created.taskId, created.sessionId); + } + return created; + }, + + async markWorkspaceUnread(_organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await workspace.markTaskUnread(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + }, + + async renameWorkspaceTask(_organizationId: string, input: TaskWorkspaceRenameInput): Promise { + await workspace.renameTask(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + }, + + async createWorkspaceSession(_organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const created = await workspace.addSession(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, created.sessionId); + return created; + }, + + async renameWorkspaceSession(_organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise { + await workspace.renameSession(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async selectWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.selectSession(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async setWorkspaceSessionUnread(_organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise { + await workspace.setSessionUnread(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async updateWorkspaceDraft(_organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise { + await workspace.updateDraft(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async changeWorkspaceModel(_organizationId: string, input: TaskWorkspaceChangeModelInput): Promise { + await workspace.changeModel(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async sendWorkspaceMessage(_organizationId: string, input: TaskWorkspaceSendMessageInput): Promise { + await workspace.sendMessage(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async stopWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.stopAgent(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async closeWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.closeSession(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + }, + + async publishWorkspacePr(_organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await workspace.publishPr(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + }, + + async changeWorkspaceTaskOwner( + _organizationId: string, + input: { repoId: string; taskId: string; targetUserId: string; targetUserName: string; targetUserEmail: string }, + ): Promise { + await workspace.changeOwner(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + }, + + async revertWorkspaceFile(_organizationId: string, input: TaskWorkspaceDiffInput): Promise { + await workspace.revertFile(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + }, + + async adminReloadGithubOrganization(): Promise {}, + async adminReloadGithubRepository(): Promise {}, + + async health(): Promise<{ ok: true }> { + return { ok: true }; + }, + + async useOrganization(organizationId: string): Promise<{ organizationId: string }> { + return { organizationId }; + }, + + async starSandboxAgentRepo(): Promise { + return { + repo: "rivet-dev/sandbox-agent", + starredAt: nowMs(), + }; + }, + }; +} diff --git a/foundry/packages/client/src/mock/latency.ts b/foundry/packages/client/src/mock/latency.ts new file mode 100644 index 0000000..54d2d58 --- /dev/null +++ b/foundry/packages/client/src/mock/latency.ts @@ -0,0 +1,12 @@ +const MOCK_LATENCY_MIN_MS = 1; +const MOCK_LATENCY_MAX_MS = 200; + +export function randomMockLatencyMs(): number { + return Math.floor(Math.random() * (MOCK_LATENCY_MAX_MS - MOCK_LATENCY_MIN_MS + 1)) + MOCK_LATENCY_MIN_MS; +} + +export function injectMockLatency(): Promise { + return new Promise((resolve) => { + setTimeout(resolve, randomMockLatencyMs()); + }); +} diff --git a/foundry/packages/client/src/mock/workspace-client.ts b/foundry/packages/client/src/mock/workspace-client.ts new file mode 100644 index 0000000..7983e0f --- /dev/null +++ b/foundry/packages/client/src/mock/workspace-client.ts @@ -0,0 +1,471 @@ +import { + MODEL_GROUPS, + buildInitialMockLayoutViewModel, + groupWorkspaceRepositories, + nowMs, + providerAgent, + randomReply, + removeFileTreePath, + slugify, + uid, +} from "../workspace-model.js"; +import { DEFAULT_WORKSPACE_MODEL_ID, workspaceAgentForModel } from "@sandbox-agent/foundry-shared"; +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, + WorkspaceSession as AgentSession, + WorkspaceTask as Task, + WorkspaceTranscriptEvent as TranscriptEvent, +} from "@sandbox-agent/foundry-shared"; +import type { TaskWorkspaceClient } from "../workspace-client.js"; + +function buildTranscriptEvent(params: { + sessionId: string; + sender: "client" | "agent"; + createdAt: number; + payload: unknown; + eventIndex: number; +}): TranscriptEvent { + return { + id: uid(), + sessionId: params.sessionId, + sender: params.sender, + createdAt: params.createdAt, + payload: params.payload, + connectionId: "mock-connection", + eventIndex: params.eventIndex, + }; +} + +class MockWorkspaceStore implements TaskWorkspaceClient { + private snapshot = buildInitialMockLayoutViewModel(); + private listeners = new Set<() => void>(); + private pendingTimers = new Map>(); + + getSnapshot(): TaskWorkspaceSnapshot { + return this.snapshot; + } + + subscribe(listener: () => void): () => void { + this.listeners.add(listener); + return () => { + this.listeners.delete(listener); + }; + } + + async createTask(input: TaskWorkspaceCreateTaskInput): Promise { + const id = uid(); + const sessionId = `session-${id}`; + const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId); + if (!repo) { + throw new Error(`Cannot create mock task for unknown repo ${input.repoId}`); + } + const nextTask: Task = { + id, + repoId: repo.id, + title: input.title?.trim() || "New Task", + status: "init_enqueue_provision", + repoName: repo.label, + updatedAtMs: nowMs(), + branch: input.branch?.trim() || null, + pullRequest: null, + activeSessionId: sessionId, + sessions: [ + { + id: sessionId, + sessionId: sessionId, + sessionName: "Session 1", + agent: workspaceAgentForModel(input.model ?? DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS), + model: input.model ?? DEFAULT_WORKSPACE_MODEL_ID, + status: "idle", + thinkingSinceMs: null, + unread: false, + created: false, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: [], + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 0, + }; + + this.updateState((current) => ({ + ...current, + tasks: [nextTask, ...current.tasks], + })); + return { taskId: id, sessionId }; + } + + async markTaskUnread(input: TaskWorkspaceSelectInput): Promise { + this.updateTask(input.taskId, (task) => { + const targetSession = task.sessions[task.sessions.length - 1] ?? null; + if (!targetSession) { + return task; + } + + return { + ...task, + sessions: task.sessions.map((session) => (session.id === targetSession.id ? { ...session, unread: true } : session)), + }; + }); + } + + async renameTask(input: TaskWorkspaceRenameInput): Promise { + const value = input.value.trim(); + if (!value) { + throw new Error(`Cannot rename task ${input.taskId} to an empty title`); + } + this.updateTask(input.taskId, (task) => ({ ...task, title: value, updatedAtMs: nowMs() })); + } + + async archiveTask(input: TaskWorkspaceSelectInput): Promise { + this.updateTask(input.taskId, (task) => ({ ...task, status: "archived", updatedAtMs: nowMs() })); + } + + async publishPr(input: TaskWorkspaceSelectInput): Promise { + const nextPrNumber = Math.max(0, ...this.snapshot.tasks.map((task) => task.pullRequest?.number ?? 0)) + 1; + this.updateTask(input.taskId, (task) => ({ + ...task, + updatedAtMs: nowMs(), + pullRequest: { + number: nextPrNumber, + status: "ready", + title: task.title, + state: "open", + url: `https://example.test/pr/${nextPrNumber}`, + headRefName: task.branch ?? `task/${task.id}`, + baseRefName: "main", + repoFullName: task.repoName, + authorLogin: "mock", + isDraft: false, + updatedAtMs: nowMs(), + }, + })); + } + + async revertFile(input: TaskWorkspaceDiffInput): Promise { + this.updateTask(input.taskId, (task) => { + const file = task.fileChanges.find((entry) => entry.path === input.path); + const nextDiffs = { ...task.diffs }; + delete nextDiffs[input.path]; + + return { + ...task, + fileChanges: task.fileChanges.filter((entry) => entry.path !== input.path), + diffs: nextDiffs, + fileTree: file?.type === "A" ? removeFileTreePath(task.fileTree, input.path) : task.fileTree, + }; + }); + } + + async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise { + this.assertSession(input.taskId, input.sessionId); + this.updateTask(input.taskId, (task) => ({ + ...task, + updatedAtMs: nowMs(), + sessions: task.sessions.map((tab) => + tab.id === input.sessionId + ? { + ...tab, + draft: { + text: input.text, + attachments: input.attachments, + updatedAtMs: nowMs(), + }, + } + : tab, + ), + })); + } + + async sendMessage(input: TaskWorkspaceSendMessageInput): Promise { + const text = input.text.trim(); + if (!text) { + throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`); + } + + this.assertSession(input.taskId, input.sessionId); + const startedAtMs = nowMs(); + + this.updateTask(input.taskId, (currentTask) => { + const isFirstOnTask = String(currentTask.status).startsWith("init_"); + const newTitle = isFirstOnTask ? (text.length > 50 ? `${text.slice(0, 47)}...` : text) : currentTask.title; + const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch; + const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)]; + const userEvent = buildTranscriptEvent({ + sessionId: input.sessionId, + sender: "client", + createdAt: startedAtMs, + eventIndex: candidateEventIndex(currentTask, input.sessionId), + payload: { + method: "session/prompt", + params: { + prompt: userMessageLines.map((line) => ({ type: "text", text: line })), + }, + }, + }); + + return { + ...currentTask, + title: newTitle, + branch: newBranch, + status: "running", + updatedAtMs: startedAtMs, + sessions: currentTask.sessions.map((candidate) => + candidate.id === input.sessionId + ? { + ...candidate, + created: true, + status: "running", + unread: false, + thinkingSinceMs: startedAtMs, + draft: { text: "", attachments: [], updatedAtMs: startedAtMs }, + transcript: [...candidate.transcript, userEvent], + } + : candidate, + ), + }; + }); + + const existingTimer = this.pendingTimers.get(input.sessionId); + if (existingTimer) { + clearTimeout(existingTimer); + } + + const timer = setTimeout(() => { + const task = this.requireTask(input.taskId); + this.requireSession(task, input.sessionId); + const completedAtMs = nowMs(); + const replyEvent = buildTranscriptEvent({ + sessionId: input.sessionId, + sender: "agent", + createdAt: completedAtMs, + eventIndex: candidateEventIndex(task, input.sessionId), + payload: { + result: { + text: randomReply(), + durationMs: completedAtMs - startedAtMs, + }, + }, + }); + + this.updateTask(input.taskId, (currentTask) => { + const updatedTabs = currentTask.sessions.map((candidate) => { + if (candidate.id !== input.sessionId) { + return candidate; + } + + return { + ...candidate, + status: "idle" as const, + thinkingSinceMs: null, + unread: true, + transcript: [...candidate.transcript, replyEvent], + }; + }); + const anyRunning = updatedTabs.some((candidate) => candidate.status === "running"); + + return { + ...currentTask, + updatedAtMs: completedAtMs, + sessions: updatedTabs, + status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle", + }; + }); + + this.pendingTimers.delete(input.sessionId); + }, 2_500); + + this.pendingTimers.set(input.sessionId, timer); + } + + async stopAgent(input: TaskWorkspaceSessionInput): Promise { + this.assertSession(input.taskId, input.sessionId); + const existing = this.pendingTimers.get(input.sessionId); + if (existing) { + clearTimeout(existing); + this.pendingTimers.delete(input.sessionId); + } + + this.updateTask(input.taskId, (currentTask) => { + const updatedTabs = currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate, + ); + const anyRunning = updatedTabs.some((candidate) => candidate.status === "running"); + + return { + ...currentTask, + updatedAtMs: nowMs(), + sessions: updatedTabs, + status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle", + }; + }); + } + + async selectSession(input: TaskWorkspaceSessionInput): Promise { + this.assertSession(input.taskId, input.sessionId); + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + activeSessionId: input.sessionId, + })); + } + + async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise { + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)), + })); + } + + async renameSession(input: TaskWorkspaceRenameSessionInput): Promise { + const title = input.title.trim(); + if (!title) { + throw new Error(`Cannot rename session ${input.sessionId} to an empty title`); + } + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, sessionName: title } : candidate)), + })); + } + + async closeSession(input: TaskWorkspaceSessionInput): Promise { + this.updateTask(input.taskId, (currentTask) => { + if (currentTask.sessions.length <= 1) { + return currentTask; + } + + return { + ...currentTask, + activeSessionId: + currentTask.activeSessionId === input.sessionId + ? (currentTask.sessions.find((candidate) => candidate.id !== input.sessionId)?.id ?? null) + : currentTask.activeSessionId, + sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId), + }; + }); + } + + async addSession(input: TaskWorkspaceSelectInput): Promise { + this.assertTask(input.taskId); + const nextSessionId = uid(); + const nextSession: AgentSession = { + id: nextSessionId, + sessionId: nextSessionId, + sandboxSessionId: null, + sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`, + agent: workspaceAgentForModel(DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS), + model: DEFAULT_WORKSPACE_MODEL_ID, + status: "idle", + thinkingSinceMs: null, + unread: false, + created: false, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: [], + }; + + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + updatedAtMs: nowMs(), + activeSessionId: nextSession.id, + sessions: [...currentTask.sessions, nextSession], + })); + return { sessionId: nextSession.id }; + } + + async changeModel(input: TaskWorkspaceChangeModelInput): Promise { + const group = MODEL_GROUPS.find((candidate) => candidate.models.some((entry) => entry.id === input.model)); + if (!group) { + throw new Error(`Unable to resolve model provider for ${input.model}`); + } + + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + sessions: currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: workspaceAgentForModel(input.model, MODEL_GROUPS) } : candidate, + ), + })); + } + + async changeOwner(input: { repoId: string; taskId: string; targetUserId: string; targetUserName: string; targetUserEmail: string }): Promise { + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + primaryUserLogin: input.targetUserName, + primaryUserAvatarUrl: null, + })); + } + + private updateState(updater: (current: TaskWorkspaceSnapshot) => TaskWorkspaceSnapshot): void { + const nextSnapshot = updater(this.snapshot); + this.snapshot = { + ...nextSnapshot, + repositories: groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks), + }; + this.notify(); + } + + private updateTask(taskId: string, updater: (task: Task) => Task): void { + this.assertTask(taskId); + this.updateState((current) => ({ + ...current, + tasks: current.tasks.map((task) => (task.id === taskId ? updater(task) : task)), + })); + } + + private notify(): void { + for (const listener of this.listeners) { + listener(); + } + } + + private assertTask(taskId: string): void { + this.requireTask(taskId); + } + + private assertSession(taskId: string, sessionId: string): void { + const task = this.requireTask(taskId); + this.requireSession(task, sessionId); + } + + private requireTask(taskId: string): Task { + const task = this.snapshot.tasks.find((candidate) => candidate.id === taskId); + if (!task) { + throw new Error(`Unable to find mock task ${taskId}`); + } + return task; + } + + private requireSession(task: Task, sessionId: string): AgentSession { + const session = task.sessions.find((candidate) => candidate.id === sessionId); + if (!session) { + throw new Error(`Unable to find mock session ${sessionId} in task ${task.id}`); + } + return session; + } +} + +function candidateEventIndex(task: Task, sessionId: string): number { + const session = task.sessions.find((candidate) => candidate.id === sessionId); + return (session?.transcript.length ?? 0) + 1; +} + +let sharedMockWorkspaceClient: TaskWorkspaceClient | null = null; + +export function getSharedMockWorkspaceClient(): TaskWorkspaceClient { + if (!sharedMockWorkspaceClient) { + sharedMockWorkspaceClient = new MockWorkspaceStore(); + } + return sharedMockWorkspaceClient; +} diff --git a/foundry/packages/client/src/remote/app-client.ts b/foundry/packages/client/src/remote/app-client.ts new file mode 100644 index 0000000..f1cb908 --- /dev/null +++ b/foundry/packages/client/src/remote/app-client.ts @@ -0,0 +1,151 @@ +import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../backend-client.js"; +import type { FoundryAppClient } from "../app-client.js"; + +export interface RemoteFoundryAppClientOptions { + backend: BackendClient; +} + +class RemoteFoundryAppStore implements FoundryAppClient { + private readonly backend: BackendClient; + private snapshot: FoundryAppSnapshot = { + auth: { status: "signed_out", currentUserId: null }, + activeOrganizationId: null, + onboarding: { + starterRepo: { + repoFullName: "rivet-dev/sandbox-agent", + repoUrl: "https://github.com/rivet-dev/sandbox-agent", + status: "pending", + starredAt: null, + skippedAt: null, + }, + }, + users: [], + organizations: [], + }; + private readonly listeners = new Set<() => void>(); + private refreshPromise: Promise | null = null; + private unsubscribeApp: (() => void) | null = null; + + constructor(options: RemoteFoundryAppClientOptions) { + this.backend = options.backend; + } + + getSnapshot(): FoundryAppSnapshot { + return this.snapshot; + } + + subscribe(listener: () => void): () => void { + this.listeners.add(listener); + this.ensureStarted(); + return () => { + this.listeners.delete(listener); + if (this.listeners.size === 0 && this.unsubscribeApp) { + this.unsubscribeApp(); + this.unsubscribeApp = null; + } + }; + } + + async signInWithGithub(userId?: string): Promise { + void userId; + await this.backend.signInWithGithub(); + } + + async signOut(): Promise { + this.snapshot = await this.backend.signOutApp(); + this.notify(); + } + + async skipStarterRepo(): Promise { + this.snapshot = await this.backend.skipAppStarterRepo(); + this.notify(); + } + + async starStarterRepo(organizationId: string): Promise { + this.snapshot = await this.backend.starAppStarterRepo(organizationId); + this.notify(); + } + + async selectOrganization(organizationId: string): Promise { + this.snapshot = await this.backend.selectAppOrganization(organizationId); + this.notify(); + } + + async setDefaultModel(model: WorkspaceModelId): Promise { + this.snapshot = await this.backend.setAppDefaultModel(model); + this.notify(); + } + + async updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { + this.snapshot = await this.backend.updateAppOrganizationProfile(input); + this.notify(); + } + + async triggerGithubSync(organizationId: string): Promise { + this.snapshot = await this.backend.triggerAppRepoImport(organizationId); + this.notify(); + } + + async completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise { + await this.backend.completeAppHostedCheckout(organizationId, planId); + } + + async openBillingPortal(organizationId: string): Promise { + await this.backend.openAppBillingPortal(organizationId); + } + + async cancelScheduledRenewal(organizationId: string): Promise { + this.snapshot = await this.backend.cancelAppScheduledRenewal(organizationId); + this.notify(); + } + + async resumeSubscription(organizationId: string): Promise { + this.snapshot = await this.backend.resumeAppSubscription(organizationId); + this.notify(); + } + + async reconnectGithub(organizationId: string): Promise { + await this.backend.reconnectAppGithub(organizationId); + } + + async recordSeatUsage(organizationId: string): Promise { + this.snapshot = await this.backend.recordAppSeatUsage(organizationId); + this.notify(); + } + + private ensureStarted(): void { + if (!this.unsubscribeApp) { + this.unsubscribeApp = this.backend.subscribeApp(() => { + void this.refresh(); + }); + } + void this.refresh(); + } + + private async refresh(): Promise { + if (this.refreshPromise) { + await this.refreshPromise; + return; + } + + this.refreshPromise = (async () => { + this.snapshot = await this.backend.getAppSnapshot(); + this.notify(); + })().finally(() => { + this.refreshPromise = null; + }); + + await this.refreshPromise; + } + + private notify(): void { + for (const listener of [...this.listeners]) { + listener(); + } + } +} + +export function createRemoteFoundryAppClient(options: RemoteFoundryAppClientOptions): FoundryAppClient { + return new RemoteFoundryAppStore(options); +} diff --git a/foundry/packages/client/src/remote/workspace-client.ts b/foundry/packages/client/src/remote/workspace-client.ts new file mode 100644 index 0000000..2a11f51 --- /dev/null +++ b/foundry/packages/client/src/remote/workspace-client.ts @@ -0,0 +1,204 @@ +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceChangeOwnerInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../backend-client.js"; +import { groupWorkspaceRepositories } from "../workspace-model.js"; +import type { TaskWorkspaceClient } from "../workspace-client.js"; + +export interface RemoteWorkspaceClientOptions { + backend: BackendClient; + organizationId: string; +} + +class RemoteWorkspaceStore implements TaskWorkspaceClient { + private readonly backend: BackendClient; + private readonly organizationId: string; + private snapshot: TaskWorkspaceSnapshot; + private readonly listeners = new Set<() => void>(); + private unsubscribeWorkspace: (() => void) | null = null; + private refreshPromise: Promise | null = null; + private refreshRetryTimeout: ReturnType | null = null; + + constructor(options: RemoteWorkspaceClientOptions) { + this.backend = options.backend; + this.organizationId = options.organizationId; + this.snapshot = { + organizationId: options.organizationId, + repos: [], + repositories: [], + tasks: [], + }; + } + + getSnapshot(): TaskWorkspaceSnapshot { + return this.snapshot; + } + + subscribe(listener: () => void): () => void { + this.listeners.add(listener); + this.ensureStarted(); + return () => { + this.listeners.delete(listener); + if (this.listeners.size === 0 && this.refreshRetryTimeout) { + clearTimeout(this.refreshRetryTimeout); + this.refreshRetryTimeout = null; + } + if (this.listeners.size === 0 && this.unsubscribeWorkspace) { + this.unsubscribeWorkspace(); + this.unsubscribeWorkspace = null; + } + }; + } + + async createTask(input: TaskWorkspaceCreateTaskInput): Promise { + const created = await this.backend.createWorkspaceTask(this.organizationId, input); + await this.refresh(); + return created; + } + + async markTaskUnread(input: TaskWorkspaceSelectInput): Promise { + await this.backend.markWorkspaceUnread(this.organizationId, input); + await this.refresh(); + } + + async renameTask(input: TaskWorkspaceRenameInput): Promise { + await this.backend.renameWorkspaceTask(this.organizationId, input); + await this.refresh(); + } + + async archiveTask(input: TaskWorkspaceSelectInput): Promise { + await this.backend.runAction(this.organizationId, input.repoId, input.taskId, "archive"); + await this.refresh(); + } + + async publishPr(input: TaskWorkspaceSelectInput): Promise { + await this.backend.publishWorkspacePr(this.organizationId, input); + await this.refresh(); + } + + async revertFile(input: TaskWorkspaceDiffInput): Promise { + await this.backend.revertWorkspaceFile(this.organizationId, input); + await this.refresh(); + } + + async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise { + await this.backend.updateWorkspaceDraft(this.organizationId, input); + // Skip refresh — the server broadcast will trigger it, and the frontend + // holds local draft state to avoid the round-trip overwriting user input. + } + + async sendMessage(input: TaskWorkspaceSendMessageInput): Promise { + await this.backend.sendWorkspaceMessage(this.organizationId, input); + await this.refresh(); + } + + async stopAgent(input: TaskWorkspaceSessionInput): Promise { + await this.backend.stopWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async selectSession(input: TaskWorkspaceSessionInput): Promise { + await this.backend.selectWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise { + await this.backend.setWorkspaceSessionUnread(this.organizationId, input); + await this.refresh(); + } + + async renameSession(input: TaskWorkspaceRenameSessionInput): Promise { + await this.backend.renameWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async closeSession(input: TaskWorkspaceSessionInput): Promise { + await this.backend.closeWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async addSession(input: TaskWorkspaceSelectInput): Promise { + const created = await this.backend.createWorkspaceSession(this.organizationId, input); + await this.refresh(); + return created; + } + + async changeModel(input: TaskWorkspaceChangeModelInput): Promise { + await this.backend.changeWorkspaceModel(this.organizationId, input); + await this.refresh(); + } + + async changeOwner(input: TaskWorkspaceChangeOwnerInput): Promise { + await this.backend.changeWorkspaceTaskOwner(this.organizationId, input); + await this.refresh(); + } + + private ensureStarted(): void { + if (!this.unsubscribeWorkspace) { + this.unsubscribeWorkspace = this.backend.subscribeWorkspace(this.organizationId, () => { + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + }); + } + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + } + + private scheduleRefreshRetry(): void { + if (this.refreshRetryTimeout || this.listeners.size === 0) { + return; + } + + this.refreshRetryTimeout = setTimeout(() => { + this.refreshRetryTimeout = null; + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + }, 1_000); + } + + private async refresh(): Promise { + if (this.refreshPromise) { + await this.refreshPromise; + return; + } + + this.refreshPromise = (async () => { + const nextSnapshot = await this.backend.getWorkspace(this.organizationId); + if (this.refreshRetryTimeout) { + clearTimeout(this.refreshRetryTimeout); + this.refreshRetryTimeout = null; + } + this.snapshot = { + ...nextSnapshot, + repositories: nextSnapshot.repositories ?? groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks), + }; + for (const listener of [...this.listeners]) { + listener(); + } + })().finally(() => { + this.refreshPromise = null; + }); + + await this.refreshPromise; + } +} + +export function createRemoteWorkspaceClient(options: RemoteWorkspaceClientOptions): TaskWorkspaceClient { + return new RemoteWorkspaceStore(options); +} diff --git a/foundry/packages/client/src/subscription/manager.ts b/foundry/packages/client/src/subscription/manager.ts new file mode 100644 index 0000000..b9bee0b --- /dev/null +++ b/foundry/packages/client/src/subscription/manager.ts @@ -0,0 +1,33 @@ +import type { TopicData, TopicKey, TopicParams } from "./topics.js"; + +export type TopicStatus = "loading" | "connected" | "error"; + +export interface DebugSubscriptionTopic { + topicKey: TopicKey; + cacheKey: string; + listenerCount: number; + status: TopicStatus; + lastRefreshAt: number | null; +} + +export interface TopicState { + data: TopicData | undefined; + status: TopicStatus; + error: Error | null; +} + +/** + * The SubscriptionManager owns all realtime actor connections and cached state. + * + * Multiple subscribers to the same topic share one connection and one cache + * entry. After the last subscriber leaves, a short grace period keeps the + * connection warm so navigation does not thrash actor connections. + */ +export interface SubscriptionManager { + subscribe(topicKey: K, params: TopicParams, listener: () => void): () => void; + getSnapshot(topicKey: K, params: TopicParams): TopicData | undefined; + getStatus(topicKey: K, params: TopicParams): TopicStatus; + getError(topicKey: K, params: TopicParams): Error | null; + listDebugTopics(): DebugSubscriptionTopic[]; + dispose(): void; +} diff --git a/foundry/packages/client/src/subscription/mock-manager.ts b/foundry/packages/client/src/subscription/mock-manager.ts new file mode 100644 index 0000000..bcdb389 --- /dev/null +++ b/foundry/packages/client/src/subscription/mock-manager.ts @@ -0,0 +1,12 @@ +import { createMockBackendClient } from "../mock/backend-client.js"; +import { RemoteSubscriptionManager } from "./remote-manager.js"; + +/** + * Mock implementation shares the same subscription-manager harness as the remote + * path, but uses the in-memory mock backend that synthesizes actor events. + */ +export class MockSubscriptionManager extends RemoteSubscriptionManager { + constructor() { + super(createMockBackendClient()); + } +} diff --git a/foundry/packages/client/src/subscription/remote-manager.ts b/foundry/packages/client/src/subscription/remote-manager.ts new file mode 100644 index 0000000..ae774c6 --- /dev/null +++ b/foundry/packages/client/src/subscription/remote-manager.ts @@ -0,0 +1,275 @@ +import type { BackendClient } from "../backend-client.js"; +import type { DebugSubscriptionTopic, SubscriptionManager, TopicStatus } from "./manager.js"; +import { topicDefinitions, type TopicData, type TopicDefinition, type TopicKey, type TopicParams } from "./topics.js"; + +const GRACE_PERIOD_MS = 30_000; + +/** Initial retry delay in ms. */ +const RETRY_BASE_MS = 1_000; +/** Maximum retry delay in ms. */ +const RETRY_MAX_MS = 30_000; + +/** + * Remote implementation of SubscriptionManager. + * Each cache entry owns one actor connection plus one materialized snapshot. + */ +export class RemoteSubscriptionManager implements SubscriptionManager { + private entries = new Map>(); + + constructor(private readonly backend: BackendClient) {} + + subscribe(topicKey: K, params: TopicParams, listener: () => void): () => void { + const definition = topicDefinitions[topicKey] as unknown as TopicDefinition; + const cacheKey = definition.key(params as any); + let entry = this.entries.get(cacheKey); + + if (!entry) { + entry = new TopicEntry(topicKey, cacheKey, definition, this.backend, params as any); + this.entries.set(cacheKey, entry); + } + + entry.cancelTeardown(); + entry.addListener(listener); + entry.ensureStarted(); + + return () => { + const current = this.entries.get(cacheKey); + if (!current) { + return; + } + current.removeListener(listener); + if (current.listenerCount === 0) { + current.scheduleTeardown(GRACE_PERIOD_MS, () => { + this.entries.delete(cacheKey); + }); + } + }; + } + + getSnapshot(topicKey: K, params: TopicParams): TopicData | undefined { + return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.data as TopicData | undefined; + } + + getStatus(topicKey: K, params: TopicParams): TopicStatus { + return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.status ?? "loading"; + } + + getError(topicKey: K, params: TopicParams): Error | null { + return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.error ?? null; + } + + listDebugTopics(): DebugSubscriptionTopic[] { + return [...this.entries.values()] + .filter((entry) => entry.listenerCount > 0) + .map((entry) => entry.getDebugTopic()) + .sort((left, right) => left.cacheKey.localeCompare(right.cacheKey)); + } + + dispose(): void { + for (const entry of this.entries.values()) { + entry.dispose(); + } + this.entries.clear(); + } +} + +class TopicEntry { + data: TData | undefined; + status: TopicStatus = "loading"; + error: Error | null = null; + listenerCount = 0; + lastRefreshAt: number | null = null; + + private readonly listeners = new Set<() => void>(); + private conn: Awaited["connect"]>> | null = null; + private unsubscribeEvent: (() => void) | null = null; + private unsubscribeError: (() => void) | null = null; + private teardownTimer: ReturnType | null = null; + private retryTimer: ReturnType | null = null; + private retryAttempt = 0; + private startPromise: Promise | null = null; + private eventPromise: Promise = Promise.resolve(); + private started = false; + private disposed = false; + + constructor( + private readonly topicKey: TopicKey, + private readonly cacheKey: string, + private readonly definition: TopicDefinition, + private readonly backend: BackendClient, + private readonly params: TParams, + ) {} + + getDebugTopic(): DebugSubscriptionTopic { + return { + topicKey: this.topicKey, + cacheKey: this.cacheKey, + listenerCount: this.listenerCount, + status: this.status, + lastRefreshAt: this.lastRefreshAt, + }; + } + + addListener(listener: () => void): void { + this.listeners.add(listener); + this.listenerCount = this.listeners.size; + } + + removeListener(listener: () => void): void { + this.listeners.delete(listener); + this.listenerCount = this.listeners.size; + } + + ensureStarted(): void { + if (this.started || this.startPromise) { + return; + } + this.startPromise = this.start().finally(() => { + this.startPromise = null; + }); + } + + scheduleTeardown(ms: number, onTeardown: () => void): void { + this.teardownTimer = setTimeout(() => { + this.dispose(); + onTeardown(); + }, ms); + } + + cancelTeardown(): void { + if (this.teardownTimer) { + clearTimeout(this.teardownTimer); + this.teardownTimer = null; + } + } + + dispose(): void { + this.disposed = true; + this.cancelTeardown(); + this.cancelRetry(); + this.unsubscribeEvent?.(); + this.unsubscribeError?.(); + if (this.conn) { + void this.conn.dispose(); + } + this.conn = null; + this.data = undefined; + this.status = "loading"; + this.error = null; + this.lastRefreshAt = null; + this.started = false; + this.retryAttempt = 0; + } + + private cancelRetry(): void { + if (this.retryTimer) { + clearTimeout(this.retryTimer); + this.retryTimer = null; + } + } + + /** + * Schedules a retry with exponential backoff. Cleans up any existing + * connection state before reconnecting. + */ + private scheduleRetry(): void { + if (this.disposed || this.listenerCount === 0) { + return; + } + + const delay = Math.min(RETRY_BASE_MS * 2 ** this.retryAttempt, RETRY_MAX_MS); + this.retryAttempt++; + + this.retryTimer = setTimeout(() => { + this.retryTimer = null; + if (this.disposed || this.listenerCount === 0) { + return; + } + + // Tear down the old connection before retrying + this.cleanupConnection(); + this.started = false; + this.startPromise = this.start().finally(() => { + this.startPromise = null; + }); + }, delay); + } + + /** + * Cleans up connection resources without resetting data/status/retry state. + */ + private cleanupConnection(): void { + this.unsubscribeEvent?.(); + this.unsubscribeError?.(); + this.unsubscribeEvent = null; + this.unsubscribeError = null; + if (this.conn) { + void this.conn.dispose(); + } + this.conn = null; + } + + private async start(): Promise { + this.status = "loading"; + this.error = null; + this.notify(); + + try { + this.conn = await this.definition.connect(this.backend, this.params); + this.unsubscribeEvent = this.conn.on(this.definition.event, (event: TEvent) => { + void this.applyEvent(event); + }); + this.unsubscribeError = this.conn.onError((error: unknown) => { + this.status = "error"; + this.error = error instanceof Error ? error : new Error(String(error)); + this.notify(); + this.scheduleRetry(); + }); + this.data = await this.definition.fetchInitial(this.backend, this.params); + this.status = "connected"; + this.lastRefreshAt = Date.now(); + this.started = true; + this.retryAttempt = 0; + this.notify(); + } catch (error) { + this.status = "error"; + this.error = error instanceof Error ? error : new Error(String(error)); + this.started = false; + this.notify(); + this.scheduleRetry(); + } + } + + private applyEvent(event: TEvent): Promise { + this.eventPromise = this.eventPromise + .then(async () => { + if (!this.started || this.data === undefined) { + return; + } + + const nextData = await this.definition.applyEvent(this.backend, this.params, this.data, event); + if (!this.started) { + return; + } + + this.data = nextData; + this.status = "connected"; + this.error = null; + this.lastRefreshAt = Date.now(); + this.notify(); + }) + .catch((error) => { + this.status = "error"; + this.error = error instanceof Error ? error : new Error(String(error)); + this.notify(); + }); + + return this.eventPromise; + } + + private notify(): void { + for (const listener of [...this.listeners]) { + listener(); + } + } +} diff --git a/foundry/packages/client/src/subscription/topics.ts b/foundry/packages/client/src/subscription/topics.ts new file mode 100644 index 0000000..bbda118 --- /dev/null +++ b/foundry/packages/client/src/subscription/topics.ts @@ -0,0 +1,106 @@ +import type { + AppEvent, + FoundryAppSnapshot, + SandboxProviderId, + SandboxProcessesEvent, + SessionEvent, + TaskEvent, + WorkspaceSessionDetail, + WorkspaceTaskDetail, + OrganizationEvent, + OrganizationSummarySnapshot, +} from "@sandbox-agent/foundry-shared"; +import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend-client.js"; + +/** + * Topic definitions for the subscription manager. + * + * Each topic describes one actor connection plus one materialized read model. + * Some topics can apply broadcast payloads directly, while others refetch + * through BackendClient so auth-scoped state stays user-specific. + */ +export interface TopicDefinition { + key: (params: TParams) => string; + event: string; + connect: (backend: BackendClient, params: TParams) => Promise; + fetchInitial: (backend: BackendClient, params: TParams) => Promise; + applyEvent: (backend: BackendClient, params: TParams, current: TData, event: TEvent) => Promise | TData; +} + +export interface AppTopicParams {} +export interface OrganizationTopicParams { + organizationId: string; +} +export interface TaskTopicParams { + organizationId: string; + repoId: string; + taskId: string; +} +export interface SessionTopicParams { + organizationId: string; + repoId: string; + taskId: string; + sessionId: string; +} +export interface SandboxProcessesTopicParams { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; +} + +export const topicDefinitions = { + app: { + key: () => "app", + event: "appUpdated", + connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"), + fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(), + applyEvent: (_backend: BackendClient, _params: AppTopicParams, _current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, + } satisfies TopicDefinition, + + organization: { + key: (params: OrganizationTopicParams) => `organization:${params.organizationId}`, + event: "organizationUpdated", + connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId), + fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId), + applyEvent: (_backend: BackendClient, _params: OrganizationTopicParams, _current: OrganizationSummarySnapshot, event: OrganizationEvent) => + event.snapshot, + } satisfies TopicDefinition, + + task: { + key: (params: TaskTopicParams) => `task:${params.organizationId}:${params.taskId}`, + event: "taskUpdated", + connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), + fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), + applyEvent: (backend: BackendClient, params: TaskTopicParams, _current: WorkspaceTaskDetail, _event: TaskEvent) => + backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), + } satisfies TopicDefinition, + + session: { + key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`, + event: "sessionUpdated", + connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), + fetchInitial: (backend: BackendClient, params: SessionTopicParams) => + backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId), + applyEvent: async (backend: BackendClient, params: SessionTopicParams, current: WorkspaceSessionDetail, event: SessionEvent) => { + if (event.session.sessionId !== params.sessionId) { + return current; + } + return await backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId); + }, + } satisfies TopicDefinition, + + sandboxProcesses: { + key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`, + event: "processesUpdated", + connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => + backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId), + fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) => + (await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes, + applyEvent: (_backend: BackendClient, _params: SandboxProcessesTopicParams, _current: SandboxProcessRecord[], event: SandboxProcessesEvent) => + event.processes, + } satisfies TopicDefinition, +} as const; + +export type TopicKey = keyof typeof topicDefinitions; +export type TopicParams = Parameters<(typeof topicDefinitions)[K]["fetchInitial"]>[1]; +export type TopicData = Awaited>; diff --git a/foundry/packages/client/src/subscription/use-subscription.ts b/foundry/packages/client/src/subscription/use-subscription.ts new file mode 100644 index 0000000..c83148a --- /dev/null +++ b/foundry/packages/client/src/subscription/use-subscription.ts @@ -0,0 +1,56 @@ +import { useMemo, useRef, useSyncExternalStore } from "react"; +import type { SubscriptionManager, TopicState } from "./manager.js"; +import { topicDefinitions, type TopicKey, type TopicParams } from "./topics.js"; + +/** + * React bridge for the subscription manager. + * + * `null` params disable the subscription entirely, which is how screens express + * conditional subscription in task/session/sandbox topics. + */ +export function useSubscription(manager: SubscriptionManager, topicKey: K, params: TopicParams | null): TopicState { + const paramsKey = params ? (topicDefinitions[topicKey] as any).key(params) : null; + const paramsRef = useRef | null>(params); + paramsRef.current = params; + + const subscribe = useMemo(() => { + return (listener: () => void) => { + const currentParams = paramsRef.current; + if (!currentParams) { + return () => {}; + } + return manager.subscribe(topicKey, currentParams, listener); + }; + }, [manager, topicKey, paramsKey]); + + const getSnapshot = useMemo(() => { + let lastSnapshot: TopicState | null = null; + + return (): TopicState => { + const currentParams = paramsRef.current; + const nextSnapshot: TopicState = currentParams + ? { + data: manager.getSnapshot(topicKey, currentParams), + status: manager.getStatus(topicKey, currentParams), + error: manager.getError(topicKey, currentParams), + } + : { + data: undefined, + status: "loading", + error: null, + }; + + // `useSyncExternalStore` requires referentially-stable snapshots when the + // underlying store has not changed. Reuse the previous object whenever + // the topic data/status/error triplet is unchanged. + if (lastSnapshot && lastSnapshot.data === nextSnapshot.data && lastSnapshot.status === nextSnapshot.status && lastSnapshot.error === nextSnapshot.error) { + return lastSnapshot; + } + + lastSnapshot = nextSnapshot; + return nextSnapshot; + }; + }, [manager, topicKey, paramsKey]); + + return useSyncExternalStore(subscribe, getSnapshot, getSnapshot); +} diff --git a/foundry/packages/client/src/view-model.ts b/foundry/packages/client/src/view-model.ts new file mode 100644 index 0000000..bd7a98c --- /dev/null +++ b/foundry/packages/client/src/view-model.ts @@ -0,0 +1,98 @@ +import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared"; + +export const TASK_STATUS_GROUPS = ["queued", "running", "idle", "archived", "killed", "error"] as const; + +export type TaskStatusGroup = (typeof TASK_STATUS_GROUPS)[number]; + +const QUEUED_STATUSES = new Set([ + "init_bootstrap_db", + "init_enqueue_provision", + "init_ensure_name", + "init_assert_name", + "init_complete", + "archive_stop_status_sync", + "archive_release_sandbox", + "archive_finalize", + "kill_destroy_sandbox", + "kill_finalize", +]); + +export function groupTaskStatus(status: TaskStatus): TaskStatusGroup { + if (status === "running") return "running"; + if (status === "idle") return "idle"; + if (status === "archived") return "archived"; + if (status === "killed") return "killed"; + if (status === "error") return "error"; + if (QUEUED_STATUSES.has(status)) return "queued"; + return "queued"; +} + +function emptyStatusCounts(): Record { + return { + queued: 0, + running: 0, + idle: 0, + archived: 0, + killed: 0, + error: 0, + }; +} + +export interface TaskSummary { + total: number; + byStatus: Record; + byProvider: Record; +} + +export function fuzzyMatch(target: string, query: string): boolean { + const haystack = target.toLowerCase(); + const needle = query.toLowerCase(); + let i = 0; + for (const ch of needle) { + i = haystack.indexOf(ch, i); + if (i < 0) { + return false; + } + i += 1; + } + return true; +} + +export function filterTasks(rows: TaskRecord[], query: string): TaskRecord[] { + const q = query.trim(); + if (!q) { + return rows; + } + + return rows.filter((row) => { + const fields = [row.branchName ?? "", row.title ?? "", row.taskId, row.task]; + return fields.some((field) => fuzzyMatch(field, q)); + }); +} + +export function formatRelativeAge(updatedAt: number, now = Date.now()): string { + const deltaSeconds = Math.max(0, Math.floor((now - updatedAt) / 1000)); + if (deltaSeconds < 60) return `${deltaSeconds}s`; + const minutes = Math.floor(deltaSeconds / 60); + if (minutes < 60) return `${minutes}m`; + const hours = Math.floor(minutes / 60); + if (hours < 24) return `${hours}h`; + const days = Math.floor(hours / 24); + return `${days}d`; +} + +export function summarizeTasks(rows: TaskRecord[]): TaskSummary { + const byStatus = emptyStatusCounts(); + const byProvider: Record = {}; + + for (const row of rows) { + byStatus[groupTaskStatus(row.status)] += 1; + byProvider[row.sandboxProviderId] = (byProvider[row.sandboxProviderId] ?? 0) + 1; + } + + return { + total: rows.length, + byStatus, + byProvider, + }; +} diff --git a/foundry/packages/client/src/workspace-client.ts b/foundry/packages/client/src/workspace-client.ts new file mode 100644 index 0000000..6662352 --- /dev/null +++ b/foundry/packages/client/src/workspace-client.ts @@ -0,0 +1,66 @@ +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceChangeOwnerInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "./backend-client.js"; +import { getSharedMockWorkspaceClient } from "./mock/workspace-client.js"; +import { createRemoteWorkspaceClient } from "./remote/workspace-client.js"; + +export type TaskWorkspaceClientMode = "mock" | "remote"; + +export interface CreateTaskWorkspaceClientOptions { + mode: TaskWorkspaceClientMode; + backend?: BackendClient; + organizationId?: string; +} + +export interface TaskWorkspaceClient { + getSnapshot(): TaskWorkspaceSnapshot; + subscribe(listener: () => void): () => void; + createTask(input: TaskWorkspaceCreateTaskInput): Promise; + markTaskUnread(input: TaskWorkspaceSelectInput): Promise; + renameTask(input: TaskWorkspaceRenameInput): Promise; + archiveTask(input: TaskWorkspaceSelectInput): Promise; + publishPr(input: TaskWorkspaceSelectInput): Promise; + revertFile(input: TaskWorkspaceDiffInput): Promise; + updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise; + sendMessage(input: TaskWorkspaceSendMessageInput): Promise; + stopAgent(input: TaskWorkspaceSessionInput): Promise; + selectSession(input: TaskWorkspaceSessionInput): Promise; + setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise; + renameSession(input: TaskWorkspaceRenameSessionInput): Promise; + closeSession(input: TaskWorkspaceSessionInput): Promise; + addSession(input: TaskWorkspaceSelectInput): Promise; + changeModel(input: TaskWorkspaceChangeModelInput): Promise; + changeOwner(input: TaskWorkspaceChangeOwnerInput): Promise; +} + +export function createTaskWorkspaceClient(options: CreateTaskWorkspaceClientOptions): TaskWorkspaceClient { + if (options.mode === "mock") { + return getSharedMockWorkspaceClient(); + } + + if (!options.backend) { + throw new Error("Remote task workspace client requires a backend client"); + } + if (!options.organizationId) { + throw new Error("Remote task workspace client requires a organization id"); + } + + return createRemoteWorkspaceClient({ + backend: options.backend, + organizationId: options.organizationId, + }); +} diff --git a/foundry/packages/client/src/workspace-model.ts b/foundry/packages/client/src/workspace-model.ts new file mode 100644 index 0000000..290794b --- /dev/null +++ b/foundry/packages/client/src/workspace-model.ts @@ -0,0 +1,1418 @@ +import { + DEFAULT_WORKSPACE_MODEL_ID, + DEFAULT_WORKSPACE_MODEL_GROUPS as SharedModelGroups, + workspaceModelLabel as sharedWorkspaceModelLabel, + workspaceProviderAgent as sharedWorkspaceProviderAgent, +} from "@sandbox-agent/foundry-shared"; +import type { + WorkspaceAgentKind as AgentKind, + WorkspaceSession as AgentSession, + WorkspaceDiffLineKind as DiffLineKind, + WorkspaceFileTreeNode as FileTreeNode, + WorkspaceTask as Task, + TaskWorkspaceSnapshot, + WorkspaceHistoryEvent as HistoryEvent, + WorkspaceModelGroup as ModelGroup, + WorkspaceModelId as ModelId, + WorkspaceParsedDiffLine as ParsedDiffLine, + WorkspaceRepositorySection, + WorkspaceRepo, + WorkspaceTranscriptEvent as TranscriptEvent, +} from "@sandbox-agent/foundry-shared"; +import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; + +export const MODEL_GROUPS: ModelGroup[] = SharedModelGroups; +export const DEFAULT_MODEL_ID: ModelId = DEFAULT_WORKSPACE_MODEL_ID; + +const MOCK_REPLIES = [ + "Got it. I'll work on that now. Let me start by examining the relevant files...", + "I've analyzed the codebase and found the relevant code. Making the changes now...", + "Working on it. I'll update you once I have the implementation ready.", + "Let me look into that. I'll trace through the code to understand the current behavior...", + "Starting on this now. I'll need to modify a few files to implement this properly.", +]; + +let nextId = 100; + +export function uid(): string { + return String(++nextId); +} + +export function nowMs(): number { + return Date.now(); +} + +export function formatThinkingDuration(durationMs: number): string { + const totalSeconds = Math.max(0, Math.floor(durationMs / 1000)); + const minutes = Math.floor(totalSeconds / 60); + const seconds = totalSeconds % 60; + return `${minutes}:${String(seconds).padStart(2, "0")}`; +} + +export function formatMessageDuration(durationMs: number): string { + const totalSeconds = Math.max(1, Math.round(durationMs / 1000)); + if (totalSeconds < 60) { + return `${totalSeconds}s`; + } + + const minutes = Math.floor(totalSeconds / 60); + const seconds = totalSeconds % 60; + return `${minutes}m ${String(seconds).padStart(2, "0")}s`; +} + +export function modelLabel(id: ModelId): string { + return sharedWorkspaceModelLabel(id, MODEL_GROUPS); +} + +export function providerAgent(provider: string): AgentKind { + return sharedWorkspaceProviderAgent(provider); +} + +export function slugify(text: string): string { + return text + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+|-+$/g, "") + .slice(0, 40); +} + +export function randomReply(): string { + return MOCK_REPLIES[Math.floor(Math.random() * MOCK_REPLIES.length)]!; +} + +const DIFF_PREFIX = "diff:"; + +export function isDiffTab(id: string): boolean { + return id.startsWith(DIFF_PREFIX); +} + +export function diffPath(id: string): string { + return id.slice(DIFF_PREFIX.length); +} + +export function diffTabId(path: string): string { + return `${DIFF_PREFIX}${path}`; +} + +export function fileName(path: string): string { + return path.split("/").pop() ?? path; +} + +function messageOrder(id: string): number { + const match = id.match(/\d+/); + return match ? Number(match[0]) : 0; +} + +interface LegacyMessage { + id: string; + role: "agent" | "user"; + agent: string | null; + createdAtMs: number; + lines: string[]; + durationMs?: number; +} + +function transcriptText(payload: unknown): string { + if (!payload || typeof payload !== "object") { + return String(payload ?? ""); + } + + const envelope = payload as { + method?: unknown; + params?: unknown; + result?: unknown; + error?: unknown; + }; + + if (envelope.params && typeof envelope.params === "object") { + const prompt = (envelope.params as { prompt?: unknown }).prompt; + if (Array.isArray(prompt)) { + const text = prompt + .map((item) => (item && typeof item === "object" ? (item as { text?: unknown }).text : null)) + .filter((value): value is string => typeof value === "string" && value.trim().length > 0) + .join("\n"); + if (text) { + return text; + } + } + + const paramsText = (envelope.params as { text?: unknown }).text; + if (typeof paramsText === "string" && paramsText.trim().length > 0) { + return paramsText.trim(); + } + } + + if (envelope.result && typeof envelope.result === "object") { + const resultText = (envelope.result as { text?: unknown }).text; + if (typeof resultText === "string" && resultText.trim().length > 0) { + return resultText.trim(); + } + } + + if (envelope.error) { + return JSON.stringify(envelope.error); + } + + if (typeof envelope.method === "string") { + return envelope.method; + } + + return JSON.stringify(payload); +} + +function historyPreview(event: TranscriptEvent): string { + const content = transcriptText(event.payload).trim() || "Untitled event"; + return content.length > 42 ? `${content.slice(0, 39)}...` : content; +} + +function historyDetail(event: TranscriptEvent): string { + const content = transcriptText(event.payload).trim(); + return content || "Untitled event"; +} + +export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { + return sessions + .flatMap((session) => + session.transcript + .filter((event) => event.sender === "client") + .map((event) => ({ + id: `history-${session.id}-${event.id}`, + messageId: event.id, + preview: historyPreview(event), + sessionName: session.sessionName, + sessionId: session.id, + createdAtMs: event.createdAt, + detail: historyDetail(event), + })), + ) + .sort((left, right) => messageOrder(left.messageId) - messageOrder(right.messageId)); +} + +function buildPullRequestSummary(params: { + number: number; + title: string; + branch: string; + repoName: string; + updatedAtMs: number; + status: "ready" | "draft"; +}) { + return { + number: params.number, + status: params.status, + title: params.title, + state: "open", + url: `https://github.com/${params.repoName}/pull/${params.number}`, + headRefName: params.branch, + baseRefName: "main", + repoFullName: params.repoName, + authorLogin: "mock", + isDraft: params.status === "draft", + updatedAtMs: params.updatedAtMs, + }; +} + +function transcriptFromLegacyMessages(sessionId: string, messages: LegacyMessage[]): TranscriptEvent[] { + return messages.map((message, index) => ({ + id: message.id, + eventIndex: index + 1, + sessionId, + createdAt: message.createdAtMs, + connectionId: "mock-connection", + sender: message.role === "user" ? "client" : "agent", + payload: + message.role === "user" + ? { + method: "session/prompt", + params: { + prompt: message.lines.map((line) => ({ type: "text", text: line })), + }, + } + : { + result: { + text: message.lines.join("\n"), + durationMs: message.durationMs, + }, + }, + })); +} + +const NOW_MS = Date.now(); + +function minutesAgo(minutes: number): number { + return NOW_MS - minutes * 60_000; +} + +function buildTranscriptStressMessages(pairCount: number): LegacyMessage[] { + const startedAtMs = NOW_MS - pairCount * 8_000; + const messages: LegacyMessage[] = []; + + for (let index = 0; index < pairCount; index++) { + const sequence = index + 1; + const createdAtMs = startedAtMs + index * 8_000; + + messages.push({ + id: `stress-user-${sequence}`, + role: "user", + agent: null, + createdAtMs, + lines: [ + `Stress prompt ${sequence}: summarize the current state of the transcript virtualizer.`, + `Keep the answer focused on scroll position, render cost, and preserved expansion state.`, + ], + }); + + messages.push({ + id: `stress-agent-${sequence}`, + role: "agent", + agent: "codex", + createdAtMs: createdAtMs + 3_000, + lines: [ + `Stress reply ${sequence}: the list should only render visible rows plus overscan while preserving scroll anchoring near the bottom.`, + `Grouping, minimap navigation, and per-row UI should remain stable even as older rows unmount.`, + ], + durationMs: 2_500, + }); + } + + return messages; +} + +export function parseDiffLines(diff: string): ParsedDiffLine[] { + return diff.split("\n").map((text, index) => { + if (text.startsWith("@@")) { + return { kind: "hunk", lineNumber: index + 1, text }; + } + if (text.startsWith("+")) { + return { kind: "add", lineNumber: index + 1, text }; + } + if (text.startsWith("-")) { + return { kind: "remove", lineNumber: index + 1, text }; + } + return { kind: "context", lineNumber: index + 1, text }; + }); +} + +export function removeFileTreePath(nodes: FileTreeNode[], targetPath: string): FileTreeNode[] { + return nodes.flatMap((node) => { + if (node.path === targetPath) { + return []; + } + + if (!node.children) { + return [node]; + } + + const nextChildren = removeFileTreePath(node.children, targetPath); + if (node.isDir && nextChildren.length === 0) { + return []; + } + + return [{ ...node, children: nextChildren }]; + }); +} + +export function buildInitialTasks(): Task[] { + return [ + // ── rivet-dev/sandbox-agent ── + { + id: "h1", + repoId: "sandbox-agent", + title: "Normalize Pi ACP bootstrap payloads", + status: "idle", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(8), + branch: "NathanFlurry/pi-bootstrap-fix", + pullRequest: buildPullRequestSummary({ + number: 227, + title: "Normalize Pi ACP bootstrap payloads", + branch: "NathanFlurry/pi-bootstrap-fix", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(8), + status: "ready", + }), + sessions: [ + { + id: "t1", + sessionId: "t1", + sessionName: "Pi payload fix", + agent: "Claude", + model: "sonnet", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t1", [ + { + id: "m1", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(18), + lines: [ + "I'll fix the Pi agent ACP bootstrap payloads. The `initialize` method sends `protocolVersion` as a string but Pi expects a number. Let me examine `acp_proxy_runtime.rs`.", + "", + "Found the issue — the ACP proxy forwards the raw JSON-RPC payload without normalizing field types per-agent. Adding a `normalize_payload_for_agent` pass before dispatch.", + ], + durationMs: 14_000, + }, + { + id: "m2", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(15), + lines: [ + "Done. Added `normalize_pi_payload()` in `acp_proxy_runtime.rs` that converts `protocolVersion` from string to number for `initialize`, and ensures `mcpServers` is present in `session/new` params.", + ], + durationMs: 22_000, + }, + { + id: "m3", + role: "user", + agent: null, + createdAtMs: minutesAgo(12), + lines: ['Does this also handle the case where protocolVersion is a float string like "2.0"?'], + }, + { + id: "m4", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(11), + lines: ['Yes — the `parse_json_number` helper tries u64, then i64, then f64 parsing in order. So "2.0" becomes `2.0` as a JSON number.'], + durationMs: 8_000, + }, + ]), + }, + { + id: "t2", + sessionId: "t2", + sessionName: "Test coverage", + agent: "Codex", + model: "gpt-5.3-codex", + status: "idle", + thinkingSinceMs: null, + unread: true, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t2", [ + { + id: "m5", + role: "agent", + agent: "codex", + createdAtMs: minutesAgo(20), + lines: ["Analyzed the normalize_pi_payload function. It handles `initialize` and `session/new` methods. I'll add unit tests for edge cases."], + durationMs: 18_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "server/packages/sandbox-agent/src/acp_proxy_runtime.rs", added: 51, removed: 0, type: "M" }, + { path: "server/packages/sandbox-agent/src/acp_proxy_runtime_test.rs", added: 38, removed: 0, type: "A" }, + ], + diffs: { + "server/packages/sandbox-agent/src/acp_proxy_runtime.rs": [ + "@@ -134,6 +134,8 @@ impl AcpProxyRuntime {", + ' "acp_proxy: instance resolved"', + " );", + " ", + "+ let payload = normalize_payload_for_agent(instance.agent, payload);", + "+", + " match instance.runtime.post(payload).await {", + "@@ -510,6 +512,57 @@ fn map_adapter_error(err: AdapterError) -> SandboxError {", + " }", + " ", + "+fn normalize_payload_for_agent(agent: AgentId, payload: Value) -> Value {", + "+ if agent != AgentId::Pi {", + "+ return payload;", + "+ }", + "+ normalize_pi_payload(payload)", + "+}", + "+", + "+fn normalize_pi_payload(mut payload: Value) -> Value {", + "+ let method = payload", + '+ .get("method")', + "+ .and_then(Value::as_str)", + "+ .unwrap_or_default();", + "+", + "+ match method {", + '+ "initialize" => {', + '+ if let Some(protocol) = payload.pointer_mut("/params/protocolVersion") {', + "+ if let Some(raw) = protocol.as_str() {", + "+ if let Some(number) = parse_json_number(raw) {", + "+ *protocol = Value::Number(number);", + "+ }", + "+ }", + "+ }", + "+ }", + '+ "session/new" => {', + '+ if let Some(params) = payload.get_mut("params").and_then(Value::as_object_mut) {', + '+ params.entry("mcpServers".to_string())', + "+ .or_insert_with(|| Value::Array(Vec::new()));", + "+ }", + "+ }", + "+ _ => {}", + "+ }", + "+ payload", + "+}", + ].join("\n"), + }, + fileTree: [ + { + name: "server", + path: "server", + isDir: true, + children: [ + { + name: "packages", + path: "server/packages", + isDir: true, + children: [ + { + name: "sandbox-agent", + path: "server/packages/sandbox-agent", + isDir: true, + children: [ + { + name: "src", + path: "server/packages/sandbox-agent/src", + isDir: true, + children: [ + { name: "acp_proxy_runtime.rs", path: "server/packages/sandbox-agent/src/acp_proxy_runtime.rs", isDir: false }, + { name: "acp_proxy_runtime_test.rs", path: "server/packages/sandbox-agent/src/acp_proxy_runtime_test.rs", isDir: false }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + minutesUsed: 42, + }, + { + id: "h2", + repoId: "sandbox-agent", + title: "Auto-inject builtin agent skills at startup", + status: "running", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(3), + branch: "feat/builtin-agent-skills", + pullRequest: buildPullRequestSummary({ + number: 223, + title: "Auto-inject builtin agent skills at startup", + branch: "feat/builtin-agent-skills", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(3), + status: "draft", + }), + sessions: [ + { + id: "t3", + sessionId: "t3", + sessionName: "Skills injection", + agent: "Claude", + model: "opus", + status: "running", + thinkingSinceMs: NOW_MS - 45_000, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t3", [ + { + id: "m10", + role: "user", + agent: null, + createdAtMs: minutesAgo(30), + lines: ["Add builtin skill injection to agent startup. Skills should be loaded from the skills registry and written to the agent's CLAUDE.md."], + }, + { + id: "m11", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(28), + lines: [ + "I'll implement this in the agent management package. The approach:", + "1. Load skills from the registry during agent install", + "2. Inject skill definitions into the agent's working directory as `.claude/skills/`", + "3. Append skill references to CLAUDE.md if present", + "", + "Working on `server/packages/agent-management/src/agents/install.rs` now...", + ], + durationMs: 32_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "server/packages/agent-management/src/agents/install.rs", added: 87, removed: 12, type: "M" }, + { path: "server/packages/agent-management/src/skills/mod.rs", added: 145, removed: 0, type: "A" }, + { path: "server/packages/agent-management/src/skills/registry.rs", added: 63, removed: 0, type: "A" }, + ], + diffs: {}, + fileTree: [ + { + name: "server", + path: "server", + isDir: true, + children: [ + { + name: "packages", + path: "server/packages", + isDir: true, + children: [ + { + name: "agent-management", + path: "server/packages/agent-management", + isDir: true, + children: [ + { + name: "src", + path: "server/packages/agent-management/src", + isDir: true, + children: [ + { + name: "agents", + path: "server/packages/agent-management/src/agents", + isDir: true, + children: [{ name: "install.rs", path: "server/packages/agent-management/src/agents/install.rs", isDir: false }], + }, + { + name: "skills", + path: "server/packages/agent-management/src/skills", + isDir: true, + children: [ + { name: "mod.rs", path: "server/packages/agent-management/src/skills/mod.rs", isDir: false }, + { name: "registry.rs", path: "server/packages/agent-management/src/skills/registry.rs", isDir: false }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + minutesUsed: 187, + }, + { + id: "h3", + repoId: "sandbox-agent", + title: "Add hooks example for Claude, Codex, and OpenCode", + status: "idle", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(45), + branch: "hooks-example", + pullRequest: buildPullRequestSummary({ + number: 225, + title: "Add hooks example for Claude, Codex, and OpenCode", + branch: "hooks-example", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(45), + status: "ready", + }), + sessions: [ + { + id: "t4", + sessionId: "t4", + sessionName: "Example docs", + agent: "Claude", + model: "sonnet", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t4", [ + { + id: "m20", + role: "user", + agent: null, + createdAtMs: minutesAgo(60), + lines: ["Create an example showing how to use hooks with Claude, Codex, and OpenCode agents."], + }, + { + id: "m21", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(58), + lines: [ + "Done. Created `examples/hooks/` with a TypeScript example that demonstrates lifecycle hooks for all three agents. Includes `onPermissionRequest`, `onSessionEvent`, and `onAgentOutput` hooks.", + ], + durationMs: 16_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "examples/hooks/src/index.ts", added: 120, removed: 0, type: "A" }, + { path: "examples/hooks/package.json", added: 18, removed: 0, type: "A" }, + { path: "examples/hooks/tsconfig.json", added: 12, removed: 0, type: "A" }, + ], + diffs: {}, + fileTree: [ + { + name: "examples", + path: "examples", + isDir: true, + children: [ + { + name: "hooks", + path: "examples/hooks", + isDir: true, + children: [ + { name: "package.json", path: "examples/hooks/package.json", isDir: false }, + { name: "tsconfig.json", path: "examples/hooks/tsconfig.json", isDir: false }, + { + name: "src", + path: "examples/hooks/src", + isDir: true, + children: [{ name: "index.ts", path: "examples/hooks/src/index.ts", isDir: false }], + }, + ], + }, + ], + }, + ], + minutesUsed: 23, + }, + // ── rivet-dev/rivet ── + { + id: "h4", + repoId: "rivet", + title: "Add actor reschedule endpoint", + status: "idle", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(15), + branch: "actor-reschedule-endpoint", + pullRequest: buildPullRequestSummary({ + number: 4400, + title: "Add actor reschedule endpoint", + branch: "actor-reschedule-endpoint", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(15), + status: "ready", + }), + sessions: [ + { + id: "t5", + sessionId: "t5", + sessionName: "Reschedule API", + agent: "Claude", + model: "sonnet", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t5", [ + { + id: "m30", + role: "user", + agent: null, + createdAtMs: minutesAgo(90), + lines: ["Implement a POST /actors/{actor_id}/reschedule endpoint that signals the actor workflow to reschedule."], + }, + { + id: "m31", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(87), + lines: [ + "I'll add the reschedule endpoint to `api-peer`. The flow is:", + "1. Resolve actor by ID and verify namespace ownership", + "2. Send `Reschedule` signal to the actor workflow", + "3. Return 200 on success, 404 if actor not found", + "", + "Created `engine/packages/api-peer/src/actors/reschedule.rs` and wired it into the router.", + ], + durationMs: 28_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "engine/packages/api-peer/src/actors/reschedule.rs", added: 64, removed: 0, type: "A" }, + { path: "engine/packages/api-peer/src/actors/mod.rs", added: 1, removed: 0, type: "M" }, + { path: "engine/packages/api-peer/src/router.rs", added: 12, removed: 3, type: "M" }, + { path: "engine/packages/api-types/src/actors/reschedule.rs", added: 24, removed: 0, type: "A" }, + ], + diffs: { + "engine/packages/api-peer/src/actors/reschedule.rs": [ + "@@ -0,0 +1,64 @@", + "+use anyhow::Result;", + "+use gas::prelude::*;", + "+use rivet_api_builder::ApiCtx;", + "+use rivet_api_types::actors::reschedule::*;", + "+use rivet_util::Id;", + "+", + "+#[utoipa::path(", + "+ post,", + '+ operation_id = "actors_reschedule",', + '+ path = "/actors/{actor_id}/reschedule",', + "+)]", + "+#[tracing::instrument(skip_all)]", + "+pub async fn reschedule(", + "+ ctx: ApiCtx,", + "+ path: ReschedulePath,", + "+ query: RescheduleQuery,", + "+) -> Result {", + "+ let actors_res = ctx.op(pegboard::ops::actor::get::Input {", + "+ actor_ids: vec![path.actor_id],", + "+ fetch_error: false,", + "+ }).await?;", + "+", + "+ let actor = actors_res.actors.into_iter().next()", + "+ .ok_or_else(|| pegboard::errors::Actor::NotFound.build())?;", + "+", + "+ ctx.signal(pegboard::workflows::actor::Reschedule {", + "+ reset_rescheduling: true,", + "+ })", + "+ .to_workflow::()", + '+ .tag("actor_id", path.actor_id)', + "+ .send().await?;", + "+", + "+ Ok(RescheduleResponse {})", + "+}", + ].join("\n"), + }, + fileTree: [ + { + name: "engine", + path: "engine", + isDir: true, + children: [ + { + name: "packages", + path: "engine/packages", + isDir: true, + children: [ + { + name: "api-peer", + path: "engine/packages/api-peer", + isDir: true, + children: [ + { + name: "src", + path: "engine/packages/api-peer/src", + isDir: true, + children: [ + { + name: "actors", + path: "engine/packages/api-peer/src/actors", + isDir: true, + children: [ + { name: "mod.rs", path: "engine/packages/api-peer/src/actors/mod.rs", isDir: false }, + { name: "reschedule.rs", path: "engine/packages/api-peer/src/actors/reschedule.rs", isDir: false }, + ], + }, + { name: "router.rs", path: "engine/packages/api-peer/src/router.rs", isDir: false }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + minutesUsed: 5, + }, + { + id: "h5", + repoId: "rivet", + title: "Dynamic actors", + status: "idle", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(35), + branch: "feat/dynamic-actors", + pullRequest: buildPullRequestSummary({ + number: 4395, + title: "Dynamic actors", + branch: "feat/dynamic-actors", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(35), + status: "draft", + }), + sessions: [ + { + id: "t6", + sessionId: "t6", + sessionName: "Dynamic actors impl", + agent: "Claude", + model: "opus", + status: "idle", + thinkingSinceMs: null, + unread: true, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t6", [ + { + id: "m40", + role: "user", + agent: null, + createdAtMs: minutesAgo(120), + lines: ["Implement dynamic actor support — actors that can be created at runtime without pre-registration in the registry."], + }, + { + id: "m41", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(115), + lines: [ + "This is a large change spanning the RivetKit runtime, the engine scheduler, and the SDK. I'll start with the core runtime changes and work outward.", + "", + "Key design decisions:", + "- Dynamic actors use a special `__dynamic` registry entry", + "- They receive their behavior module at creation time via `createDynamic()`", + "- State persistence works identically to registered actors", + ], + durationMs: 45_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "rivetkit-typescript/packages/rivetkit/src/dynamic.ts", added: 280, removed: 0, type: "A" }, + { path: "rivetkit-typescript/packages/rivetkit/src/registry.ts", added: 45, removed: 12, type: "M" }, + { path: "engine/packages/pegboard/src/workflows/actor.rs", added: 120, removed: 30, type: "M" }, + ], + diffs: {}, + fileTree: [], + minutesUsed: 312, + }, + // ── rivet-dev/vbare ── + { + id: "h6", + repoId: "vbare", + title: "Use full cloud run pool name for routing", + status: "idle", + repoName: "rivet-dev/vbare", + updatedAtMs: minutesAgo(25), + branch: "fix-use-full-cloud-run-pool-name", + pullRequest: buildPullRequestSummary({ + number: 235, + title: "Use full cloud run pool name for routing", + branch: "fix-use-full-cloud-run-pool-name", + repoName: "rivet-dev/vbare", + updatedAtMs: minutesAgo(25), + status: "ready", + }), + sessions: [ + { + id: "t7", + sessionId: "t7", + sessionName: "Pool routing fix", + agent: "Claude", + model: "sonnet", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t7", [ + { + id: "m50", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(40), + lines: [ + "Fixed the managed pool routing issue. The Cloud Run service was using a truncated pool name for routing, causing 404s on pools with long names. Updated the gateway routing endpoint to use the full pool name.", + ], + durationMs: 24_000, + }, + { + id: "m51", + role: "user", + agent: null, + createdAtMs: minutesAgo(38), + lines: ["Does this also update the SDK type exports?"], + }, + { + id: "m52", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(36), + lines: [ + "Yes — the `Registry` type is now exported from `actors/index.ts` so downstream consumers can reference it. Also bumped rivetkit to `2.0.4-rc.1` in pnpm overrides.", + ], + durationMs: 11_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "packages/api/src/actors/index.ts", added: 4, removed: 2, type: "M" }, + { path: "package.json", added: 2, removed: 1, type: "M" }, + { path: "packages/api/scripts/managed-pools-e2e.ts", added: 2, removed: 2, type: "M" }, + ], + diffs: { + "packages/api/src/actors/index.ts": [ + "@@ -28,6 +28,8 @@ export const registry = setup({", + " inspector: {},", + " });", + " ", + "+export type Registry = typeof registry;", + "+", + " export type ActorClient = ReturnType;", + " ", + " let _client: ActorClient | null = null;", + "@@ -37,7 +39,7 @@ function createActorClient() {", + " const managerPort = process.env.RIVETKIT_MANAGER_PORT", + " ? Number.parseInt(process.env.RIVETKIT_MANAGER_PORT, 10)", + " : 6420;", + "- return createClient({", + "+ return createClient({", + " endpoint: `http://127.0.0.1:${managerPort}`,", + ].join("\n"), + }, + fileTree: [ + { + name: "packages", + path: "packages", + isDir: true, + children: [ + { + name: "api", + path: "packages/api", + isDir: true, + children: [ + { + name: "src", + path: "packages/api/src", + isDir: true, + children: [ + { + name: "actors", + path: "packages/api/src/actors", + isDir: true, + children: [{ name: "index.ts", path: "packages/api/src/actors/index.ts", isDir: false }], + }, + ], + }, + ], + }, + ], + }, + ], + minutesUsed: 0, + }, + // ── rivet-dev/skills ── + { + id: "h7", + repoId: "skills", + title: "Route compute gateway path correctly", + status: "idle", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(50), + branch: "fix-guard-support-https-targets", + pullRequest: buildPullRequestSummary({ + number: 125, + title: "Route compute gateway path correctly", + branch: "fix-guard-support-https-targets", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(50), + status: "ready", + }), + sessions: [ + { + id: "t8", + sessionId: "t8", + sessionName: "Guard routing", + agent: "Claude", + model: "sonnet", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t8", [ + { + id: "m60", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(65), + lines: [ + "Fixed the guard proxy to support HTTPS targets and correct compute gateway path routing. The proxy was using an HTTP-only connector — switched to `hyper_tls::HttpsConnector`. Also fixed path-based routing to strip the `/compute/gateway` prefix before forwarding.", + ], + durationMs: 30_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "engine/packages/guard-core/src/proxy_service.rs", added: 8, removed: 4, type: "M" }, + { path: "engine/packages/guard/src/routing/compute_gateway.rs", added: 42, removed: 8, type: "M" }, + { path: "engine/packages/guard-core/Cargo.toml", added: 1, removed: 0, type: "M" }, + { path: "Cargo.lock", added: 37, removed: 5, type: "M" }, + ], + diffs: { + "engine/packages/guard-core/src/proxy_service.rs": [ + "@@ -309,15 +309,19 @@ pub struct ProxyService {", + " remote_addr: SocketAddr,", + "- client: Client>,", + "+ client: Client<", + "+ hyper_tls::HttpsConnector,", + "+ Full,", + "+ >,", + " }", + " ", + " impl ProxyService {", + " pub fn new(state: Arc, remote_addr: SocketAddr) -> Self {", + "+ let https_connector = hyper_tls::HttpsConnector::new();", + " let client = Client::builder(TokioExecutor::new())", + " .pool_idle_timeout(Duration::from_secs(30))", + "- .build_http();", + "+ .build(https_connector);", + ].join("\n"), + }, + fileTree: [ + { + name: "engine", + path: "engine", + isDir: true, + children: [ + { + name: "packages", + path: "engine/packages", + isDir: true, + children: [ + { + name: "guard-core", + path: "engine/packages/guard-core", + isDir: true, + children: [ + { name: "Cargo.toml", path: "engine/packages/guard-core/Cargo.toml", isDir: false }, + { + name: "src", + path: "engine/packages/guard-core/src", + isDir: true, + children: [{ name: "proxy_service.rs", path: "engine/packages/guard-core/src/proxy_service.rs", isDir: false }], + }, + ], + }, + { + name: "guard", + path: "engine/packages/guard", + isDir: true, + children: [ + { + name: "src", + path: "engine/packages/guard/src", + isDir: true, + children: [ + { + name: "routing", + path: "engine/packages/guard/src/routing", + isDir: true, + children: [{ name: "compute_gateway.rs", path: "engine/packages/guard/src/routing/compute_gateway.rs", isDir: false }], + }, + ], + }, + ], + }, + ], + }, + ], + }, + ], + minutesUsed: 78, + }, + // ── rivet-dev/skills (archived) ── + { + id: "h8", + repoId: "skills", + title: "Move compute gateway to guard", + status: "archived", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(2 * 24 * 60), + branch: "chore-move-compute-gateway-to", + pullRequest: buildPullRequestSummary({ + number: 123, + title: "Move compute gateway to guard", + branch: "chore-move-compute-gateway-to", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(2 * 24 * 60), + status: "ready", + }), + sessions: [ + { + id: "t9", + sessionId: "t9", + sessionName: "Gateway migration", + agent: "Claude", + model: "sonnet", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t9", [ + { + id: "m70", + role: "agent", + agent: "claude", + createdAtMs: minutesAgo(2 * 24 * 60 + 30), + lines: ["Migrated the compute gateway from its standalone service into the guard package. Removed 469 lines of duplicated routing logic."], + durationMs: 38_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "engine/packages/guard/src/routing/compute_gateway.rs", added: 180, removed: 0, type: "A" }, + { path: "engine/packages/compute-gateway/src/lib.rs", added: 0, removed: 320, type: "D" }, + ], + diffs: {}, + fileTree: [], + minutesUsed: 15, + }, + // ── rivet-dev/deploy-action ── + { + id: "h9", + repoId: "deploy-action", + title: "Harden namespace isolation for nested containers", + status: "idle", + repoName: "rivet-dev/deploy-action", + updatedAtMs: minutesAgo(90), + branch: "fix/namespace-isolation", + pullRequest: null, + sessions: [ + { + id: "t10", + sessionId: "t10", + sessionName: "Namespace fix", + agent: "Codex", + model: "gpt-5.3-codex", + status: "idle", + thinkingSinceMs: null, + unread: true, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("t10", [ + { + id: "m80", + role: "user", + agent: null, + createdAtMs: minutesAgo(100), + lines: [ + "Audit and harden the namespace isolation for nested container execution. Make sure PID, network, and mount namespaces are correctly unshared.", + ], + }, + { + id: "m81", + role: "agent", + agent: "codex", + createdAtMs: minutesAgo(97), + lines: [ + "Audited the sandbox creation path. Found that the PID namespace was shared with the host in certain fallback paths. Fixed by always calling `unshare(CLONE_NEWPID)` before `fork()`. Also tightened the seccomp filter to block `setns` calls from within the sandbox.", + ], + durationMs: 42_000, + }, + ]), + }, + ], + fileChanges: [ + { path: "src/sandbox/namespace.ts", added: 35, removed: 8, type: "M" }, + { path: "src/sandbox/seccomp.ts", added: 12, removed: 2, type: "M" }, + ], + diffs: {}, + fileTree: [], + minutesUsed: 3, + }, + + // ── Status demo tasks ────────────────────────────────────────────── + { + id: "status-error", + repoId: "sandbox-agent", + title: "Fix broken auth middleware (error demo)", + status: "error", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(2), + branch: "fix/auth-middleware", + pullRequest: null, + sessions: [ + { + id: "status-error-session", + sessionId: "status-error-session", + sessionName: "Auth fix", + agent: "Claude", + model: "sonnet", + status: "error", + thinkingSinceMs: null, + unread: false, + created: true, + errorMessage: "Sandbox process exited unexpectedly (exit code 137). The sandbox may have run out of memory.", + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: [], + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 1, + }, + { + id: "status-provisioning", + repoId: "sandbox-agent", + title: "Add rate limiting to API gateway (provisioning demo)", + status: "init_enqueue_provision", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(0), + branch: null, + pullRequest: null, + sessions: [ + { + id: "status-prov-session", + sessionId: "status-prov-session", + sandboxSessionId: null, + sessionName: "Session 1", + agent: "Claude", + model: "sonnet", + status: "pending_provision", + thinkingSinceMs: null, + unread: false, + created: false, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: [], + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 0, + }, + { + id: "stress-transcript", + repoId: "sandbox-agent", + title: "Transcript virtualization stress test", + status: "idle", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(40), + branch: "perf/transcript-virtualizer", + pullRequest: null, + sessions: [ + { + id: "stress-transcript-tab", + sessionId: "stress-transcript-session", + sessionName: "Virtualizer stress session", + agent: "Codex", + model: "gpt-5.3-codex", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("stress-transcript-tab", buildTranscriptStressMessages(1600)), + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 18, + }, + { + id: "status-running", + repoId: "sandbox-agent", + title: "Refactor WebSocket handler (running demo)", + status: "running", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(1), + branch: "refactor/ws-handler", + pullRequest: null, + sessions: [ + { + id: "status-run-session", + sessionId: "status-run-session", + sessionName: "WS refactor", + agent: "Codex", + model: "gpt-5.3-codex", + status: "running", + thinkingSinceMs: Date.now() - 12_000, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("status-run-session", [ + { + id: "sr1", + role: "user", + agent: null, + createdAtMs: minutesAgo(3), + lines: ["Refactor the WebSocket handler to use a connection pool pattern."], + }, + ]), + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 2, + }, + ]; +} + +/** + * Build repos list from the rivet-dev fixture data (scripts/data/rivet-dev.json). + * Uses real public repos so the mock sidebar matches what an actual rivet-dev + * organization would show after a GitHub sync. + */ +function buildMockRepos(): WorkspaceRepo[] { + return rivetDevFixture.repos.map((r) => ({ + id: repoIdFromFullName(r.fullName), + label: r.fullName, + })); +} + +/** Derive a stable short id from a "org/repo" full name (e.g. "rivet-dev/rivet" → "rivet"). */ +function repoIdFromFullName(fullName: string): string { + const parts = fullName.split("/"); + return parts[parts.length - 1] ?? fullName; +} + +export function buildInitialMockLayoutViewModel(): TaskWorkspaceSnapshot { + const repos = buildMockRepos(); + const tasks = buildInitialTasks(); + return { + organizationId: "default", + repos, + repositories: groupWorkspaceRepositories(repos, tasks), + tasks, + }; +} + +export function groupWorkspaceRepositories(repos: WorkspaceRepo[], tasks: Task[]): WorkspaceRepositorySection[] { + const grouped = new Map(); + + for (const repo of repos) { + grouped.set(repo.id, { + id: repo.id, + label: repo.label, + updatedAtMs: 0, + tasks: [], + }); + } + + for (const task of tasks) { + const existing = grouped.get(task.repoId) ?? { + id: task.repoId, + label: task.repoName, + updatedAtMs: 0, + tasks: [], + }; + + existing.tasks.push(task); + existing.updatedAtMs = Math.max(existing.updatedAtMs, task.updatedAtMs); + grouped.set(task.repoId, existing); + } + + return [...grouped.values()] + .map((repository) => ({ + ...repository, + tasks: [...repository.tasks].sort((a, b) => b.updatedAtMs - a.updatedAtMs), + updatedAtMs: repository.tasks.length > 0 ? Math.max(...repository.tasks.map((task) => task.updatedAtMs)) : repository.updatedAtMs, + })) + .filter((repository) => repository.tasks.length > 0) + .sort((a, b) => b.updatedAtMs - a.updatedAtMs); +} diff --git a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts new file mode 100644 index 0000000..21eaf6b --- /dev/null +++ b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts @@ -0,0 +1,146 @@ +import { randomUUID } from "node:crypto"; +import { describe, expect, it } from "vitest"; +import type { AuditLogEvent as HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; +import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; + +const RUN_FULL_E2E = process.env.HF_ENABLE_DAEMON_FULL_E2E === "1"; + +function requiredEnv(name: string): string { + const value = process.env[name]?.trim(); + if (!value) { + throw new Error(`Missing required env var: ${name}`); + } + return value; +} + +function parseGithubRepo(input: string): { fullName: string } { + const trimmed = input.trim(); + const shorthand = trimmed.match(/^([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+)$/); + if (shorthand) { + return { fullName: `${shorthand[1]}/${shorthand[2]}` }; + } + + const url = new URL(trimmed.startsWith("http") ? trimmed : `https://${trimmed}`); + const parts = url.pathname.replace(/^\/+/, "").split("/").filter(Boolean); + if (url.hostname.toLowerCase().includes("github.com") && parts.length >= 2) { + return { fullName: `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}` }; + } + + throw new Error(`Unable to parse GitHub repo from: ${input}`); +} + +async function sleep(ms: number): Promise { + await new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function poll(label: string, timeoutMs: number, intervalMs: number, fn: () => Promise, isDone: (value: T) => boolean): Promise { + const start = Date.now(); + let last: T; + for (;;) { + last = await fn(); + if (isDone(last)) { + return last; + } + if (Date.now() - start > timeoutMs) { + throw new Error(`timed out waiting for ${label}`); + } + await sleep(intervalMs); + } +} + +function parseHistoryPayload(event: HistoryEvent): Record { + try { + return JSON.parse(event.payloadJson) as Record; + } catch { + return {}; + } +} + +async function githubApi(token: string, path: string, init?: RequestInit): Promise { + const url = `https://api.github.com/${path.replace(/^\/+/, "")}`; + return await fetch(url, { + ...init, + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${token}`, + "X-GitHub-Api-Version": "2022-11-28", + ...(init?.headers ?? {}), + }, + }); +} + +async function ensureRemoteBranchExists(token: string, fullName: string, branchName: string): Promise { + const repoRes = await githubApi(token, `repos/${fullName}`, { method: "GET" }); + if (!repoRes.ok) { + throw new Error(`GitHub repo lookup failed: ${repoRes.status} ${await repoRes.text()}`); + } + const repo = (await repoRes.json()) as { default_branch?: string }; + const defaultBranch = repo.default_branch; + if (!defaultBranch) { + throw new Error(`GitHub repo default branch is missing for ${fullName}`); + } + + const defaultRefRes = await githubApi(token, `repos/${fullName}/git/ref/heads/${encodeURIComponent(defaultBranch)}`, { method: "GET" }); + if (!defaultRefRes.ok) { + throw new Error(`GitHub default ref lookup failed: ${defaultRefRes.status} ${await defaultRefRes.text()}`); + } + const defaultRef = (await defaultRefRes.json()) as { object?: { sha?: string } }; + const sha = defaultRef.object?.sha; + if (!sha) { + throw new Error(`GitHub default ref sha missing for ${fullName}:${defaultBranch}`); + } + + const createRefRes = await githubApi(token, `repos/${fullName}/git/refs`, { + method: "POST", + body: JSON.stringify({ + ref: `refs/heads/${branchName}`, + sha, + }), + headers: { "Content-Type": "application/json" }, + }); + if (createRefRes.ok || createRefRes.status === 422) { + return; + } + + throw new Error(`GitHub create ref failed: ${createRefRes.status} ${await createRefRes.text()}`); +} + +describe("e2e(client): full integration stack workflow", () => { + it.skipIf(!RUN_FULL_E2E)("uses an imported repo, loads branch graph, and executes a stack restack action", { timeout: 8 * 60_000 }, async () => { + const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); + const githubToken = requiredEnv("GITHUB_TOKEN"); + const { fullName } = parseGithubRepo(repoRemote); + const normalizedRepoRemote = `https://github.com/${fullName}.git`; + const seededBranch = `e2e/full-seed-${Date.now().toString(36)}-${randomUUID().slice(0, 8)}`; + + const client = createBackendClient({ + endpoint, + defaultOrganizationId: organizationId, + }); + + try { + await ensureRemoteBranchExists(githubToken, fullName, seededBranch); + + const repo = await requireImportedRepo(client, organizationId, repoRemote); + expect(repo.remoteUrl).toBe(normalizedRepoRemote); + + const overview = await poll( + "repo overview includes seeded branch", + 90_000, + 1_000, + async () => client.getRepoOverview(organizationId, repo.repoId), + (value) => value.branches.some((row: RepoOverview["branches"][number]) => row.branchName === seededBranch), + ); + + const postActionOverview = await client.getRepoOverview(organizationId, repo.repoId); + const seededRow = postActionOverview.branches.find((row: RepoOverview["branches"][number]) => row.branchName === seededBranch); + expect(Boolean(seededRow)).toBe(true); + expect(postActionOverview.fetchedAt).toBeGreaterThanOrEqual(overview.fetchedAt); + } finally { + await githubApi(githubToken, `repos/${fullName}/git/refs/heads/${encodeURIComponent(seededBranch)}`, { method: "DELETE" }).catch(() => {}); + } + }); +}); diff --git a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts new file mode 100644 index 0000000..89dd638 --- /dev/null +++ b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts @@ -0,0 +1,333 @@ +import { describe, expect, it } from "vitest"; +import type { AuditLogEvent as HistoryEvent, TaskRecord } from "@sandbox-agent/foundry-shared"; +import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; + +const RUN_E2E = process.env.HF_ENABLE_DAEMON_E2E === "1"; + +function requiredEnv(name: string): string { + const value = process.env[name]?.trim(); + if (!value) { + throw new Error(`Missing required env var: ${name}`); + } + return value; +} + +function parseGithubRepo(input: string): { owner: string; repo: string; fullName: string } { + const trimmed = input.trim(); + if (!trimmed) { + throw new Error("HF_E2E_GITHUB_REPO is empty"); + } + + // owner/repo shorthand + const shorthand = trimmed.match(/^([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+)$/); + if (shorthand) { + const owner = shorthand[1]!; + const repo = shorthand[2]!; + return { owner, repo, fullName: `${owner}/${repo}` }; + } + + // https://github.com/owner/repo(.git)?(/...)? + try { + const url = new URL(trimmed.startsWith("http") ? trimmed : `https://${trimmed}`); + const parts = url.pathname.replace(/^\/+/, "").split("/").filter(Boolean); + if (url.hostname.toLowerCase().includes("github.com") && parts.length >= 2) { + const owner = parts[0]!; + const repo = (parts[1] ?? "").replace(/\.git$/, ""); + if (owner && repo) { + return { owner, repo, fullName: `${owner}/${repo}` }; + } + } + } catch { + // fall through + } + + throw new Error(`Unable to parse GitHub repo from: ${input}`); +} + +async function sleep(ms: number): Promise { + await new Promise((r) => setTimeout(r, ms)); +} + +async function poll( + label: string, + timeoutMs: number, + intervalMs: number, + fn: () => Promise, + isDone: (value: T) => boolean, + onTick?: (value: T) => void, +): Promise { + const start = Date.now(); + let last: T; + for (;;) { + last = await fn(); + onTick?.(last); + if (isDone(last)) { + return last; + } + if (Date.now() - start > timeoutMs) { + throw new Error(`timed out waiting for ${label}`); + } + await sleep(intervalMs); + } +} + +function parseHistoryPayload(event: HistoryEvent): Record { + try { + return JSON.parse(event.payloadJson) as Record; + } catch { + return {}; + } +} + +async function debugDump(client: ReturnType, organizationId: string, repoId: string, taskId: string): Promise { + try { + const task = await client.getTask(organizationId, repoId, taskId); + const detail = await client.getTaskDetail(organizationId, repoId, taskId).catch(() => null); + const history = await client.listHistory({ organizationId, taskId, limit: 80 }).catch(() => []); + const historySummary = history + .slice(0, 20) + .map((e) => `${new Date(e.createdAt).toISOString()} ${e.kind}`) + .join("\n"); + + let sessionEventsSummary = ""; + const activeSessionId = detail?.activeSessionId ?? null; + if (task.activeSandboxId && activeSessionId) { + const events = await client + .listSandboxSessionEvents(organizationId, task.sandboxProviderId, task.activeSandboxId, { + sessionId: activeSessionId, + limit: 50, + }) + .then((r) => r.items) + .catch(() => []); + sessionEventsSummary = events + .slice(-12) + .map((e) => `${new Date(e.createdAt).toISOString()} ${e.sender}`) + .join("\n"); + } + + return [ + "=== task ===", + JSON.stringify( + { + status: task.status, + title: task.title, + branchName: task.branchName, + activeSandboxId: task.activeSandboxId, + activeSessionId, + pullRequestUrl: detail?.pullRequest?.url ?? null, + }, + null, + 2, + ), + "=== history (most recent first) ===", + historySummary || "(none)", + "=== session events (tail) ===", + sessionEventsSummary || "(none)", + ].join("\n"); + } catch (err) { + return `debug dump failed: ${err instanceof Error ? err.message : String(err)}`; + } +} + +async function githubApi(token: string, path: string, init?: RequestInit): Promise { + const url = `https://api.github.com/${path.replace(/^\/+/, "")}`; + return await fetch(url, { + ...init, + headers: { + Accept: "application/vnd.github+json", + Authorization: `Bearer ${token}`, + "X-GitHub-Api-Version": "2022-11-28", + ...(init?.headers ?? {}), + }, + }); +} + +describe("e2e: backend -> sandbox-agent -> git -> PR", () => { + it.skipIf(!RUN_E2E)("creates a task, waits for agent to implement, and opens a PR", { timeout: 15 * 60_000 }, async () => { + const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); + const githubToken = requiredEnv("GITHUB_TOKEN"); + + const { fullName } = parseGithubRepo(repoRemote); + const runId = `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`; + const expectedFile = `e2e/${runId}.txt`; + + const client = createBackendClient({ + endpoint, + defaultOrganizationId: organizationId, + }); + + const repo = await requireImportedRepo(client, organizationId, repoRemote); + + const created = await client.createTask({ + organizationId, + repoId: repo.repoId, + task: [ + "E2E test task:", + `1. Create a new file at ${expectedFile} containing the single line: ${runId}`, + "2. git add the file", + `3. git commit -m \"test(e2e): ${runId}\"`, + "4. git push the branch to origin", + "5. Stop when done (agent should go idle).", + ].join("\n"), + sandboxProviderId: "local", + explicitTitle: `test(e2e): ${runId}`, + explicitBranchName: `e2e/${runId}`, + }); + + let prNumber: number | null = null; + let branchName: string | null = null; + let sandboxId: string | null = null; + let sessionId: string | null = null; + let lastStatus: string | null = null; + + try { + const namedAndProvisioned = await poll( + "task naming + sandbox provisioning", + // Cold local sandbox startup can exceed a few minutes on first run. + 8 * 60_000, + 1_000, + async () => client.getTask(organizationId, repo.repoId, created.taskId), + (h) => Boolean(h.title && h.branchName && h.activeSandboxId), + (h) => { + if (h.status !== lastStatus) { + lastStatus = h.status; + } + if (h.status === "error") { + throw new Error("task entered error state during provisioning"); + } + }, + ).catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); + }); + + branchName = namedAndProvisioned.branchName!; + sandboxId = namedAndProvisioned.activeSandboxId!; + + const withSession = await poll>>( + "task to create active session", + 3 * 60_000, + 1_500, + async () => client.getTaskDetail(organizationId, repo.repoId, created.taskId), + (h) => Boolean(h.activeSessionId), + (h) => { + if (h.status === "error") { + throw new Error("task entered error state while waiting for active session"); + } + }, + ).catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); + }); + + sessionId = withSession.activeSessionId!; + + await poll<{ id: string }[]>( + "session transcript bootstrap events", + 2 * 60_000, + 2_000, + async () => + ( + await client.listSandboxSessionEvents(organizationId, namedAndProvisioned.sandboxProviderId, sandboxId!, { + sessionId: sessionId!, + limit: 40, + }) + ).items, + (events) => events.length > 0, + ).catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); + }); + + await poll( + "task to reach idle state", + 8 * 60_000, + 2_000, + async () => client.getTask(organizationId, repo.repoId, created.taskId), + (h) => h.status === "idle", + (h) => { + if (h.status === "error") { + throw new Error("task entered error state while waiting for idle"); + } + }, + ).catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); + }); + + const prCreatedEvent = await poll( + "PR creation history event", + 3 * 60_000, + 2_000, + async () => client.listHistory({ organizationId, taskId: created.taskId, limit: 200 }), + (events) => events.some((e) => e.kind === "task.pr_created"), + ) + .catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); + }) + .then((events) => events.find((e) => e.kind === "task.pr_created")!); + + const payload = parseHistoryPayload(prCreatedEvent); + prNumber = Number(payload.prNumber); + const prUrl = String(payload.prUrl ?? ""); + + expect(prNumber).toBeGreaterThan(0); + expect(prUrl).toContain("/pull/"); + + const prFilesRes = await githubApi(githubToken, `repos/${fullName}/pulls/${prNumber}/files?per_page=100`, { method: "GET" }); + if (!prFilesRes.ok) { + const body = await prFilesRes.text(); + throw new Error(`GitHub PR files request failed: ${prFilesRes.status} ${body}`); + } + const prFiles = (await prFilesRes.json()) as Array<{ filename: string }>; + expect(prFiles.some((f) => f.filename === expectedFile)).toBe(true); + + // Close the task and assert the sandbox is released (stopped). + await client.runAction(organizationId, repo.repoId, created.taskId, "archive"); + + await poll>>( + "task to become archived (session released)", + 60_000, + 1_000, + async () => client.getTaskDetail(organizationId, repo.repoId, created.taskId), + (h) => h.status === "archived" && h.activeSessionId === null, + ).catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); + }); + + if (sandboxId) { + await poll<{ sandboxProviderId: string; sandboxId: string; state: string; at: number }>( + "sandbox to stop", + 2 * 60_000, + 2_000, + async () => client.sandboxProviderState(organizationId, "local", sandboxId!), + (s) => { + const st = String(s.state).toLowerCase(); + return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused"); + }, + ).catch(async (err) => { + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); + const state = await client.sandboxProviderState(organizationId, "local", sandboxId!).catch(() => null); + throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`); + }); + } + } finally { + if (prNumber && Number.isFinite(prNumber)) { + await githubApi(githubToken, `repos/${fullName}/pulls/${prNumber}`, { + method: "PATCH", + body: JSON.stringify({ state: "closed" }), + headers: { "Content-Type": "application/json" }, + }).catch(() => {}); + } + + if (branchName) { + await githubApi(githubToken, `repos/${fullName}/git/refs/heads/${encodeURIComponent(branchName)}`, { method: "DELETE" }).catch(() => {}); + } + } + }); +}); diff --git a/foundry/packages/client/test/e2e/helpers.ts b/foundry/packages/client/test/e2e/helpers.ts new file mode 100644 index 0000000..0e15c51 --- /dev/null +++ b/foundry/packages/client/test/e2e/helpers.ts @@ -0,0 +1,84 @@ +import type { RepoRecord } from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../../src/backend-client.js"; + +function normalizeRepoSelector(value: string): string { + let normalized = value.trim(); + if (!normalized) { + return ""; + } + + normalized = normalized.replace(/\/+$/, ""); + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) { + return `https://github.com/${normalized}.git`; + } + + if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) { + normalized = `https://${normalized.replace(/^www\./i, "")}`; + } + + try { + if (/^https?:\/\//i.test(normalized)) { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`; + } + } + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // Keep the selector as-is for matching below. + } + + return normalized; +} + +function githubRepoFullNameFromSelector(value: string): string | null { + const normalized = normalizeRepoSelector(value); + try { + const url = new URL(normalized); + if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } catch { + return null; + } +} + +export async function requireImportedRepo(client: BackendClient, organizationId: string, repoSelector: string): Promise { + const selector = repoSelector.trim(); + if (!selector) { + throw new Error("Missing repo selector"); + } + + const normalizedSelector = normalizeRepoSelector(selector); + const selectorFullName = githubRepoFullNameFromSelector(selector); + const repos = await client.listRepos(organizationId); + const match = repos.find((repo) => { + if (repo.repoId === selector) { + return true; + } + if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) { + return true; + } + const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl); + return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName); + }); + + if (!match) { + throw new Error( + `Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running this test.`, + ); + } + + return match; +} diff --git a/foundry/packages/client/test/e2e/workspace-e2e.test.ts b/foundry/packages/client/test/e2e/workspace-e2e.test.ts new file mode 100644 index 0000000..1de2065 --- /dev/null +++ b/foundry/packages/client/test/e2e/workspace-e2e.test.ts @@ -0,0 +1,307 @@ +import { describe, expect, it } from "vitest"; +import type { TaskWorkspaceSnapshot, WorkspaceSession, WorkspaceTask, WorkspaceModelId, WorkspaceTranscriptEvent } from "@sandbox-agent/foundry-shared"; +import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; + +const RUN_WORKBENCH_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_E2E === "1"; + +function requiredEnv(name: string): string { + const value = process.env[name]?.trim(); + if (!value) { + throw new Error(`Missing required env var: ${name}`); + } + return value; +} + +function workspaceModelEnv(name: string, fallback: WorkspaceModelId): WorkspaceModelId { + const value = process.env[name]?.trim(); + return value && value.length > 0 ? value : fallback; +} + +async function sleep(ms: number): Promise { + await new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function poll(label: string, timeoutMs: number, intervalMs: number, fn: () => Promise, isDone: (value: T) => boolean): Promise { + const startedAt = Date.now(); + let lastValue: T; + + for (;;) { + lastValue = await fn(); + if (isDone(lastValue)) { + return lastValue; + } + if (Date.now() - startedAt > timeoutMs) { + throw new Error(`timed out waiting for ${label}`); + } + await sleep(intervalMs); + } +} + +function findTask(snapshot: TaskWorkspaceSnapshot, taskId: string): WorkspaceTask { + const task = snapshot.tasks.find((candidate) => candidate.id === taskId); + if (!task) { + throw new Error(`task ${taskId} missing from snapshot`); + } + return task; +} + +function findTab(task: WorkspaceTask, sessionId: string): WorkspaceSession { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); + if (!tab) { + throw new Error(`tab ${sessionId} missing from task ${task.id}`); + } + return tab; +} + +function extractEventText(event: WorkspaceTranscriptEvent): string { + const payload = event.payload; + if (!payload || typeof payload !== "object") { + return String(payload ?? ""); + } + + const envelope = payload as { + method?: unknown; + params?: unknown; + result?: unknown; + error?: unknown; + }; + + const params = envelope.params; + if (params && typeof params === "object") { + const update = (params as { update?: unknown }).update; + if (update && typeof update === "object") { + const content = (update as { content?: unknown }).content; + if (content && typeof content === "object") { + const chunkText = (content as { text?: unknown }).text; + if (typeof chunkText === "string") { + return chunkText; + } + } + } + + const text = (params as { text?: unknown }).text; + if (typeof text === "string" && text.trim()) { + return text.trim(); + } + const prompt = (params as { prompt?: Array<{ text?: unknown }> }).prompt; + if (Array.isArray(prompt)) { + const value = prompt + .map((item) => (typeof item?.text === "string" ? item.text.trim() : "")) + .filter(Boolean) + .join("\n"); + if (value) { + return value; + } + } + } + + const result = envelope.result; + if (result && typeof result === "object") { + const text = (result as { text?: unknown }).text; + if (typeof text === "string" && text.trim()) { + return text.trim(); + } + } + + if (envelope.error) { + return JSON.stringify(envelope.error); + } + + if (typeof envelope.method === "string") { + return envelope.method; + } + + return JSON.stringify(payload); +} + +function transcriptIncludesAgentText(transcript: WorkspaceTranscriptEvent[], expectedText: string): boolean { + return transcript + .filter((event) => event.sender === "agent") + .map((event) => extractEventText(event)) + .join("") + .includes(expectedText); +} + +describe("e2e(client): workspace flows", () => { + it.skipIf(!RUN_WORKBENCH_E2E)( + "creates a task from an imported repo, adds sessions, exchanges messages, and manages workspace state", + { timeout: 20 * 60_000 }, + async () => { + const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); + const model = workspaceModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const runId = `wb-${Date.now().toString(36)}`; + const expectedFile = `${runId}.txt`; + const expectedInitialReply = `WORKBENCH_READY_${runId}`; + const expectedReply = `WORKBENCH_ACK_${runId}`; + + const client = createBackendClient({ + endpoint, + defaultOrganizationId: organizationId, + }); + + const repo = await requireImportedRepo(client, organizationId, repoRemote); + const created = await client.createWorkspaceTask(organizationId, { + repoId: repo.repoId, + title: `Workspace E2E ${runId}`, + branch: `e2e/${runId}`, + model, + task: `Reply with exactly: ${expectedInitialReply}`, + }); + + const provisioned = await poll( + "task provisioning", + 12 * 60_000, + 2_000, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => task.branch === `e2e/${runId}` && task.sessions.length > 0, + ); + + const primaryTab = provisioned.sessions[0]!; + + const initialCompleted = await poll( + "initial agent response", + 12 * 60_000, + 2_000, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => { + const tab = findTab(task, primaryTab.id); + return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); + }, + ); + + expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); + expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); + + await client.renameWorkspaceTask(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + value: `Workspace E2E ${runId} Renamed`, + }); + await client.renameWorkspaceSession(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: primaryTab.id, + title: "Primary Session", + }); + + const secondTab = await client.createWorkspaceSession(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + model, + }); + + await client.renameWorkspaceSession(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: secondTab.sessionId, + title: "Follow-up Session", + }); + + await client.updateWorkspaceDraft(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: secondTab.sessionId, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], + }); + + const drafted = findTask(await client.getWorkspace(organizationId), created.taskId); + expect(findTab(drafted, secondTab.sessionId).draft.text).toContain(expectedReply); + expect(findTab(drafted, secondTab.sessionId).draft.attachments).toHaveLength(1); + + await client.sendWorkspaceMessage(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: secondTab.sessionId, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], + }); + + const withSecondReply = await poll( + "follow-up session response", + 10 * 60_000, + 2_000, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => { + const tab = findTab(task, secondTab.sessionId); + return ( + tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile) + ); + }, + ); + + const secondTranscript = findTab(withSecondReply, secondTab.sessionId).transcript; + expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); + expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); + + await client.setWorkspaceSessionUnread(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: secondTab.sessionId, + unread: false, + }); + await client.markWorkspaceUnread(organizationId, { repoId: repo.repoId, taskId: created.taskId }); + + const unreadSnapshot = findTask(await client.getWorkspace(organizationId), created.taskId); + expect(unreadSnapshot.sessions.some((tab) => tab.unread)).toBe(true); + + await client.closeWorkspaceSession(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: secondTab.sessionId, + }); + + const closedSnapshot = await poll( + "secondary session closed", + 30_000, + 1_000, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => !task.sessions.some((tab) => tab.id === secondTab.sessionId), + ); + expect(closedSnapshot.sessions).toHaveLength(1); + + await client.revertWorkspaceFile(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + path: expectedFile, + }); + + const revertedSnapshot = await poll( + "file revert reflected in workspace", + 30_000, + 1_000, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => !task.fileChanges.some((file) => file.path === expectedFile), + ); + + expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); + expect(revertedSnapshot.title).toBe(`Workspace E2E ${runId} Renamed`); + expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); + }, + ); +}); diff --git a/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts b/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts new file mode 100644 index 0000000..f9fc244 --- /dev/null +++ b/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts @@ -0,0 +1,314 @@ +import { describe, expect, it } from "vitest"; +import { + createFoundryLogger, + type TaskWorkspaceSnapshot, + type WorkspaceSession, + type WorkspaceTask, + type WorkspaceModelId, + type WorkspaceTranscriptEvent, +} from "@sandbox-agent/foundry-shared"; +import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; + +const RUN_WORKBENCH_LOAD_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E === "1"; +const logger = createFoundryLogger({ + service: "foundry-client-e2e", + bindings: { + suite: "workspace-load", + }, +}); + +function requiredEnv(name: string): string { + const value = process.env[name]?.trim(); + if (!value) { + throw new Error(`Missing required env var: ${name}`); + } + return value; +} + +function workspaceModelEnv(name: string, fallback: WorkspaceModelId): WorkspaceModelId { + const value = process.env[name]?.trim(); + return value && value.length > 0 ? value : fallback; +} + +function intEnv(name: string, fallback: number): number { + const raw = process.env[name]?.trim(); + if (!raw) { + return fallback; + } + const value = Number.parseInt(raw, 10); + return Number.isFinite(value) && value > 0 ? value : fallback; +} + +async function sleep(ms: number): Promise { + await new Promise((resolve) => setTimeout(resolve, ms)); +} + +async function poll(label: string, timeoutMs: number, intervalMs: number, fn: () => Promise, isDone: (value: T) => boolean): Promise { + const startedAt = Date.now(); + let lastValue: T; + + for (;;) { + lastValue = await fn(); + if (isDone(lastValue)) { + return lastValue; + } + if (Date.now() - startedAt > timeoutMs) { + throw new Error(`timed out waiting for ${label}`); + } + await sleep(intervalMs); + } +} + +function findTask(snapshot: TaskWorkspaceSnapshot, taskId: string): WorkspaceTask { + const task = snapshot.tasks.find((candidate) => candidate.id === taskId); + if (!task) { + throw new Error(`task ${taskId} missing from snapshot`); + } + return task; +} + +function findTab(task: WorkspaceTask, sessionId: string): WorkspaceSession { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); + if (!tab) { + throw new Error(`tab ${sessionId} missing from task ${task.id}`); + } + return tab; +} + +function extractEventText(event: WorkspaceTranscriptEvent): string { + const payload = event.payload; + if (!payload || typeof payload !== "object") { + return String(payload ?? ""); + } + + const envelope = payload as { + method?: unknown; + params?: unknown; + result?: unknown; + }; + + const params = envelope.params; + if (params && typeof params === "object") { + const update = (params as { update?: unknown }).update; + if (update && typeof update === "object") { + const content = (update as { content?: unknown }).content; + if (content && typeof content === "object") { + const chunkText = (content as { text?: unknown }).text; + if (typeof chunkText === "string") { + return chunkText; + } + } + } + + const text = (params as { text?: unknown }).text; + if (typeof text === "string" && text.trim()) { + return text.trim(); + } + + const prompt = (params as { prompt?: Array<{ text?: unknown }> }).prompt; + if (Array.isArray(prompt)) { + return prompt + .map((item) => (typeof item?.text === "string" ? item.text.trim() : "")) + .filter(Boolean) + .join("\n"); + } + } + + const result = envelope.result; + if (result && typeof result === "object") { + const text = (result as { text?: unknown }).text; + if (typeof text === "string" && text.trim()) { + return text.trim(); + } + } + + return typeof envelope.method === "string" ? envelope.method : JSON.stringify(payload); +} + +function transcriptIncludesAgentText(transcript: WorkspaceTranscriptEvent[], expectedText: string): boolean { + return transcript + .filter((event) => event.sender === "agent") + .map((event) => extractEventText(event)) + .join("") + .includes(expectedText); +} + +function average(values: number[]): number { + return values.reduce((sum, value) => sum + value, 0) / Math.max(values.length, 1); +} + +async function measureWorkspaceSnapshot( + client: ReturnType, + organizationId: string, + iterations: number, +): Promise<{ + avgMs: number; + maxMs: number; + payloadBytes: number; + taskCount: number; + tabCount: number; + transcriptEventCount: number; +}> { + const durations: number[] = []; + let snapshot: TaskWorkspaceSnapshot | null = null; + + for (let index = 0; index < iterations; index += 1) { + const startedAt = performance.now(); + snapshot = await client.getWorkspace(organizationId); + durations.push(performance.now() - startedAt); + } + + const finalSnapshot = snapshot ?? { + organizationId, + repos: [], + repositories: [], + tasks: [], + }; + const payloadBytes = Buffer.byteLength(JSON.stringify(finalSnapshot), "utf8"); + const tabCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.sessions.length, 0); + const transcriptEventCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.sessions.reduce((tabSum, tab) => tabSum + tab.transcript.length, 0), 0); + + return { + avgMs: Math.round(average(durations)), + maxMs: Math.round(Math.max(...durations, 0)), + payloadBytes, + taskCount: finalSnapshot.tasks.length, + tabCount, + transcriptEventCount, + }; +} + +describe("e2e(client): workspace load", () => { + it.skipIf(!RUN_WORKBENCH_LOAD_E2E)("runs a simple sequential load profile against the real backend", { timeout: 30 * 60_000 }, async () => { + const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); + const model = workspaceModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3); + const extraSessionCount = intEnv("HF_LOAD_EXTRA_SESSION_COUNT", 2); + const pollIntervalMs = intEnv("HF_LOAD_POLL_INTERVAL_MS", 2_000); + + const client = createBackendClient({ + endpoint, + defaultOrganizationId: organizationId, + }); + + const repo = await requireImportedRepo(client, organizationId, repoRemote); + const createTaskLatencies: number[] = []; + const provisionLatencies: number[] = []; + const createSessionLatencies: number[] = []; + const messageRoundTripLatencies: number[] = []; + const snapshotSeries: Array<{ + taskCount: number; + avgMs: number; + maxMs: number; + payloadBytes: number; + tabCount: number; + transcriptEventCount: number; + }> = []; + + snapshotSeries.push(await measureWorkspaceSnapshot(client, organizationId, 2)); + + for (let taskIndex = 0; taskIndex < taskCount; taskIndex += 1) { + const runId = `load-${taskIndex}-${Date.now().toString(36)}`; + const initialReply = `LOAD_INIT_${runId}`; + + const createStartedAt = performance.now(); + const created = await client.createWorkspaceTask(organizationId, { + repoId: repo.repoId, + title: `Workspace Load ${runId}`, + branch: `load/${runId}`, + model, + task: `Reply with exactly: ${initialReply}`, + }); + createTaskLatencies.push(performance.now() - createStartedAt); + + const provisionStartedAt = performance.now(); + const provisioned = await poll( + `task ${runId} provisioning`, + 12 * 60_000, + pollIntervalMs, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => { + const tab = task.sessions[0]; + return Boolean(tab && task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, initialReply)); + }, + ); + provisionLatencies.push(performance.now() - provisionStartedAt); + + expect(provisioned.sessions.length).toBeGreaterThan(0); + const primaryTab = provisioned.sessions[0]!; + expect(transcriptIncludesAgentText(primaryTab.transcript, initialReply)).toBe(true); + + for (let sessionIndex = 0; sessionIndex < extraSessionCount; sessionIndex += 1) { + const expectedReply = `LOAD_REPLY_${runId}_${sessionIndex}`; + const createSessionStartedAt = performance.now(); + const createdSession = await client.createWorkspaceSession(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + model, + }); + createSessionLatencies.push(performance.now() - createSessionStartedAt); + + await client.sendWorkspaceMessage(organizationId, { + repoId: repo.repoId, + taskId: created.taskId, + sessionId: createdSession.sessionId, + text: `Run pwd in the repo, then reply with exactly: ${expectedReply}`, + attachments: [], + }); + + const messageStartedAt = performance.now(); + const withReply = await poll( + `task ${runId} session ${sessionIndex} reply`, + 10 * 60_000, + pollIntervalMs, + async () => findTask(await client.getWorkspace(organizationId), created.taskId), + (task) => { + const tab = findTab(task, createdSession.sessionId); + return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply); + }, + ); + messageRoundTripLatencies.push(performance.now() - messageStartedAt); + + expect(transcriptIncludesAgentText(findTab(withReply, createdSession.sessionId).transcript, expectedReply)).toBe(true); + } + + const snapshotMetrics = await measureWorkspaceSnapshot(client, organizationId, 3); + snapshotSeries.push(snapshotMetrics); + logger.info( + { + taskIndex: taskIndex + 1, + ...snapshotMetrics, + }, + "workspace_load_snapshot", + ); + } + + const firstSnapshot = snapshotSeries[0]!; + const lastSnapshot = snapshotSeries[snapshotSeries.length - 1]!; + const summary = { + taskCount, + extraSessionCount, + createTaskAvgMs: Math.round(average(createTaskLatencies)), + provisionAvgMs: Math.round(average(provisionLatencies)), + createSessionAvgMs: Math.round(average(createSessionLatencies)), + messageRoundTripAvgMs: Math.round(average(messageRoundTripLatencies)), + snapshotReadBaselineAvgMs: firstSnapshot.avgMs, + snapshotReadFinalAvgMs: lastSnapshot.avgMs, + snapshotReadFinalMaxMs: lastSnapshot.maxMs, + snapshotPayloadBaselineBytes: firstSnapshot.payloadBytes, + snapshotPayloadFinalBytes: lastSnapshot.payloadBytes, + snapshotTabFinalCount: lastSnapshot.tabCount, + snapshotTranscriptFinalCount: lastSnapshot.transcriptEventCount, + }; + + logger.info(summary, "workspace_load_summary"); + + expect(createTaskLatencies.length).toBe(taskCount); + expect(provisionLatencies.length).toBe(taskCount); + expect(createSessionLatencies.length).toBe(taskCount * extraSessionCount); + expect(messageRoundTripLatencies.length).toBe(taskCount * extraSessionCount); + }); +}); diff --git a/foundry/packages/client/test/keys.test.ts b/foundry/packages/client/test/keys.test.ts new file mode 100644 index 0000000..6b93ec1 --- /dev/null +++ b/foundry/packages/client/test/keys.test.ts @@ -0,0 +1,13 @@ +import { describe, expect, it } from "vitest"; +import { auditLogKey, organizationKey, taskKey, taskSandboxKey } from "../src/keys.js"; + +describe("actor keys", () => { + it("prefixes every key with organization namespace", () => { + const keys = [organizationKey("default"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), auditLogKey("default")]; + + for (const key of keys) { + expect(key[0]).toBe("org"); + expect(key[1]).toBe("default"); + } + }); +}); diff --git a/foundry/packages/client/test/subscription-manager.test.ts b/foundry/packages/client/test/subscription-manager.test.ts new file mode 100644 index 0000000..f0a29c2 --- /dev/null +++ b/foundry/packages/client/test/subscription-manager.test.ts @@ -0,0 +1,225 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import type { OrganizationEvent, OrganizationSummarySnapshot } from "@sandbox-agent/foundry-shared"; +import type { ActorConn, BackendClient } from "../src/backend-client.js"; +import { RemoteSubscriptionManager } from "../src/subscription/remote-manager.js"; + +class FakeActorConn implements ActorConn { + private readonly listeners = new Map void>>(); + private readonly errorListeners = new Set<(error: unknown) => void>(); + disposeCount = 0; + + on(event: string, listener: (payload: any) => void): () => void { + let current = this.listeners.get(event); + if (!current) { + current = new Set(); + this.listeners.set(event, current); + } + current.add(listener); + return () => { + current?.delete(listener); + if (current?.size === 0) { + this.listeners.delete(event); + } + }; + } + + onError(listener: (error: unknown) => void): () => void { + this.errorListeners.add(listener); + return () => { + this.errorListeners.delete(listener); + }; + } + + emit(event: string, payload: unknown): void { + for (const listener of this.listeners.get(event) ?? []) { + listener(payload); + } + } + + emitError(error: unknown): void { + for (const listener of this.errorListeners) { + listener(error); + } + } + + async dispose(): Promise { + this.disposeCount += 1; + } +} + +function organizationSnapshot(): OrganizationSummarySnapshot { + return { + organizationId: "org-1", + github: { + connectedAccount: "octocat", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: 1, + lastSyncLabel: "Synced just now", + lastSyncAt: 10, + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 1, + syncPhase: null, + processedRepositoryCount: 1, + totalRepositoryCount: 1, + }, + repos: [{ id: "repo-1", label: "repo-1", taskCount: 1, latestActivityMs: 10 }], + taskSummaries: [ + { + id: "task-1", + repoId: "repo-1", + title: "Initial task", + status: "idle", + repoName: "repo-1", + updatedAtMs: 10, + branch: "main", + pullRequest: null, + activeSessionId: null, + sessionsSummary: [], + primaryUserLogin: null, + primaryUserAvatarUrl: null, + }, + ], + }; +} + +function createBackend(conn: FakeActorConn, snapshot: OrganizationSummarySnapshot): BackendClient { + return { + connectOrganization: vi.fn(async () => conn), + getOrganizationSummary: vi.fn(async () => snapshot), + } as unknown as BackendClient; +} + +async function flushAsyncWork(): Promise { + await Promise.resolve(); + await Promise.resolve(); +} + +describe("RemoteSubscriptionManager", () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it("shares one connection per topic key and applies incoming events", async () => { + const conn = new FakeActorConn(); + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; + const listenerA = vi.fn(); + const listenerB = vi.fn(); + + const unsubscribeA = manager.subscribe("organization", params, listenerA); + const unsubscribeB = manager.subscribe("organization", params, listenerB); + await flushAsyncWork(); + + expect(backend.connectOrganization).toHaveBeenCalledTimes(1); + expect(backend.getOrganizationSummary).toHaveBeenCalledTimes(1); + expect(manager.getStatus("organization", params)).toBe("connected"); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Initial task"); + expect(manager.listDebugTopics()).toEqual([ + expect.objectContaining({ + topicKey: "organization", + cacheKey: "organization:org-1", + listenerCount: 2, + status: "connected", + }), + ]); + + conn.emit("organizationUpdated", { + type: "organizationUpdated", + snapshot: { + organizationId: "org-1", + github: { + connectedAccount: "octocat", + installationStatus: "connected", + syncStatus: "syncing", + importedRepoCount: 1, + lastSyncLabel: "Syncing repositories...", + lastSyncAt: 10, + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 2, + syncPhase: "syncing_branches", + processedRepositoryCount: 1, + totalRepositoryCount: 3, + }, + repos: [], + taskSummaries: [ + { + id: "task-1", + repoId: "repo-1", + title: "Updated task", + status: "running", + repoName: "repo-1", + updatedAtMs: 20, + branch: "feature/live", + pullRequest: null, + activeSessionId: null, + sessionsSummary: [], + primaryUserLogin: null, + primaryUserAvatarUrl: null, + }, + ], + }, + } satisfies OrganizationEvent); + + // applyEvent chains onto an internal promise — flush the microtask queue + await flushAsyncWork(); + + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Updated task"); + expect(listenerA).toHaveBeenCalled(); + expect(listenerB).toHaveBeenCalled(); + expect(manager.listDebugTopics()[0]?.lastRefreshAt).toEqual(expect.any(Number)); + + unsubscribeA(); + unsubscribeB(); + manager.dispose(); + }); + + it("keeps a topic warm during the grace period and tears it down afterwards", async () => { + const conn = new FakeActorConn(); + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; + + const unsubscribeA = manager.subscribe("organization", params, () => {}); + await flushAsyncWork(); + unsubscribeA(); + + vi.advanceTimersByTime(29_000); + expect(manager.listDebugTopics()).toEqual([]); + + const unsubscribeB = manager.subscribe("organization", params, () => {}); + await flushAsyncWork(); + + expect(backend.connectOrganization).toHaveBeenCalledTimes(1); + expect(conn.disposeCount).toBe(0); + + unsubscribeB(); + expect(manager.listDebugTopics()).toEqual([]); + vi.advanceTimersByTime(30_000); + + expect(conn.disposeCount).toBe(1); + expect(manager.getSnapshot("organization", params)).toBeUndefined(); + }); + + it("surfaces connection errors to subscribers", async () => { + const conn = new FakeActorConn(); + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; + + manager.subscribe("organization", params, () => {}); + await flushAsyncWork(); + + conn.emitError(new Error("socket dropped")); + + expect(manager.getStatus("organization", params)).toBe("error"); + expect(manager.getError("organization", params)?.message).toBe("socket dropped"); + }); +}); diff --git a/foundry/packages/client/test/view-model.test.ts b/foundry/packages/client/test/view-model.test.ts new file mode 100644 index 0000000..d418c2f --- /dev/null +++ b/foundry/packages/client/test/view-model.test.ts @@ -0,0 +1,75 @@ +import { describe, expect, it } from "vitest"; +import type { TaskRecord } from "@sandbox-agent/foundry-shared"; +import { filterTasks, formatRelativeAge, fuzzyMatch, summarizeTasks } from "../src/view-model.js"; + +const sample: TaskRecord = { + organizationId: "default", + repoId: "repo-a", + repoRemote: "https://example.com/repo-a.git", + taskId: "task-1", + branchName: "feature/test", + title: "Test Title", + task: "Do test", + sandboxProviderId: "local", + status: "running", + activeSandboxId: "sandbox-1", + pullRequest: null, + sandboxes: [ + { + sandboxId: "sandbox-1", + sandboxProviderId: "local", + sandboxActorId: null, + switchTarget: "sandbox://local/sandbox-1", + cwd: null, + createdAt: 1, + updatedAt: 1, + }, + ], + createdAt: 1, + updatedAt: 1, +}; + +describe("search helpers", () => { + it("supports ordered fuzzy matching", () => { + expect(fuzzyMatch("feature/test-branch", "ftb")).toBe(true); + expect(fuzzyMatch("feature/test-branch", "fbt")).toBe(false); + }); + + it("filters rows across branch and title", () => { + const rows: TaskRecord[] = [ + sample, + { + ...sample, + taskId: "task-2", + branchName: "docs/update-intro", + title: "Docs Intro Refresh", + status: "idle", + }, + ]; + expect(filterTasks(rows, "doc")).toHaveLength(1); + expect(filterTasks(rows, "intro")).toHaveLength(1); + expect(filterTasks(rows, "test")).toHaveLength(2); + }); +}); + +describe("summary helpers", () => { + it("formats relative age", () => { + expect(formatRelativeAge(9_000, 10_000)).toBe("1s"); + expect(formatRelativeAge(0, 120_000)).toBe("2m"); + }); + + it("summarizes by status and provider", () => { + const rows: TaskRecord[] = [ + sample, + { ...sample, taskId: "task-2", status: "idle", sandboxProviderId: "local" }, + { ...sample, taskId: "task-3", status: "error", sandboxProviderId: "local" }, + ]; + + const summary = summarizeTasks(rows); + expect(summary.total).toBe(3); + expect(summary.byStatus.running).toBe(1); + expect(summary.byStatus.idle).toBe(1); + expect(summary.byStatus.error).toBe(1); + expect(summary.byProvider.local).toBe(3); + }); +}); diff --git a/foundry/packages/client/tsconfig.build.json b/foundry/packages/client/tsconfig.build.json new file mode 100644 index 0000000..35bcdb2 --- /dev/null +++ b/foundry/packages/client/tsconfig.build.json @@ -0,0 +1,6 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "ignoreDeprecations": "6.0" + } +} diff --git a/foundry/packages/client/tsconfig.json b/foundry/packages/client/tsconfig.json new file mode 100644 index 0000000..ae5ba21 --- /dev/null +++ b/foundry/packages/client/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": ["src", "test"] +} diff --git a/foundry/packages/desktop/package.json b/foundry/packages/desktop/package.json new file mode 100644 index 0000000..fee1a61 --- /dev/null +++ b/foundry/packages/desktop/package.json @@ -0,0 +1,23 @@ +{ + "name": "@sandbox-agent/foundry-desktop", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "dev": "tauri dev", + "build": "tauri build", + "build:sidecar": "tsx scripts/build-sidecar.ts", + "build:frontend": "tsx scripts/build-frontend.ts", + "build:all": "pnpm build:sidecar && pnpm build:frontend && pnpm build", + "tauri": "tauri" + }, + "devDependencies": { + "@tauri-apps/cli": "^2", + "tsx": "^4" + }, + "dependencies": { + "@sandbox-agent/foundry-shared": "workspace:*", + "@tauri-apps/api": "^2", + "@tauri-apps/plugin-shell": "^2" + } +} diff --git a/foundry/packages/desktop/scripts/build-frontend.ts b/foundry/packages/desktop/scripts/build-frontend.ts new file mode 100644 index 0000000..742231e --- /dev/null +++ b/foundry/packages/desktop/scripts/build-frontend.ts @@ -0,0 +1,49 @@ +import { execSync } from "node:child_process"; +import { cpSync, readFileSync, writeFileSync, rmSync, existsSync } from "node:fs"; +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import { createFoundryLogger } from "@sandbox-agent/foundry-shared"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const desktopRoot = resolve(__dirname, ".."); +const repoRoot = resolve(desktopRoot, "../../.."); +const frontendDist = resolve(desktopRoot, "../frontend/dist"); +const destDir = resolve(desktopRoot, "frontend-dist"); +const logger = createFoundryLogger({ + service: "foundry-desktop-build", + bindings: { + script: "build-frontend", + }, +}); + +function run(cmd: string, opts?: { cwd?: string; env?: NodeJS.ProcessEnv }) { + logger.info({ command: cmd, cwd: opts?.cwd ?? repoRoot }, "run_command"); + execSync(cmd, { + stdio: "inherit", + cwd: opts?.cwd ?? repoRoot, + env: { ...process.env, ...opts?.env }, + }); +} + +// Step 1: Build the frontend with the desktop-specific backend endpoint +logger.info("building_frontend"); +run("pnpm --filter @sandbox-agent/foundry-frontend build", { + env: { + VITE_HF_BACKEND_ENDPOINT: "http://127.0.0.1:7741/v1/rivet", + }, +}); + +// Step 2: Copy dist to frontend-dist/ +logger.info({ frontendDist, destDir }, "copying_frontend_dist"); +if (existsSync(destDir)) { + rmSync(destDir, { recursive: true }); +} +cpSync(frontendDist, destDir, { recursive: true }); + +// Step 3: Strip react-scan script from index.html (it loads unconditionally) +const indexPath = resolve(destDir, "index.html"); +let html = readFileSync(indexPath, "utf-8"); +html = html.replace(/]*><\/script>\s*/g, ""); +writeFileSync(indexPath, html); + +logger.info({ indexPath }, "frontend_build_complete"); diff --git a/foundry/packages/desktop/scripts/build-sidecar.ts b/foundry/packages/desktop/scripts/build-sidecar.ts new file mode 100644 index 0000000..58ef4b0 --- /dev/null +++ b/foundry/packages/desktop/scripts/build-sidecar.ts @@ -0,0 +1,75 @@ +import { execSync } from "node:child_process"; +import { mkdirSync, existsSync } from "node:fs"; +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import { createFoundryLogger } from "@sandbox-agent/foundry-shared"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const desktopRoot = resolve(__dirname, ".."); +const sidecarDir = resolve(desktopRoot, "src-tauri/sidecars"); +const logger = createFoundryLogger({ + service: "foundry-desktop-build", + bindings: { + script: "build-sidecar", + }, +}); + +const isDev = process.argv.includes("--dev"); + +// Detect current architecture +function currentTarget(): string { + const arch = process.arch === "arm64" ? "aarch64" : "x86_64"; + return `${arch}-apple-darwin`; +} + +// Target triples to build +const targets: Array<{ bunTarget: string; tripleTarget: string }> = isDev + ? [ + { + bunTarget: process.arch === "arm64" ? "bun-darwin-arm64" : "bun-darwin-x64", + tripleTarget: currentTarget(), + }, + ] + : [ + { + bunTarget: "bun-darwin-arm64", + tripleTarget: "aarch64-apple-darwin", + }, + { + bunTarget: "bun-darwin-x64", + tripleTarget: "x86_64-apple-darwin", + }, + ]; + +function run(cmd: string, opts?: { cwd?: string; env?: NodeJS.ProcessEnv }) { + logger.info({ command: cmd, cwd: opts?.cwd ?? desktopRoot }, "run_command"); + execSync(cmd, { + stdio: "inherit", + cwd: opts?.cwd ?? desktopRoot, + env: { ...process.env, ...opts?.env }, + }); +} + +// Step 1: Build the backend with tsup +logger.info("building_backend"); +run("pnpm --filter @sandbox-agent/foundry-backend build", { + cwd: resolve(desktopRoot, "../../.."), +}); + +// Step 2: Compile standalone binaries with bun +mkdirSync(sidecarDir, { recursive: true }); + +const backendEntry = resolve(desktopRoot, "../backend/dist/index.js"); + +if (!existsSync(backendEntry)) { + logger.error({ backendEntry }, "backend_build_output_not_found"); + process.exit(1); +} + +for (const { bunTarget, tripleTarget } of targets) { + const outfile = resolve(sidecarDir, `foundry-backend-${tripleTarget}`); + logger.info({ bunTarget, tripleTarget, outfile }, "compiling_sidecar"); + run(`bun build --compile --target ${bunTarget} ${backendEntry} --outfile ${outfile}`); +} + +logger.info({ targets: targets.map((target) => target.tripleTarget) }, "sidecar_build_complete"); diff --git a/foundry/packages/desktop/src-tauri/Cargo.toml b/foundry/packages/desktop/src-tauri/Cargo.toml new file mode 100644 index 0000000..bf86188 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "foundry" +version = "0.1.0" +edition = "2021" + +[build-dependencies] +tauri-build = { version = "2", features = [] } + +[dependencies] +tauri = { version = "2", features = [] } +tauri-plugin-shell = "2" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +reqwest = { version = "0.12", features = ["json"] } +tokio = { version = "1", features = ["time"] } diff --git a/foundry/packages/desktop/src-tauri/build.rs b/foundry/packages/desktop/src-tauri/build.rs new file mode 100644 index 0000000..d860e1e --- /dev/null +++ b/foundry/packages/desktop/src-tauri/build.rs @@ -0,0 +1,3 @@ +fn main() { + tauri_build::build() +} diff --git a/foundry/packages/desktop/src-tauri/capabilities/default.json b/foundry/packages/desktop/src-tauri/capabilities/default.json new file mode 100644 index 0000000..1275b58 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/capabilities/default.json @@ -0,0 +1,30 @@ +{ + "identifier": "default", + "description": "Default capability for Foundry desktop", + "windows": ["main"], + "permissions": [ + "core:default", + "core:window:allow-start-dragging", + "shell:allow-open", + { + "identifier": "shell:allow-execute", + "allow": [ + { + "name": "sidecars/foundry-backend", + "sidecar": true, + "args": true + } + ] + }, + { + "identifier": "shell:allow-spawn", + "allow": [ + { + "name": "sidecars/foundry-backend", + "sidecar": true, + "args": true + } + ] + } + ] +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json b/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json new file mode 100644 index 0000000..6844932 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json @@ -0,0 +1,1922 @@ +{ + "core": { + "default_permission": { + "identifier": "default", + "description": "Default core plugins set.", + "permissions": [ + "core:path:default", + "core:event:default", + "core:window:default", + "core:webview:default", + "core:app:default", + "core:image:default", + "core:resources:default", + "core:menu:default", + "core:tray:default" + ] + }, + "permissions": {}, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:app": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": [ + "allow-version", + "allow-name", + "allow-tauri-version", + "allow-identifier", + "allow-bundle-type", + "allow-register-listener", + "allow-remove-listener" + ] + }, + "permissions": { + "allow-app-hide": { + "identifier": "allow-app-hide", + "description": "Enables the app_hide command without any pre-configured scope.", + "commands": { "allow": ["app_hide"], "deny": [] } + }, + "allow-app-show": { + "identifier": "allow-app-show", + "description": "Enables the app_show command without any pre-configured scope.", + "commands": { "allow": ["app_show"], "deny": [] } + }, + "allow-bundle-type": { + "identifier": "allow-bundle-type", + "description": "Enables the bundle_type command without any pre-configured scope.", + "commands": { "allow": ["bundle_type"], "deny": [] } + }, + "allow-default-window-icon": { + "identifier": "allow-default-window-icon", + "description": "Enables the default_window_icon command without any pre-configured scope.", + "commands": { "allow": ["default_window_icon"], "deny": [] } + }, + "allow-fetch-data-store-identifiers": { + "identifier": "allow-fetch-data-store-identifiers", + "description": "Enables the fetch_data_store_identifiers command without any pre-configured scope.", + "commands": { "allow": ["fetch_data_store_identifiers"], "deny": [] } + }, + "allow-identifier": { + "identifier": "allow-identifier", + "description": "Enables the identifier command without any pre-configured scope.", + "commands": { "allow": ["identifier"], "deny": [] } + }, + "allow-name": { + "identifier": "allow-name", + "description": "Enables the name command without any pre-configured scope.", + "commands": { "allow": ["name"], "deny": [] } + }, + "allow-register-listener": { + "identifier": "allow-register-listener", + "description": "Enables the register_listener command without any pre-configured scope.", + "commands": { "allow": ["register_listener"], "deny": [] } + }, + "allow-remove-data-store": { + "identifier": "allow-remove-data-store", + "description": "Enables the remove_data_store command without any pre-configured scope.", + "commands": { "allow": ["remove_data_store"], "deny": [] } + }, + "allow-remove-listener": { + "identifier": "allow-remove-listener", + "description": "Enables the remove_listener command without any pre-configured scope.", + "commands": { "allow": ["remove_listener"], "deny": [] } + }, + "allow-set-app-theme": { + "identifier": "allow-set-app-theme", + "description": "Enables the set_app_theme command without any pre-configured scope.", + "commands": { "allow": ["set_app_theme"], "deny": [] } + }, + "allow-set-dock-visibility": { + "identifier": "allow-set-dock-visibility", + "description": "Enables the set_dock_visibility command without any pre-configured scope.", + "commands": { "allow": ["set_dock_visibility"], "deny": [] } + }, + "allow-tauri-version": { + "identifier": "allow-tauri-version", + "description": "Enables the tauri_version command without any pre-configured scope.", + "commands": { "allow": ["tauri_version"], "deny": [] } + }, + "allow-version": { + "identifier": "allow-version", + "description": "Enables the version command without any pre-configured scope.", + "commands": { "allow": ["version"], "deny": [] } + }, + "deny-app-hide": { + "identifier": "deny-app-hide", + "description": "Denies the app_hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["app_hide"] } + }, + "deny-app-show": { + "identifier": "deny-app-show", + "description": "Denies the app_show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["app_show"] } + }, + "deny-bundle-type": { + "identifier": "deny-bundle-type", + "description": "Denies the bundle_type command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["bundle_type"] } + }, + "deny-default-window-icon": { + "identifier": "deny-default-window-icon", + "description": "Denies the default_window_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["default_window_icon"] } + }, + "deny-fetch-data-store-identifiers": { + "identifier": "deny-fetch-data-store-identifiers", + "description": "Denies the fetch_data_store_identifiers command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["fetch_data_store_identifiers"] } + }, + "deny-identifier": { + "identifier": "deny-identifier", + "description": "Denies the identifier command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["identifier"] } + }, + "deny-name": { + "identifier": "deny-name", + "description": "Denies the name command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["name"] } + }, + "deny-register-listener": { + "identifier": "deny-register-listener", + "description": "Denies the register_listener command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["register_listener"] } + }, + "deny-remove-data-store": { + "identifier": "deny-remove-data-store", + "description": "Denies the remove_data_store command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_data_store"] } + }, + "deny-remove-listener": { + "identifier": "deny-remove-listener", + "description": "Denies the remove_listener command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_listener"] } + }, + "deny-set-app-theme": { + "identifier": "deny-set-app-theme", + "description": "Denies the set_app_theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_app_theme"] } + }, + "deny-set-dock-visibility": { + "identifier": "deny-set-dock-visibility", + "description": "Denies the set_dock_visibility command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_dock_visibility"] } + }, + "deny-tauri-version": { + "identifier": "deny-tauri-version", + "description": "Denies the tauri_version command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["tauri_version"] } + }, + "deny-version": { + "identifier": "deny-version", + "description": "Denies the version command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["version"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:event": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-listen", "allow-unlisten", "allow-emit", "allow-emit-to"] + }, + "permissions": { + "allow-emit": { + "identifier": "allow-emit", + "description": "Enables the emit command without any pre-configured scope.", + "commands": { "allow": ["emit"], "deny": [] } + }, + "allow-emit-to": { + "identifier": "allow-emit-to", + "description": "Enables the emit_to command without any pre-configured scope.", + "commands": { "allow": ["emit_to"], "deny": [] } + }, + "allow-listen": { + "identifier": "allow-listen", + "description": "Enables the listen command without any pre-configured scope.", + "commands": { "allow": ["listen"], "deny": [] } + }, + "allow-unlisten": { + "identifier": "allow-unlisten", + "description": "Enables the unlisten command without any pre-configured scope.", + "commands": { "allow": ["unlisten"], "deny": [] } + }, + "deny-emit": { + "identifier": "deny-emit", + "description": "Denies the emit command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["emit"] } + }, + "deny-emit-to": { + "identifier": "deny-emit-to", + "description": "Denies the emit_to command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["emit_to"] } + }, + "deny-listen": { + "identifier": "deny-listen", + "description": "Denies the listen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["listen"] } + }, + "deny-unlisten": { + "identifier": "deny-unlisten", + "description": "Denies the unlisten command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unlisten"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:image": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-new", "allow-from-bytes", "allow-from-path", "allow-rgba", "allow-size"] + }, + "permissions": { + "allow-from-bytes": { + "identifier": "allow-from-bytes", + "description": "Enables the from_bytes command without any pre-configured scope.", + "commands": { "allow": ["from_bytes"], "deny": [] } + }, + "allow-from-path": { + "identifier": "allow-from-path", + "description": "Enables the from_path command without any pre-configured scope.", + "commands": { "allow": ["from_path"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-rgba": { + "identifier": "allow-rgba", + "description": "Enables the rgba command without any pre-configured scope.", + "commands": { "allow": ["rgba"], "deny": [] } + }, + "allow-size": { + "identifier": "allow-size", + "description": "Enables the size command without any pre-configured scope.", + "commands": { "allow": ["size"], "deny": [] } + }, + "deny-from-bytes": { + "identifier": "deny-from-bytes", + "description": "Denies the from_bytes command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["from_bytes"] } + }, + "deny-from-path": { + "identifier": "deny-from-path", + "description": "Denies the from_path command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["from_path"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-rgba": { + "identifier": "deny-rgba", + "description": "Denies the rgba command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["rgba"] } + }, + "deny-size": { + "identifier": "deny-size", + "description": "Denies the size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["size"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:menu": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-new", + "allow-append", + "allow-prepend", + "allow-insert", + "allow-remove", + "allow-remove-at", + "allow-items", + "allow-get", + "allow-popup", + "allow-create-default", + "allow-set-as-app-menu", + "allow-set-as-window-menu", + "allow-text", + "allow-set-text", + "allow-is-enabled", + "allow-set-enabled", + "allow-set-accelerator", + "allow-set-as-windows-menu-for-nsapp", + "allow-set-as-help-menu-for-nsapp", + "allow-is-checked", + "allow-set-checked", + "allow-set-icon" + ] + }, + "permissions": { + "allow-append": { + "identifier": "allow-append", + "description": "Enables the append command without any pre-configured scope.", + "commands": { "allow": ["append"], "deny": [] } + }, + "allow-create-default": { + "identifier": "allow-create-default", + "description": "Enables the create_default command without any pre-configured scope.", + "commands": { "allow": ["create_default"], "deny": [] } + }, + "allow-get": { + "identifier": "allow-get", + "description": "Enables the get command without any pre-configured scope.", + "commands": { "allow": ["get"], "deny": [] } + }, + "allow-insert": { + "identifier": "allow-insert", + "description": "Enables the insert command without any pre-configured scope.", + "commands": { "allow": ["insert"], "deny": [] } + }, + "allow-is-checked": { + "identifier": "allow-is-checked", + "description": "Enables the is_checked command without any pre-configured scope.", + "commands": { "allow": ["is_checked"], "deny": [] } + }, + "allow-is-enabled": { + "identifier": "allow-is-enabled", + "description": "Enables the is_enabled command without any pre-configured scope.", + "commands": { "allow": ["is_enabled"], "deny": [] } + }, + "allow-items": { + "identifier": "allow-items", + "description": "Enables the items command without any pre-configured scope.", + "commands": { "allow": ["items"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-popup": { + "identifier": "allow-popup", + "description": "Enables the popup command without any pre-configured scope.", + "commands": { "allow": ["popup"], "deny": [] } + }, + "allow-prepend": { + "identifier": "allow-prepend", + "description": "Enables the prepend command without any pre-configured scope.", + "commands": { "allow": ["prepend"], "deny": [] } + }, + "allow-remove": { + "identifier": "allow-remove", + "description": "Enables the remove command without any pre-configured scope.", + "commands": { "allow": ["remove"], "deny": [] } + }, + "allow-remove-at": { + "identifier": "allow-remove-at", + "description": "Enables the remove_at command without any pre-configured scope.", + "commands": { "allow": ["remove_at"], "deny": [] } + }, + "allow-set-accelerator": { + "identifier": "allow-set-accelerator", + "description": "Enables the set_accelerator command without any pre-configured scope.", + "commands": { "allow": ["set_accelerator"], "deny": [] } + }, + "allow-set-as-app-menu": { + "identifier": "allow-set-as-app-menu", + "description": "Enables the set_as_app_menu command without any pre-configured scope.", + "commands": { "allow": ["set_as_app_menu"], "deny": [] } + }, + "allow-set-as-help-menu-for-nsapp": { + "identifier": "allow-set-as-help-menu-for-nsapp", + "description": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": ["set_as_help_menu_for_nsapp"], "deny": [] } + }, + "allow-set-as-window-menu": { + "identifier": "allow-set-as-window-menu", + "description": "Enables the set_as_window_menu command without any pre-configured scope.", + "commands": { "allow": ["set_as_window_menu"], "deny": [] } + }, + "allow-set-as-windows-menu-for-nsapp": { + "identifier": "allow-set-as-windows-menu-for-nsapp", + "description": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": ["set_as_windows_menu_for_nsapp"], "deny": [] } + }, + "allow-set-checked": { + "identifier": "allow-set-checked", + "description": "Enables the set_checked command without any pre-configured scope.", + "commands": { "allow": ["set_checked"], "deny": [] } + }, + "allow-set-enabled": { + "identifier": "allow-set-enabled", + "description": "Enables the set_enabled command without any pre-configured scope.", + "commands": { "allow": ["set_enabled"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-text": { + "identifier": "allow-set-text", + "description": "Enables the set_text command without any pre-configured scope.", + "commands": { "allow": ["set_text"], "deny": [] } + }, + "allow-text": { + "identifier": "allow-text", + "description": "Enables the text command without any pre-configured scope.", + "commands": { "allow": ["text"], "deny": [] } + }, + "deny-append": { + "identifier": "deny-append", + "description": "Denies the append command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["append"] } + }, + "deny-create-default": { + "identifier": "deny-create-default", + "description": "Denies the create_default command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_default"] } + }, + "deny-get": { + "identifier": "deny-get", + "description": "Denies the get command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get"] } + }, + "deny-insert": { + "identifier": "deny-insert", + "description": "Denies the insert command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["insert"] } + }, + "deny-is-checked": { + "identifier": "deny-is-checked", + "description": "Denies the is_checked command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_checked"] } + }, + "deny-is-enabled": { + "identifier": "deny-is-enabled", + "description": "Denies the is_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_enabled"] } + }, + "deny-items": { + "identifier": "deny-items", + "description": "Denies the items command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["items"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-popup": { + "identifier": "deny-popup", + "description": "Denies the popup command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["popup"] } + }, + "deny-prepend": { + "identifier": "deny-prepend", + "description": "Denies the prepend command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["prepend"] } + }, + "deny-remove": { + "identifier": "deny-remove", + "description": "Denies the remove command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove"] } + }, + "deny-remove-at": { + "identifier": "deny-remove-at", + "description": "Denies the remove_at command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_at"] } + }, + "deny-set-accelerator": { + "identifier": "deny-set-accelerator", + "description": "Denies the set_accelerator command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_accelerator"] } + }, + "deny-set-as-app-menu": { + "identifier": "deny-set-as-app-menu", + "description": "Denies the set_as_app_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_app_menu"] } + }, + "deny-set-as-help-menu-for-nsapp": { + "identifier": "deny-set-as-help-menu-for-nsapp", + "description": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_help_menu_for_nsapp"] } + }, + "deny-set-as-window-menu": { + "identifier": "deny-set-as-window-menu", + "description": "Denies the set_as_window_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_window_menu"] } + }, + "deny-set-as-windows-menu-for-nsapp": { + "identifier": "deny-set-as-windows-menu-for-nsapp", + "description": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_windows_menu_for_nsapp"] } + }, + "deny-set-checked": { + "identifier": "deny-set-checked", + "description": "Denies the set_checked command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_checked"] } + }, + "deny-set-enabled": { + "identifier": "deny-set-enabled", + "description": "Denies the set_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_enabled"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-text": { + "identifier": "deny-set-text", + "description": "Denies the set_text command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_text"] } + }, + "deny-text": { + "identifier": "deny-text", + "description": "Denies the text command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["text"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:path": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-resolve-directory", + "allow-resolve", + "allow-normalize", + "allow-join", + "allow-dirname", + "allow-extname", + "allow-basename", + "allow-is-absolute" + ] + }, + "permissions": { + "allow-basename": { + "identifier": "allow-basename", + "description": "Enables the basename command without any pre-configured scope.", + "commands": { "allow": ["basename"], "deny": [] } + }, + "allow-dirname": { + "identifier": "allow-dirname", + "description": "Enables the dirname command without any pre-configured scope.", + "commands": { "allow": ["dirname"], "deny": [] } + }, + "allow-extname": { + "identifier": "allow-extname", + "description": "Enables the extname command without any pre-configured scope.", + "commands": { "allow": ["extname"], "deny": [] } + }, + "allow-is-absolute": { + "identifier": "allow-is-absolute", + "description": "Enables the is_absolute command without any pre-configured scope.", + "commands": { "allow": ["is_absolute"], "deny": [] } + }, + "allow-join": { + "identifier": "allow-join", + "description": "Enables the join command without any pre-configured scope.", + "commands": { "allow": ["join"], "deny": [] } + }, + "allow-normalize": { + "identifier": "allow-normalize", + "description": "Enables the normalize command without any pre-configured scope.", + "commands": { "allow": ["normalize"], "deny": [] } + }, + "allow-resolve": { + "identifier": "allow-resolve", + "description": "Enables the resolve command without any pre-configured scope.", + "commands": { "allow": ["resolve"], "deny": [] } + }, + "allow-resolve-directory": { + "identifier": "allow-resolve-directory", + "description": "Enables the resolve_directory command without any pre-configured scope.", + "commands": { "allow": ["resolve_directory"], "deny": [] } + }, + "deny-basename": { + "identifier": "deny-basename", + "description": "Denies the basename command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["basename"] } + }, + "deny-dirname": { + "identifier": "deny-dirname", + "description": "Denies the dirname command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["dirname"] } + }, + "deny-extname": { + "identifier": "deny-extname", + "description": "Denies the extname command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["extname"] } + }, + "deny-is-absolute": { + "identifier": "deny-is-absolute", + "description": "Denies the is_absolute command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_absolute"] } + }, + "deny-join": { + "identifier": "deny-join", + "description": "Denies the join command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["join"] } + }, + "deny-normalize": { + "identifier": "deny-normalize", + "description": "Denies the normalize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["normalize"] } + }, + "deny-resolve": { + "identifier": "deny-resolve", + "description": "Denies the resolve command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["resolve"] } + }, + "deny-resolve-directory": { + "identifier": "deny-resolve-directory", + "description": "Denies the resolve_directory command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["resolve_directory"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:resources": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-close"] + }, + "permissions": { + "allow-close": { + "identifier": "allow-close", + "description": "Enables the close command without any pre-configured scope.", + "commands": { "allow": ["close"], "deny": [] } + }, + "deny-close": { + "identifier": "deny-close", + "description": "Denies the close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["close"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:tray": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-new", + "allow-get-by-id", + "allow-remove-by-id", + "allow-set-icon", + "allow-set-menu", + "allow-set-tooltip", + "allow-set-title", + "allow-set-visible", + "allow-set-temp-dir-path", + "allow-set-icon-as-template", + "allow-set-show-menu-on-left-click" + ] + }, + "permissions": { + "allow-get-by-id": { + "identifier": "allow-get-by-id", + "description": "Enables the get_by_id command without any pre-configured scope.", + "commands": { "allow": ["get_by_id"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-remove-by-id": { + "identifier": "allow-remove-by-id", + "description": "Enables the remove_by_id command without any pre-configured scope.", + "commands": { "allow": ["remove_by_id"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-icon-as-template": { + "identifier": "allow-set-icon-as-template", + "description": "Enables the set_icon_as_template command without any pre-configured scope.", + "commands": { "allow": ["set_icon_as_template"], "deny": [] } + }, + "allow-set-menu": { + "identifier": "allow-set-menu", + "description": "Enables the set_menu command without any pre-configured scope.", + "commands": { "allow": ["set_menu"], "deny": [] } + }, + "allow-set-show-menu-on-left-click": { + "identifier": "allow-set-show-menu-on-left-click", + "description": "Enables the set_show_menu_on_left_click command without any pre-configured scope.", + "commands": { "allow": ["set_show_menu_on_left_click"], "deny": [] } + }, + "allow-set-temp-dir-path": { + "identifier": "allow-set-temp-dir-path", + "description": "Enables the set_temp_dir_path command without any pre-configured scope.", + "commands": { "allow": ["set_temp_dir_path"], "deny": [] } + }, + "allow-set-title": { + "identifier": "allow-set-title", + "description": "Enables the set_title command without any pre-configured scope.", + "commands": { "allow": ["set_title"], "deny": [] } + }, + "allow-set-tooltip": { + "identifier": "allow-set-tooltip", + "description": "Enables the set_tooltip command without any pre-configured scope.", + "commands": { "allow": ["set_tooltip"], "deny": [] } + }, + "allow-set-visible": { + "identifier": "allow-set-visible", + "description": "Enables the set_visible command without any pre-configured scope.", + "commands": { "allow": ["set_visible"], "deny": [] } + }, + "deny-get-by-id": { + "identifier": "deny-get-by-id", + "description": "Denies the get_by_id command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_by_id"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-remove-by-id": { + "identifier": "deny-remove-by-id", + "description": "Denies the remove_by_id command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_by_id"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-icon-as-template": { + "identifier": "deny-set-icon-as-template", + "description": "Denies the set_icon_as_template command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon_as_template"] } + }, + "deny-set-menu": { + "identifier": "deny-set-menu", + "description": "Denies the set_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_menu"] } + }, + "deny-set-show-menu-on-left-click": { + "identifier": "deny-set-show-menu-on-left-click", + "description": "Denies the set_show_menu_on_left_click command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_show_menu_on_left_click"] } + }, + "deny-set-temp-dir-path": { + "identifier": "deny-set-temp-dir-path", + "description": "Denies the set_temp_dir_path command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_temp_dir_path"] } + }, + "deny-set-title": { + "identifier": "deny-set-title", + "description": "Denies the set_title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title"] } + }, + "deny-set-tooltip": { + "identifier": "deny-set-tooltip", + "description": "Denies the set_tooltip command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_tooltip"] } + }, + "deny-set-visible": { + "identifier": "deny-set-visible", + "description": "Denies the set_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_visible"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:webview": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": ["allow-get-all-webviews", "allow-webview-position", "allow-webview-size", "allow-internal-toggle-devtools"] + }, + "permissions": { + "allow-clear-all-browsing-data": { + "identifier": "allow-clear-all-browsing-data", + "description": "Enables the clear_all_browsing_data command without any pre-configured scope.", + "commands": { "allow": ["clear_all_browsing_data"], "deny": [] } + }, + "allow-create-webview": { + "identifier": "allow-create-webview", + "description": "Enables the create_webview command without any pre-configured scope.", + "commands": { "allow": ["create_webview"], "deny": [] } + }, + "allow-create-webview-window": { + "identifier": "allow-create-webview-window", + "description": "Enables the create_webview_window command without any pre-configured scope.", + "commands": { "allow": ["create_webview_window"], "deny": [] } + }, + "allow-get-all-webviews": { + "identifier": "allow-get-all-webviews", + "description": "Enables the get_all_webviews command without any pre-configured scope.", + "commands": { "allow": ["get_all_webviews"], "deny": [] } + }, + "allow-internal-toggle-devtools": { + "identifier": "allow-internal-toggle-devtools", + "description": "Enables the internal_toggle_devtools command without any pre-configured scope.", + "commands": { "allow": ["internal_toggle_devtools"], "deny": [] } + }, + "allow-print": { + "identifier": "allow-print", + "description": "Enables the print command without any pre-configured scope.", + "commands": { "allow": ["print"], "deny": [] } + }, + "allow-reparent": { + "identifier": "allow-reparent", + "description": "Enables the reparent command without any pre-configured scope.", + "commands": { "allow": ["reparent"], "deny": [] } + }, + "allow-set-webview-auto-resize": { + "identifier": "allow-set-webview-auto-resize", + "description": "Enables the set_webview_auto_resize command without any pre-configured scope.", + "commands": { "allow": ["set_webview_auto_resize"], "deny": [] } + }, + "allow-set-webview-background-color": { + "identifier": "allow-set-webview-background-color", + "description": "Enables the set_webview_background_color command without any pre-configured scope.", + "commands": { "allow": ["set_webview_background_color"], "deny": [] } + }, + "allow-set-webview-focus": { + "identifier": "allow-set-webview-focus", + "description": "Enables the set_webview_focus command without any pre-configured scope.", + "commands": { "allow": ["set_webview_focus"], "deny": [] } + }, + "allow-set-webview-position": { + "identifier": "allow-set-webview-position", + "description": "Enables the set_webview_position command without any pre-configured scope.", + "commands": { "allow": ["set_webview_position"], "deny": [] } + }, + "allow-set-webview-size": { + "identifier": "allow-set-webview-size", + "description": "Enables the set_webview_size command without any pre-configured scope.", + "commands": { "allow": ["set_webview_size"], "deny": [] } + }, + "allow-set-webview-zoom": { + "identifier": "allow-set-webview-zoom", + "description": "Enables the set_webview_zoom command without any pre-configured scope.", + "commands": { "allow": ["set_webview_zoom"], "deny": [] } + }, + "allow-webview-close": { + "identifier": "allow-webview-close", + "description": "Enables the webview_close command without any pre-configured scope.", + "commands": { "allow": ["webview_close"], "deny": [] } + }, + "allow-webview-hide": { + "identifier": "allow-webview-hide", + "description": "Enables the webview_hide command without any pre-configured scope.", + "commands": { "allow": ["webview_hide"], "deny": [] } + }, + "allow-webview-position": { + "identifier": "allow-webview-position", + "description": "Enables the webview_position command without any pre-configured scope.", + "commands": { "allow": ["webview_position"], "deny": [] } + }, + "allow-webview-show": { + "identifier": "allow-webview-show", + "description": "Enables the webview_show command without any pre-configured scope.", + "commands": { "allow": ["webview_show"], "deny": [] } + }, + "allow-webview-size": { + "identifier": "allow-webview-size", + "description": "Enables the webview_size command without any pre-configured scope.", + "commands": { "allow": ["webview_size"], "deny": [] } + }, + "deny-clear-all-browsing-data": { + "identifier": "deny-clear-all-browsing-data", + "description": "Denies the clear_all_browsing_data command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["clear_all_browsing_data"] } + }, + "deny-create-webview": { + "identifier": "deny-create-webview", + "description": "Denies the create_webview command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_webview"] } + }, + "deny-create-webview-window": { + "identifier": "deny-create-webview-window", + "description": "Denies the create_webview_window command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_webview_window"] } + }, + "deny-get-all-webviews": { + "identifier": "deny-get-all-webviews", + "description": "Denies the get_all_webviews command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_all_webviews"] } + }, + "deny-internal-toggle-devtools": { + "identifier": "deny-internal-toggle-devtools", + "description": "Denies the internal_toggle_devtools command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["internal_toggle_devtools"] } + }, + "deny-print": { + "identifier": "deny-print", + "description": "Denies the print command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["print"] } + }, + "deny-reparent": { + "identifier": "deny-reparent", + "description": "Denies the reparent command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["reparent"] } + }, + "deny-set-webview-auto-resize": { + "identifier": "deny-set-webview-auto-resize", + "description": "Denies the set_webview_auto_resize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_auto_resize"] } + }, + "deny-set-webview-background-color": { + "identifier": "deny-set-webview-background-color", + "description": "Denies the set_webview_background_color command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_background_color"] } + }, + "deny-set-webview-focus": { + "identifier": "deny-set-webview-focus", + "description": "Denies the set_webview_focus command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_focus"] } + }, + "deny-set-webview-position": { + "identifier": "deny-set-webview-position", + "description": "Denies the set_webview_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_position"] } + }, + "deny-set-webview-size": { + "identifier": "deny-set-webview-size", + "description": "Denies the set_webview_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_size"] } + }, + "deny-set-webview-zoom": { + "identifier": "deny-set-webview-zoom", + "description": "Denies the set_webview_zoom command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_zoom"] } + }, + "deny-webview-close": { + "identifier": "deny-webview-close", + "description": "Denies the webview_close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_close"] } + }, + "deny-webview-hide": { + "identifier": "deny-webview-hide", + "description": "Denies the webview_hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_hide"] } + }, + "deny-webview-position": { + "identifier": "deny-webview-position", + "description": "Denies the webview_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_position"] } + }, + "deny-webview-show": { + "identifier": "deny-webview-show", + "description": "Denies the webview_show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_show"] } + }, + "deny-webview-size": { + "identifier": "deny-webview-size", + "description": "Denies the webview_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_size"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:window": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": [ + "allow-get-all-windows", + "allow-scale-factor", + "allow-inner-position", + "allow-outer-position", + "allow-inner-size", + "allow-outer-size", + "allow-is-fullscreen", + "allow-is-minimized", + "allow-is-maximized", + "allow-is-focused", + "allow-is-decorated", + "allow-is-resizable", + "allow-is-maximizable", + "allow-is-minimizable", + "allow-is-closable", + "allow-is-visible", + "allow-is-enabled", + "allow-title", + "allow-current-monitor", + "allow-primary-monitor", + "allow-monitor-from-point", + "allow-available-monitors", + "allow-cursor-position", + "allow-theme", + "allow-is-always-on-top", + "allow-internal-toggle-maximize" + ] + }, + "permissions": { + "allow-available-monitors": { + "identifier": "allow-available-monitors", + "description": "Enables the available_monitors command without any pre-configured scope.", + "commands": { "allow": ["available_monitors"], "deny": [] } + }, + "allow-center": { + "identifier": "allow-center", + "description": "Enables the center command without any pre-configured scope.", + "commands": { "allow": ["center"], "deny": [] } + }, + "allow-close": { + "identifier": "allow-close", + "description": "Enables the close command without any pre-configured scope.", + "commands": { "allow": ["close"], "deny": [] } + }, + "allow-create": { + "identifier": "allow-create", + "description": "Enables the create command without any pre-configured scope.", + "commands": { "allow": ["create"], "deny": [] } + }, + "allow-current-monitor": { + "identifier": "allow-current-monitor", + "description": "Enables the current_monitor command without any pre-configured scope.", + "commands": { "allow": ["current_monitor"], "deny": [] } + }, + "allow-cursor-position": { + "identifier": "allow-cursor-position", + "description": "Enables the cursor_position command without any pre-configured scope.", + "commands": { "allow": ["cursor_position"], "deny": [] } + }, + "allow-destroy": { + "identifier": "allow-destroy", + "description": "Enables the destroy command without any pre-configured scope.", + "commands": { "allow": ["destroy"], "deny": [] } + }, + "allow-get-all-windows": { + "identifier": "allow-get-all-windows", + "description": "Enables the get_all_windows command without any pre-configured scope.", + "commands": { "allow": ["get_all_windows"], "deny": [] } + }, + "allow-hide": { + "identifier": "allow-hide", + "description": "Enables the hide command without any pre-configured scope.", + "commands": { "allow": ["hide"], "deny": [] } + }, + "allow-inner-position": { + "identifier": "allow-inner-position", + "description": "Enables the inner_position command without any pre-configured scope.", + "commands": { "allow": ["inner_position"], "deny": [] } + }, + "allow-inner-size": { + "identifier": "allow-inner-size", + "description": "Enables the inner_size command without any pre-configured scope.", + "commands": { "allow": ["inner_size"], "deny": [] } + }, + "allow-internal-toggle-maximize": { + "identifier": "allow-internal-toggle-maximize", + "description": "Enables the internal_toggle_maximize command without any pre-configured scope.", + "commands": { "allow": ["internal_toggle_maximize"], "deny": [] } + }, + "allow-is-always-on-top": { + "identifier": "allow-is-always-on-top", + "description": "Enables the is_always_on_top command without any pre-configured scope.", + "commands": { "allow": ["is_always_on_top"], "deny": [] } + }, + "allow-is-closable": { + "identifier": "allow-is-closable", + "description": "Enables the is_closable command without any pre-configured scope.", + "commands": { "allow": ["is_closable"], "deny": [] } + }, + "allow-is-decorated": { + "identifier": "allow-is-decorated", + "description": "Enables the is_decorated command without any pre-configured scope.", + "commands": { "allow": ["is_decorated"], "deny": [] } + }, + "allow-is-enabled": { + "identifier": "allow-is-enabled", + "description": "Enables the is_enabled command without any pre-configured scope.", + "commands": { "allow": ["is_enabled"], "deny": [] } + }, + "allow-is-focused": { + "identifier": "allow-is-focused", + "description": "Enables the is_focused command without any pre-configured scope.", + "commands": { "allow": ["is_focused"], "deny": [] } + }, + "allow-is-fullscreen": { + "identifier": "allow-is-fullscreen", + "description": "Enables the is_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["is_fullscreen"], "deny": [] } + }, + "allow-is-maximizable": { + "identifier": "allow-is-maximizable", + "description": "Enables the is_maximizable command without any pre-configured scope.", + "commands": { "allow": ["is_maximizable"], "deny": [] } + }, + "allow-is-maximized": { + "identifier": "allow-is-maximized", + "description": "Enables the is_maximized command without any pre-configured scope.", + "commands": { "allow": ["is_maximized"], "deny": [] } + }, + "allow-is-minimizable": { + "identifier": "allow-is-minimizable", + "description": "Enables the is_minimizable command without any pre-configured scope.", + "commands": { "allow": ["is_minimizable"], "deny": [] } + }, + "allow-is-minimized": { + "identifier": "allow-is-minimized", + "description": "Enables the is_minimized command without any pre-configured scope.", + "commands": { "allow": ["is_minimized"], "deny": [] } + }, + "allow-is-resizable": { + "identifier": "allow-is-resizable", + "description": "Enables the is_resizable command without any pre-configured scope.", + "commands": { "allow": ["is_resizable"], "deny": [] } + }, + "allow-is-visible": { + "identifier": "allow-is-visible", + "description": "Enables the is_visible command without any pre-configured scope.", + "commands": { "allow": ["is_visible"], "deny": [] } + }, + "allow-maximize": { + "identifier": "allow-maximize", + "description": "Enables the maximize command without any pre-configured scope.", + "commands": { "allow": ["maximize"], "deny": [] } + }, + "allow-minimize": { + "identifier": "allow-minimize", + "description": "Enables the minimize command without any pre-configured scope.", + "commands": { "allow": ["minimize"], "deny": [] } + }, + "allow-monitor-from-point": { + "identifier": "allow-monitor-from-point", + "description": "Enables the monitor_from_point command without any pre-configured scope.", + "commands": { "allow": ["monitor_from_point"], "deny": [] } + }, + "allow-outer-position": { + "identifier": "allow-outer-position", + "description": "Enables the outer_position command without any pre-configured scope.", + "commands": { "allow": ["outer_position"], "deny": [] } + }, + "allow-outer-size": { + "identifier": "allow-outer-size", + "description": "Enables the outer_size command without any pre-configured scope.", + "commands": { "allow": ["outer_size"], "deny": [] } + }, + "allow-primary-monitor": { + "identifier": "allow-primary-monitor", + "description": "Enables the primary_monitor command without any pre-configured scope.", + "commands": { "allow": ["primary_monitor"], "deny": [] } + }, + "allow-request-user-attention": { + "identifier": "allow-request-user-attention", + "description": "Enables the request_user_attention command without any pre-configured scope.", + "commands": { "allow": ["request_user_attention"], "deny": [] } + }, + "allow-scale-factor": { + "identifier": "allow-scale-factor", + "description": "Enables the scale_factor command without any pre-configured scope.", + "commands": { "allow": ["scale_factor"], "deny": [] } + }, + "allow-set-always-on-bottom": { + "identifier": "allow-set-always-on-bottom", + "description": "Enables the set_always_on_bottom command without any pre-configured scope.", + "commands": { "allow": ["set_always_on_bottom"], "deny": [] } + }, + "allow-set-always-on-top": { + "identifier": "allow-set-always-on-top", + "description": "Enables the set_always_on_top command without any pre-configured scope.", + "commands": { "allow": ["set_always_on_top"], "deny": [] } + }, + "allow-set-background-color": { + "identifier": "allow-set-background-color", + "description": "Enables the set_background_color command without any pre-configured scope.", + "commands": { "allow": ["set_background_color"], "deny": [] } + }, + "allow-set-badge-count": { + "identifier": "allow-set-badge-count", + "description": "Enables the set_badge_count command without any pre-configured scope.", + "commands": { "allow": ["set_badge_count"], "deny": [] } + }, + "allow-set-badge-label": { + "identifier": "allow-set-badge-label", + "description": "Enables the set_badge_label command without any pre-configured scope.", + "commands": { "allow": ["set_badge_label"], "deny": [] } + }, + "allow-set-closable": { + "identifier": "allow-set-closable", + "description": "Enables the set_closable command without any pre-configured scope.", + "commands": { "allow": ["set_closable"], "deny": [] } + }, + "allow-set-content-protected": { + "identifier": "allow-set-content-protected", + "description": "Enables the set_content_protected command without any pre-configured scope.", + "commands": { "allow": ["set_content_protected"], "deny": [] } + }, + "allow-set-cursor-grab": { + "identifier": "allow-set-cursor-grab", + "description": "Enables the set_cursor_grab command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_grab"], "deny": [] } + }, + "allow-set-cursor-icon": { + "identifier": "allow-set-cursor-icon", + "description": "Enables the set_cursor_icon command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_icon"], "deny": [] } + }, + "allow-set-cursor-position": { + "identifier": "allow-set-cursor-position", + "description": "Enables the set_cursor_position command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_position"], "deny": [] } + }, + "allow-set-cursor-visible": { + "identifier": "allow-set-cursor-visible", + "description": "Enables the set_cursor_visible command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_visible"], "deny": [] } + }, + "allow-set-decorations": { + "identifier": "allow-set-decorations", + "description": "Enables the set_decorations command without any pre-configured scope.", + "commands": { "allow": ["set_decorations"], "deny": [] } + }, + "allow-set-effects": { + "identifier": "allow-set-effects", + "description": "Enables the set_effects command without any pre-configured scope.", + "commands": { "allow": ["set_effects"], "deny": [] } + }, + "allow-set-enabled": { + "identifier": "allow-set-enabled", + "description": "Enables the set_enabled command without any pre-configured scope.", + "commands": { "allow": ["set_enabled"], "deny": [] } + }, + "allow-set-focus": { + "identifier": "allow-set-focus", + "description": "Enables the set_focus command without any pre-configured scope.", + "commands": { "allow": ["set_focus"], "deny": [] } + }, + "allow-set-focusable": { + "identifier": "allow-set-focusable", + "description": "Enables the set_focusable command without any pre-configured scope.", + "commands": { "allow": ["set_focusable"], "deny": [] } + }, + "allow-set-fullscreen": { + "identifier": "allow-set-fullscreen", + "description": "Enables the set_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["set_fullscreen"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-ignore-cursor-events": { + "identifier": "allow-set-ignore-cursor-events", + "description": "Enables the set_ignore_cursor_events command without any pre-configured scope.", + "commands": { "allow": ["set_ignore_cursor_events"], "deny": [] } + }, + "allow-set-max-size": { + "identifier": "allow-set-max-size", + "description": "Enables the set_max_size command without any pre-configured scope.", + "commands": { "allow": ["set_max_size"], "deny": [] } + }, + "allow-set-maximizable": { + "identifier": "allow-set-maximizable", + "description": "Enables the set_maximizable command without any pre-configured scope.", + "commands": { "allow": ["set_maximizable"], "deny": [] } + }, + "allow-set-min-size": { + "identifier": "allow-set-min-size", + "description": "Enables the set_min_size command without any pre-configured scope.", + "commands": { "allow": ["set_min_size"], "deny": [] } + }, + "allow-set-minimizable": { + "identifier": "allow-set-minimizable", + "description": "Enables the set_minimizable command without any pre-configured scope.", + "commands": { "allow": ["set_minimizable"], "deny": [] } + }, + "allow-set-overlay-icon": { + "identifier": "allow-set-overlay-icon", + "description": "Enables the set_overlay_icon command without any pre-configured scope.", + "commands": { "allow": ["set_overlay_icon"], "deny": [] } + }, + "allow-set-position": { + "identifier": "allow-set-position", + "description": "Enables the set_position command without any pre-configured scope.", + "commands": { "allow": ["set_position"], "deny": [] } + }, + "allow-set-progress-bar": { + "identifier": "allow-set-progress-bar", + "description": "Enables the set_progress_bar command without any pre-configured scope.", + "commands": { "allow": ["set_progress_bar"], "deny": [] } + }, + "allow-set-resizable": { + "identifier": "allow-set-resizable", + "description": "Enables the set_resizable command without any pre-configured scope.", + "commands": { "allow": ["set_resizable"], "deny": [] } + }, + "allow-set-shadow": { + "identifier": "allow-set-shadow", + "description": "Enables the set_shadow command without any pre-configured scope.", + "commands": { "allow": ["set_shadow"], "deny": [] } + }, + "allow-set-simple-fullscreen": { + "identifier": "allow-set-simple-fullscreen", + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["set_simple_fullscreen"], "deny": [] } + }, + "allow-set-size": { + "identifier": "allow-set-size", + "description": "Enables the set_size command without any pre-configured scope.", + "commands": { "allow": ["set_size"], "deny": [] } + }, + "allow-set-size-constraints": { + "identifier": "allow-set-size-constraints", + "description": "Enables the set_size_constraints command without any pre-configured scope.", + "commands": { "allow": ["set_size_constraints"], "deny": [] } + }, + "allow-set-skip-taskbar": { + "identifier": "allow-set-skip-taskbar", + "description": "Enables the set_skip_taskbar command without any pre-configured scope.", + "commands": { "allow": ["set_skip_taskbar"], "deny": [] } + }, + "allow-set-theme": { + "identifier": "allow-set-theme", + "description": "Enables the set_theme command without any pre-configured scope.", + "commands": { "allow": ["set_theme"], "deny": [] } + }, + "allow-set-title": { + "identifier": "allow-set-title", + "description": "Enables the set_title command without any pre-configured scope.", + "commands": { "allow": ["set_title"], "deny": [] } + }, + "allow-set-title-bar-style": { + "identifier": "allow-set-title-bar-style", + "description": "Enables the set_title_bar_style command without any pre-configured scope.", + "commands": { "allow": ["set_title_bar_style"], "deny": [] } + }, + "allow-set-visible-on-all-organizations": { + "identifier": "allow-set-visible-on-all-organizations", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", + "commands": { "allow": ["set_visible_on_all_organizations"], "deny": [] } + }, + "allow-show": { + "identifier": "allow-show", + "description": "Enables the show command without any pre-configured scope.", + "commands": { "allow": ["show"], "deny": [] } + }, + "allow-start-dragging": { + "identifier": "allow-start-dragging", + "description": "Enables the start_dragging command without any pre-configured scope.", + "commands": { "allow": ["start_dragging"], "deny": [] } + }, + "allow-start-resize-dragging": { + "identifier": "allow-start-resize-dragging", + "description": "Enables the start_resize_dragging command without any pre-configured scope.", + "commands": { "allow": ["start_resize_dragging"], "deny": [] } + }, + "allow-theme": { + "identifier": "allow-theme", + "description": "Enables the theme command without any pre-configured scope.", + "commands": { "allow": ["theme"], "deny": [] } + }, + "allow-title": { + "identifier": "allow-title", + "description": "Enables the title command without any pre-configured scope.", + "commands": { "allow": ["title"], "deny": [] } + }, + "allow-toggle-maximize": { + "identifier": "allow-toggle-maximize", + "description": "Enables the toggle_maximize command without any pre-configured scope.", + "commands": { "allow": ["toggle_maximize"], "deny": [] } + }, + "allow-unmaximize": { + "identifier": "allow-unmaximize", + "description": "Enables the unmaximize command without any pre-configured scope.", + "commands": { "allow": ["unmaximize"], "deny": [] } + }, + "allow-unminimize": { + "identifier": "allow-unminimize", + "description": "Enables the unminimize command without any pre-configured scope.", + "commands": { "allow": ["unminimize"], "deny": [] } + }, + "deny-available-monitors": { + "identifier": "deny-available-monitors", + "description": "Denies the available_monitors command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["available_monitors"] } + }, + "deny-center": { + "identifier": "deny-center", + "description": "Denies the center command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["center"] } + }, + "deny-close": { + "identifier": "deny-close", + "description": "Denies the close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["close"] } + }, + "deny-create": { + "identifier": "deny-create", + "description": "Denies the create command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create"] } + }, + "deny-current-monitor": { + "identifier": "deny-current-monitor", + "description": "Denies the current_monitor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["current_monitor"] } + }, + "deny-cursor-position": { + "identifier": "deny-cursor-position", + "description": "Denies the cursor_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["cursor_position"] } + }, + "deny-destroy": { + "identifier": "deny-destroy", + "description": "Denies the destroy command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["destroy"] } + }, + "deny-get-all-windows": { + "identifier": "deny-get-all-windows", + "description": "Denies the get_all_windows command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_all_windows"] } + }, + "deny-hide": { + "identifier": "deny-hide", + "description": "Denies the hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["hide"] } + }, + "deny-inner-position": { + "identifier": "deny-inner-position", + "description": "Denies the inner_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["inner_position"] } + }, + "deny-inner-size": { + "identifier": "deny-inner-size", + "description": "Denies the inner_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["inner_size"] } + }, + "deny-internal-toggle-maximize": { + "identifier": "deny-internal-toggle-maximize", + "description": "Denies the internal_toggle_maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["internal_toggle_maximize"] } + }, + "deny-is-always-on-top": { + "identifier": "deny-is-always-on-top", + "description": "Denies the is_always_on_top command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_always_on_top"] } + }, + "deny-is-closable": { + "identifier": "deny-is-closable", + "description": "Denies the is_closable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_closable"] } + }, + "deny-is-decorated": { + "identifier": "deny-is-decorated", + "description": "Denies the is_decorated command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_decorated"] } + }, + "deny-is-enabled": { + "identifier": "deny-is-enabled", + "description": "Denies the is_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_enabled"] } + }, + "deny-is-focused": { + "identifier": "deny-is-focused", + "description": "Denies the is_focused command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_focused"] } + }, + "deny-is-fullscreen": { + "identifier": "deny-is-fullscreen", + "description": "Denies the is_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_fullscreen"] } + }, + "deny-is-maximizable": { + "identifier": "deny-is-maximizable", + "description": "Denies the is_maximizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_maximizable"] } + }, + "deny-is-maximized": { + "identifier": "deny-is-maximized", + "description": "Denies the is_maximized command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_maximized"] } + }, + "deny-is-minimizable": { + "identifier": "deny-is-minimizable", + "description": "Denies the is_minimizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_minimizable"] } + }, + "deny-is-minimized": { + "identifier": "deny-is-minimized", + "description": "Denies the is_minimized command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_minimized"] } + }, + "deny-is-resizable": { + "identifier": "deny-is-resizable", + "description": "Denies the is_resizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_resizable"] } + }, + "deny-is-visible": { + "identifier": "deny-is-visible", + "description": "Denies the is_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_visible"] } + }, + "deny-maximize": { + "identifier": "deny-maximize", + "description": "Denies the maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["maximize"] } + }, + "deny-minimize": { + "identifier": "deny-minimize", + "description": "Denies the minimize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["minimize"] } + }, + "deny-monitor-from-point": { + "identifier": "deny-monitor-from-point", + "description": "Denies the monitor_from_point command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["monitor_from_point"] } + }, + "deny-outer-position": { + "identifier": "deny-outer-position", + "description": "Denies the outer_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["outer_position"] } + }, + "deny-outer-size": { + "identifier": "deny-outer-size", + "description": "Denies the outer_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["outer_size"] } + }, + "deny-primary-monitor": { + "identifier": "deny-primary-monitor", + "description": "Denies the primary_monitor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["primary_monitor"] } + }, + "deny-request-user-attention": { + "identifier": "deny-request-user-attention", + "description": "Denies the request_user_attention command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["request_user_attention"] } + }, + "deny-scale-factor": { + "identifier": "deny-scale-factor", + "description": "Denies the scale_factor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["scale_factor"] } + }, + "deny-set-always-on-bottom": { + "identifier": "deny-set-always-on-bottom", + "description": "Denies the set_always_on_bottom command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_always_on_bottom"] } + }, + "deny-set-always-on-top": { + "identifier": "deny-set-always-on-top", + "description": "Denies the set_always_on_top command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_always_on_top"] } + }, + "deny-set-background-color": { + "identifier": "deny-set-background-color", + "description": "Denies the set_background_color command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_background_color"] } + }, + "deny-set-badge-count": { + "identifier": "deny-set-badge-count", + "description": "Denies the set_badge_count command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_badge_count"] } + }, + "deny-set-badge-label": { + "identifier": "deny-set-badge-label", + "description": "Denies the set_badge_label command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_badge_label"] } + }, + "deny-set-closable": { + "identifier": "deny-set-closable", + "description": "Denies the set_closable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_closable"] } + }, + "deny-set-content-protected": { + "identifier": "deny-set-content-protected", + "description": "Denies the set_content_protected command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_content_protected"] } + }, + "deny-set-cursor-grab": { + "identifier": "deny-set-cursor-grab", + "description": "Denies the set_cursor_grab command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_grab"] } + }, + "deny-set-cursor-icon": { + "identifier": "deny-set-cursor-icon", + "description": "Denies the set_cursor_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_icon"] } + }, + "deny-set-cursor-position": { + "identifier": "deny-set-cursor-position", + "description": "Denies the set_cursor_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_position"] } + }, + "deny-set-cursor-visible": { + "identifier": "deny-set-cursor-visible", + "description": "Denies the set_cursor_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_visible"] } + }, + "deny-set-decorations": { + "identifier": "deny-set-decorations", + "description": "Denies the set_decorations command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_decorations"] } + }, + "deny-set-effects": { + "identifier": "deny-set-effects", + "description": "Denies the set_effects command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_effects"] } + }, + "deny-set-enabled": { + "identifier": "deny-set-enabled", + "description": "Denies the set_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_enabled"] } + }, + "deny-set-focus": { + "identifier": "deny-set-focus", + "description": "Denies the set_focus command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_focus"] } + }, + "deny-set-focusable": { + "identifier": "deny-set-focusable", + "description": "Denies the set_focusable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_focusable"] } + }, + "deny-set-fullscreen": { + "identifier": "deny-set-fullscreen", + "description": "Denies the set_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_fullscreen"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-ignore-cursor-events": { + "identifier": "deny-set-ignore-cursor-events", + "description": "Denies the set_ignore_cursor_events command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_ignore_cursor_events"] } + }, + "deny-set-max-size": { + "identifier": "deny-set-max-size", + "description": "Denies the set_max_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_max_size"] } + }, + "deny-set-maximizable": { + "identifier": "deny-set-maximizable", + "description": "Denies the set_maximizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_maximizable"] } + }, + "deny-set-min-size": { + "identifier": "deny-set-min-size", + "description": "Denies the set_min_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_min_size"] } + }, + "deny-set-minimizable": { + "identifier": "deny-set-minimizable", + "description": "Denies the set_minimizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_minimizable"] } + }, + "deny-set-overlay-icon": { + "identifier": "deny-set-overlay-icon", + "description": "Denies the set_overlay_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_overlay_icon"] } + }, + "deny-set-position": { + "identifier": "deny-set-position", + "description": "Denies the set_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_position"] } + }, + "deny-set-progress-bar": { + "identifier": "deny-set-progress-bar", + "description": "Denies the set_progress_bar command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_progress_bar"] } + }, + "deny-set-resizable": { + "identifier": "deny-set-resizable", + "description": "Denies the set_resizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_resizable"] } + }, + "deny-set-shadow": { + "identifier": "deny-set-shadow", + "description": "Denies the set_shadow command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_shadow"] } + }, + "deny-set-simple-fullscreen": { + "identifier": "deny-set-simple-fullscreen", + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_simple_fullscreen"] } + }, + "deny-set-size": { + "identifier": "deny-set-size", + "description": "Denies the set_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_size"] } + }, + "deny-set-size-constraints": { + "identifier": "deny-set-size-constraints", + "description": "Denies the set_size_constraints command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_size_constraints"] } + }, + "deny-set-skip-taskbar": { + "identifier": "deny-set-skip-taskbar", + "description": "Denies the set_skip_taskbar command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_skip_taskbar"] } + }, + "deny-set-theme": { + "identifier": "deny-set-theme", + "description": "Denies the set_theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_theme"] } + }, + "deny-set-title": { + "identifier": "deny-set-title", + "description": "Denies the set_title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title"] } + }, + "deny-set-title-bar-style": { + "identifier": "deny-set-title-bar-style", + "description": "Denies the set_title_bar_style command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title_bar_style"] } + }, + "deny-set-visible-on-all-organizations": { + "identifier": "deny-set-visible-on-all-organizations", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_visible_on_all_organizations"] } + }, + "deny-show": { + "identifier": "deny-show", + "description": "Denies the show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["show"] } + }, + "deny-start-dragging": { + "identifier": "deny-start-dragging", + "description": "Denies the start_dragging command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["start_dragging"] } + }, + "deny-start-resize-dragging": { + "identifier": "deny-start-resize-dragging", + "description": "Denies the start_resize_dragging command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["start_resize_dragging"] } + }, + "deny-theme": { + "identifier": "deny-theme", + "description": "Denies the theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["theme"] } + }, + "deny-title": { + "identifier": "deny-title", + "description": "Denies the title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["title"] } + }, + "deny-toggle-maximize": { + "identifier": "deny-toggle-maximize", + "description": "Denies the toggle_maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["toggle_maximize"] } + }, + "deny-unmaximize": { + "identifier": "deny-unmaximize", + "description": "Denies the unmaximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unmaximize"] } + }, + "deny-unminimize": { + "identifier": "deny-unminimize", + "description": "Denies the unminimize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unminimize"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "shell": { + "default_permission": { + "identifier": "default", + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n", + "permissions": ["allow-open"] + }, + "permissions": { + "allow-execute": { + "identifier": "allow-execute", + "description": "Enables the execute command without any pre-configured scope.", + "commands": { "allow": ["execute"], "deny": [] } + }, + "allow-kill": { + "identifier": "allow-kill", + "description": "Enables the kill command without any pre-configured scope.", + "commands": { "allow": ["kill"], "deny": [] } + }, + "allow-open": { + "identifier": "allow-open", + "description": "Enables the open command without any pre-configured scope.", + "commands": { "allow": ["open"], "deny": [] } + }, + "allow-spawn": { + "identifier": "allow-spawn", + "description": "Enables the spawn command without any pre-configured scope.", + "commands": { "allow": ["spawn"], "deny": [] } + }, + "allow-stdin-write": { + "identifier": "allow-stdin-write", + "description": "Enables the stdin_write command without any pre-configured scope.", + "commands": { "allow": ["stdin_write"], "deny": [] } + }, + "deny-execute": { + "identifier": "deny-execute", + "description": "Denies the execute command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["execute"] } + }, + "deny-kill": { + "identifier": "deny-kill", + "description": "Denies the kill command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["kill"] } + }, + "deny-open": { + "identifier": "deny-open", + "description": "Denies the open command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["open"] } + }, + "deny-spawn": { + "identifier": "deny-spawn", + "description": "Denies the spawn command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["spawn"] } + }, + "deny-stdin-write": { + "identifier": "deny-stdin-write", + "description": "Denies the stdin_write command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["stdin_write"] } + } + }, + "permission_sets": {}, + "global_scope_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "args": { "allOf": [{ "$ref": "#/definitions/ShellScopeEntryAllowedArgs" }], "description": "The allowed arguments for the command execution." }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "required": ["cmd", "name"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "args": { "allOf": [{ "$ref": "#/definitions/ShellScopeEntryAllowedArgs" }], "description": "The allowed arguments for the command execution." }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { "description": "If this command is a sidecar command.", "type": "boolean" } + }, + "required": ["name", "sidecar"], + "type": "object" + } + ], + "definitions": { + "ShellScopeEntryAllowedArg": { + "anyOf": [ + { "description": "A non-configurable argument that is passed to the command in the order it was specified.", "type": "string" }, + { + "additionalProperties": false, + "description": "A variable that is set while calling the command from the webview API.", + "properties": { + "raw": { + "default": false, + "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", + "type": "boolean" + }, + "validator": { + "description": "[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ", + "type": "string" + } + }, + "required": ["validator"], + "type": "object" + } + ], + "description": "A command argument allowed to be executed by the webview API." + }, + "ShellScopeEntryAllowedArgs": { + "anyOf": [ + { "description": "Use a simple boolean to allow all or disable all arguments to this command configuration.", "type": "boolean" }, + { + "description": "A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.", + "items": { "$ref": "#/definitions/ShellScopeEntryAllowedArg" }, + "type": "array" + } + ], + "description": "A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration." + } + }, + "description": "Shell scope entry.", + "title": "ShellScopeEntry" + } + } +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/capabilities.json b/foundry/packages/desktop/src-tauri/gen/schemas/capabilities.json new file mode 100644 index 0000000..fe3a6f3 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/gen/schemas/capabilities.json @@ -0,0 +1 @@ +{"default":{"identifier":"default","description":"Default capability for Foundry desktop","local":true,"windows":["main"],"permissions":["core:default","core:window:allow-start-dragging","shell:allow-open",{"identifier":"shell:allow-execute","allow":[{"args":true,"name":"sidecars/foundry-backend","sidecar":true}]},{"identifier":"shell:allow-spawn","allow":[{"args":true,"name":"sidecars/foundry-backend","sidecar":true}]}]}} \ No newline at end of file diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json b/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json new file mode 100644 index 0000000..34f0a61 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json @@ -0,0 +1,2522 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "CapabilityFile", + "description": "Capability formats accepted in a capability file.", + "anyOf": [ + { + "description": "A single capability.", + "allOf": [ + { + "$ref": "#/definitions/Capability" + } + ] + }, + { + "description": "A list of capabilities.", + "type": "array", + "items": { + "$ref": "#/definitions/Capability" + } + }, + { + "description": "A list of capabilities.", + "type": "object", + "required": ["capabilities"], + "properties": { + "capabilities": { + "description": "The list of capabilities.", + "type": "array", + "items": { + "$ref": "#/definitions/Capability" + } + } + } + } + ], + "definitions": { + "Capability": { + "description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```", + "type": "object", + "required": ["identifier", "permissions"], + "properties": { + "identifier": { + "description": "Identifier of the capability.\n\n## Example\n\n`main-user-files-write`", + "type": "string" + }, + "description": { + "description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.", + "default": "", + "type": "string" + }, + "remote": { + "description": "Configure remote URLs that can use the capability permissions.\n\nThis setting is optional and defaults to not being set, as our default use case is that the content is served from our local application.\n\n:::caution Make sure you understand the security implications of providing remote sources with local system access. :::\n\n## Example\n\n```json { \"urls\": [\"https://*.mydomain.dev\"] } ```", + "anyOf": [ + { + "$ref": "#/definitions/CapabilityRemote" + }, + { + "type": "null" + } + ] + }, + "local": { + "description": "Whether this capability is enabled for local app URLs or not. Defaults to `true`.", + "default": true, + "type": "boolean" + }, + "windows": { + "description": "List of windows that are affected by this capability. Can be a glob pattern.\n\nIf a window label matches any of the patterns in this list, the capability will be enabled on all the webviews of that window, regardless of the value of [`Self::webviews`].\n\nOn multiwebview windows, prefer specifying [`Self::webviews`] and omitting [`Self::windows`] for a fine grained access control.\n\n## Example\n\n`[\"main\"]`", + "type": "array", + "items": { + "type": "string" + } + }, + "webviews": { + "description": "List of webviews that are affected by this capability. Can be a glob pattern.\n\nThe capability will be enabled on all the webviews whose label matches any of the patterns in this list, regardless of whether the webview's window label matches a pattern in [`Self::windows`].\n\n## Example\n\n`[\"sub-webview-one\", \"sub-webview-two\"]`", + "type": "array", + "items": { + "type": "string" + } + }, + "permissions": { + "description": "List of permissions attached to this capability.\n\nMust include the plugin name as prefix in the form of `${plugin-name}:${permission-name}`. For commands directly implemented in the application itself only `${permission-name}` is required.\n\n## Example\n\n```json [ \"core:default\", \"shell:allow-open\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] } ] ```", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionEntry" + }, + "uniqueItems": true + }, + "platforms": { + "description": "Limit which target platforms this capability applies to.\n\nBy default all platforms are targeted.\n\n## Example\n\n`[\"macOS\",\"windows\"]`", + "type": ["array", "null"], + "items": { + "$ref": "#/definitions/Target" + } + } + } + }, + "CapabilityRemote": { + "description": "Configuration for remote URLs that are associated with the capability.", + "type": "object", + "required": ["urls"], + "properties": { + "urls": { + "description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n## Examples\n\n- \"https://*.mydomain.dev\": allows subdomains of mydomain.dev - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "PermissionEntry": { + "description": "An entry for a permission value in a [`Capability`] can be either a raw permission [`Identifier`] or an object that references a permission and extends its scope.", + "anyOf": [ + { + "description": "Reference a permission or permission set by identifier.", + "allOf": [ + { + "$ref": "#/definitions/Identifier" + } + ] + }, + { + "description": "Reference a permission or permission set by identifier and extends its scope.", + "type": "object", + "allOf": [ + { + "if": { + "properties": { + "identifier": { + "anyOf": [ + { + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`", + "type": "string", + "const": "shell:default", + "markdownDescription": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`" + }, + { + "description": "Enables the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-execute", + "markdownDescription": "Enables the execute command without any pre-configured scope." + }, + { + "description": "Enables the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-kill", + "markdownDescription": "Enables the kill command without any pre-configured scope." + }, + { + "description": "Enables the open command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-open", + "markdownDescription": "Enables the open command without any pre-configured scope." + }, + { + "description": "Enables the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-spawn", + "markdownDescription": "Enables the spawn command without any pre-configured scope." + }, + { + "description": "Enables the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-stdin-write", + "markdownDescription": "Enables the stdin_write command without any pre-configured scope." + }, + { + "description": "Denies the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-execute", + "markdownDescription": "Denies the execute command without any pre-configured scope." + }, + { + "description": "Denies the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-kill", + "markdownDescription": "Denies the kill command without any pre-configured scope." + }, + { + "description": "Denies the open command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-open", + "markdownDescription": "Denies the open command without any pre-configured scope." + }, + { + "description": "Denies the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-spawn", + "markdownDescription": "Denies the spawn command without any pre-configured scope." + }, + { + "description": "Denies the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-stdin-write", + "markdownDescription": "Denies the stdin_write command without any pre-configured scope." + } + ] + } + } + }, + "then": { + "properties": { + "allow": { + "items": { + "title": "ShellScopeEntry", + "description": "Shell scope entry.", + "anyOf": [ + { + "type": "object", + "required": ["cmd", "name"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": ["name", "sidecar"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { + "description": "If this command is a sidecar command.", + "type": "boolean" + } + }, + "additionalProperties": false + } + ] + } + }, + "deny": { + "items": { + "title": "ShellScopeEntry", + "description": "Shell scope entry.", + "anyOf": [ + { + "type": "object", + "required": ["cmd", "name"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": ["name", "sidecar"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { + "description": "If this command is a sidecar command.", + "type": "boolean" + } + }, + "additionalProperties": false + } + ] + } + } + } + }, + "properties": { + "identifier": { + "description": "Identifier of the permission or permission set.", + "allOf": [ + { + "$ref": "#/definitions/Identifier" + } + ] + } + } + }, + { + "properties": { + "identifier": { + "description": "Identifier of the permission or permission set.", + "allOf": [ + { + "$ref": "#/definitions/Identifier" + } + ] + }, + "allow": { + "description": "Data that defines what is allowed by the scope.", + "type": ["array", "null"], + "items": { + "$ref": "#/definitions/Value" + } + }, + "deny": { + "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", + "type": ["array", "null"], + "items": { + "$ref": "#/definitions/Value" + } + } + } + } + ], + "required": ["identifier"] + } + ] + }, + "Identifier": { + "description": "Permission identifier", + "oneOf": [ + { + "description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`", + "type": "string", + "const": "core:default", + "markdownDescription": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`" + }, + { + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`", + "type": "string", + "const": "core:app:default", + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`" + }, + { + "description": "Enables the app_hide command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-app-hide", + "markdownDescription": "Enables the app_hide command without any pre-configured scope." + }, + { + "description": "Enables the app_show command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-app-show", + "markdownDescription": "Enables the app_show command without any pre-configured scope." + }, + { + "description": "Enables the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-bundle-type", + "markdownDescription": "Enables the bundle_type command without any pre-configured scope." + }, + { + "description": "Enables the default_window_icon command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-default-window-icon", + "markdownDescription": "Enables the default_window_icon command without any pre-configured scope." + }, + { + "description": "Enables the fetch_data_store_identifiers command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-fetch-data-store-identifiers", + "markdownDescription": "Enables the fetch_data_store_identifiers command without any pre-configured scope." + }, + { + "description": "Enables the identifier command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-identifier", + "markdownDescription": "Enables the identifier command without any pre-configured scope." + }, + { + "description": "Enables the name command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-name", + "markdownDescription": "Enables the name command without any pre-configured scope." + }, + { + "description": "Enables the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-register-listener", + "markdownDescription": "Enables the register_listener command without any pre-configured scope." + }, + { + "description": "Enables the remove_data_store command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-remove-data-store", + "markdownDescription": "Enables the remove_data_store command without any pre-configured scope." + }, + { + "description": "Enables the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-remove-listener", + "markdownDescription": "Enables the remove_listener command without any pre-configured scope." + }, + { + "description": "Enables the set_app_theme command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-set-app-theme", + "markdownDescription": "Enables the set_app_theme command without any pre-configured scope." + }, + { + "description": "Enables the set_dock_visibility command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-set-dock-visibility", + "markdownDescription": "Enables the set_dock_visibility command without any pre-configured scope." + }, + { + "description": "Enables the tauri_version command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-tauri-version", + "markdownDescription": "Enables the tauri_version command without any pre-configured scope." + }, + { + "description": "Enables the version command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-version", + "markdownDescription": "Enables the version command without any pre-configured scope." + }, + { + "description": "Denies the app_hide command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-app-hide", + "markdownDescription": "Denies the app_hide command without any pre-configured scope." + }, + { + "description": "Denies the app_show command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-app-show", + "markdownDescription": "Denies the app_show command without any pre-configured scope." + }, + { + "description": "Denies the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-bundle-type", + "markdownDescription": "Denies the bundle_type command without any pre-configured scope." + }, + { + "description": "Denies the default_window_icon command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-default-window-icon", + "markdownDescription": "Denies the default_window_icon command without any pre-configured scope." + }, + { + "description": "Denies the fetch_data_store_identifiers command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-fetch-data-store-identifiers", + "markdownDescription": "Denies the fetch_data_store_identifiers command without any pre-configured scope." + }, + { + "description": "Denies the identifier command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-identifier", + "markdownDescription": "Denies the identifier command without any pre-configured scope." + }, + { + "description": "Denies the name command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-name", + "markdownDescription": "Denies the name command without any pre-configured scope." + }, + { + "description": "Denies the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-register-listener", + "markdownDescription": "Denies the register_listener command without any pre-configured scope." + }, + { + "description": "Denies the remove_data_store command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-remove-data-store", + "markdownDescription": "Denies the remove_data_store command without any pre-configured scope." + }, + { + "description": "Denies the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-remove-listener", + "markdownDescription": "Denies the remove_listener command without any pre-configured scope." + }, + { + "description": "Denies the set_app_theme command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-set-app-theme", + "markdownDescription": "Denies the set_app_theme command without any pre-configured scope." + }, + { + "description": "Denies the set_dock_visibility command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-set-dock-visibility", + "markdownDescription": "Denies the set_dock_visibility command without any pre-configured scope." + }, + { + "description": "Denies the tauri_version command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-tauri-version", + "markdownDescription": "Denies the tauri_version command without any pre-configured scope." + }, + { + "description": "Denies the version command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-version", + "markdownDescription": "Denies the version command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-listen`\n- `allow-unlisten`\n- `allow-emit`\n- `allow-emit-to`", + "type": "string", + "const": "core:event:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-listen`\n- `allow-unlisten`\n- `allow-emit`\n- `allow-emit-to`" + }, + { + "description": "Enables the emit command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-emit", + "markdownDescription": "Enables the emit command without any pre-configured scope." + }, + { + "description": "Enables the emit_to command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-emit-to", + "markdownDescription": "Enables the emit_to command without any pre-configured scope." + }, + { + "description": "Enables the listen command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-listen", + "markdownDescription": "Enables the listen command without any pre-configured scope." + }, + { + "description": "Enables the unlisten command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-unlisten", + "markdownDescription": "Enables the unlisten command without any pre-configured scope." + }, + { + "description": "Denies the emit command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-emit", + "markdownDescription": "Denies the emit command without any pre-configured scope." + }, + { + "description": "Denies the emit_to command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-emit-to", + "markdownDescription": "Denies the emit_to command without any pre-configured scope." + }, + { + "description": "Denies the listen command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-listen", + "markdownDescription": "Denies the listen command without any pre-configured scope." + }, + { + "description": "Denies the unlisten command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-unlisten", + "markdownDescription": "Denies the unlisten command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-from-bytes`\n- `allow-from-path`\n- `allow-rgba`\n- `allow-size`", + "type": "string", + "const": "core:image:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-from-bytes`\n- `allow-from-path`\n- `allow-rgba`\n- `allow-size`" + }, + { + "description": "Enables the from_bytes command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-from-bytes", + "markdownDescription": "Enables the from_bytes command without any pre-configured scope." + }, + { + "description": "Enables the from_path command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-from-path", + "markdownDescription": "Enables the from_path command without any pre-configured scope." + }, + { + "description": "Enables the new command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-new", + "markdownDescription": "Enables the new command without any pre-configured scope." + }, + { + "description": "Enables the rgba command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-rgba", + "markdownDescription": "Enables the rgba command without any pre-configured scope." + }, + { + "description": "Enables the size command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-size", + "markdownDescription": "Enables the size command without any pre-configured scope." + }, + { + "description": "Denies the from_bytes command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-from-bytes", + "markdownDescription": "Denies the from_bytes command without any pre-configured scope." + }, + { + "description": "Denies the from_path command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-from-path", + "markdownDescription": "Denies the from_path command without any pre-configured scope." + }, + { + "description": "Denies the new command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-new", + "markdownDescription": "Denies the new command without any pre-configured scope." + }, + { + "description": "Denies the rgba command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-rgba", + "markdownDescription": "Denies the rgba command without any pre-configured scope." + }, + { + "description": "Denies the size command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-size", + "markdownDescription": "Denies the size command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-append`\n- `allow-prepend`\n- `allow-insert`\n- `allow-remove`\n- `allow-remove-at`\n- `allow-items`\n- `allow-get`\n- `allow-popup`\n- `allow-create-default`\n- `allow-set-as-app-menu`\n- `allow-set-as-window-menu`\n- `allow-text`\n- `allow-set-text`\n- `allow-is-enabled`\n- `allow-set-enabled`\n- `allow-set-accelerator`\n- `allow-set-as-windows-menu-for-nsapp`\n- `allow-set-as-help-menu-for-nsapp`\n- `allow-is-checked`\n- `allow-set-checked`\n- `allow-set-icon`", + "type": "string", + "const": "core:menu:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-append`\n- `allow-prepend`\n- `allow-insert`\n- `allow-remove`\n- `allow-remove-at`\n- `allow-items`\n- `allow-get`\n- `allow-popup`\n- `allow-create-default`\n- `allow-set-as-app-menu`\n- `allow-set-as-window-menu`\n- `allow-text`\n- `allow-set-text`\n- `allow-is-enabled`\n- `allow-set-enabled`\n- `allow-set-accelerator`\n- `allow-set-as-windows-menu-for-nsapp`\n- `allow-set-as-help-menu-for-nsapp`\n- `allow-is-checked`\n- `allow-set-checked`\n- `allow-set-icon`" + }, + { + "description": "Enables the append command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-append", + "markdownDescription": "Enables the append command without any pre-configured scope." + }, + { + "description": "Enables the create_default command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-create-default", + "markdownDescription": "Enables the create_default command without any pre-configured scope." + }, + { + "description": "Enables the get command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-get", + "markdownDescription": "Enables the get command without any pre-configured scope." + }, + { + "description": "Enables the insert command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-insert", + "markdownDescription": "Enables the insert command without any pre-configured scope." + }, + { + "description": "Enables the is_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-is-checked", + "markdownDescription": "Enables the is_checked command without any pre-configured scope." + }, + { + "description": "Enables the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-is-enabled", + "markdownDescription": "Enables the is_enabled command without any pre-configured scope." + }, + { + "description": "Enables the items command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-items", + "markdownDescription": "Enables the items command without any pre-configured scope." + }, + { + "description": "Enables the new command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-new", + "markdownDescription": "Enables the new command without any pre-configured scope." + }, + { + "description": "Enables the popup command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-popup", + "markdownDescription": "Enables the popup command without any pre-configured scope." + }, + { + "description": "Enables the prepend command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-prepend", + "markdownDescription": "Enables the prepend command without any pre-configured scope." + }, + { + "description": "Enables the remove command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-remove", + "markdownDescription": "Enables the remove command without any pre-configured scope." + }, + { + "description": "Enables the remove_at command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-remove-at", + "markdownDescription": "Enables the remove_at command without any pre-configured scope." + }, + { + "description": "Enables the set_accelerator command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-accelerator", + "markdownDescription": "Enables the set_accelerator command without any pre-configured scope." + }, + { + "description": "Enables the set_as_app_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-app-menu", + "markdownDescription": "Enables the set_as_app_menu command without any pre-configured scope." + }, + { + "description": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-help-menu-for-nsapp", + "markdownDescription": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Enables the set_as_window_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-window-menu", + "markdownDescription": "Enables the set_as_window_menu command without any pre-configured scope." + }, + { + "description": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-windows-menu-for-nsapp", + "markdownDescription": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Enables the set_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-checked", + "markdownDescription": "Enables the set_checked command without any pre-configured scope." + }, + { + "description": "Enables the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-enabled", + "markdownDescription": "Enables the set_enabled command without any pre-configured scope." + }, + { + "description": "Enables the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-icon", + "markdownDescription": "Enables the set_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-text", + "markdownDescription": "Enables the set_text command without any pre-configured scope." + }, + { + "description": "Enables the text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-text", + "markdownDescription": "Enables the text command without any pre-configured scope." + }, + { + "description": "Denies the append command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-append", + "markdownDescription": "Denies the append command without any pre-configured scope." + }, + { + "description": "Denies the create_default command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-create-default", + "markdownDescription": "Denies the create_default command without any pre-configured scope." + }, + { + "description": "Denies the get command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-get", + "markdownDescription": "Denies the get command without any pre-configured scope." + }, + { + "description": "Denies the insert command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-insert", + "markdownDescription": "Denies the insert command without any pre-configured scope." + }, + { + "description": "Denies the is_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-is-checked", + "markdownDescription": "Denies the is_checked command without any pre-configured scope." + }, + { + "description": "Denies the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-is-enabled", + "markdownDescription": "Denies the is_enabled command without any pre-configured scope." + }, + { + "description": "Denies the items command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-items", + "markdownDescription": "Denies the items command without any pre-configured scope." + }, + { + "description": "Denies the new command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-new", + "markdownDescription": "Denies the new command without any pre-configured scope." + }, + { + "description": "Denies the popup command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-popup", + "markdownDescription": "Denies the popup command without any pre-configured scope." + }, + { + "description": "Denies the prepend command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-prepend", + "markdownDescription": "Denies the prepend command without any pre-configured scope." + }, + { + "description": "Denies the remove command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-remove", + "markdownDescription": "Denies the remove command without any pre-configured scope." + }, + { + "description": "Denies the remove_at command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-remove-at", + "markdownDescription": "Denies the remove_at command without any pre-configured scope." + }, + { + "description": "Denies the set_accelerator command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-accelerator", + "markdownDescription": "Denies the set_accelerator command without any pre-configured scope." + }, + { + "description": "Denies the set_as_app_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-app-menu", + "markdownDescription": "Denies the set_as_app_menu command without any pre-configured scope." + }, + { + "description": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-help-menu-for-nsapp", + "markdownDescription": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Denies the set_as_window_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-window-menu", + "markdownDescription": "Denies the set_as_window_menu command without any pre-configured scope." + }, + { + "description": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-windows-menu-for-nsapp", + "markdownDescription": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Denies the set_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-checked", + "markdownDescription": "Denies the set_checked command without any pre-configured scope." + }, + { + "description": "Denies the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-enabled", + "markdownDescription": "Denies the set_enabled command without any pre-configured scope." + }, + { + "description": "Denies the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-icon", + "markdownDescription": "Denies the set_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-text", + "markdownDescription": "Denies the set_text command without any pre-configured scope." + }, + { + "description": "Denies the text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-text", + "markdownDescription": "Denies the text command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-resolve-directory`\n- `allow-resolve`\n- `allow-normalize`\n- `allow-join`\n- `allow-dirname`\n- `allow-extname`\n- `allow-basename`\n- `allow-is-absolute`", + "type": "string", + "const": "core:path:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-resolve-directory`\n- `allow-resolve`\n- `allow-normalize`\n- `allow-join`\n- `allow-dirname`\n- `allow-extname`\n- `allow-basename`\n- `allow-is-absolute`" + }, + { + "description": "Enables the basename command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-basename", + "markdownDescription": "Enables the basename command without any pre-configured scope." + }, + { + "description": "Enables the dirname command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-dirname", + "markdownDescription": "Enables the dirname command without any pre-configured scope." + }, + { + "description": "Enables the extname command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-extname", + "markdownDescription": "Enables the extname command without any pre-configured scope." + }, + { + "description": "Enables the is_absolute command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-is-absolute", + "markdownDescription": "Enables the is_absolute command without any pre-configured scope." + }, + { + "description": "Enables the join command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-join", + "markdownDescription": "Enables the join command without any pre-configured scope." + }, + { + "description": "Enables the normalize command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-normalize", + "markdownDescription": "Enables the normalize command without any pre-configured scope." + }, + { + "description": "Enables the resolve command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-resolve", + "markdownDescription": "Enables the resolve command without any pre-configured scope." + }, + { + "description": "Enables the resolve_directory command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-resolve-directory", + "markdownDescription": "Enables the resolve_directory command without any pre-configured scope." + }, + { + "description": "Denies the basename command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-basename", + "markdownDescription": "Denies the basename command without any pre-configured scope." + }, + { + "description": "Denies the dirname command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-dirname", + "markdownDescription": "Denies the dirname command without any pre-configured scope." + }, + { + "description": "Denies the extname command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-extname", + "markdownDescription": "Denies the extname command without any pre-configured scope." + }, + { + "description": "Denies the is_absolute command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-is-absolute", + "markdownDescription": "Denies the is_absolute command without any pre-configured scope." + }, + { + "description": "Denies the join command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-join", + "markdownDescription": "Denies the join command without any pre-configured scope." + }, + { + "description": "Denies the normalize command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-normalize", + "markdownDescription": "Denies the normalize command without any pre-configured scope." + }, + { + "description": "Denies the resolve command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-resolve", + "markdownDescription": "Denies the resolve command without any pre-configured scope." + }, + { + "description": "Denies the resolve_directory command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-resolve-directory", + "markdownDescription": "Denies the resolve_directory command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-close`", + "type": "string", + "const": "core:resources:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-close`" + }, + { + "description": "Enables the close command without any pre-configured scope.", + "type": "string", + "const": "core:resources:allow-close", + "markdownDescription": "Enables the close command without any pre-configured scope." + }, + { + "description": "Denies the close command without any pre-configured scope.", + "type": "string", + "const": "core:resources:deny-close", + "markdownDescription": "Denies the close command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-get-by-id`\n- `allow-remove-by-id`\n- `allow-set-icon`\n- `allow-set-menu`\n- `allow-set-tooltip`\n- `allow-set-title`\n- `allow-set-visible`\n- `allow-set-temp-dir-path`\n- `allow-set-icon-as-template`\n- `allow-set-show-menu-on-left-click`", + "type": "string", + "const": "core:tray:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-get-by-id`\n- `allow-remove-by-id`\n- `allow-set-icon`\n- `allow-set-menu`\n- `allow-set-tooltip`\n- `allow-set-title`\n- `allow-set-visible`\n- `allow-set-temp-dir-path`\n- `allow-set-icon-as-template`\n- `allow-set-show-menu-on-left-click`" + }, + { + "description": "Enables the get_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-get-by-id", + "markdownDescription": "Enables the get_by_id command without any pre-configured scope." + }, + { + "description": "Enables the new command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-new", + "markdownDescription": "Enables the new command without any pre-configured scope." + }, + { + "description": "Enables the remove_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-remove-by-id", + "markdownDescription": "Enables the remove_by_id command without any pre-configured scope." + }, + { + "description": "Enables the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-icon", + "markdownDescription": "Enables the set_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_icon_as_template command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-icon-as-template", + "markdownDescription": "Enables the set_icon_as_template command without any pre-configured scope." + }, + { + "description": "Enables the set_menu command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-menu", + "markdownDescription": "Enables the set_menu command without any pre-configured scope." + }, + { + "description": "Enables the set_show_menu_on_left_click command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-show-menu-on-left-click", + "markdownDescription": "Enables the set_show_menu_on_left_click command without any pre-configured scope." + }, + { + "description": "Enables the set_temp_dir_path command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-temp-dir-path", + "markdownDescription": "Enables the set_temp_dir_path command without any pre-configured scope." + }, + { + "description": "Enables the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-title", + "markdownDescription": "Enables the set_title command without any pre-configured scope." + }, + { + "description": "Enables the set_tooltip command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-tooltip", + "markdownDescription": "Enables the set_tooltip command without any pre-configured scope." + }, + { + "description": "Enables the set_visible command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-visible", + "markdownDescription": "Enables the set_visible command without any pre-configured scope." + }, + { + "description": "Denies the get_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-get-by-id", + "markdownDescription": "Denies the get_by_id command without any pre-configured scope." + }, + { + "description": "Denies the new command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-new", + "markdownDescription": "Denies the new command without any pre-configured scope." + }, + { + "description": "Denies the remove_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-remove-by-id", + "markdownDescription": "Denies the remove_by_id command without any pre-configured scope." + }, + { + "description": "Denies the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-icon", + "markdownDescription": "Denies the set_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_icon_as_template command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-icon-as-template", + "markdownDescription": "Denies the set_icon_as_template command without any pre-configured scope." + }, + { + "description": "Denies the set_menu command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-menu", + "markdownDescription": "Denies the set_menu command without any pre-configured scope." + }, + { + "description": "Denies the set_show_menu_on_left_click command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-show-menu-on-left-click", + "markdownDescription": "Denies the set_show_menu_on_left_click command without any pre-configured scope." + }, + { + "description": "Denies the set_temp_dir_path command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-temp-dir-path", + "markdownDescription": "Denies the set_temp_dir_path command without any pre-configured scope." + }, + { + "description": "Denies the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-title", + "markdownDescription": "Denies the set_title command without any pre-configured scope." + }, + { + "description": "Denies the set_tooltip command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-tooltip", + "markdownDescription": "Denies the set_tooltip command without any pre-configured scope." + }, + { + "description": "Denies the set_visible command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-visible", + "markdownDescription": "Denies the set_visible command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-webviews`\n- `allow-webview-position`\n- `allow-webview-size`\n- `allow-internal-toggle-devtools`", + "type": "string", + "const": "core:webview:default", + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-webviews`\n- `allow-webview-position`\n- `allow-webview-size`\n- `allow-internal-toggle-devtools`" + }, + { + "description": "Enables the clear_all_browsing_data command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-clear-all-browsing-data", + "markdownDescription": "Enables the clear_all_browsing_data command without any pre-configured scope." + }, + { + "description": "Enables the create_webview command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-create-webview", + "markdownDescription": "Enables the create_webview command without any pre-configured scope." + }, + { + "description": "Enables the create_webview_window command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-create-webview-window", + "markdownDescription": "Enables the create_webview_window command without any pre-configured scope." + }, + { + "description": "Enables the get_all_webviews command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-get-all-webviews", + "markdownDescription": "Enables the get_all_webviews command without any pre-configured scope." + }, + { + "description": "Enables the internal_toggle_devtools command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-internal-toggle-devtools", + "markdownDescription": "Enables the internal_toggle_devtools command without any pre-configured scope." + }, + { + "description": "Enables the print command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-print", + "markdownDescription": "Enables the print command without any pre-configured scope." + }, + { + "description": "Enables the reparent command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-reparent", + "markdownDescription": "Enables the reparent command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_auto_resize command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-auto-resize", + "markdownDescription": "Enables the set_webview_auto_resize command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-background-color", + "markdownDescription": "Enables the set_webview_background_color command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_focus command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-focus", + "markdownDescription": "Enables the set_webview_focus command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-position", + "markdownDescription": "Enables the set_webview_position command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-size", + "markdownDescription": "Enables the set_webview_size command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_zoom command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-zoom", + "markdownDescription": "Enables the set_webview_zoom command without any pre-configured scope." + }, + { + "description": "Enables the webview_close command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-close", + "markdownDescription": "Enables the webview_close command without any pre-configured scope." + }, + { + "description": "Enables the webview_hide command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-hide", + "markdownDescription": "Enables the webview_hide command without any pre-configured scope." + }, + { + "description": "Enables the webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-position", + "markdownDescription": "Enables the webview_position command without any pre-configured scope." + }, + { + "description": "Enables the webview_show command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-show", + "markdownDescription": "Enables the webview_show command without any pre-configured scope." + }, + { + "description": "Enables the webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-size", + "markdownDescription": "Enables the webview_size command without any pre-configured scope." + }, + { + "description": "Denies the clear_all_browsing_data command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-clear-all-browsing-data", + "markdownDescription": "Denies the clear_all_browsing_data command without any pre-configured scope." + }, + { + "description": "Denies the create_webview command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-create-webview", + "markdownDescription": "Denies the create_webview command without any pre-configured scope." + }, + { + "description": "Denies the create_webview_window command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-create-webview-window", + "markdownDescription": "Denies the create_webview_window command without any pre-configured scope." + }, + { + "description": "Denies the get_all_webviews command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-get-all-webviews", + "markdownDescription": "Denies the get_all_webviews command without any pre-configured scope." + }, + { + "description": "Denies the internal_toggle_devtools command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-internal-toggle-devtools", + "markdownDescription": "Denies the internal_toggle_devtools command without any pre-configured scope." + }, + { + "description": "Denies the print command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-print", + "markdownDescription": "Denies the print command without any pre-configured scope." + }, + { + "description": "Denies the reparent command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-reparent", + "markdownDescription": "Denies the reparent command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_auto_resize command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-auto-resize", + "markdownDescription": "Denies the set_webview_auto_resize command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-background-color", + "markdownDescription": "Denies the set_webview_background_color command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_focus command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-focus", + "markdownDescription": "Denies the set_webview_focus command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-position", + "markdownDescription": "Denies the set_webview_position command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-size", + "markdownDescription": "Denies the set_webview_size command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_zoom command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-zoom", + "markdownDescription": "Denies the set_webview_zoom command without any pre-configured scope." + }, + { + "description": "Denies the webview_close command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-close", + "markdownDescription": "Denies the webview_close command without any pre-configured scope." + }, + { + "description": "Denies the webview_hide command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-hide", + "markdownDescription": "Denies the webview_hide command without any pre-configured scope." + }, + { + "description": "Denies the webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-position", + "markdownDescription": "Denies the webview_position command without any pre-configured scope." + }, + { + "description": "Denies the webview_show command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-show", + "markdownDescription": "Denies the webview_show command without any pre-configured scope." + }, + { + "description": "Denies the webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-size", + "markdownDescription": "Denies the webview_size command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-windows`\n- `allow-scale-factor`\n- `allow-inner-position`\n- `allow-outer-position`\n- `allow-inner-size`\n- `allow-outer-size`\n- `allow-is-fullscreen`\n- `allow-is-minimized`\n- `allow-is-maximized`\n- `allow-is-focused`\n- `allow-is-decorated`\n- `allow-is-resizable`\n- `allow-is-maximizable`\n- `allow-is-minimizable`\n- `allow-is-closable`\n- `allow-is-visible`\n- `allow-is-enabled`\n- `allow-title`\n- `allow-current-monitor`\n- `allow-primary-monitor`\n- `allow-monitor-from-point`\n- `allow-available-monitors`\n- `allow-cursor-position`\n- `allow-theme`\n- `allow-is-always-on-top`\n- `allow-internal-toggle-maximize`", + "type": "string", + "const": "core:window:default", + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-windows`\n- `allow-scale-factor`\n- `allow-inner-position`\n- `allow-outer-position`\n- `allow-inner-size`\n- `allow-outer-size`\n- `allow-is-fullscreen`\n- `allow-is-minimized`\n- `allow-is-maximized`\n- `allow-is-focused`\n- `allow-is-decorated`\n- `allow-is-resizable`\n- `allow-is-maximizable`\n- `allow-is-minimizable`\n- `allow-is-closable`\n- `allow-is-visible`\n- `allow-is-enabled`\n- `allow-title`\n- `allow-current-monitor`\n- `allow-primary-monitor`\n- `allow-monitor-from-point`\n- `allow-available-monitors`\n- `allow-cursor-position`\n- `allow-theme`\n- `allow-is-always-on-top`\n- `allow-internal-toggle-maximize`" + }, + { + "description": "Enables the available_monitors command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-available-monitors", + "markdownDescription": "Enables the available_monitors command without any pre-configured scope." + }, + { + "description": "Enables the center command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-center", + "markdownDescription": "Enables the center command without any pre-configured scope." + }, + { + "description": "Enables the close command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-close", + "markdownDescription": "Enables the close command without any pre-configured scope." + }, + { + "description": "Enables the create command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-create", + "markdownDescription": "Enables the create command without any pre-configured scope." + }, + { + "description": "Enables the current_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-current-monitor", + "markdownDescription": "Enables the current_monitor command without any pre-configured scope." + }, + { + "description": "Enables the cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-cursor-position", + "markdownDescription": "Enables the cursor_position command without any pre-configured scope." + }, + { + "description": "Enables the destroy command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-destroy", + "markdownDescription": "Enables the destroy command without any pre-configured scope." + }, + { + "description": "Enables the get_all_windows command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-get-all-windows", + "markdownDescription": "Enables the get_all_windows command without any pre-configured scope." + }, + { + "description": "Enables the hide command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-hide", + "markdownDescription": "Enables the hide command without any pre-configured scope." + }, + { + "description": "Enables the inner_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-inner-position", + "markdownDescription": "Enables the inner_position command without any pre-configured scope." + }, + { + "description": "Enables the inner_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-inner-size", + "markdownDescription": "Enables the inner_size command without any pre-configured scope." + }, + { + "description": "Enables the internal_toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-internal-toggle-maximize", + "markdownDescription": "Enables the internal_toggle_maximize command without any pre-configured scope." + }, + { + "description": "Enables the is_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-always-on-top", + "markdownDescription": "Enables the is_always_on_top command without any pre-configured scope." + }, + { + "description": "Enables the is_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-closable", + "markdownDescription": "Enables the is_closable command without any pre-configured scope." + }, + { + "description": "Enables the is_decorated command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-decorated", + "markdownDescription": "Enables the is_decorated command without any pre-configured scope." + }, + { + "description": "Enables the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-enabled", + "markdownDescription": "Enables the is_enabled command without any pre-configured scope." + }, + { + "description": "Enables the is_focused command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-focused", + "markdownDescription": "Enables the is_focused command without any pre-configured scope." + }, + { + "description": "Enables the is_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-fullscreen", + "markdownDescription": "Enables the is_fullscreen command without any pre-configured scope." + }, + { + "description": "Enables the is_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-maximizable", + "markdownDescription": "Enables the is_maximizable command without any pre-configured scope." + }, + { + "description": "Enables the is_maximized command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-maximized", + "markdownDescription": "Enables the is_maximized command without any pre-configured scope." + }, + { + "description": "Enables the is_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-minimizable", + "markdownDescription": "Enables the is_minimizable command without any pre-configured scope." + }, + { + "description": "Enables the is_minimized command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-minimized", + "markdownDescription": "Enables the is_minimized command without any pre-configured scope." + }, + { + "description": "Enables the is_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-resizable", + "markdownDescription": "Enables the is_resizable command without any pre-configured scope." + }, + { + "description": "Enables the is_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-visible", + "markdownDescription": "Enables the is_visible command without any pre-configured scope." + }, + { + "description": "Enables the maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-maximize", + "markdownDescription": "Enables the maximize command without any pre-configured scope." + }, + { + "description": "Enables the minimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-minimize", + "markdownDescription": "Enables the minimize command without any pre-configured scope." + }, + { + "description": "Enables the monitor_from_point command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-monitor-from-point", + "markdownDescription": "Enables the monitor_from_point command without any pre-configured scope." + }, + { + "description": "Enables the outer_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-outer-position", + "markdownDescription": "Enables the outer_position command without any pre-configured scope." + }, + { + "description": "Enables the outer_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-outer-size", + "markdownDescription": "Enables the outer_size command without any pre-configured scope." + }, + { + "description": "Enables the primary_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-primary-monitor", + "markdownDescription": "Enables the primary_monitor command without any pre-configured scope." + }, + { + "description": "Enables the request_user_attention command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-request-user-attention", + "markdownDescription": "Enables the request_user_attention command without any pre-configured scope." + }, + { + "description": "Enables the scale_factor command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-scale-factor", + "markdownDescription": "Enables the scale_factor command without any pre-configured scope." + }, + { + "description": "Enables the set_always_on_bottom command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-always-on-bottom", + "markdownDescription": "Enables the set_always_on_bottom command without any pre-configured scope." + }, + { + "description": "Enables the set_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-always-on-top", + "markdownDescription": "Enables the set_always_on_top command without any pre-configured scope." + }, + { + "description": "Enables the set_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-background-color", + "markdownDescription": "Enables the set_background_color command without any pre-configured scope." + }, + { + "description": "Enables the set_badge_count command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-badge-count", + "markdownDescription": "Enables the set_badge_count command without any pre-configured scope." + }, + { + "description": "Enables the set_badge_label command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-badge-label", + "markdownDescription": "Enables the set_badge_label command without any pre-configured scope." + }, + { + "description": "Enables the set_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-closable", + "markdownDescription": "Enables the set_closable command without any pre-configured scope." + }, + { + "description": "Enables the set_content_protected command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-content-protected", + "markdownDescription": "Enables the set_content_protected command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_grab command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-grab", + "markdownDescription": "Enables the set_cursor_grab command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-icon", + "markdownDescription": "Enables the set_cursor_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-position", + "markdownDescription": "Enables the set_cursor_position command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-visible", + "markdownDescription": "Enables the set_cursor_visible command without any pre-configured scope." + }, + { + "description": "Enables the set_decorations command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-decorations", + "markdownDescription": "Enables the set_decorations command without any pre-configured scope." + }, + { + "description": "Enables the set_effects command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-effects", + "markdownDescription": "Enables the set_effects command without any pre-configured scope." + }, + { + "description": "Enables the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-enabled", + "markdownDescription": "Enables the set_enabled command without any pre-configured scope." + }, + { + "description": "Enables the set_focus command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-focus", + "markdownDescription": "Enables the set_focus command without any pre-configured scope." + }, + { + "description": "Enables the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-focusable", + "markdownDescription": "Enables the set_focusable command without any pre-configured scope." + }, + { + "description": "Enables the set_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-fullscreen", + "markdownDescription": "Enables the set_fullscreen command without any pre-configured scope." + }, + { + "description": "Enables the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-icon", + "markdownDescription": "Enables the set_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_ignore_cursor_events command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-ignore-cursor-events", + "markdownDescription": "Enables the set_ignore_cursor_events command without any pre-configured scope." + }, + { + "description": "Enables the set_max_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-max-size", + "markdownDescription": "Enables the set_max_size command without any pre-configured scope." + }, + { + "description": "Enables the set_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-maximizable", + "markdownDescription": "Enables the set_maximizable command without any pre-configured scope." + }, + { + "description": "Enables the set_min_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-min-size", + "markdownDescription": "Enables the set_min_size command without any pre-configured scope." + }, + { + "description": "Enables the set_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-minimizable", + "markdownDescription": "Enables the set_minimizable command without any pre-configured scope." + }, + { + "description": "Enables the set_overlay_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-overlay-icon", + "markdownDescription": "Enables the set_overlay_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-position", + "markdownDescription": "Enables the set_position command without any pre-configured scope." + }, + { + "description": "Enables the set_progress_bar command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-progress-bar", + "markdownDescription": "Enables the set_progress_bar command without any pre-configured scope." + }, + { + "description": "Enables the set_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-resizable", + "markdownDescription": "Enables the set_resizable command without any pre-configured scope." + }, + { + "description": "Enables the set_shadow command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-shadow", + "markdownDescription": "Enables the set_shadow command without any pre-configured scope." + }, + { + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-simple-fullscreen", + "markdownDescription": "Enables the set_simple_fullscreen command without any pre-configured scope." + }, + { + "description": "Enables the set_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-size", + "markdownDescription": "Enables the set_size command without any pre-configured scope." + }, + { + "description": "Enables the set_size_constraints command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-size-constraints", + "markdownDescription": "Enables the set_size_constraints command without any pre-configured scope." + }, + { + "description": "Enables the set_skip_taskbar command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-skip-taskbar", + "markdownDescription": "Enables the set_skip_taskbar command without any pre-configured scope." + }, + { + "description": "Enables the set_theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-theme", + "markdownDescription": "Enables the set_theme command without any pre-configured scope." + }, + { + "description": "Enables the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-title", + "markdownDescription": "Enables the set_title command without any pre-configured scope." + }, + { + "description": "Enables the set_title_bar_style command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-title-bar-style", + "markdownDescription": "Enables the set_title_bar_style command without any pre-configured scope." + }, + { + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-visible-on-all-organizations", + "markdownDescription": "Enables the set_visible_on_all_organizations command without any pre-configured scope." + }, + { + "description": "Enables the show command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-show", + "markdownDescription": "Enables the show command without any pre-configured scope." + }, + { + "description": "Enables the start_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-start-dragging", + "markdownDescription": "Enables the start_dragging command without any pre-configured scope." + }, + { + "description": "Enables the start_resize_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-start-resize-dragging", + "markdownDescription": "Enables the start_resize_dragging command without any pre-configured scope." + }, + { + "description": "Enables the theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-theme", + "markdownDescription": "Enables the theme command without any pre-configured scope." + }, + { + "description": "Enables the title command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-title", + "markdownDescription": "Enables the title command without any pre-configured scope." + }, + { + "description": "Enables the toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-toggle-maximize", + "markdownDescription": "Enables the toggle_maximize command without any pre-configured scope." + }, + { + "description": "Enables the unmaximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-unmaximize", + "markdownDescription": "Enables the unmaximize command without any pre-configured scope." + }, + { + "description": "Enables the unminimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-unminimize", + "markdownDescription": "Enables the unminimize command without any pre-configured scope." + }, + { + "description": "Denies the available_monitors command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-available-monitors", + "markdownDescription": "Denies the available_monitors command without any pre-configured scope." + }, + { + "description": "Denies the center command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-center", + "markdownDescription": "Denies the center command without any pre-configured scope." + }, + { + "description": "Denies the close command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-close", + "markdownDescription": "Denies the close command without any pre-configured scope." + }, + { + "description": "Denies the create command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-create", + "markdownDescription": "Denies the create command without any pre-configured scope." + }, + { + "description": "Denies the current_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-current-monitor", + "markdownDescription": "Denies the current_monitor command without any pre-configured scope." + }, + { + "description": "Denies the cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-cursor-position", + "markdownDescription": "Denies the cursor_position command without any pre-configured scope." + }, + { + "description": "Denies the destroy command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-destroy", + "markdownDescription": "Denies the destroy command without any pre-configured scope." + }, + { + "description": "Denies the get_all_windows command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-get-all-windows", + "markdownDescription": "Denies the get_all_windows command without any pre-configured scope." + }, + { + "description": "Denies the hide command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-hide", + "markdownDescription": "Denies the hide command without any pre-configured scope." + }, + { + "description": "Denies the inner_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-inner-position", + "markdownDescription": "Denies the inner_position command without any pre-configured scope." + }, + { + "description": "Denies the inner_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-inner-size", + "markdownDescription": "Denies the inner_size command without any pre-configured scope." + }, + { + "description": "Denies the internal_toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-internal-toggle-maximize", + "markdownDescription": "Denies the internal_toggle_maximize command without any pre-configured scope." + }, + { + "description": "Denies the is_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-always-on-top", + "markdownDescription": "Denies the is_always_on_top command without any pre-configured scope." + }, + { + "description": "Denies the is_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-closable", + "markdownDescription": "Denies the is_closable command without any pre-configured scope." + }, + { + "description": "Denies the is_decorated command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-decorated", + "markdownDescription": "Denies the is_decorated command without any pre-configured scope." + }, + { + "description": "Denies the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-enabled", + "markdownDescription": "Denies the is_enabled command without any pre-configured scope." + }, + { + "description": "Denies the is_focused command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-focused", + "markdownDescription": "Denies the is_focused command without any pre-configured scope." + }, + { + "description": "Denies the is_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-fullscreen", + "markdownDescription": "Denies the is_fullscreen command without any pre-configured scope." + }, + { + "description": "Denies the is_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-maximizable", + "markdownDescription": "Denies the is_maximizable command without any pre-configured scope." + }, + { + "description": "Denies the is_maximized command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-maximized", + "markdownDescription": "Denies the is_maximized command without any pre-configured scope." + }, + { + "description": "Denies the is_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-minimizable", + "markdownDescription": "Denies the is_minimizable command without any pre-configured scope." + }, + { + "description": "Denies the is_minimized command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-minimized", + "markdownDescription": "Denies the is_minimized command without any pre-configured scope." + }, + { + "description": "Denies the is_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-resizable", + "markdownDescription": "Denies the is_resizable command without any pre-configured scope." + }, + { + "description": "Denies the is_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-visible", + "markdownDescription": "Denies the is_visible command without any pre-configured scope." + }, + { + "description": "Denies the maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-maximize", + "markdownDescription": "Denies the maximize command without any pre-configured scope." + }, + { + "description": "Denies the minimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-minimize", + "markdownDescription": "Denies the minimize command without any pre-configured scope." + }, + { + "description": "Denies the monitor_from_point command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-monitor-from-point", + "markdownDescription": "Denies the monitor_from_point command without any pre-configured scope." + }, + { + "description": "Denies the outer_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-outer-position", + "markdownDescription": "Denies the outer_position command without any pre-configured scope." + }, + { + "description": "Denies the outer_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-outer-size", + "markdownDescription": "Denies the outer_size command without any pre-configured scope." + }, + { + "description": "Denies the primary_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-primary-monitor", + "markdownDescription": "Denies the primary_monitor command without any pre-configured scope." + }, + { + "description": "Denies the request_user_attention command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-request-user-attention", + "markdownDescription": "Denies the request_user_attention command without any pre-configured scope." + }, + { + "description": "Denies the scale_factor command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-scale-factor", + "markdownDescription": "Denies the scale_factor command without any pre-configured scope." + }, + { + "description": "Denies the set_always_on_bottom command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-always-on-bottom", + "markdownDescription": "Denies the set_always_on_bottom command without any pre-configured scope." + }, + { + "description": "Denies the set_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-always-on-top", + "markdownDescription": "Denies the set_always_on_top command without any pre-configured scope." + }, + { + "description": "Denies the set_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-background-color", + "markdownDescription": "Denies the set_background_color command without any pre-configured scope." + }, + { + "description": "Denies the set_badge_count command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-badge-count", + "markdownDescription": "Denies the set_badge_count command without any pre-configured scope." + }, + { + "description": "Denies the set_badge_label command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-badge-label", + "markdownDescription": "Denies the set_badge_label command without any pre-configured scope." + }, + { + "description": "Denies the set_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-closable", + "markdownDescription": "Denies the set_closable command without any pre-configured scope." + }, + { + "description": "Denies the set_content_protected command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-content-protected", + "markdownDescription": "Denies the set_content_protected command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_grab command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-grab", + "markdownDescription": "Denies the set_cursor_grab command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-icon", + "markdownDescription": "Denies the set_cursor_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-position", + "markdownDescription": "Denies the set_cursor_position command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-visible", + "markdownDescription": "Denies the set_cursor_visible command without any pre-configured scope." + }, + { + "description": "Denies the set_decorations command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-decorations", + "markdownDescription": "Denies the set_decorations command without any pre-configured scope." + }, + { + "description": "Denies the set_effects command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-effects", + "markdownDescription": "Denies the set_effects command without any pre-configured scope." + }, + { + "description": "Denies the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-enabled", + "markdownDescription": "Denies the set_enabled command without any pre-configured scope." + }, + { + "description": "Denies the set_focus command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-focus", + "markdownDescription": "Denies the set_focus command without any pre-configured scope." + }, + { + "description": "Denies the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-focusable", + "markdownDescription": "Denies the set_focusable command without any pre-configured scope." + }, + { + "description": "Denies the set_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-fullscreen", + "markdownDescription": "Denies the set_fullscreen command without any pre-configured scope." + }, + { + "description": "Denies the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-icon", + "markdownDescription": "Denies the set_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_ignore_cursor_events command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-ignore-cursor-events", + "markdownDescription": "Denies the set_ignore_cursor_events command without any pre-configured scope." + }, + { + "description": "Denies the set_max_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-max-size", + "markdownDescription": "Denies the set_max_size command without any pre-configured scope." + }, + { + "description": "Denies the set_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-maximizable", + "markdownDescription": "Denies the set_maximizable command without any pre-configured scope." + }, + { + "description": "Denies the set_min_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-min-size", + "markdownDescription": "Denies the set_min_size command without any pre-configured scope." + }, + { + "description": "Denies the set_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-minimizable", + "markdownDescription": "Denies the set_minimizable command without any pre-configured scope." + }, + { + "description": "Denies the set_overlay_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-overlay-icon", + "markdownDescription": "Denies the set_overlay_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-position", + "markdownDescription": "Denies the set_position command without any pre-configured scope." + }, + { + "description": "Denies the set_progress_bar command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-progress-bar", + "markdownDescription": "Denies the set_progress_bar command without any pre-configured scope." + }, + { + "description": "Denies the set_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-resizable", + "markdownDescription": "Denies the set_resizable command without any pre-configured scope." + }, + { + "description": "Denies the set_shadow command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-shadow", + "markdownDescription": "Denies the set_shadow command without any pre-configured scope." + }, + { + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-simple-fullscreen", + "markdownDescription": "Denies the set_simple_fullscreen command without any pre-configured scope." + }, + { + "description": "Denies the set_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-size", + "markdownDescription": "Denies the set_size command without any pre-configured scope." + }, + { + "description": "Denies the set_size_constraints command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-size-constraints", + "markdownDescription": "Denies the set_size_constraints command without any pre-configured scope." + }, + { + "description": "Denies the set_skip_taskbar command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-skip-taskbar", + "markdownDescription": "Denies the set_skip_taskbar command without any pre-configured scope." + }, + { + "description": "Denies the set_theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-theme", + "markdownDescription": "Denies the set_theme command without any pre-configured scope." + }, + { + "description": "Denies the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-title", + "markdownDescription": "Denies the set_title command without any pre-configured scope." + }, + { + "description": "Denies the set_title_bar_style command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-title-bar-style", + "markdownDescription": "Denies the set_title_bar_style command without any pre-configured scope." + }, + { + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-visible-on-all-organizations", + "markdownDescription": "Denies the set_visible_on_all_organizations command without any pre-configured scope." + }, + { + "description": "Denies the show command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-show", + "markdownDescription": "Denies the show command without any pre-configured scope." + }, + { + "description": "Denies the start_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-start-dragging", + "markdownDescription": "Denies the start_dragging command without any pre-configured scope." + }, + { + "description": "Denies the start_resize_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-start-resize-dragging", + "markdownDescription": "Denies the start_resize_dragging command without any pre-configured scope." + }, + { + "description": "Denies the theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-theme", + "markdownDescription": "Denies the theme command without any pre-configured scope." + }, + { + "description": "Denies the title command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-title", + "markdownDescription": "Denies the title command without any pre-configured scope." + }, + { + "description": "Denies the toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-toggle-maximize", + "markdownDescription": "Denies the toggle_maximize command without any pre-configured scope." + }, + { + "description": "Denies the unmaximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-unmaximize", + "markdownDescription": "Denies the unmaximize command without any pre-configured scope." + }, + { + "description": "Denies the unminimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-unminimize", + "markdownDescription": "Denies the unminimize command without any pre-configured scope." + }, + { + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`", + "type": "string", + "const": "shell:default", + "markdownDescription": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`" + }, + { + "description": "Enables the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-execute", + "markdownDescription": "Enables the execute command without any pre-configured scope." + }, + { + "description": "Enables the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-kill", + "markdownDescription": "Enables the kill command without any pre-configured scope." + }, + { + "description": "Enables the open command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-open", + "markdownDescription": "Enables the open command without any pre-configured scope." + }, + { + "description": "Enables the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-spawn", + "markdownDescription": "Enables the spawn command without any pre-configured scope." + }, + { + "description": "Enables the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-stdin-write", + "markdownDescription": "Enables the stdin_write command without any pre-configured scope." + }, + { + "description": "Denies the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-execute", + "markdownDescription": "Denies the execute command without any pre-configured scope." + }, + { + "description": "Denies the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-kill", + "markdownDescription": "Denies the kill command without any pre-configured scope." + }, + { + "description": "Denies the open command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-open", + "markdownDescription": "Denies the open command without any pre-configured scope." + }, + { + "description": "Denies the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-spawn", + "markdownDescription": "Denies the spawn command without any pre-configured scope." + }, + { + "description": "Denies the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-stdin-write", + "markdownDescription": "Denies the stdin_write command without any pre-configured scope." + } + ] + }, + "Value": { + "description": "All supported ACL values.", + "anyOf": [ + { + "description": "Represents a null JSON value.", + "type": "null" + }, + { + "description": "Represents a [`bool`].", + "type": "boolean" + }, + { + "description": "Represents a valid ACL [`Number`].", + "allOf": [ + { + "$ref": "#/definitions/Number" + } + ] + }, + { + "description": "Represents a [`String`].", + "type": "string" + }, + { + "description": "Represents a list of other [`Value`]s.", + "type": "array", + "items": { + "$ref": "#/definitions/Value" + } + }, + { + "description": "Represents a map of [`String`] keys to [`Value`]s.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Value" + } + } + ] + }, + "Number": { + "description": "A valid ACL number.", + "anyOf": [ + { + "description": "Represents an [`i64`].", + "type": "integer", + "format": "int64" + }, + { + "description": "Represents a [`f64`].", + "type": "number", + "format": "double" + } + ] + }, + "Target": { + "description": "Platform target.", + "oneOf": [ + { + "description": "MacOS.", + "type": "string", + "enum": ["macOS"] + }, + { + "description": "Windows.", + "type": "string", + "enum": ["windows"] + }, + { + "description": "Linux.", + "type": "string", + "enum": ["linux"] + }, + { + "description": "Android.", + "type": "string", + "enum": ["android"] + }, + { + "description": "iOS.", + "type": "string", + "enum": ["iOS"] + } + ] + }, + "ShellScopeEntryAllowedArg": { + "description": "A command argument allowed to be executed by the webview API.", + "anyOf": [ + { + "description": "A non-configurable argument that is passed to the command in the order it was specified.", + "type": "string" + }, + { + "description": "A variable that is set while calling the command from the webview API.", + "type": "object", + "required": ["validator"], + "properties": { + "raw": { + "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", + "default": false, + "type": "boolean" + }, + "validator": { + "description": "[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ", + "type": "string" + } + }, + "additionalProperties": false + } + ] + }, + "ShellScopeEntryAllowedArgs": { + "description": "A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration.", + "anyOf": [ + { + "description": "Use a simple boolean to allow all or disable all arguments to this command configuration.", + "type": "boolean" + }, + { + "description": "A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.", + "type": "array", + "items": { + "$ref": "#/definitions/ShellScopeEntryAllowedArg" + } + } + ] + } + } +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json b/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json new file mode 100644 index 0000000..34f0a61 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json @@ -0,0 +1,2522 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "CapabilityFile", + "description": "Capability formats accepted in a capability file.", + "anyOf": [ + { + "description": "A single capability.", + "allOf": [ + { + "$ref": "#/definitions/Capability" + } + ] + }, + { + "description": "A list of capabilities.", + "type": "array", + "items": { + "$ref": "#/definitions/Capability" + } + }, + { + "description": "A list of capabilities.", + "type": "object", + "required": ["capabilities"], + "properties": { + "capabilities": { + "description": "The list of capabilities.", + "type": "array", + "items": { + "$ref": "#/definitions/Capability" + } + } + } + } + ], + "definitions": { + "Capability": { + "description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```", + "type": "object", + "required": ["identifier", "permissions"], + "properties": { + "identifier": { + "description": "Identifier of the capability.\n\n## Example\n\n`main-user-files-write`", + "type": "string" + }, + "description": { + "description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.", + "default": "", + "type": "string" + }, + "remote": { + "description": "Configure remote URLs that can use the capability permissions.\n\nThis setting is optional and defaults to not being set, as our default use case is that the content is served from our local application.\n\n:::caution Make sure you understand the security implications of providing remote sources with local system access. :::\n\n## Example\n\n```json { \"urls\": [\"https://*.mydomain.dev\"] } ```", + "anyOf": [ + { + "$ref": "#/definitions/CapabilityRemote" + }, + { + "type": "null" + } + ] + }, + "local": { + "description": "Whether this capability is enabled for local app URLs or not. Defaults to `true`.", + "default": true, + "type": "boolean" + }, + "windows": { + "description": "List of windows that are affected by this capability. Can be a glob pattern.\n\nIf a window label matches any of the patterns in this list, the capability will be enabled on all the webviews of that window, regardless of the value of [`Self::webviews`].\n\nOn multiwebview windows, prefer specifying [`Self::webviews`] and omitting [`Self::windows`] for a fine grained access control.\n\n## Example\n\n`[\"main\"]`", + "type": "array", + "items": { + "type": "string" + } + }, + "webviews": { + "description": "List of webviews that are affected by this capability. Can be a glob pattern.\n\nThe capability will be enabled on all the webviews whose label matches any of the patterns in this list, regardless of whether the webview's window label matches a pattern in [`Self::windows`].\n\n## Example\n\n`[\"sub-webview-one\", \"sub-webview-two\"]`", + "type": "array", + "items": { + "type": "string" + } + }, + "permissions": { + "description": "List of permissions attached to this capability.\n\nMust include the plugin name as prefix in the form of `${plugin-name}:${permission-name}`. For commands directly implemented in the application itself only `${permission-name}` is required.\n\n## Example\n\n```json [ \"core:default\", \"shell:allow-open\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] } ] ```", + "type": "array", + "items": { + "$ref": "#/definitions/PermissionEntry" + }, + "uniqueItems": true + }, + "platforms": { + "description": "Limit which target platforms this capability applies to.\n\nBy default all platforms are targeted.\n\n## Example\n\n`[\"macOS\",\"windows\"]`", + "type": ["array", "null"], + "items": { + "$ref": "#/definitions/Target" + } + } + } + }, + "CapabilityRemote": { + "description": "Configuration for remote URLs that are associated with the capability.", + "type": "object", + "required": ["urls"], + "properties": { + "urls": { + "description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n## Examples\n\n- \"https://*.mydomain.dev\": allows subdomains of mydomain.dev - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "PermissionEntry": { + "description": "An entry for a permission value in a [`Capability`] can be either a raw permission [`Identifier`] or an object that references a permission and extends its scope.", + "anyOf": [ + { + "description": "Reference a permission or permission set by identifier.", + "allOf": [ + { + "$ref": "#/definitions/Identifier" + } + ] + }, + { + "description": "Reference a permission or permission set by identifier and extends its scope.", + "type": "object", + "allOf": [ + { + "if": { + "properties": { + "identifier": { + "anyOf": [ + { + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`", + "type": "string", + "const": "shell:default", + "markdownDescription": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`" + }, + { + "description": "Enables the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-execute", + "markdownDescription": "Enables the execute command without any pre-configured scope." + }, + { + "description": "Enables the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-kill", + "markdownDescription": "Enables the kill command without any pre-configured scope." + }, + { + "description": "Enables the open command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-open", + "markdownDescription": "Enables the open command without any pre-configured scope." + }, + { + "description": "Enables the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-spawn", + "markdownDescription": "Enables the spawn command without any pre-configured scope." + }, + { + "description": "Enables the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-stdin-write", + "markdownDescription": "Enables the stdin_write command without any pre-configured scope." + }, + { + "description": "Denies the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-execute", + "markdownDescription": "Denies the execute command without any pre-configured scope." + }, + { + "description": "Denies the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-kill", + "markdownDescription": "Denies the kill command without any pre-configured scope." + }, + { + "description": "Denies the open command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-open", + "markdownDescription": "Denies the open command without any pre-configured scope." + }, + { + "description": "Denies the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-spawn", + "markdownDescription": "Denies the spawn command without any pre-configured scope." + }, + { + "description": "Denies the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-stdin-write", + "markdownDescription": "Denies the stdin_write command without any pre-configured scope." + } + ] + } + } + }, + "then": { + "properties": { + "allow": { + "items": { + "title": "ShellScopeEntry", + "description": "Shell scope entry.", + "anyOf": [ + { + "type": "object", + "required": ["cmd", "name"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": ["name", "sidecar"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { + "description": "If this command is a sidecar command.", + "type": "boolean" + } + }, + "additionalProperties": false + } + ] + } + }, + "deny": { + "items": { + "title": "ShellScopeEntry", + "description": "Shell scope entry.", + "anyOf": [ + { + "type": "object", + "required": ["cmd", "name"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": ["name", "sidecar"], + "properties": { + "args": { + "description": "The allowed arguments for the command execution.", + "allOf": [ + { + "$ref": "#/definitions/ShellScopeEntryAllowedArgs" + } + ] + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { + "description": "If this command is a sidecar command.", + "type": "boolean" + } + }, + "additionalProperties": false + } + ] + } + } + } + }, + "properties": { + "identifier": { + "description": "Identifier of the permission or permission set.", + "allOf": [ + { + "$ref": "#/definitions/Identifier" + } + ] + } + } + }, + { + "properties": { + "identifier": { + "description": "Identifier of the permission or permission set.", + "allOf": [ + { + "$ref": "#/definitions/Identifier" + } + ] + }, + "allow": { + "description": "Data that defines what is allowed by the scope.", + "type": ["array", "null"], + "items": { + "$ref": "#/definitions/Value" + } + }, + "deny": { + "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", + "type": ["array", "null"], + "items": { + "$ref": "#/definitions/Value" + } + } + } + } + ], + "required": ["identifier"] + } + ] + }, + "Identifier": { + "description": "Permission identifier", + "oneOf": [ + { + "description": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`", + "type": "string", + "const": "core:default", + "markdownDescription": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`" + }, + { + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`", + "type": "string", + "const": "core:app:default", + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`" + }, + { + "description": "Enables the app_hide command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-app-hide", + "markdownDescription": "Enables the app_hide command without any pre-configured scope." + }, + { + "description": "Enables the app_show command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-app-show", + "markdownDescription": "Enables the app_show command without any pre-configured scope." + }, + { + "description": "Enables the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-bundle-type", + "markdownDescription": "Enables the bundle_type command without any pre-configured scope." + }, + { + "description": "Enables the default_window_icon command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-default-window-icon", + "markdownDescription": "Enables the default_window_icon command without any pre-configured scope." + }, + { + "description": "Enables the fetch_data_store_identifiers command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-fetch-data-store-identifiers", + "markdownDescription": "Enables the fetch_data_store_identifiers command without any pre-configured scope." + }, + { + "description": "Enables the identifier command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-identifier", + "markdownDescription": "Enables the identifier command without any pre-configured scope." + }, + { + "description": "Enables the name command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-name", + "markdownDescription": "Enables the name command without any pre-configured scope." + }, + { + "description": "Enables the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-register-listener", + "markdownDescription": "Enables the register_listener command without any pre-configured scope." + }, + { + "description": "Enables the remove_data_store command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-remove-data-store", + "markdownDescription": "Enables the remove_data_store command without any pre-configured scope." + }, + { + "description": "Enables the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-remove-listener", + "markdownDescription": "Enables the remove_listener command without any pre-configured scope." + }, + { + "description": "Enables the set_app_theme command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-set-app-theme", + "markdownDescription": "Enables the set_app_theme command without any pre-configured scope." + }, + { + "description": "Enables the set_dock_visibility command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-set-dock-visibility", + "markdownDescription": "Enables the set_dock_visibility command without any pre-configured scope." + }, + { + "description": "Enables the tauri_version command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-tauri-version", + "markdownDescription": "Enables the tauri_version command without any pre-configured scope." + }, + { + "description": "Enables the version command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-version", + "markdownDescription": "Enables the version command without any pre-configured scope." + }, + { + "description": "Denies the app_hide command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-app-hide", + "markdownDescription": "Denies the app_hide command without any pre-configured scope." + }, + { + "description": "Denies the app_show command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-app-show", + "markdownDescription": "Denies the app_show command without any pre-configured scope." + }, + { + "description": "Denies the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-bundle-type", + "markdownDescription": "Denies the bundle_type command without any pre-configured scope." + }, + { + "description": "Denies the default_window_icon command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-default-window-icon", + "markdownDescription": "Denies the default_window_icon command without any pre-configured scope." + }, + { + "description": "Denies the fetch_data_store_identifiers command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-fetch-data-store-identifiers", + "markdownDescription": "Denies the fetch_data_store_identifiers command without any pre-configured scope." + }, + { + "description": "Denies the identifier command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-identifier", + "markdownDescription": "Denies the identifier command without any pre-configured scope." + }, + { + "description": "Denies the name command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-name", + "markdownDescription": "Denies the name command without any pre-configured scope." + }, + { + "description": "Denies the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-register-listener", + "markdownDescription": "Denies the register_listener command without any pre-configured scope." + }, + { + "description": "Denies the remove_data_store command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-remove-data-store", + "markdownDescription": "Denies the remove_data_store command without any pre-configured scope." + }, + { + "description": "Denies the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-remove-listener", + "markdownDescription": "Denies the remove_listener command without any pre-configured scope." + }, + { + "description": "Denies the set_app_theme command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-set-app-theme", + "markdownDescription": "Denies the set_app_theme command without any pre-configured scope." + }, + { + "description": "Denies the set_dock_visibility command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-set-dock-visibility", + "markdownDescription": "Denies the set_dock_visibility command without any pre-configured scope." + }, + { + "description": "Denies the tauri_version command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-tauri-version", + "markdownDescription": "Denies the tauri_version command without any pre-configured scope." + }, + { + "description": "Denies the version command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-version", + "markdownDescription": "Denies the version command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-listen`\n- `allow-unlisten`\n- `allow-emit`\n- `allow-emit-to`", + "type": "string", + "const": "core:event:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-listen`\n- `allow-unlisten`\n- `allow-emit`\n- `allow-emit-to`" + }, + { + "description": "Enables the emit command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-emit", + "markdownDescription": "Enables the emit command without any pre-configured scope." + }, + { + "description": "Enables the emit_to command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-emit-to", + "markdownDescription": "Enables the emit_to command without any pre-configured scope." + }, + { + "description": "Enables the listen command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-listen", + "markdownDescription": "Enables the listen command without any pre-configured scope." + }, + { + "description": "Enables the unlisten command without any pre-configured scope.", + "type": "string", + "const": "core:event:allow-unlisten", + "markdownDescription": "Enables the unlisten command without any pre-configured scope." + }, + { + "description": "Denies the emit command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-emit", + "markdownDescription": "Denies the emit command without any pre-configured scope." + }, + { + "description": "Denies the emit_to command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-emit-to", + "markdownDescription": "Denies the emit_to command without any pre-configured scope." + }, + { + "description": "Denies the listen command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-listen", + "markdownDescription": "Denies the listen command without any pre-configured scope." + }, + { + "description": "Denies the unlisten command without any pre-configured scope.", + "type": "string", + "const": "core:event:deny-unlisten", + "markdownDescription": "Denies the unlisten command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-from-bytes`\n- `allow-from-path`\n- `allow-rgba`\n- `allow-size`", + "type": "string", + "const": "core:image:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-from-bytes`\n- `allow-from-path`\n- `allow-rgba`\n- `allow-size`" + }, + { + "description": "Enables the from_bytes command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-from-bytes", + "markdownDescription": "Enables the from_bytes command without any pre-configured scope." + }, + { + "description": "Enables the from_path command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-from-path", + "markdownDescription": "Enables the from_path command without any pre-configured scope." + }, + { + "description": "Enables the new command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-new", + "markdownDescription": "Enables the new command without any pre-configured scope." + }, + { + "description": "Enables the rgba command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-rgba", + "markdownDescription": "Enables the rgba command without any pre-configured scope." + }, + { + "description": "Enables the size command without any pre-configured scope.", + "type": "string", + "const": "core:image:allow-size", + "markdownDescription": "Enables the size command without any pre-configured scope." + }, + { + "description": "Denies the from_bytes command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-from-bytes", + "markdownDescription": "Denies the from_bytes command without any pre-configured scope." + }, + { + "description": "Denies the from_path command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-from-path", + "markdownDescription": "Denies the from_path command without any pre-configured scope." + }, + { + "description": "Denies the new command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-new", + "markdownDescription": "Denies the new command without any pre-configured scope." + }, + { + "description": "Denies the rgba command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-rgba", + "markdownDescription": "Denies the rgba command without any pre-configured scope." + }, + { + "description": "Denies the size command without any pre-configured scope.", + "type": "string", + "const": "core:image:deny-size", + "markdownDescription": "Denies the size command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-append`\n- `allow-prepend`\n- `allow-insert`\n- `allow-remove`\n- `allow-remove-at`\n- `allow-items`\n- `allow-get`\n- `allow-popup`\n- `allow-create-default`\n- `allow-set-as-app-menu`\n- `allow-set-as-window-menu`\n- `allow-text`\n- `allow-set-text`\n- `allow-is-enabled`\n- `allow-set-enabled`\n- `allow-set-accelerator`\n- `allow-set-as-windows-menu-for-nsapp`\n- `allow-set-as-help-menu-for-nsapp`\n- `allow-is-checked`\n- `allow-set-checked`\n- `allow-set-icon`", + "type": "string", + "const": "core:menu:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-append`\n- `allow-prepend`\n- `allow-insert`\n- `allow-remove`\n- `allow-remove-at`\n- `allow-items`\n- `allow-get`\n- `allow-popup`\n- `allow-create-default`\n- `allow-set-as-app-menu`\n- `allow-set-as-window-menu`\n- `allow-text`\n- `allow-set-text`\n- `allow-is-enabled`\n- `allow-set-enabled`\n- `allow-set-accelerator`\n- `allow-set-as-windows-menu-for-nsapp`\n- `allow-set-as-help-menu-for-nsapp`\n- `allow-is-checked`\n- `allow-set-checked`\n- `allow-set-icon`" + }, + { + "description": "Enables the append command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-append", + "markdownDescription": "Enables the append command without any pre-configured scope." + }, + { + "description": "Enables the create_default command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-create-default", + "markdownDescription": "Enables the create_default command without any pre-configured scope." + }, + { + "description": "Enables the get command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-get", + "markdownDescription": "Enables the get command without any pre-configured scope." + }, + { + "description": "Enables the insert command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-insert", + "markdownDescription": "Enables the insert command without any pre-configured scope." + }, + { + "description": "Enables the is_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-is-checked", + "markdownDescription": "Enables the is_checked command without any pre-configured scope." + }, + { + "description": "Enables the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-is-enabled", + "markdownDescription": "Enables the is_enabled command without any pre-configured scope." + }, + { + "description": "Enables the items command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-items", + "markdownDescription": "Enables the items command without any pre-configured scope." + }, + { + "description": "Enables the new command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-new", + "markdownDescription": "Enables the new command without any pre-configured scope." + }, + { + "description": "Enables the popup command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-popup", + "markdownDescription": "Enables the popup command without any pre-configured scope." + }, + { + "description": "Enables the prepend command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-prepend", + "markdownDescription": "Enables the prepend command without any pre-configured scope." + }, + { + "description": "Enables the remove command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-remove", + "markdownDescription": "Enables the remove command without any pre-configured scope." + }, + { + "description": "Enables the remove_at command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-remove-at", + "markdownDescription": "Enables the remove_at command without any pre-configured scope." + }, + { + "description": "Enables the set_accelerator command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-accelerator", + "markdownDescription": "Enables the set_accelerator command without any pre-configured scope." + }, + { + "description": "Enables the set_as_app_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-app-menu", + "markdownDescription": "Enables the set_as_app_menu command without any pre-configured scope." + }, + { + "description": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-help-menu-for-nsapp", + "markdownDescription": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Enables the set_as_window_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-window-menu", + "markdownDescription": "Enables the set_as_window_menu command without any pre-configured scope." + }, + { + "description": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-as-windows-menu-for-nsapp", + "markdownDescription": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Enables the set_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-checked", + "markdownDescription": "Enables the set_checked command without any pre-configured scope." + }, + { + "description": "Enables the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-enabled", + "markdownDescription": "Enables the set_enabled command without any pre-configured scope." + }, + { + "description": "Enables the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-icon", + "markdownDescription": "Enables the set_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-set-text", + "markdownDescription": "Enables the set_text command without any pre-configured scope." + }, + { + "description": "Enables the text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:allow-text", + "markdownDescription": "Enables the text command without any pre-configured scope." + }, + { + "description": "Denies the append command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-append", + "markdownDescription": "Denies the append command without any pre-configured scope." + }, + { + "description": "Denies the create_default command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-create-default", + "markdownDescription": "Denies the create_default command without any pre-configured scope." + }, + { + "description": "Denies the get command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-get", + "markdownDescription": "Denies the get command without any pre-configured scope." + }, + { + "description": "Denies the insert command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-insert", + "markdownDescription": "Denies the insert command without any pre-configured scope." + }, + { + "description": "Denies the is_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-is-checked", + "markdownDescription": "Denies the is_checked command without any pre-configured scope." + }, + { + "description": "Denies the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-is-enabled", + "markdownDescription": "Denies the is_enabled command without any pre-configured scope." + }, + { + "description": "Denies the items command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-items", + "markdownDescription": "Denies the items command without any pre-configured scope." + }, + { + "description": "Denies the new command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-new", + "markdownDescription": "Denies the new command without any pre-configured scope." + }, + { + "description": "Denies the popup command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-popup", + "markdownDescription": "Denies the popup command without any pre-configured scope." + }, + { + "description": "Denies the prepend command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-prepend", + "markdownDescription": "Denies the prepend command without any pre-configured scope." + }, + { + "description": "Denies the remove command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-remove", + "markdownDescription": "Denies the remove command without any pre-configured scope." + }, + { + "description": "Denies the remove_at command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-remove-at", + "markdownDescription": "Denies the remove_at command without any pre-configured scope." + }, + { + "description": "Denies the set_accelerator command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-accelerator", + "markdownDescription": "Denies the set_accelerator command without any pre-configured scope." + }, + { + "description": "Denies the set_as_app_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-app-menu", + "markdownDescription": "Denies the set_as_app_menu command without any pre-configured scope." + }, + { + "description": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-help-menu-for-nsapp", + "markdownDescription": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Denies the set_as_window_menu command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-window-menu", + "markdownDescription": "Denies the set_as_window_menu command without any pre-configured scope." + }, + { + "description": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-as-windows-menu-for-nsapp", + "markdownDescription": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope." + }, + { + "description": "Denies the set_checked command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-checked", + "markdownDescription": "Denies the set_checked command without any pre-configured scope." + }, + { + "description": "Denies the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-enabled", + "markdownDescription": "Denies the set_enabled command without any pre-configured scope." + }, + { + "description": "Denies the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-icon", + "markdownDescription": "Denies the set_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-set-text", + "markdownDescription": "Denies the set_text command without any pre-configured scope." + }, + { + "description": "Denies the text command without any pre-configured scope.", + "type": "string", + "const": "core:menu:deny-text", + "markdownDescription": "Denies the text command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-resolve-directory`\n- `allow-resolve`\n- `allow-normalize`\n- `allow-join`\n- `allow-dirname`\n- `allow-extname`\n- `allow-basename`\n- `allow-is-absolute`", + "type": "string", + "const": "core:path:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-resolve-directory`\n- `allow-resolve`\n- `allow-normalize`\n- `allow-join`\n- `allow-dirname`\n- `allow-extname`\n- `allow-basename`\n- `allow-is-absolute`" + }, + { + "description": "Enables the basename command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-basename", + "markdownDescription": "Enables the basename command without any pre-configured scope." + }, + { + "description": "Enables the dirname command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-dirname", + "markdownDescription": "Enables the dirname command without any pre-configured scope." + }, + { + "description": "Enables the extname command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-extname", + "markdownDescription": "Enables the extname command without any pre-configured scope." + }, + { + "description": "Enables the is_absolute command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-is-absolute", + "markdownDescription": "Enables the is_absolute command without any pre-configured scope." + }, + { + "description": "Enables the join command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-join", + "markdownDescription": "Enables the join command without any pre-configured scope." + }, + { + "description": "Enables the normalize command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-normalize", + "markdownDescription": "Enables the normalize command without any pre-configured scope." + }, + { + "description": "Enables the resolve command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-resolve", + "markdownDescription": "Enables the resolve command without any pre-configured scope." + }, + { + "description": "Enables the resolve_directory command without any pre-configured scope.", + "type": "string", + "const": "core:path:allow-resolve-directory", + "markdownDescription": "Enables the resolve_directory command without any pre-configured scope." + }, + { + "description": "Denies the basename command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-basename", + "markdownDescription": "Denies the basename command without any pre-configured scope." + }, + { + "description": "Denies the dirname command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-dirname", + "markdownDescription": "Denies the dirname command without any pre-configured scope." + }, + { + "description": "Denies the extname command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-extname", + "markdownDescription": "Denies the extname command without any pre-configured scope." + }, + { + "description": "Denies the is_absolute command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-is-absolute", + "markdownDescription": "Denies the is_absolute command without any pre-configured scope." + }, + { + "description": "Denies the join command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-join", + "markdownDescription": "Denies the join command without any pre-configured scope." + }, + { + "description": "Denies the normalize command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-normalize", + "markdownDescription": "Denies the normalize command without any pre-configured scope." + }, + { + "description": "Denies the resolve command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-resolve", + "markdownDescription": "Denies the resolve command without any pre-configured scope." + }, + { + "description": "Denies the resolve_directory command without any pre-configured scope.", + "type": "string", + "const": "core:path:deny-resolve-directory", + "markdownDescription": "Denies the resolve_directory command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-close`", + "type": "string", + "const": "core:resources:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-close`" + }, + { + "description": "Enables the close command without any pre-configured scope.", + "type": "string", + "const": "core:resources:allow-close", + "markdownDescription": "Enables the close command without any pre-configured scope." + }, + { + "description": "Denies the close command without any pre-configured scope.", + "type": "string", + "const": "core:resources:deny-close", + "markdownDescription": "Denies the close command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-get-by-id`\n- `allow-remove-by-id`\n- `allow-set-icon`\n- `allow-set-menu`\n- `allow-set-tooltip`\n- `allow-set-title`\n- `allow-set-visible`\n- `allow-set-temp-dir-path`\n- `allow-set-icon-as-template`\n- `allow-set-show-menu-on-left-click`", + "type": "string", + "const": "core:tray:default", + "markdownDescription": "Default permissions for the plugin, which enables all commands.\n#### This default permission set includes:\n\n- `allow-new`\n- `allow-get-by-id`\n- `allow-remove-by-id`\n- `allow-set-icon`\n- `allow-set-menu`\n- `allow-set-tooltip`\n- `allow-set-title`\n- `allow-set-visible`\n- `allow-set-temp-dir-path`\n- `allow-set-icon-as-template`\n- `allow-set-show-menu-on-left-click`" + }, + { + "description": "Enables the get_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-get-by-id", + "markdownDescription": "Enables the get_by_id command without any pre-configured scope." + }, + { + "description": "Enables the new command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-new", + "markdownDescription": "Enables the new command without any pre-configured scope." + }, + { + "description": "Enables the remove_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-remove-by-id", + "markdownDescription": "Enables the remove_by_id command without any pre-configured scope." + }, + { + "description": "Enables the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-icon", + "markdownDescription": "Enables the set_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_icon_as_template command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-icon-as-template", + "markdownDescription": "Enables the set_icon_as_template command without any pre-configured scope." + }, + { + "description": "Enables the set_menu command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-menu", + "markdownDescription": "Enables the set_menu command without any pre-configured scope." + }, + { + "description": "Enables the set_show_menu_on_left_click command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-show-menu-on-left-click", + "markdownDescription": "Enables the set_show_menu_on_left_click command without any pre-configured scope." + }, + { + "description": "Enables the set_temp_dir_path command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-temp-dir-path", + "markdownDescription": "Enables the set_temp_dir_path command without any pre-configured scope." + }, + { + "description": "Enables the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-title", + "markdownDescription": "Enables the set_title command without any pre-configured scope." + }, + { + "description": "Enables the set_tooltip command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-tooltip", + "markdownDescription": "Enables the set_tooltip command without any pre-configured scope." + }, + { + "description": "Enables the set_visible command without any pre-configured scope.", + "type": "string", + "const": "core:tray:allow-set-visible", + "markdownDescription": "Enables the set_visible command without any pre-configured scope." + }, + { + "description": "Denies the get_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-get-by-id", + "markdownDescription": "Denies the get_by_id command without any pre-configured scope." + }, + { + "description": "Denies the new command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-new", + "markdownDescription": "Denies the new command without any pre-configured scope." + }, + { + "description": "Denies the remove_by_id command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-remove-by-id", + "markdownDescription": "Denies the remove_by_id command without any pre-configured scope." + }, + { + "description": "Denies the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-icon", + "markdownDescription": "Denies the set_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_icon_as_template command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-icon-as-template", + "markdownDescription": "Denies the set_icon_as_template command without any pre-configured scope." + }, + { + "description": "Denies the set_menu command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-menu", + "markdownDescription": "Denies the set_menu command without any pre-configured scope." + }, + { + "description": "Denies the set_show_menu_on_left_click command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-show-menu-on-left-click", + "markdownDescription": "Denies the set_show_menu_on_left_click command without any pre-configured scope." + }, + { + "description": "Denies the set_temp_dir_path command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-temp-dir-path", + "markdownDescription": "Denies the set_temp_dir_path command without any pre-configured scope." + }, + { + "description": "Denies the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-title", + "markdownDescription": "Denies the set_title command without any pre-configured scope." + }, + { + "description": "Denies the set_tooltip command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-tooltip", + "markdownDescription": "Denies the set_tooltip command without any pre-configured scope." + }, + { + "description": "Denies the set_visible command without any pre-configured scope.", + "type": "string", + "const": "core:tray:deny-set-visible", + "markdownDescription": "Denies the set_visible command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-webviews`\n- `allow-webview-position`\n- `allow-webview-size`\n- `allow-internal-toggle-devtools`", + "type": "string", + "const": "core:webview:default", + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-webviews`\n- `allow-webview-position`\n- `allow-webview-size`\n- `allow-internal-toggle-devtools`" + }, + { + "description": "Enables the clear_all_browsing_data command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-clear-all-browsing-data", + "markdownDescription": "Enables the clear_all_browsing_data command without any pre-configured scope." + }, + { + "description": "Enables the create_webview command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-create-webview", + "markdownDescription": "Enables the create_webview command without any pre-configured scope." + }, + { + "description": "Enables the create_webview_window command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-create-webview-window", + "markdownDescription": "Enables the create_webview_window command without any pre-configured scope." + }, + { + "description": "Enables the get_all_webviews command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-get-all-webviews", + "markdownDescription": "Enables the get_all_webviews command without any pre-configured scope." + }, + { + "description": "Enables the internal_toggle_devtools command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-internal-toggle-devtools", + "markdownDescription": "Enables the internal_toggle_devtools command without any pre-configured scope." + }, + { + "description": "Enables the print command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-print", + "markdownDescription": "Enables the print command without any pre-configured scope." + }, + { + "description": "Enables the reparent command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-reparent", + "markdownDescription": "Enables the reparent command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_auto_resize command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-auto-resize", + "markdownDescription": "Enables the set_webview_auto_resize command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-background-color", + "markdownDescription": "Enables the set_webview_background_color command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_focus command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-focus", + "markdownDescription": "Enables the set_webview_focus command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-position", + "markdownDescription": "Enables the set_webview_position command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-size", + "markdownDescription": "Enables the set_webview_size command without any pre-configured scope." + }, + { + "description": "Enables the set_webview_zoom command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-set-webview-zoom", + "markdownDescription": "Enables the set_webview_zoom command without any pre-configured scope." + }, + { + "description": "Enables the webview_close command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-close", + "markdownDescription": "Enables the webview_close command without any pre-configured scope." + }, + { + "description": "Enables the webview_hide command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-hide", + "markdownDescription": "Enables the webview_hide command without any pre-configured scope." + }, + { + "description": "Enables the webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-position", + "markdownDescription": "Enables the webview_position command without any pre-configured scope." + }, + { + "description": "Enables the webview_show command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-show", + "markdownDescription": "Enables the webview_show command without any pre-configured scope." + }, + { + "description": "Enables the webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:allow-webview-size", + "markdownDescription": "Enables the webview_size command without any pre-configured scope." + }, + { + "description": "Denies the clear_all_browsing_data command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-clear-all-browsing-data", + "markdownDescription": "Denies the clear_all_browsing_data command without any pre-configured scope." + }, + { + "description": "Denies the create_webview command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-create-webview", + "markdownDescription": "Denies the create_webview command without any pre-configured scope." + }, + { + "description": "Denies the create_webview_window command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-create-webview-window", + "markdownDescription": "Denies the create_webview_window command without any pre-configured scope." + }, + { + "description": "Denies the get_all_webviews command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-get-all-webviews", + "markdownDescription": "Denies the get_all_webviews command without any pre-configured scope." + }, + { + "description": "Denies the internal_toggle_devtools command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-internal-toggle-devtools", + "markdownDescription": "Denies the internal_toggle_devtools command without any pre-configured scope." + }, + { + "description": "Denies the print command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-print", + "markdownDescription": "Denies the print command without any pre-configured scope." + }, + { + "description": "Denies the reparent command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-reparent", + "markdownDescription": "Denies the reparent command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_auto_resize command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-auto-resize", + "markdownDescription": "Denies the set_webview_auto_resize command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-background-color", + "markdownDescription": "Denies the set_webview_background_color command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_focus command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-focus", + "markdownDescription": "Denies the set_webview_focus command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-position", + "markdownDescription": "Denies the set_webview_position command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-size", + "markdownDescription": "Denies the set_webview_size command without any pre-configured scope." + }, + { + "description": "Denies the set_webview_zoom command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-set-webview-zoom", + "markdownDescription": "Denies the set_webview_zoom command without any pre-configured scope." + }, + { + "description": "Denies the webview_close command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-close", + "markdownDescription": "Denies the webview_close command without any pre-configured scope." + }, + { + "description": "Denies the webview_hide command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-hide", + "markdownDescription": "Denies the webview_hide command without any pre-configured scope." + }, + { + "description": "Denies the webview_position command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-position", + "markdownDescription": "Denies the webview_position command without any pre-configured scope." + }, + { + "description": "Denies the webview_show command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-show", + "markdownDescription": "Denies the webview_show command without any pre-configured scope." + }, + { + "description": "Denies the webview_size command without any pre-configured scope.", + "type": "string", + "const": "core:webview:deny-webview-size", + "markdownDescription": "Denies the webview_size command without any pre-configured scope." + }, + { + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-windows`\n- `allow-scale-factor`\n- `allow-inner-position`\n- `allow-outer-position`\n- `allow-inner-size`\n- `allow-outer-size`\n- `allow-is-fullscreen`\n- `allow-is-minimized`\n- `allow-is-maximized`\n- `allow-is-focused`\n- `allow-is-decorated`\n- `allow-is-resizable`\n- `allow-is-maximizable`\n- `allow-is-minimizable`\n- `allow-is-closable`\n- `allow-is-visible`\n- `allow-is-enabled`\n- `allow-title`\n- `allow-current-monitor`\n- `allow-primary-monitor`\n- `allow-monitor-from-point`\n- `allow-available-monitors`\n- `allow-cursor-position`\n- `allow-theme`\n- `allow-is-always-on-top`\n- `allow-internal-toggle-maximize`", + "type": "string", + "const": "core:window:default", + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-get-all-windows`\n- `allow-scale-factor`\n- `allow-inner-position`\n- `allow-outer-position`\n- `allow-inner-size`\n- `allow-outer-size`\n- `allow-is-fullscreen`\n- `allow-is-minimized`\n- `allow-is-maximized`\n- `allow-is-focused`\n- `allow-is-decorated`\n- `allow-is-resizable`\n- `allow-is-maximizable`\n- `allow-is-minimizable`\n- `allow-is-closable`\n- `allow-is-visible`\n- `allow-is-enabled`\n- `allow-title`\n- `allow-current-monitor`\n- `allow-primary-monitor`\n- `allow-monitor-from-point`\n- `allow-available-monitors`\n- `allow-cursor-position`\n- `allow-theme`\n- `allow-is-always-on-top`\n- `allow-internal-toggle-maximize`" + }, + { + "description": "Enables the available_monitors command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-available-monitors", + "markdownDescription": "Enables the available_monitors command without any pre-configured scope." + }, + { + "description": "Enables the center command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-center", + "markdownDescription": "Enables the center command without any pre-configured scope." + }, + { + "description": "Enables the close command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-close", + "markdownDescription": "Enables the close command without any pre-configured scope." + }, + { + "description": "Enables the create command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-create", + "markdownDescription": "Enables the create command without any pre-configured scope." + }, + { + "description": "Enables the current_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-current-monitor", + "markdownDescription": "Enables the current_monitor command without any pre-configured scope." + }, + { + "description": "Enables the cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-cursor-position", + "markdownDescription": "Enables the cursor_position command without any pre-configured scope." + }, + { + "description": "Enables the destroy command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-destroy", + "markdownDescription": "Enables the destroy command without any pre-configured scope." + }, + { + "description": "Enables the get_all_windows command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-get-all-windows", + "markdownDescription": "Enables the get_all_windows command without any pre-configured scope." + }, + { + "description": "Enables the hide command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-hide", + "markdownDescription": "Enables the hide command without any pre-configured scope." + }, + { + "description": "Enables the inner_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-inner-position", + "markdownDescription": "Enables the inner_position command without any pre-configured scope." + }, + { + "description": "Enables the inner_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-inner-size", + "markdownDescription": "Enables the inner_size command without any pre-configured scope." + }, + { + "description": "Enables the internal_toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-internal-toggle-maximize", + "markdownDescription": "Enables the internal_toggle_maximize command without any pre-configured scope." + }, + { + "description": "Enables the is_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-always-on-top", + "markdownDescription": "Enables the is_always_on_top command without any pre-configured scope." + }, + { + "description": "Enables the is_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-closable", + "markdownDescription": "Enables the is_closable command without any pre-configured scope." + }, + { + "description": "Enables the is_decorated command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-decorated", + "markdownDescription": "Enables the is_decorated command without any pre-configured scope." + }, + { + "description": "Enables the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-enabled", + "markdownDescription": "Enables the is_enabled command without any pre-configured scope." + }, + { + "description": "Enables the is_focused command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-focused", + "markdownDescription": "Enables the is_focused command without any pre-configured scope." + }, + { + "description": "Enables the is_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-fullscreen", + "markdownDescription": "Enables the is_fullscreen command without any pre-configured scope." + }, + { + "description": "Enables the is_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-maximizable", + "markdownDescription": "Enables the is_maximizable command without any pre-configured scope." + }, + { + "description": "Enables the is_maximized command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-maximized", + "markdownDescription": "Enables the is_maximized command without any pre-configured scope." + }, + { + "description": "Enables the is_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-minimizable", + "markdownDescription": "Enables the is_minimizable command without any pre-configured scope." + }, + { + "description": "Enables the is_minimized command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-minimized", + "markdownDescription": "Enables the is_minimized command without any pre-configured scope." + }, + { + "description": "Enables the is_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-resizable", + "markdownDescription": "Enables the is_resizable command without any pre-configured scope." + }, + { + "description": "Enables the is_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-is-visible", + "markdownDescription": "Enables the is_visible command without any pre-configured scope." + }, + { + "description": "Enables the maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-maximize", + "markdownDescription": "Enables the maximize command without any pre-configured scope." + }, + { + "description": "Enables the minimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-minimize", + "markdownDescription": "Enables the minimize command without any pre-configured scope." + }, + { + "description": "Enables the monitor_from_point command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-monitor-from-point", + "markdownDescription": "Enables the monitor_from_point command without any pre-configured scope." + }, + { + "description": "Enables the outer_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-outer-position", + "markdownDescription": "Enables the outer_position command without any pre-configured scope." + }, + { + "description": "Enables the outer_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-outer-size", + "markdownDescription": "Enables the outer_size command without any pre-configured scope." + }, + { + "description": "Enables the primary_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-primary-monitor", + "markdownDescription": "Enables the primary_monitor command without any pre-configured scope." + }, + { + "description": "Enables the request_user_attention command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-request-user-attention", + "markdownDescription": "Enables the request_user_attention command without any pre-configured scope." + }, + { + "description": "Enables the scale_factor command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-scale-factor", + "markdownDescription": "Enables the scale_factor command without any pre-configured scope." + }, + { + "description": "Enables the set_always_on_bottom command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-always-on-bottom", + "markdownDescription": "Enables the set_always_on_bottom command without any pre-configured scope." + }, + { + "description": "Enables the set_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-always-on-top", + "markdownDescription": "Enables the set_always_on_top command without any pre-configured scope." + }, + { + "description": "Enables the set_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-background-color", + "markdownDescription": "Enables the set_background_color command without any pre-configured scope." + }, + { + "description": "Enables the set_badge_count command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-badge-count", + "markdownDescription": "Enables the set_badge_count command without any pre-configured scope." + }, + { + "description": "Enables the set_badge_label command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-badge-label", + "markdownDescription": "Enables the set_badge_label command without any pre-configured scope." + }, + { + "description": "Enables the set_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-closable", + "markdownDescription": "Enables the set_closable command without any pre-configured scope." + }, + { + "description": "Enables the set_content_protected command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-content-protected", + "markdownDescription": "Enables the set_content_protected command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_grab command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-grab", + "markdownDescription": "Enables the set_cursor_grab command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-icon", + "markdownDescription": "Enables the set_cursor_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-position", + "markdownDescription": "Enables the set_cursor_position command without any pre-configured scope." + }, + { + "description": "Enables the set_cursor_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-cursor-visible", + "markdownDescription": "Enables the set_cursor_visible command without any pre-configured scope." + }, + { + "description": "Enables the set_decorations command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-decorations", + "markdownDescription": "Enables the set_decorations command without any pre-configured scope." + }, + { + "description": "Enables the set_effects command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-effects", + "markdownDescription": "Enables the set_effects command without any pre-configured scope." + }, + { + "description": "Enables the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-enabled", + "markdownDescription": "Enables the set_enabled command without any pre-configured scope." + }, + { + "description": "Enables the set_focus command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-focus", + "markdownDescription": "Enables the set_focus command without any pre-configured scope." + }, + { + "description": "Enables the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-focusable", + "markdownDescription": "Enables the set_focusable command without any pre-configured scope." + }, + { + "description": "Enables the set_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-fullscreen", + "markdownDescription": "Enables the set_fullscreen command without any pre-configured scope." + }, + { + "description": "Enables the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-icon", + "markdownDescription": "Enables the set_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_ignore_cursor_events command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-ignore-cursor-events", + "markdownDescription": "Enables the set_ignore_cursor_events command without any pre-configured scope." + }, + { + "description": "Enables the set_max_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-max-size", + "markdownDescription": "Enables the set_max_size command without any pre-configured scope." + }, + { + "description": "Enables the set_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-maximizable", + "markdownDescription": "Enables the set_maximizable command without any pre-configured scope." + }, + { + "description": "Enables the set_min_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-min-size", + "markdownDescription": "Enables the set_min_size command without any pre-configured scope." + }, + { + "description": "Enables the set_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-minimizable", + "markdownDescription": "Enables the set_minimizable command without any pre-configured scope." + }, + { + "description": "Enables the set_overlay_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-overlay-icon", + "markdownDescription": "Enables the set_overlay_icon command without any pre-configured scope." + }, + { + "description": "Enables the set_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-position", + "markdownDescription": "Enables the set_position command without any pre-configured scope." + }, + { + "description": "Enables the set_progress_bar command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-progress-bar", + "markdownDescription": "Enables the set_progress_bar command without any pre-configured scope." + }, + { + "description": "Enables the set_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-resizable", + "markdownDescription": "Enables the set_resizable command without any pre-configured scope." + }, + { + "description": "Enables the set_shadow command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-shadow", + "markdownDescription": "Enables the set_shadow command without any pre-configured scope." + }, + { + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-simple-fullscreen", + "markdownDescription": "Enables the set_simple_fullscreen command without any pre-configured scope." + }, + { + "description": "Enables the set_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-size", + "markdownDescription": "Enables the set_size command without any pre-configured scope." + }, + { + "description": "Enables the set_size_constraints command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-size-constraints", + "markdownDescription": "Enables the set_size_constraints command without any pre-configured scope." + }, + { + "description": "Enables the set_skip_taskbar command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-skip-taskbar", + "markdownDescription": "Enables the set_skip_taskbar command without any pre-configured scope." + }, + { + "description": "Enables the set_theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-theme", + "markdownDescription": "Enables the set_theme command without any pre-configured scope." + }, + { + "description": "Enables the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-title", + "markdownDescription": "Enables the set_title command without any pre-configured scope." + }, + { + "description": "Enables the set_title_bar_style command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-title-bar-style", + "markdownDescription": "Enables the set_title_bar_style command without any pre-configured scope." + }, + { + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-visible-on-all-organizations", + "markdownDescription": "Enables the set_visible_on_all_organizations command without any pre-configured scope." + }, + { + "description": "Enables the show command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-show", + "markdownDescription": "Enables the show command without any pre-configured scope." + }, + { + "description": "Enables the start_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-start-dragging", + "markdownDescription": "Enables the start_dragging command without any pre-configured scope." + }, + { + "description": "Enables the start_resize_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-start-resize-dragging", + "markdownDescription": "Enables the start_resize_dragging command without any pre-configured scope." + }, + { + "description": "Enables the theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-theme", + "markdownDescription": "Enables the theme command without any pre-configured scope." + }, + { + "description": "Enables the title command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-title", + "markdownDescription": "Enables the title command without any pre-configured scope." + }, + { + "description": "Enables the toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-toggle-maximize", + "markdownDescription": "Enables the toggle_maximize command without any pre-configured scope." + }, + { + "description": "Enables the unmaximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-unmaximize", + "markdownDescription": "Enables the unmaximize command without any pre-configured scope." + }, + { + "description": "Enables the unminimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-unminimize", + "markdownDescription": "Enables the unminimize command without any pre-configured scope." + }, + { + "description": "Denies the available_monitors command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-available-monitors", + "markdownDescription": "Denies the available_monitors command without any pre-configured scope." + }, + { + "description": "Denies the center command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-center", + "markdownDescription": "Denies the center command without any pre-configured scope." + }, + { + "description": "Denies the close command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-close", + "markdownDescription": "Denies the close command without any pre-configured scope." + }, + { + "description": "Denies the create command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-create", + "markdownDescription": "Denies the create command without any pre-configured scope." + }, + { + "description": "Denies the current_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-current-monitor", + "markdownDescription": "Denies the current_monitor command without any pre-configured scope." + }, + { + "description": "Denies the cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-cursor-position", + "markdownDescription": "Denies the cursor_position command without any pre-configured scope." + }, + { + "description": "Denies the destroy command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-destroy", + "markdownDescription": "Denies the destroy command without any pre-configured scope." + }, + { + "description": "Denies the get_all_windows command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-get-all-windows", + "markdownDescription": "Denies the get_all_windows command without any pre-configured scope." + }, + { + "description": "Denies the hide command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-hide", + "markdownDescription": "Denies the hide command without any pre-configured scope." + }, + { + "description": "Denies the inner_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-inner-position", + "markdownDescription": "Denies the inner_position command without any pre-configured scope." + }, + { + "description": "Denies the inner_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-inner-size", + "markdownDescription": "Denies the inner_size command without any pre-configured scope." + }, + { + "description": "Denies the internal_toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-internal-toggle-maximize", + "markdownDescription": "Denies the internal_toggle_maximize command without any pre-configured scope." + }, + { + "description": "Denies the is_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-always-on-top", + "markdownDescription": "Denies the is_always_on_top command without any pre-configured scope." + }, + { + "description": "Denies the is_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-closable", + "markdownDescription": "Denies the is_closable command without any pre-configured scope." + }, + { + "description": "Denies the is_decorated command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-decorated", + "markdownDescription": "Denies the is_decorated command without any pre-configured scope." + }, + { + "description": "Denies the is_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-enabled", + "markdownDescription": "Denies the is_enabled command without any pre-configured scope." + }, + { + "description": "Denies the is_focused command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-focused", + "markdownDescription": "Denies the is_focused command without any pre-configured scope." + }, + { + "description": "Denies the is_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-fullscreen", + "markdownDescription": "Denies the is_fullscreen command without any pre-configured scope." + }, + { + "description": "Denies the is_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-maximizable", + "markdownDescription": "Denies the is_maximizable command without any pre-configured scope." + }, + { + "description": "Denies the is_maximized command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-maximized", + "markdownDescription": "Denies the is_maximized command without any pre-configured scope." + }, + { + "description": "Denies the is_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-minimizable", + "markdownDescription": "Denies the is_minimizable command without any pre-configured scope." + }, + { + "description": "Denies the is_minimized command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-minimized", + "markdownDescription": "Denies the is_minimized command without any pre-configured scope." + }, + { + "description": "Denies the is_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-resizable", + "markdownDescription": "Denies the is_resizable command without any pre-configured scope." + }, + { + "description": "Denies the is_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-is-visible", + "markdownDescription": "Denies the is_visible command without any pre-configured scope." + }, + { + "description": "Denies the maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-maximize", + "markdownDescription": "Denies the maximize command without any pre-configured scope." + }, + { + "description": "Denies the minimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-minimize", + "markdownDescription": "Denies the minimize command without any pre-configured scope." + }, + { + "description": "Denies the monitor_from_point command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-monitor-from-point", + "markdownDescription": "Denies the monitor_from_point command without any pre-configured scope." + }, + { + "description": "Denies the outer_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-outer-position", + "markdownDescription": "Denies the outer_position command without any pre-configured scope." + }, + { + "description": "Denies the outer_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-outer-size", + "markdownDescription": "Denies the outer_size command without any pre-configured scope." + }, + { + "description": "Denies the primary_monitor command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-primary-monitor", + "markdownDescription": "Denies the primary_monitor command without any pre-configured scope." + }, + { + "description": "Denies the request_user_attention command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-request-user-attention", + "markdownDescription": "Denies the request_user_attention command without any pre-configured scope." + }, + { + "description": "Denies the scale_factor command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-scale-factor", + "markdownDescription": "Denies the scale_factor command without any pre-configured scope." + }, + { + "description": "Denies the set_always_on_bottom command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-always-on-bottom", + "markdownDescription": "Denies the set_always_on_bottom command without any pre-configured scope." + }, + { + "description": "Denies the set_always_on_top command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-always-on-top", + "markdownDescription": "Denies the set_always_on_top command without any pre-configured scope." + }, + { + "description": "Denies the set_background_color command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-background-color", + "markdownDescription": "Denies the set_background_color command without any pre-configured scope." + }, + { + "description": "Denies the set_badge_count command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-badge-count", + "markdownDescription": "Denies the set_badge_count command without any pre-configured scope." + }, + { + "description": "Denies the set_badge_label command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-badge-label", + "markdownDescription": "Denies the set_badge_label command without any pre-configured scope." + }, + { + "description": "Denies the set_closable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-closable", + "markdownDescription": "Denies the set_closable command without any pre-configured scope." + }, + { + "description": "Denies the set_content_protected command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-content-protected", + "markdownDescription": "Denies the set_content_protected command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_grab command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-grab", + "markdownDescription": "Denies the set_cursor_grab command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-icon", + "markdownDescription": "Denies the set_cursor_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-position", + "markdownDescription": "Denies the set_cursor_position command without any pre-configured scope." + }, + { + "description": "Denies the set_cursor_visible command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-cursor-visible", + "markdownDescription": "Denies the set_cursor_visible command without any pre-configured scope." + }, + { + "description": "Denies the set_decorations command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-decorations", + "markdownDescription": "Denies the set_decorations command without any pre-configured scope." + }, + { + "description": "Denies the set_effects command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-effects", + "markdownDescription": "Denies the set_effects command without any pre-configured scope." + }, + { + "description": "Denies the set_enabled command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-enabled", + "markdownDescription": "Denies the set_enabled command without any pre-configured scope." + }, + { + "description": "Denies the set_focus command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-focus", + "markdownDescription": "Denies the set_focus command without any pre-configured scope." + }, + { + "description": "Denies the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-focusable", + "markdownDescription": "Denies the set_focusable command without any pre-configured scope." + }, + { + "description": "Denies the set_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-fullscreen", + "markdownDescription": "Denies the set_fullscreen command without any pre-configured scope." + }, + { + "description": "Denies the set_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-icon", + "markdownDescription": "Denies the set_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_ignore_cursor_events command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-ignore-cursor-events", + "markdownDescription": "Denies the set_ignore_cursor_events command without any pre-configured scope." + }, + { + "description": "Denies the set_max_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-max-size", + "markdownDescription": "Denies the set_max_size command without any pre-configured scope." + }, + { + "description": "Denies the set_maximizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-maximizable", + "markdownDescription": "Denies the set_maximizable command without any pre-configured scope." + }, + { + "description": "Denies the set_min_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-min-size", + "markdownDescription": "Denies the set_min_size command without any pre-configured scope." + }, + { + "description": "Denies the set_minimizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-minimizable", + "markdownDescription": "Denies the set_minimizable command without any pre-configured scope." + }, + { + "description": "Denies the set_overlay_icon command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-overlay-icon", + "markdownDescription": "Denies the set_overlay_icon command without any pre-configured scope." + }, + { + "description": "Denies the set_position command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-position", + "markdownDescription": "Denies the set_position command without any pre-configured scope." + }, + { + "description": "Denies the set_progress_bar command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-progress-bar", + "markdownDescription": "Denies the set_progress_bar command without any pre-configured scope." + }, + { + "description": "Denies the set_resizable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-resizable", + "markdownDescription": "Denies the set_resizable command without any pre-configured scope." + }, + { + "description": "Denies the set_shadow command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-shadow", + "markdownDescription": "Denies the set_shadow command without any pre-configured scope." + }, + { + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-simple-fullscreen", + "markdownDescription": "Denies the set_simple_fullscreen command without any pre-configured scope." + }, + { + "description": "Denies the set_size command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-size", + "markdownDescription": "Denies the set_size command without any pre-configured scope." + }, + { + "description": "Denies the set_size_constraints command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-size-constraints", + "markdownDescription": "Denies the set_size_constraints command without any pre-configured scope." + }, + { + "description": "Denies the set_skip_taskbar command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-skip-taskbar", + "markdownDescription": "Denies the set_skip_taskbar command without any pre-configured scope." + }, + { + "description": "Denies the set_theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-theme", + "markdownDescription": "Denies the set_theme command without any pre-configured scope." + }, + { + "description": "Denies the set_title command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-title", + "markdownDescription": "Denies the set_title command without any pre-configured scope." + }, + { + "description": "Denies the set_title_bar_style command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-title-bar-style", + "markdownDescription": "Denies the set_title_bar_style command without any pre-configured scope." + }, + { + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-visible-on-all-organizations", + "markdownDescription": "Denies the set_visible_on_all_organizations command without any pre-configured scope." + }, + { + "description": "Denies the show command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-show", + "markdownDescription": "Denies the show command without any pre-configured scope." + }, + { + "description": "Denies the start_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-start-dragging", + "markdownDescription": "Denies the start_dragging command without any pre-configured scope." + }, + { + "description": "Denies the start_resize_dragging command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-start-resize-dragging", + "markdownDescription": "Denies the start_resize_dragging command without any pre-configured scope." + }, + { + "description": "Denies the theme command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-theme", + "markdownDescription": "Denies the theme command without any pre-configured scope." + }, + { + "description": "Denies the title command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-title", + "markdownDescription": "Denies the title command without any pre-configured scope." + }, + { + "description": "Denies the toggle_maximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-toggle-maximize", + "markdownDescription": "Denies the toggle_maximize command without any pre-configured scope." + }, + { + "description": "Denies the unmaximize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-unmaximize", + "markdownDescription": "Denies the unmaximize command without any pre-configured scope." + }, + { + "description": "Denies the unminimize command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-unminimize", + "markdownDescription": "Denies the unminimize command without any pre-configured scope." + }, + { + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`", + "type": "string", + "const": "shell:default", + "markdownDescription": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n\n#### This default permission set includes:\n\n- `allow-open`" + }, + { + "description": "Enables the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-execute", + "markdownDescription": "Enables the execute command without any pre-configured scope." + }, + { + "description": "Enables the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-kill", + "markdownDescription": "Enables the kill command without any pre-configured scope." + }, + { + "description": "Enables the open command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-open", + "markdownDescription": "Enables the open command without any pre-configured scope." + }, + { + "description": "Enables the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-spawn", + "markdownDescription": "Enables the spawn command without any pre-configured scope." + }, + { + "description": "Enables the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:allow-stdin-write", + "markdownDescription": "Enables the stdin_write command without any pre-configured scope." + }, + { + "description": "Denies the execute command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-execute", + "markdownDescription": "Denies the execute command without any pre-configured scope." + }, + { + "description": "Denies the kill command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-kill", + "markdownDescription": "Denies the kill command without any pre-configured scope." + }, + { + "description": "Denies the open command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-open", + "markdownDescription": "Denies the open command without any pre-configured scope." + }, + { + "description": "Denies the spawn command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-spawn", + "markdownDescription": "Denies the spawn command without any pre-configured scope." + }, + { + "description": "Denies the stdin_write command without any pre-configured scope.", + "type": "string", + "const": "shell:deny-stdin-write", + "markdownDescription": "Denies the stdin_write command without any pre-configured scope." + } + ] + }, + "Value": { + "description": "All supported ACL values.", + "anyOf": [ + { + "description": "Represents a null JSON value.", + "type": "null" + }, + { + "description": "Represents a [`bool`].", + "type": "boolean" + }, + { + "description": "Represents a valid ACL [`Number`].", + "allOf": [ + { + "$ref": "#/definitions/Number" + } + ] + }, + { + "description": "Represents a [`String`].", + "type": "string" + }, + { + "description": "Represents a list of other [`Value`]s.", + "type": "array", + "items": { + "$ref": "#/definitions/Value" + } + }, + { + "description": "Represents a map of [`String`] keys to [`Value`]s.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Value" + } + } + ] + }, + "Number": { + "description": "A valid ACL number.", + "anyOf": [ + { + "description": "Represents an [`i64`].", + "type": "integer", + "format": "int64" + }, + { + "description": "Represents a [`f64`].", + "type": "number", + "format": "double" + } + ] + }, + "Target": { + "description": "Platform target.", + "oneOf": [ + { + "description": "MacOS.", + "type": "string", + "enum": ["macOS"] + }, + { + "description": "Windows.", + "type": "string", + "enum": ["windows"] + }, + { + "description": "Linux.", + "type": "string", + "enum": ["linux"] + }, + { + "description": "Android.", + "type": "string", + "enum": ["android"] + }, + { + "description": "iOS.", + "type": "string", + "enum": ["iOS"] + } + ] + }, + "ShellScopeEntryAllowedArg": { + "description": "A command argument allowed to be executed by the webview API.", + "anyOf": [ + { + "description": "A non-configurable argument that is passed to the command in the order it was specified.", + "type": "string" + }, + { + "description": "A variable that is set while calling the command from the webview API.", + "type": "object", + "required": ["validator"], + "properties": { + "raw": { + "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", + "default": false, + "type": "boolean" + }, + "validator": { + "description": "[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ", + "type": "string" + } + }, + "additionalProperties": false + } + ] + }, + "ShellScopeEntryAllowedArgs": { + "description": "A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration.", + "anyOf": [ + { + "description": "Use a simple boolean to allow all or disable all arguments to this command configuration.", + "type": "boolean" + }, + { + "description": "A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.", + "type": "array", + "items": { + "$ref": "#/definitions/ShellScopeEntryAllowedArg" + } + } + ] + } + } +} diff --git a/foundry/packages/desktop/src-tauri/icons/128x128.png b/foundry/packages/desktop/src-tauri/icons/128x128.png new file mode 100644 index 0000000..40cb79f Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/128x128.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/128x128@2x.png b/foundry/packages/desktop/src-tauri/icons/128x128@2x.png new file mode 100644 index 0000000..de99a94 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/128x128@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/32x32.png b/foundry/packages/desktop/src-tauri/icons/32x32.png new file mode 100644 index 0000000..0b9b8d9 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/32x32.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/64x64.png b/foundry/packages/desktop/src-tauri/icons/64x64.png new file mode 100644 index 0000000..4849c59 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/64x64.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square107x107Logo.png b/foundry/packages/desktop/src-tauri/icons/Square107x107Logo.png new file mode 100644 index 0000000..4a7904a Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square107x107Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square142x142Logo.png b/foundry/packages/desktop/src-tauri/icons/Square142x142Logo.png new file mode 100644 index 0000000..598a4e0 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square142x142Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square150x150Logo.png b/foundry/packages/desktop/src-tauri/icons/Square150x150Logo.png new file mode 100644 index 0000000..1c33be7 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square150x150Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square284x284Logo.png b/foundry/packages/desktop/src-tauri/icons/Square284x284Logo.png new file mode 100644 index 0000000..67eb476 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square284x284Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square30x30Logo.png b/foundry/packages/desktop/src-tauri/icons/Square30x30Logo.png new file mode 100644 index 0000000..276c779 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square30x30Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square310x310Logo.png b/foundry/packages/desktop/src-tauri/icons/Square310x310Logo.png new file mode 100644 index 0000000..41a103e Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square310x310Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square44x44Logo.png b/foundry/packages/desktop/src-tauri/icons/Square44x44Logo.png new file mode 100644 index 0000000..ead9269 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square44x44Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square71x71Logo.png b/foundry/packages/desktop/src-tauri/icons/Square71x71Logo.png new file mode 100644 index 0000000..ae7086c Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square71x71Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/Square89x89Logo.png b/foundry/packages/desktop/src-tauri/icons/Square89x89Logo.png new file mode 100644 index 0000000..c38274b Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/Square89x89Logo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/StoreLogo.png b/foundry/packages/desktop/src-tauri/icons/StoreLogo.png new file mode 100644 index 0000000..bfedcec Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/StoreLogo.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-anydpi-v26/ic_launcher.xml b/foundry/packages/desktop/src-tauri/icons/android/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 0000000..2ffbf24 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/icons/android/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000..8300460 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher_foreground.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher_foreground.png new file mode 100644 index 0000000..3fd9ebd Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher_foreground.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher_round.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 0000000..8300460 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-hdpi/ic_launcher_round.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000..7d4c93b Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher_foreground.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher_foreground.png new file mode 100644 index 0000000..0e46d02 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher_foreground.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher_round.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 0000000..7d4c93b Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-mdpi/ic_launcher_round.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 0000000..fec86d5 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_foreground.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_foreground.png new file mode 100644 index 0000000..1eae667 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_foreground.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_round.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 0000000..fec86d5 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000..e45a2dc Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_foreground.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_foreground.png new file mode 100644 index 0000000..d3bb642 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_foreground.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_round.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 0000000..e45a2dc Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000..d72589c Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_foreground.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_foreground.png new file mode 100644 index 0000000..d290579 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_foreground.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_round.png b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 0000000..d72589c Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/android/values/ic_launcher_background.xml b/foundry/packages/desktop/src-tauri/icons/android/values/ic_launcher_background.xml new file mode 100644 index 0000000..ea9c223 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/icons/android/values/ic_launcher_background.xml @@ -0,0 +1,4 @@ + + + #fff + \ No newline at end of file diff --git a/foundry/packages/desktop/src-tauri/icons/icon-source.svg b/foundry/packages/desktop/src-tauri/icons/icon-source.svg new file mode 100644 index 0000000..7db083d --- /dev/null +++ b/foundry/packages/desktop/src-tauri/icons/icon-source.svg @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/foundry/packages/desktop/src-tauri/icons/icon.icns b/foundry/packages/desktop/src-tauri/icons/icon.icns new file mode 100644 index 0000000..5757cea Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/icon.icns differ diff --git a/foundry/packages/desktop/src-tauri/icons/icon.ico b/foundry/packages/desktop/src-tauri/icons/icon.ico new file mode 100644 index 0000000..2dc8306 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/icon.ico differ diff --git a/foundry/packages/desktop/src-tauri/icons/icon.png b/foundry/packages/desktop/src-tauri/icons/icon.png new file mode 100644 index 0000000..c11a109 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/icon.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@1x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@1x.png new file mode 100644 index 0000000..db892f7 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@1x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@2x-1.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@2x-1.png new file mode 100644 index 0000000..1fe129d Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@2x-1.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@2x.png new file mode 100644 index 0000000..1fe129d Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@3x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@3x.png new file mode 100644 index 0000000..866c461 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-20x20@3x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@1x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@1x.png new file mode 100644 index 0000000..6eca363 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@1x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@2x-1.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@2x-1.png new file mode 100644 index 0000000..092aab8 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@2x-1.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@2x.png new file mode 100644 index 0000000..092aab8 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@3x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@3x.png new file mode 100644 index 0000000..119d3ca Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-29x29@3x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@1x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@1x.png new file mode 100644 index 0000000..1fe129d Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@1x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@2x-1.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@2x-1.png new file mode 100644 index 0000000..8b10e8a Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@2x-1.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@2x.png new file mode 100644 index 0000000..8b10e8a Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@3x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@3x.png new file mode 100644 index 0000000..e9e7129 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-40x40@3x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-512@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-512@2x.png new file mode 100644 index 0000000..45d9c6e Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-512@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-60x60@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-60x60@2x.png new file mode 100644 index 0000000..e9e7129 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-60x60@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-60x60@3x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-60x60@3x.png new file mode 100644 index 0000000..0eb464d Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-60x60@3x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-76x76@1x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-76x76@1x.png new file mode 100644 index 0000000..671a540 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-76x76@1x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-76x76@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-76x76@2x.png new file mode 100644 index 0000000..3958a90 Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-76x76@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-83.5x83.5@2x.png b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-83.5x83.5@2x.png new file mode 100644 index 0000000..c8290ee Binary files /dev/null and b/foundry/packages/desktop/src-tauri/icons/ios/AppIcon-83.5x83.5@2x.png differ diff --git a/foundry/packages/desktop/src-tauri/src/lib.rs b/foundry/packages/desktop/src-tauri/src/lib.rs new file mode 100644 index 0000000..efcbd75 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/src/lib.rs @@ -0,0 +1,154 @@ +use std::sync::Mutex; +use tauri::{AppHandle, LogicalPosition, Manager, WebviewUrl, WebviewWindowBuilder}; +use tauri_plugin_shell::process::CommandChild; +use tauri_plugin_shell::ShellExt; + +struct BackendState { + child: Mutex>, +} + +#[tauri::command] +fn get_backend_url() -> String { + "http://127.0.0.1:7741".to_string() +} + +#[tauri::command] +async fn backend_health() -> Result { + match reqwest::get("http://127.0.0.1:7741/v1/rivet/metadata").await { + Ok(resp) => Ok(resp.status().is_success()), + Err(_) => Ok(false), + } +} + +async fn wait_for_backend(timeout_secs: u64) -> Result<(), String> { + let start = std::time::Instant::now(); + let timeout = std::time::Duration::from_secs(timeout_secs); + + loop { + if start.elapsed() > timeout { + return Err(format!( + "Backend failed to start within {} seconds", + timeout_secs + )); + } + + match reqwest::get("http://127.0.0.1:7741/v1/rivet/metadata").await { + Ok(resp) if resp.status().is_success() => return Ok(()), + _ => {} + } + + tokio::time::sleep(std::time::Duration::from_millis(250)).await; + } +} + +fn spawn_backend(app: &AppHandle) -> Result<(), String> { + let sidecar = app + .shell() + .sidecar("sidecars/foundry-backend") + .map_err(|e| format!("Failed to create sidecar command: {}", e))? + .args(["start", "--host", "127.0.0.1", "--port", "7741"]); + + let (mut rx, child) = sidecar + .spawn() + .map_err(|e| format!("Failed to spawn backend sidecar: {}", e))?; + + // Store the child process handle for cleanup + let state = app.state::(); + *state.child.lock().unwrap() = Some(child); + + // Log sidecar stdout/stderr in a background task + tauri::async_runtime::spawn(async move { + use tauri_plugin_shell::process::CommandEvent; + while let Some(event) = rx.recv().await { + match event { + CommandEvent::Stdout(line) => { + eprintln!("[foundry-backend] {}", String::from_utf8_lossy(&line)); + } + CommandEvent::Stderr(line) => { + eprintln!("[foundry-backend] {}", String::from_utf8_lossy(&line)); + } + CommandEvent::Terminated(payload) => { + eprintln!( + "[foundry-backend] process exited with code {:?}", + payload.code + ); + break; + } + CommandEvent::Error(err) => { + eprintln!("[foundry-backend] error: {}", err); + break; + } + _ => {} + } + } + }); + + Ok(()) +} + +#[cfg_attr(mobile, tauri::mobile_entry_point)] +pub fn run() { + tauri::Builder::default() + .plugin(tauri_plugin_shell::init()) + .manage(BackendState { + child: Mutex::new(None), + }) + .invoke_handler(tauri::generate_handler![get_backend_url, backend_health]) + .setup(|app| { + // Create main window programmatically so we can set traffic light position + let url = if cfg!(debug_assertions) { + WebviewUrl::External("http://localhost:4173".parse().unwrap()) + } else { + WebviewUrl::default() + }; + + let mut builder = WebviewWindowBuilder::new(app, "main", url) + .title("Foundry") + .inner_size(1280.0, 800.0) + .min_inner_size(900.0, 600.0) + .resizable(true) + .theme(Some(tauri::Theme::Dark)) + .title_bar_style(tauri::TitleBarStyle::Overlay) + .hidden_title(true); + + #[cfg(target_os = "macos")] + { + builder = builder.traffic_light_position(LogicalPosition::new(14.0, 14.0)); + } + + builder.build()?; + + // In debug mode, assume the developer is running the backend externally + if cfg!(debug_assertions) { + eprintln!("[foundry-desktop] Dev mode: skipping sidecar spawn. Run the backend separately."); + return Ok(()); + } + + let handle = app.handle().clone(); + tauri::async_runtime::spawn(async move { + if let Err(e) = spawn_backend(&handle) { + eprintln!("[foundry-desktop] Failed to start backend: {}", e); + return; + } + + match wait_for_backend(30).await { + Ok(()) => eprintln!("[foundry-desktop] Backend is ready."), + Err(e) => eprintln!("[foundry-desktop] {}", e), + } + }); + + Ok(()) + }) + .on_window_event(|window, event| { + if let tauri::WindowEvent::Destroyed = event { + let state = window.state::(); + let child = state.child.lock().unwrap().take(); + if let Some(child) = child { + let _ = child.kill(); + eprintln!("[foundry-desktop] Backend sidecar killed."); + } + } + }) + .run(tauri::generate_context!()) + .expect("error while running Foundry"); +} diff --git a/foundry/packages/desktop/src-tauri/src/main.rs b/foundry/packages/desktop/src-tauri/src/main.rs new file mode 100644 index 0000000..bc7e592 --- /dev/null +++ b/foundry/packages/desktop/src-tauri/src/main.rs @@ -0,0 +1,5 @@ +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] + +fn main() { + foundry::run(); +} diff --git a/foundry/packages/desktop/src-tauri/tauri.conf.json b/foundry/packages/desktop/src-tauri/tauri.conf.json new file mode 100644 index 0000000..53221cb --- /dev/null +++ b/foundry/packages/desktop/src-tauri/tauri.conf.json @@ -0,0 +1,31 @@ +{ + "$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-cli/schema.json", + "productName": "Foundry", + "version": "0.1.0", + "identifier": "dev.sandboxagent.foundry", + "build": { + "beforeDevCommand": "FOUNDRY_FRONTEND_CLIENT_MODE=mock VITE_DESKTOP=1 pnpm --filter @sandbox-agent/foundry-frontend dev", + "devUrl": "http://localhost:4173", + "frontendDist": "../frontend-dist" + }, + "app": { + "windows": [], + "security": { + "csp": null + } + }, + "bundle": { + "active": true, + "targets": ["dmg", "app"], + "icon": ["icons/32x32.png", "icons/128x128.png", "icons/128x128@2x.png", "icons/icon.icns", "icons/icon.ico"], + "macOS": { + "signingIdentity": null + }, + "externalBin": ["sidecars/foundry-backend"] + }, + "plugins": { + "shell": { + "open": true + } + } +} diff --git a/foundry/packages/frontend/index.html b/foundry/packages/frontend/index.html new file mode 100644 index 0000000..4e72d23 --- /dev/null +++ b/foundry/packages/frontend/index.html @@ -0,0 +1,21 @@ + + + + + + + + + Foundry + + +
+ + + + diff --git a/foundry/packages/frontend/package.json b/foundry/packages/frontend/package.json new file mode 100644 index 0000000..793a12d --- /dev/null +++ b/foundry/packages/frontend/package.json @@ -0,0 +1,36 @@ +{ + "name": "@sandbox-agent/foundry-frontend", + "version": "0.1.0", + "private": true, + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "typecheck": "tsc --noEmit", + "test": "vitest run" + }, + "dependencies": { + "@sandbox-agent/foundry-client": "workspace:*", + "@sandbox-agent/foundry-shared": "workspace:*", + "@sandbox-agent/react": "workspace:*", + "@tanstack/react-query": "^5.85.5", + "@tanstack/react-router": "^1.132.23", + "@tanstack/react-virtual": "^3.13.22", + "baseui": "^16.1.1", + "lucide-react": "^0.542.0", + "react": "^19.1.1", + "react-dom": "^19.1.1", + "sandbox-agent": "workspace:*", + "styletron-engine-atomic": "^1.6.2", + "styletron-react": "^6.1.1" + }, + "devDependencies": { + "@react-grab/mcp": "^0.1.13", + "@types/react": "^19.1.12", + "@types/react-dom": "^19.1.9", + "@vitejs/plugin-react": "^5.0.3", + "react-grab": "^0.1.13", + "tsup": "^8.5.0", + "vite": "^7.1.3" + } +} diff --git a/foundry/packages/frontend/public/favicon.svg b/foundry/packages/frontend/public/favicon.svg new file mode 100644 index 0000000..ec605d6 --- /dev/null +++ b/foundry/packages/frontend/public/favicon.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/foundry/packages/frontend/src/app/router.tsx b/foundry/packages/frontend/src/app/router.tsx new file mode 100644 index 0000000..dd22724 --- /dev/null +++ b/foundry/packages/frontend/src/app/router.tsx @@ -0,0 +1,328 @@ +import { type ReactNode, useEffect } from "react"; +import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared"; +import { useSubscription } from "@sandbox-agent/foundry-client"; +import { Navigate, Outlet, createRootRoute, createRoute, createRouter } from "@tanstack/react-router"; +import { MockLayout } from "../components/mock-layout"; +import { + MockAccountSettingsPage, + MockHostedCheckoutPage, + MockOrganizationBillingPage, + MockOrganizationSelectorPage, + MockOrganizationSettingsPage, + MockSignInPage, +} from "../components/mock-onboarding"; +import { defaultOrganizationId, isMockFrontendClient } from "../lib/env"; +import { subscriptionManager } from "../lib/subscription"; +import { activeMockOrganization, getMockOrganizationById, isAppSnapshotBootstrapping, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; + +const rootRoute = createRootRoute({ + component: RootLayout, +}); + +const indexRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/", + component: IndexRoute, +}); + +const signInRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/signin", + component: SignInRoute, +}); + +const accountRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/account", + component: AccountRoute, +}); + +const organizationsRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/organizations", + component: OrganizationsRoute, +}); + +const organizationSettingsRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/organizations/$organizationId/settings", + component: OrganizationSettingsRoute, +}); + +const organizationBillingRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/organizations/$organizationId/billing", + component: OrganizationBillingRoute, +}); + +const organizationCheckoutRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/organizations/$organizationId/checkout/$planId", + component: OrganizationCheckoutRoute, +}); + +const organizationRoute = createRoute({ + getParentRoute: () => rootRoute, + path: "/organizations/$organizationId", + component: OrganizationLayoutRoute, +}); + +const organizationIndexRoute = createRoute({ + getParentRoute: () => organizationRoute, + path: "/", + component: OrganizationRoute, +}); + +const taskRoute = createRoute({ + getParentRoute: () => organizationRoute, + path: "tasks/$taskId", + validateSearch: (search: Record) => ({ + sessionId: typeof search.sessionId === "string" && search.sessionId.trim().length > 0 ? search.sessionId : undefined, + }), + component: TaskRoute, +}); + +const repoRoute = createRoute({ + getParentRoute: () => organizationRoute, + path: "repos/$repoId", + component: RepoRoute, +}); + +const routeTree = rootRoute.addChildren([ + indexRoute, + signInRoute, + accountRoute, + organizationsRoute, + organizationSettingsRoute, + organizationBillingRoute, + organizationCheckoutRoute, + organizationRoute.addChildren([organizationIndexRoute, taskRoute, repoRoute]), +]); + +export const router = createRouter({ routeTree }); + +declare module "@tanstack/react-router" { + interface Register { + router: typeof router; + } +} + +function OrganizationLayoutRoute() { + return ; +} + +function AppLoadingScreen({ label }: { label: string }) { + return ( +
+ {label} +
+ ); +} + +function IndexRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_out") { + return ; + } + + const activeOrganization = activeMockOrganization(snapshot); + if (activeOrganization) { + return ; + } + + return ; +} + +function SignInRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_in") { + return ; + } + + return ; +} + +function AccountRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_out") { + return ; + } + + return ; +} + +function OrganizationsRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_out") { + return ; + } + + return ; +} + +function OrganizationSettingsRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_out") { + return ; + } + + const { organizationId } = organizationSettingsRoute.useParams(); + const organization = getMockOrganizationById(snapshot, organizationId); + if (!organization) { + return ; + } + + return ; +} + +function OrganizationBillingRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_out") { + return ; + } + + const { organizationId } = organizationBillingRoute.useParams(); + const organization = getMockOrganizationById(snapshot, organizationId); + if (!organization) { + return ; + } + + return ; +} + +function OrganizationCheckoutRoute() { + const snapshot = useMockAppSnapshot(); + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + if (snapshot.auth.status === "signed_out") { + return ; + } + + const { organizationId, planId } = organizationCheckoutRoute.useParams(); + const organization = getMockOrganizationById(snapshot, organizationId); + if (!organization) { + return ; + } + + return ; +} + +function OrganizationRoute() { + const { organizationId } = organizationRoute.useParams(); + return ( + + + + ); +} + +function OrganizationView({ + organizationId, + selectedTaskId, + selectedSessionId, +}: { + organizationId: string; + selectedTaskId: string | null; + selectedSessionId: string | null; +}) { + return ; +} + +function TaskRoute() { + const { organizationId, taskId } = taskRoute.useParams(); + const { sessionId } = taskRoute.useSearch(); + return ( + + + + ); +} + +function TaskView({ organizationId, taskId, sessionId }: { organizationId: string; taskId: string; sessionId: string | null }) { + return ; +} + +function RepoRoute() { + const { organizationId, repoId } = repoRoute.useParams(); + return ( + + + + ); +} + +function AppOrganizationGate({ organizationId, children }: { organizationId: string; children: ReactNode }) { + const client = useMockAppClient(); + const snapshot = useMockAppSnapshot(); + const organization = snapshot.organizations.find((candidate) => candidate.organizationId === organizationId) ?? null; + + useEffect(() => { + if (organization && snapshot.activeOrganizationId !== organization.id) { + void client.selectOrganization(organization.id); + } + }, [client, organization, snapshot.activeOrganizationId]); + + if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { + return ; + } + + if (snapshot.auth.status === "signed_out") { + return ; + } + + if (!organization) { + return isMockFrontendClient ? : ; + } + + return <>{children}; +} + +function RepoRouteInner({ organizationId, repoId }: { organizationId: string; repoId: string }) { + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const activeTaskId = organizationState.data?.taskSummaries.find((task) => task.repoId === repoId)?.id; + if (!activeTaskId) { + return ; + } + return ( + + ); +} + +function RootLayout() { + return ( + <> + + + ); +} diff --git a/foundry/packages/frontend/src/app/theme.ts b/foundry/packages/frontend/src/app/theme.ts new file mode 100644 index 0000000..5b62453 --- /dev/null +++ b/foundry/packages/frontend/src/app/theme.ts @@ -0,0 +1,82 @@ +import { createContext, useContext } from "react"; +import { createDarkTheme, createLightTheme, type Theme } from "baseui"; +import { useStyletron } from "baseui"; +import { getFoundryTokens, type FoundryTokens } from "../styles/tokens"; + +const STORAGE_KEY = "sandbox-agent-foundry:color-mode"; + +export type ColorMode = "dark" | "light"; + +export const darkTheme: Theme = createDarkTheme({ + colors: { + primary: "#e4e4e7", // zinc-200 + accent: "#ff4f00", // orange accent (inspector) + backgroundPrimary: "#09090b", // darkest — chat center panel + backgroundSecondary: "#0f0f11", // slightly lighter — sidebars + backgroundTertiary: "#0c0c0e", // center + right panel headers + backgroundInversePrimary: "#fafafa", + contentPrimary: "#ffffff", // white (inspector --text) + contentSecondary: "#a1a1aa", // zinc-400 (inspector --muted) + contentTertiary: "#71717a", // zinc-500 + contentInversePrimary: "#000000", + borderOpaque: "rgba(255, 255, 255, 0.10)", // inspector --border + borderTransparent: "rgba(255, 255, 255, 0.07)", // inspector --border-2 + }, +}); + +export const lightTheme: Theme = createLightTheme({ + colors: { + primary: "#27272a", // zinc-800 + accent: "#ff4f00", // orange accent (inspector) + backgroundPrimary: "#ffffff", + backgroundSecondary: "#f4f4f5", // zinc-100 + backgroundTertiary: "#fafafa", // zinc-50 + backgroundInversePrimary: "#18181b", + contentPrimary: "#09090b", // zinc-950 + contentSecondary: "#52525b", // zinc-600 + contentTertiary: "#a1a1aa", // zinc-400 + contentInversePrimary: "#ffffff", + borderOpaque: "rgba(0, 0, 0, 0.10)", + borderTransparent: "rgba(0, 0, 0, 0.06)", + }, +}); + +/** Kept for backwards compat — defaults to dark */ +export const appTheme = darkTheme; + +export interface ColorModeContext { + colorMode: ColorMode; + setColorMode: (mode: ColorMode) => void; +} + +export const ColorModeCtx = createContext({ + colorMode: "dark", + setColorMode: () => {}, +}); + +export function useColorMode() { + return useContext(ColorModeCtx); +} + +export function useFoundryTokens(): FoundryTokens { + const [, theme] = useStyletron(); + return getFoundryTokens(theme); +} + +export function getStoredColorMode(): ColorMode { + try { + const stored = localStorage.getItem(STORAGE_KEY); + if (stored === "light" || stored === "dark") return stored; + } catch { + // ignore + } + return "dark"; +} + +export function storeColorMode(mode: ColorMode) { + try { + localStorage.setItem(STORAGE_KEY, mode); + } catch { + // ignore + } +} diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx new file mode 100644 index 0000000..947331e --- /dev/null +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -0,0 +1,593 @@ +import { memo, useEffect, useMemo, useState } from "react"; +import { useStyletron } from "baseui"; +import { useFoundryTokens } from "../app/theme"; +import { isMockFrontendClient } from "../lib/env"; +import { subscriptionManager } from "../lib/subscription"; +import type { + FoundryAppSnapshot, + FoundryOrganization, + TaskWorkspaceSnapshot, + WorkspaceSandboxSummary, + WorkspaceSessionSummary, + WorkspaceTaskStatus, +} from "@sandbox-agent/foundry-shared"; +import { useSubscription } from "@sandbox-agent/foundry-client"; +import type { DebugSubscriptionTopic } from "@sandbox-agent/foundry-client"; +import { describeTaskState } from "../features/tasks/status"; + +interface DevPanelProps { + organizationId: string; + snapshot: TaskWorkspaceSnapshot; + organization?: FoundryOrganization | null; + focusedTask?: DevPanelFocusedTask | null; +} + +export interface DevPanelFocusedTask { + id: string; + repoId: string; + title: string | null; + status: WorkspaceTaskStatus; + branch?: string | null; + activeSandboxId?: string | null; + activeSessionId?: string | null; + sandboxes?: WorkspaceSandboxSummary[]; + sessions?: WorkspaceSessionSummary[]; +} + +interface TopicInfo { + label: string; + key: string; + /** Parsed params portion of the cache key, or empty if none. */ + params: string; + listenerCount: number; + hasConnection: boolean; + status: "loading" | "connected" | "error"; + lastRefresh: number | null; +} + +function topicLabel(topic: DebugSubscriptionTopic): string { + switch (topic.topicKey) { + case "app": + return "App"; + case "organization": + return "Organization"; + case "task": + return "Task"; + case "session": + return "Session"; + case "sandboxProcesses": + return "Sandbox"; + } +} + +/** Extract the params portion of a cache key (everything after the first `:`) */ +function topicParams(topic: DebugSubscriptionTopic): string { + const idx = topic.cacheKey.indexOf(":"); + return idx >= 0 ? topic.cacheKey.slice(idx + 1) : ""; +} + +function timeAgo(ts: number | null): string { + if (!ts) return "never"; + const seconds = Math.floor((Date.now() - ts) / 1000); + if (seconds < 5) return "now"; + if (seconds < 60) return `${seconds}s ago`; + const minutes = Math.floor(seconds / 60); + if (minutes < 60) return `${minutes}m ago`; + return `${Math.floor(minutes / 60)}h ago`; +} + +function statusColor(status: string, t: ReturnType): string { + if (status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) { + return t.statusWarning; + } + switch (status) { + case "connected": + case "running": + case "ready": + return t.statusSuccess; + case "loading": + return t.statusWarning; + case "archived": + return t.textMuted; + case "error": + case "failed": + return t.statusError; + default: + return t.textTertiary; + } +} + +function syncStatusColor(status: string, t: ReturnType): string { + switch (status) { + case "synced": + return t.statusSuccess; + case "syncing": + case "pending": + return t.statusWarning; + case "error": + return t.statusError; + default: + return t.textMuted; + } +} + +function installStatusColor(status: string, t: ReturnType): string { + switch (status) { + case "connected": + return t.statusSuccess; + case "install_required": + return t.statusWarning; + case "reconnect_required": + return t.statusError; + default: + return t.textMuted; + } +} + +/** Format elapsed thinking time as a compact string. */ +function thinkingLabel(sinceMs: number | null, now: number): string | null { + if (!sinceMs) return null; + const elapsed = Math.floor((now - sinceMs) / 1000); + if (elapsed < 1) return "thinking"; + return `thinking ${elapsed}s`; +} + +export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organization, focusedTask }: DevPanelProps) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const [now, setNow] = useState(Date.now()); + + // Tick every 2s to keep relative timestamps fresh + useEffect(() => { + const id = setInterval(() => setNow(Date.now()), 2000); + return () => clearInterval(id); + }, []); + + const topics = useMemo((): TopicInfo[] => { + return subscriptionManager.listDebugTopics().map((topic) => ({ + label: topicLabel(topic), + key: topic.cacheKey, + params: topicParams(topic), + listenerCount: topic.listenerCount, + hasConnection: topic.status === "connected", + status: topic.status, + lastRefresh: topic.lastRefreshAt, + })); + }, [now]); + + const appState = useSubscription(subscriptionManager, "app", {}); + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const appSnapshot: FoundryAppSnapshot | null = appState.data ?? null; + const liveGithub = organizationState.data?.github ?? organization?.github ?? null; + + const repos = snapshot.repos ?? []; + const tasks = snapshot.tasks ?? []; + const prCount = tasks.filter((task) => task.pullRequest != null).length; + const focusedTaskStatus = focusedTask?.status ?? null; + const focusedTaskState = describeTaskState(focusedTaskStatus); + const lastWebhookAt = liveGithub?.lastWebhookAt ?? null; + const hasRecentWebhook = lastWebhookAt != null && now - lastWebhookAt < 5 * 60_000; + const totalOrgs = appSnapshot?.organizations.length ?? 0; + const authStatus = appSnapshot?.auth.status ?? "unknown"; + + const mono = css({ + fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace", + fontSize: "10px", + }); + + return ( +
+ {/* Header */} +
+ + Dev + {isMockFrontendClient && MOCK} + + Shift+D +
+ + {/* Body */} +
+ {/* Subscription Topics */} +
+ {topics.map((topic) => ( +
+ + + {topic.label} + + {topic.status} + {topic.params && ( + + {topic.params} + + )} + {timeAgo(topic.lastRefresh)} +
+ ))} + {topics.length === 0 && No active subscriptions} +
+ + {/* App State */} +
+
+
+ + Auth + {authStatus.replace(/_/g, " ")} +
+
+ + +
+
app topic: {appState.status}
+
+
+ + {/* Snapshot Summary */} +
+
+ + + +
+
+ +
+ {focusedTask ? ( +
+
+ + + {focusedTask.title || focusedTask.id.slice(0, 12)} + + + {focusedTaskStatus ?? focusedTask.status} + +
+
{focusedTaskState.detail}
+
task: {focusedTask.id}
+
repo: {focusedTask.repoId}
+
branch: {focusedTask.branch ?? "-"}
+
+ ) : ( + No task focused + )} +
+ + {/* Session — only when a task is focused */} + {focusedTask && ( +
+ {(focusedTask.sessions?.length ?? 0) > 0 ? ( + focusedTask.sessions!.map((session) => { + const isActive = session.id === focusedTask.activeSessionId; + const thinking = thinkingLabel(session.thinkingSinceMs, now); + return ( +
+
+ + + {session.sessionName || session.id.slice(0, 12)} + {isActive ? " *" : ""} + + {session.status} +
+
+ {session.agent} + {session.model} + {!session.created && not created} + {session.unread && unread} + {thinking && {thinking}} +
+ {session.errorMessage && ( +
{session.errorMessage}
+ )} + {session.sessionId &&
sid: {session.sessionId}
} +
+ ); + }) + ) : ( + No sessions + )} +
+ )} + + {/* Sandbox — only when a task is focused */} + {focusedTask && ( +
+ {(focusedTask.sandboxes?.length ?? 0) > 0 ? ( + focusedTask.sandboxes!.map((sandbox) => { + const isActive = sandbox.sandboxId === focusedTask.activeSandboxId; + return ( +
+
+ + + {sandbox.sandboxId.slice(0, 16)} + {isActive ? " *" : ""} + + {sandbox.sandboxProviderId} +
+ {sandbox.cwd &&
cwd: {sandbox.cwd}
} +
+ ); + }) + ) : ( + No sandboxes + )} +
+ )} + + {/* GitHub */} +
+ {liveGithub ? ( +
+
+ + App Install + + {liveGithub.installationStatus.replace(/_/g, " ")} + +
+
+ + Sync + {liveGithub.syncStatus} + {liveGithub.lastSyncAt != null && {timeAgo(liveGithub.lastSyncAt)}} +
+
+ + Webhook + {lastWebhookAt != null ? ( + + {liveGithub.lastWebhookEvent} · {timeAgo(lastWebhookAt)} + + ) : ( + never received + )} +
+
+ + + +
+ {liveGithub.connectedAccount &&
@{liveGithub.connectedAccount}
} + {liveGithub.lastSyncLabel &&
last sync: {liveGithub.lastSyncLabel}
} + {liveGithub.syncPhase && ( +
+ phase: {liveGithub.syncPhase.replace(/^syncing_/, "").replace(/_/g, " ")} ({liveGithub.processedRepositoryCount}/ + {liveGithub.totalRepositoryCount}) +
+ )} +
+ ) : ( + No organization data loaded + )} +
+ + {/* Organization */} +
+
{organizationId}
+ {organization && ( +
+ org: {organization.settings.displayName} ({organization.kind}) +
+ )} +
+
+
+ ); +}); + +function Section({ + label, + t, + css: cssFn, + children, +}: { + label: string; + t: ReturnType; + css: ReturnType[0]; + children: React.ReactNode; +}) { + return ( +
+
+ {label} +
+ {children} +
+ ); +} + +function Stat({ + label, + value, + t, + css: cssFn, +}: { + label: string; + value: number; + t: ReturnType; + css: ReturnType[0]; +}) { + return ( + + {value} + {label} + + ); +} + +export function useDevPanel() { + const [visible, setVisible] = useState(true); + + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (e.shiftKey && e.key === "D" && !e.metaKey && !e.ctrlKey && !e.altKey) { + const tag = (e.target as HTMLElement)?.tagName; + if (tag === "INPUT" || tag === "TEXTAREA" || tag === "SELECT") return; + e.preventDefault(); + setVisible((prev) => !prev); + } + }; + window.addEventListener("keydown", handleKeyDown); + return () => window.removeEventListener("keydown", handleKeyDown); + }, []); + + return visible; +} diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx new file mode 100644 index 0000000..4089e01 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -0,0 +1,2237 @@ +import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState, type PointerEvent as ReactPointerEvent } from "react"; +import { useQuery } from "@tanstack/react-query"; +import { useNavigate } from "@tanstack/react-router"; +import { useStyletron } from "baseui"; +import { + DEFAULT_WORKSPACE_MODEL_GROUPS, + DEFAULT_WORKSPACE_MODEL_ID, + createErrorContext, + type FoundryOrganization, + type TaskWorkspaceSnapshot, + type WorkspaceModelGroup, + type WorkspaceSessionSummary, + type WorkspaceTaskDetail, + type WorkspaceTaskSummary, +} from "@sandbox-agent/foundry-shared"; +import { useSubscription } from "@sandbox-agent/foundry-client"; + +import { CircleAlert, PanelLeft, PanelRight } from "lucide-react"; +import { useFoundryTokens } from "../app/theme"; +import { logger } from "../logging.js"; + +import { DiffContent } from "./mock-layout/diff-content"; +import { MessageList } from "./mock-layout/message-list"; +import { PromptComposer } from "./mock-layout/prompt-composer"; +import { RightSidebar } from "./mock-layout/right-sidebar"; +import { Sidebar } from "./mock-layout/sidebar"; +import { SessionStrip } from "./mock-layout/session-strip"; +import { TerminalPane } from "./mock-layout/terminal-pane"; +import { TranscriptHeader } from "./mock-layout/transcript-header"; +import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT, SPanel, ScrollBody, Shell, SpinnerDot } from "./mock-layout/ui"; +import { DevPanel, useDevPanel } from "./dev-panel"; +import { + buildDisplayMessages, + diffPath, + diffTabId, + formatThinkingDuration, + isDiffTab, + buildHistoryEvents, + type Task, + type HistoryEvent, + type LineAttachment, + type Message, + type ModelId, +} from "./mock-layout/view-model"; +import { activeMockOrganization, activeMockUser, getMockOrganizationById, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; +import { backendClient } from "../lib/backend"; +import { subscriptionManager } from "../lib/subscription"; +import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status"; + +function firstAgentSessionId(task: Task): string | null { + return task.sessions[0]?.id ?? null; +} + +function sanitizeOpenDiffs(task: Task, paths: string[] | undefined): string[] { + if (!paths) { + return []; + } + + return paths.filter((path) => task.diffs[path] != null); +} + +function sanitizeLastAgentSessionId(task: Task, sessionId: string | null | undefined): string | null { + if (sessionId && task.sessions.some((tab) => tab.id === sessionId)) { + return sessionId; + } + + return firstAgentSessionId(task); +} + +function sanitizeActiveSessionId(task: Task, sessionId: string | null | undefined, openDiffs: string[], lastAgentSessionId: string | null): string | null { + if (sessionId) { + if (task.sessions.some((tab) => tab.id === sessionId)) { + return sessionId; + } + if (isDiffTab(sessionId) && openDiffs.includes(diffPath(sessionId))) { + return sessionId; + } + } + + return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentSessionId; +} + +type GithubStatusView = Pick< + FoundryOrganization["github"], + "connectedAccount" | "installationStatus" | "syncStatus" | "importedRepoCount" | "lastSyncLabel" +> & { + syncPhase?: string | null; + processedRepositoryCount?: number; + totalRepositoryCount?: number; +}; + +function githubInstallationWarningTitle(github: GithubStatusView): string { + return github.installationStatus === "install_required" ? "GitHub App not installed" : "GitHub App needs reconnection"; +} + +function githubInstallationWarningDetail(github: GithubStatusView): string { + const statusDetail = github.lastSyncLabel.trim(); + const requirementDetail = + github.installationStatus === "install_required" + ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this organization." + : "Webhook delivery is unavailable. Repo sync and PR updates will not work until the GitHub App is reconnected."; + return statusDetail ? `${requirementDetail} ${statusDetail}.` : requirementDetail; +} + +function GithubInstallationWarning({ + github, + css, + t, +}: { + github: GithubStatusView; + css: ReturnType[0]; + t: ReturnType; +}) { + if (github.installationStatus === "connected") { + return null; + } + + return ( +
+ +
+
{githubInstallationWarningTitle(github)}
+
{githubInstallationWarningDetail(github)}
+
+
+ ); +} + +function toSessionModel( + summary: WorkspaceSessionSummary, + sessionDetail?: { draft: Task["sessions"][number]["draft"]; transcript: Task["sessions"][number]["transcript"] }, +): Task["sessions"][number] { + return { + id: summary.id, + sessionId: summary.sessionId, + sessionName: summary.sessionName, + agent: summary.agent, + model: summary.model, + status: summary.status, + thinkingSinceMs: summary.thinkingSinceMs, + unread: summary.unread, + created: summary.created, + errorMessage: summary.errorMessage ?? null, + draft: sessionDetail?.draft ?? { + text: "", + attachments: [], + updatedAtMs: null, + }, + transcript: sessionDetail?.transcript ?? [], + }; +} + +function toTaskModel( + summary: WorkspaceTaskSummary, + detail?: WorkspaceTaskDetail, + sessionCache?: Map, +): Task { + const sessions = detail?.sessionsSummary ?? summary.sessionsSummary; + return { + id: summary.id, + repoId: summary.repoId, + title: detail?.title ?? summary.title, + status: detail?.status ?? summary.status, + repoName: detail?.repoName ?? summary.repoName, + updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs, + branch: detail?.branch ?? summary.branch, + pullRequest: detail?.pullRequest ?? summary.pullRequest, + activeSessionId: detail?.activeSessionId ?? summary.activeSessionId ?? null, + sessions: sessions.map((session) => toSessionModel(session, sessionCache?.get(session.id))), + fileChanges: detail?.fileChanges ?? [], + diffs: detail?.diffs ?? {}, + fileTree: detail?.fileTree ?? [], + minutesUsed: detail?.minutesUsed ?? 0, + sandboxes: detail?.sandboxes ?? [], + activeSandboxId: detail?.activeSandboxId ?? null, + primaryUserLogin: detail?.primaryUserLogin ?? summary.primaryUserLogin ?? null, + primaryUserAvatarUrl: detail?.primaryUserAvatarUrl ?? summary.primaryUserAvatarUrl ?? null, + }; +} + +function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): string | null { + if (!tab) { + return null; + } + if (tab.status === "pending_provision") { + return "Provisioning sandbox..."; + } + if (tab.status === "pending_session_create") { + return "Creating session..."; + } + if (tab.status === "error") { + return tab.errorMessage ?? "Session failed to start."; + } + return null; +} + +function groupRepositories( + repos: Array<{ id: string; label: string }>, + tasks: Task[], + openPullRequests?: Array<{ + repoId: string; + repoFullName: string; + number: number; + title: string; + state: string; + url: string; + headRefName: string; + authorLogin: string | null; + isDraft: boolean; + }>, +) { + return repos + .map((repo) => ({ + id: repo.id, + label: repo.label, + updatedAtMs: tasks.filter((task) => task.repoId === repo.id).reduce((latest, task) => Math.max(latest, task.updatedAtMs), 0), + tasks: tasks.filter((task) => task.repoId === repo.id).sort((left, right) => right.updatedAtMs - left.updatedAtMs), + pullRequests: (openPullRequests ?? []).filter((pr) => pr.repoId === repo.id), + })) + .sort((a, b) => { + // Repos with tasks first, then repos with PRs, then alphabetical + const aHasActivity = a.tasks.length > 0 || a.pullRequests.length > 0; + const bHasActivity = b.tasks.length > 0 || b.pullRequests.length > 0; + if (aHasActivity && !bHasActivity) return -1; + if (!aHasActivity && bHasActivity) return 1; + if (a.updatedAtMs !== b.updatedAtMs) return b.updatedAtMs - a.updatedAtMs; + return a.label.localeCompare(b.label); + }); +} + +interface WorkspaceActions { + createTask(input: { + repoId: string; + task: string; + title?: string; + branch?: string; + onBranch?: string; + model?: ModelId; + }): Promise<{ taskId: string; sessionId?: string }>; + markTaskUnread(input: { repoId: string; taskId: string }): Promise; + renameTask(input: { repoId: string; taskId: string; value: string }): Promise; + archiveTask(input: { repoId: string; taskId: string }): Promise; + publishPr(input: { repoId: string; taskId: string }): Promise; + revertFile(input: { repoId: string; taskId: string; path: string }): Promise; + updateDraft(input: { repoId: string; taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + sendMessage(input: { repoId: string; taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + stopAgent(input: { repoId: string; taskId: string; sessionId: string }): Promise; + selectSession(input: { repoId: string; taskId: string; sessionId: string }): Promise; + setSessionUnread(input: { repoId: string; taskId: string; sessionId: string; unread: boolean }): Promise; + renameSession(input: { repoId: string; taskId: string; sessionId: string; title: string }): Promise; + closeSession(input: { repoId: string; taskId: string; sessionId: string }): Promise; + addSession(input: { repoId: string; taskId: string; model?: string }): Promise<{ sessionId: string }>; + changeModel(input: { repoId: string; taskId: string; sessionId: string; model: ModelId }): Promise; + changeOwner(input: { repoId: string; taskId: string; targetUserId: string; targetUserName: string; targetUserEmail: string }): Promise; + adminReloadGithubOrganization(): Promise; + adminReloadGithubRepository(repoId: string): Promise; +} + +const TranscriptPanel = memo(function TranscriptPanel({ + taskWorkspaceClient, + task, + hasSandbox, + activeSessionId, + lastAgentSessionId, + openDiffs, + onSyncRouteSession, + onSetActiveSessionId, + onSetLastAgentSessionId, + onSetOpenDiffs, + sidebarCollapsed, + onToggleSidebar, + onSidebarPeekStart, + onSidebarPeekEnd, + rightSidebarCollapsed, + onToggleRightSidebar, + selectedSessionHydrating = false, + modelGroups, + onNavigateToUsage, +}: { + taskWorkspaceClient: WorkspaceActions; + task: Task; + hasSandbox: boolean; + activeSessionId: string | null; + lastAgentSessionId: string | null; + openDiffs: string[]; + onSyncRouteSession: (taskId: string, sessionId: string | null, replace?: boolean) => void; + onSetActiveSessionId: (sessionId: string | null) => void; + onSetLastAgentSessionId: (sessionId: string | null) => void; + onSetOpenDiffs: (paths: string[]) => void; + sidebarCollapsed?: boolean; + onToggleSidebar?: () => void; + onSidebarPeekStart?: () => void; + onSidebarPeekEnd?: () => void; + rightSidebarCollapsed?: boolean; + onToggleRightSidebar?: () => void; + selectedSessionHydrating?: boolean; + modelGroups: WorkspaceModelGroup[]; + onNavigateToUsage?: () => void; +}) { + const t = useFoundryTokens(); + const appSnapshot = useMockAppSnapshot(); + const appClient = useMockAppClient(); + const currentUser = activeMockUser(appSnapshot); + const defaultModel = currentUser?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID; + const [editingField, setEditingField] = useState<"title" | null>(null); + const [editValue, setEditValue] = useState(""); + const [editingSessionId, setEditingSessionId] = useState(null); + const [editingSessionName, setEditingSessionName] = useState(""); + const [pendingHistoryTarget, setPendingHistoryTarget] = useState<{ messageId: string; sessionId: string } | null>(null); + const [copiedMessageId, setCopiedMessageId] = useState(null); + const [timerNowMs, setTimerNowMs] = useState(() => Date.now()); + const [localDraft, setLocalDraft] = useState(""); + const [localAttachments, setLocalAttachments] = useState([]); + const [pendingMessage, setPendingMessage] = useState<{ text: string; sessionId: string; sentAt: number } | null>(null); + const lastEditTimeRef = useRef(0); + const throttleTimerRef = useRef | null>(null); + const pendingDraftRef = useRef<{ text: string; attachments: LineAttachment[] } | null>(null); + const scrollRef = useRef(null); + const textareaRef = useRef(null); + const messageRefs = useRef(new Map()); + const activeDiff = activeSessionId && isDiffTab(activeSessionId) ? diffPath(activeSessionId) : null; + const activeAgentSession = activeDiff ? null : (task.sessions.find((candidate) => candidate.id === activeSessionId) ?? task.sessions[0] ?? null); + const promptSession = task.sessions.find((candidate) => candidate.id === lastAgentSessionId) ?? task.sessions[0] ?? null; + const isTerminal = task.status === "archived"; + const historyEvents = useMemo(() => buildHistoryEvents(task.sessions), [task.sessions]); + const activeMessages = useMemo(() => buildDisplayMessages(activeAgentSession), [activeAgentSession]); + const taskState = describeTaskState(task.status); + const taskProvisioning = isProvisioningTaskStatus(task.status); + const taskProvisioningMessage = taskState.detail; + const activeSessionMessage = sessionStateMessage(activeAgentSession); + const showPendingSessionState = + !activeDiff && + !!activeAgentSession && + (activeAgentSession.status === "pending_provision" || activeAgentSession.status === "pending_session_create" || activeAgentSession.status === "error") && + activeMessages.length === 0; + const serverDraft = promptSession?.draft.text ?? ""; + const serverAttachments = promptSession?.draft.attachments; + const serverAttachmentsJson = JSON.stringify(serverAttachments ?? []); + + // Sync server → local only when user hasn't typed recently (3s cooldown) + const DRAFT_SYNC_COOLDOWN_MS = 3_000; + useEffect(() => { + if (Date.now() - lastEditTimeRef.current > DRAFT_SYNC_COOLDOWN_MS) { + setLocalDraft(serverDraft); + setLocalAttachments(serverAttachments ?? []); + } + }, [serverDraft, serverAttachmentsJson]); + + // Reset local draft immediately on session/task switch + useEffect(() => { + lastEditTimeRef.current = 0; + setLocalDraft(promptSession?.draft.text ?? ""); + setLocalAttachments(promptSession?.draft.attachments ?? []); + }, [promptSession?.id, task.id]); + + // Clear pending message once the real transcript contains a client message newer than when we sent + const pendingMessageClientCount = useRef(0); + useEffect(() => { + if (!pendingMessage) return; + + const targetSession = task.sessions.find((s) => s.id === pendingMessage.sessionId); + if (!targetSession) return; + + const clientEventCount = targetSession.transcript.filter((event) => event.sender === "client").length; + if (clientEventCount > pendingMessageClientCount.current) { + setPendingMessage(null); + } + }, [task.sessions, pendingMessage]); + + const draft = localDraft; + const attachments = localAttachments; + + useEffect(() => { + if (scrollRef.current) { + scrollRef.current.scrollTop = scrollRef.current.scrollHeight; + } + }, [activeMessages.length]); + + useEffect(() => { + textareaRef.current?.focus(); + }, [activeSessionId, task.id]); + + useEffect(() => { + setEditingSessionId(null); + setEditingSessionName(""); + }, [task.id]); + + useLayoutEffect(() => { + const textarea = textareaRef.current; + if (!textarea) { + return; + } + + textarea.style.height = `${PROMPT_TEXTAREA_MIN_HEIGHT}px`; + const nextHeight = Math.min(textarea.scrollHeight, PROMPT_TEXTAREA_MAX_HEIGHT); + textarea.style.height = `${Math.max(PROMPT_TEXTAREA_MIN_HEIGHT, nextHeight)}px`; + textarea.style.overflowY = textarea.scrollHeight > PROMPT_TEXTAREA_MAX_HEIGHT ? "auto" : "hidden"; + }, [draft, activeSessionId, task.id]); + + useEffect(() => { + if (!copiedMessageId) { + return; + } + + const timer = setTimeout(() => { + setCopiedMessageId(null); + }, 1_200); + + return () => clearTimeout(timer); + }, [copiedMessageId]); + + useEffect(() => { + if (!activeAgentSession || activeAgentSession.status !== "running" || activeAgentSession.thinkingSinceMs === null) { + return; + } + + setTimerNowMs(Date.now()); + const timer = window.setInterval(() => { + setTimerNowMs(Date.now()); + }, 1_000); + + return () => window.clearInterval(timer); + }, [activeAgentSession?.id, activeAgentSession?.status, activeAgentSession?.thinkingSinceMs]); + + useEffect(() => { + if (!activeAgentSession?.unread) { + return; + } + + void taskWorkspaceClient.setSessionUnread({ + repoId: task.repoId, + taskId: task.id, + sessionId: activeAgentSession.id, + unread: false, + }); + }, [activeAgentSession?.id, activeAgentSession?.unread, task.id]); + + const startEditingField = useCallback((field: "title", value: string) => { + setEditingField(field); + setEditValue(value); + }, []); + + const cancelEditingField = useCallback(() => { + setEditingField(null); + }, []); + + const commitEditingField = useCallback( + (field: "title") => { + const value = editValue.trim(); + if (!value) { + setEditingField(null); + return; + } + + void taskWorkspaceClient.renameTask({ repoId: task.repoId, taskId: task.id, value }); + setEditingField(null); + }, + [editValue, task.id], + ); + + const DRAFT_THROTTLE_MS = 500; + + const flushDraft = useCallback( + (text: string, nextAttachments: LineAttachment[], sessionId: string) => { + void taskWorkspaceClient.updateDraft({ + repoId: task.repoId, + taskId: task.id, + sessionId, + text, + attachments: nextAttachments, + }); + }, + [task.id], + ); + + // Clean up throttle timer on unmount + useEffect(() => { + return () => { + if (throttleTimerRef.current) { + clearTimeout(throttleTimerRef.current); + } + }; + }, []); + + const updateDraft = useCallback( + (nextText: string, nextAttachments: LineAttachment[]) => { + if (!promptSession) { + return; + } + + // Update local state immediately for responsive typing + lastEditTimeRef.current = Date.now(); + setLocalDraft(nextText); + setLocalAttachments(nextAttachments); + + // Throttle the network call + pendingDraftRef.current = { text: nextText, attachments: nextAttachments }; + if (!throttleTimerRef.current) { + throttleTimerRef.current = setTimeout(() => { + throttleTimerRef.current = null; + if (pendingDraftRef.current) { + flushDraft(pendingDraftRef.current.text, pendingDraftRef.current.attachments, promptSession.id); + pendingDraftRef.current = null; + } + }, DRAFT_THROTTLE_MS); + } + }, + [promptSession, flushDraft], + ); + + const sendMessage = useCallback(() => { + const text = draft.trim(); + if (!text || !promptSession) { + return; + } + + // Clear draft and show optimistic message immediately (don't wait for server round-trip) + setLocalDraft(""); + setLocalAttachments([]); + lastEditTimeRef.current = Date.now(); + // Snapshot current client message count so we can detect when the server adds ours + pendingMessageClientCount.current = promptSession.transcript.filter((event) => event.sender === "client").length; + setPendingMessage({ text, sessionId: promptSession.id, sentAt: Date.now() }); + + onSetActiveSessionId(promptSession.id); + onSetLastAgentSessionId(promptSession.id); + void taskWorkspaceClient.sendMessage({ + repoId: task.repoId, + taskId: task.id, + sessionId: promptSession.id, + text, + attachments, + }); + }, [attachments, draft, task.id, onSetActiveSessionId, onSetLastAgentSessionId, promptSession]); + + const stopAgent = useCallback(() => { + if (!promptSession) { + return; + } + + void taskWorkspaceClient.stopAgent({ + repoId: task.repoId, + taskId: task.id, + sessionId: promptSession.id, + }); + }, [task.id, promptSession]); + + const switchSession = useCallback( + (sessionId: string) => { + onSetActiveSessionId(sessionId); + + if (!isDiffTab(sessionId)) { + onSetLastAgentSessionId(sessionId); + void taskWorkspaceClient.selectSession({ + repoId: task.repoId, + taskId: task.id, + sessionId, + }); + const session = task.sessions.find((candidate) => candidate.id === sessionId); + if (session?.unread) { + void taskWorkspaceClient.setSessionUnread({ + repoId: task.repoId, + taskId: task.id, + sessionId, + unread: false, + }); + } + onSyncRouteSession(task.id, sessionId); + } + }, + [task.id, task.repoId, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], + ); + + const setSessionUnread = useCallback( + (sessionId: string, unread: boolean) => { + void taskWorkspaceClient.setSessionUnread({ repoId: task.repoId, taskId: task.id, sessionId, unread }); + }, + [task.id, task.repoId], + ); + + const startRenamingSession = useCallback( + (sessionId: string) => { + const targetSession = task.sessions.find((candidate) => candidate.id === sessionId); + if (!targetSession) { + throw new Error(`Unable to rename missing session ${sessionId}`); + } + + setEditingSessionId(sessionId); + setEditingSessionName(targetSession.sessionName); + }, + [task.sessions], + ); + + const cancelSessionRename = useCallback(() => { + setEditingSessionId(null); + setEditingSessionName(""); + }, []); + + const commitSessionRename = useCallback(() => { + if (!editingSessionId) { + return; + } + + const trimmedName = editingSessionName.trim(); + if (!trimmedName) { + cancelSessionRename(); + return; + } + + void taskWorkspaceClient.renameSession({ + repoId: task.repoId, + taskId: task.id, + sessionId: editingSessionId, + title: trimmedName, + }); + cancelSessionRename(); + }, [cancelSessionRename, editingSessionName, editingSessionId, task.id]); + + const closeSession = useCallback( + (sessionId: string) => { + const remainingSessions = task.sessions.filter((candidate) => candidate.id !== sessionId); + const nextSessionId = remainingSessions[0]?.id ?? null; + + if (activeSessionId === sessionId) { + onSetActiveSessionId(nextSessionId); + } + if (lastAgentSessionId === sessionId) { + onSetLastAgentSessionId(nextSessionId); + } + + onSyncRouteSession(task.id, nextSessionId); + void taskWorkspaceClient.closeSession({ repoId: task.repoId, taskId: task.id, sessionId }); + }, + [activeSessionId, task.id, task.repoId, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], + ); + + const closeDiffTab = useCallback( + (path: string) => { + const nextOpenDiffs = openDiffs.filter((candidate) => candidate !== path); + onSetOpenDiffs(nextOpenDiffs); + if (activeSessionId === diffTabId(path)) { + onSetActiveSessionId( + nextOpenDiffs.length > 0 ? diffTabId(nextOpenDiffs[nextOpenDiffs.length - 1]!) : (lastAgentSessionId ?? firstAgentSessionId(task)), + ); + } + }, + [activeSessionId, task, lastAgentSessionId, onSetActiveSessionId, onSetOpenDiffs, openDiffs], + ); + + const addSession = useCallback(() => { + void (async () => { + const { sessionId } = await taskWorkspaceClient.addSession({ repoId: task.repoId, taskId: task.id }); + onSetLastAgentSessionId(sessionId); + onSetActiveSessionId(sessionId); + onSyncRouteSession(task.id, sessionId); + })(); + }, [task.id, task.repoId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); + + const changeModel = useCallback( + (model: ModelId) => { + if (!promptSession) { + throw new Error(`Unable to change model for task ${task.id} without an active prompt session`); + } + + void taskWorkspaceClient.changeModel({ + repoId: task.repoId, + taskId: task.id, + sessionId: promptSession.id, + model, + }); + }, + [task.id, promptSession], + ); + + const addAttachment = useCallback( + (filePath: string, lineNumber: number, lineContent: string) => { + if (!promptSession) { + return; + } + + const nextAttachment = { id: `${filePath}:${lineNumber}`, filePath, lineNumber, lineContent }; + if (attachments.some((attachment) => attachment.filePath === filePath && attachment.lineNumber === lineNumber)) { + return; + } + + updateDraft(draft, [...attachments, nextAttachment]); + }, + [attachments, draft, promptSession, updateDraft], + ); + + const removeAttachment = useCallback( + (id: string) => { + updateDraft( + draft, + attachments.filter((attachment) => attachment.id !== id), + ); + }, + [attachments, draft, updateDraft], + ); + + const jumpToHistoryEvent = useCallback( + (event: HistoryEvent) => { + setPendingHistoryTarget({ messageId: event.messageId, sessionId: event.sessionId }); + + if (activeSessionId !== event.sessionId) { + switchSession(event.sessionId); + } + }, + [activeSessionId, switchSession], + ); + + const copyMessage = useCallback(async (message: Message) => { + try { + if (!window.navigator.clipboard) { + throw new Error("Clipboard API unavailable in mock layout"); + } + + await window.navigator.clipboard.writeText(message.text); + setCopiedMessageId(message.id); + } catch (error) { + logger.error( + { + messageId: message.id, + ...createErrorContext(error), + }, + "failed_to_copy_transcript_message", + ); + } + }, []); + + const isOptimisticThinking = pendingMessage !== null && activeAgentSession?.id === pendingMessage.sessionId; + const thinkingTimerLabel = + activeAgentSession?.status === "running" && activeAgentSession.thinkingSinceMs !== null + ? formatThinkingDuration(timerNowMs - activeAgentSession.thinkingSinceMs) + : isOptimisticThinking + ? formatThinkingDuration(timerNowMs - pendingMessage.sentAt) + : null; + + return ( + + { + if (activeAgentSession) { + setSessionUnread(activeAgentSession.id, unread); + } + }} + sidebarCollapsed={sidebarCollapsed} + onToggleSidebar={onToggleSidebar} + onSidebarPeekStart={onSidebarPeekStart} + onSidebarPeekEnd={onSidebarPeekEnd} + rightSidebarCollapsed={rightSidebarCollapsed} + onToggleRightSidebar={onToggleRightSidebar} + onNavigateToUsage={onNavigateToUsage} + /> +
+ + {activeDiff ? ( + file.path === activeDiff)} + diff={task.diffs[activeDiff]} + onAddAttachment={addAttachment} + /> + ) : task.sessions.length === 0 ? ( + +
+
+ {taskProvisioning ? ( + <> + +

{taskState.title}

+

{taskProvisioningMessage}

+ + ) : ( + <> +

Create the first session

+

Sessions are where you chat with the agent. Start one now to send the first prompt on this task.

+ + + )} +
+
+
+ ) : selectedSessionHydrating ? ( + +
+
+ +

Loading session

+

Fetching the latest transcript for this session.

+
+
+
+ ) : showPendingSessionState ? ( + +
+
+ {activeAgentSession?.status === "error" ? null : } +

+ {activeAgentSession?.status === "pending_provision" + ? "Provisioning sandbox" + : activeAgentSession?.status === "pending_session_create" + ? "Creating session" + : "Session unavailable"} +

+

{activeSessionMessage}

+ {activeAgentSession?.status === "error" ? ( + + ) : null} +
+
+
+ ) : ( + + setPendingHistoryTarget(null)} + copiedMessageId={copiedMessageId} + onCopyMessage={(message) => { + void copyMessage(message); + }} + thinkingTimerLabel={thinkingTimerLabel} + pendingMessage={ + pendingMessage && activeAgentSession?.id === pendingMessage.sessionId ? { text: pendingMessage.text, sentAt: pendingMessage.sentAt } : null + } + /> + + )} + {!isTerminal && promptSession && (promptSession.status === "ready" || promptSession.status === "running" || promptSession.status === "idle") ? ( + updateDraft(value, attachments)} + onSend={sendMessage} + onStop={stopAgent} + onRemoveAttachment={removeAttachment} + onChangeModel={changeModel} + onSetDefaultModel={(model) => { + void appClient.setDefaultModel(model); + }} + /> + ) : null} +
+
+ ); +}); + +const LEFT_SIDEBAR_DEFAULT_WIDTH = 340; +const RIGHT_SIDEBAR_DEFAULT_WIDTH = 380; +const SIDEBAR_MIN_WIDTH = 220; +const SIDEBAR_MAX_WIDTH = 600; +const RESIZE_HANDLE_WIDTH = 1; +const LEFT_WIDTH_STORAGE_KEY = "foundry:left-sidebar-width"; +const RIGHT_WIDTH_STORAGE_KEY = "foundry:right-sidebar-width"; + +function readStoredWidth(key: string, fallback: number): number { + if (typeof window === "undefined") return fallback; + const stored = window.localStorage.getItem(key); + const parsed = stored ? Number.parseInt(stored, 10) : Number.NaN; + return Number.isFinite(parsed) ? Math.min(Math.max(parsed, SIDEBAR_MIN_WIDTH), SIDEBAR_MAX_WIDTH) : fallback; +} + +const PanelResizeHandle = memo(function PanelResizeHandle({ onResizeStart, onResize }: { onResizeStart: () => void; onResize: (deltaX: number) => void }) { + const handlePointerDown = useCallback( + (event: ReactPointerEvent) => { + event.preventDefault(); + const startX = event.clientX; + onResizeStart(); + document.body.style.cursor = "col-resize"; + document.body.style.userSelect = "none"; + + const handlePointerMove = (moveEvent: PointerEvent) => { + onResize(moveEvent.clientX - startX); + }; + + const stopResize = () => { + document.body.style.cursor = ""; + document.body.style.userSelect = ""; + window.removeEventListener("pointermove", handlePointerMove); + window.removeEventListener("pointerup", stopResize); + }; + + window.addEventListener("pointermove", handlePointerMove); + window.addEventListener("pointerup", stopResize, { once: true }); + }, + [onResize, onResizeStart], + ); + + return ( +
+
+
+ ); +}); + +const RIGHT_RAIL_MIN_SECTION_HEIGHT = 180; +const RIGHT_RAIL_SPLITTER_HEIGHT = 10; +const DEFAULT_TERMINAL_HEIGHT = 320; +const TERMINAL_HEIGHT_STORAGE_KEY = "foundry:terminal-height"; + +const RightRail = memo(function RightRail({ + organizationId, + task, + activeSessionId, + onOpenDiff, + onArchive, + onRevertFile, + onPublishPr, + onChangeOwner, + members, + onToggleSidebar, +}: { + organizationId: string; + task: Task; + activeSessionId: string | null; + onOpenDiff: (path: string) => void; + onArchive: () => void; + onRevertFile: (path: string) => void; + onPublishPr: () => void; + onChangeOwner: (member: { id: string; name: string; email: string }) => void; + members: Array<{ id: string; name: string; email: string }>; + onToggleSidebar?: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const railRef = useRef(null); + const [terminalHeight, setTerminalHeight] = useState(() => { + if (typeof window === "undefined") { + return DEFAULT_TERMINAL_HEIGHT; + } + + const stored = window.localStorage.getItem(TERMINAL_HEIGHT_STORAGE_KEY); + const parsed = stored ? Number.parseInt(stored, 10) : Number.NaN; + return Number.isFinite(parsed) ? parsed : DEFAULT_TERMINAL_HEIGHT; + }); + + const clampTerminalHeight = useCallback((nextHeight: number) => { + const railHeight = railRef.current?.getBoundingClientRect().height ?? 0; + const maxHeight = Math.max(RIGHT_RAIL_MIN_SECTION_HEIGHT, railHeight - RIGHT_RAIL_MIN_SECTION_HEIGHT - RIGHT_RAIL_SPLITTER_HEIGHT); + + return Math.min(Math.max(nextHeight, 43), maxHeight); + }, []); + + useEffect(() => { + if (typeof window === "undefined") { + return; + } + + window.localStorage.setItem(TERMINAL_HEIGHT_STORAGE_KEY, String(terminalHeight)); + }, [terminalHeight]); + + useEffect(() => { + const handleResize = () => { + setTerminalHeight((current) => clampTerminalHeight(current)); + }; + + window.addEventListener("resize", handleResize); + handleResize(); + return () => window.removeEventListener("resize", handleResize); + }, [clampTerminalHeight]); + + const startResize = useCallback( + (event: ReactPointerEvent) => { + event.preventDefault(); + + const startY = event.clientY; + const startHeight = terminalHeight; + document.body.style.cursor = "ns-resize"; + + const handlePointerMove = (moveEvent: PointerEvent) => { + const deltaY = moveEvent.clientY - startY; + setTerminalHeight(clampTerminalHeight(startHeight - deltaY)); + }; + + const stopResize = () => { + document.body.style.cursor = ""; + window.removeEventListener("pointermove", handlePointerMove); + window.removeEventListener("pointerup", stopResize); + }; + + window.addEventListener("pointermove", handlePointerMove); + window.addEventListener("pointerup", stopResize, { once: true }); + }, + [clampTerminalHeight, terminalHeight], + ); + + return ( +
+
+ +
+
+ { + const railHeight = railRef.current?.getBoundingClientRect().height ?? 0; + return railHeight > 0 && terminalHeight >= railHeight * 0.7; + })()} + onExpand={() => { + const railHeight = railRef.current?.getBoundingClientRect().height ?? 0; + const maxHeight = Math.max(RIGHT_RAIL_MIN_SECTION_HEIGHT, railHeight - RIGHT_RAIL_SPLITTER_HEIGHT - 42); + setTerminalHeight(maxHeight); + }} + onCollapse={() => { + setTerminalHeight(43); + }} + /> +
+
+ ); +}); + +interface MockLayoutProps { + organizationId: string; + selectedTaskId?: string | null; + selectedSessionId?: string | null; +} + +function MockOrganizationOrgBar() { + const navigate = useNavigate(); + const snapshot = useMockAppSnapshot(); + const organization = activeMockOrganization(snapshot); + const t = useFoundryTokens(); + + if (!organization) { + return null; + } + + const buttonStyle = { + border: `1px solid ${t.borderMedium}`, + borderRadius: "999px", + padding: "8px 12px", + background: t.interactiveSubtle, + color: t.textPrimary, + cursor: "pointer", + fontSize: "13px", + fontWeight: 600, + } satisfies React.CSSProperties; + + return ( +
+
+ {organization.settings.displayName} + {organization.settings.primaryDomain} +
+
+ + + +
+
+ ); +} + +export function MockLayout({ organizationId, selectedTaskId, selectedSessionId }: MockLayoutProps) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const navigate = useNavigate(); + const taskWorkspaceClient = useMemo( + () => ({ + createTask: (input) => backendClient.createWorkspaceTask(organizationId, input), + markTaskUnread: (input) => backendClient.markWorkspaceUnread(organizationId, input), + renameTask: (input) => backendClient.renameWorkspaceTask(organizationId, input), + archiveTask: async (input) => backendClient.runAction(organizationId, input.repoId, input.taskId, "archive"), + publishPr: (input) => backendClient.publishWorkspacePr(organizationId, input), + revertFile: (input) => backendClient.revertWorkspaceFile(organizationId, input), + updateDraft: (input) => backendClient.updateWorkspaceDraft(organizationId, input), + sendMessage: (input) => backendClient.sendWorkspaceMessage(organizationId, input), + stopAgent: (input) => backendClient.stopWorkspaceSession(organizationId, input), + selectSession: (input) => backendClient.selectWorkspaceSession(organizationId, input), + setSessionUnread: (input) => backendClient.setWorkspaceSessionUnread(organizationId, input), + renameSession: (input) => backendClient.renameWorkspaceSession(organizationId, input), + closeSession: (input) => backendClient.closeWorkspaceSession(organizationId, input), + addSession: (input) => backendClient.createWorkspaceSession(organizationId, input), + changeModel: (input) => backendClient.changeWorkspaceModel(organizationId, input), + changeOwner: (input) => backendClient.changeWorkspaceTaskOwner(organizationId, input), + adminReloadGithubOrganization: () => backendClient.adminReloadGithubOrganization(organizationId), + adminReloadGithubRepository: (repoId) => backendClient.adminReloadGithubRepository(organizationId, repoId), + }), + [organizationId], + ); + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const organizationReposData = organizationState.data?.repos; + const taskSummariesData = organizationState.data?.taskSummaries; + const openPullRequestsData = organizationState.data?.openPullRequests; + const organizationRepos = organizationReposData ?? []; + const taskSummaries = taskSummariesData ?? []; + const selectedTaskSummary = useMemo( + () => taskSummaries.find((task) => task.id === selectedTaskId) ?? taskSummaries[0] ?? null, + [selectedTaskId, taskSummariesData], + ); + const taskState = useSubscription( + subscriptionManager, + "task", + selectedTaskSummary + ? { + organizationId, + repoId: selectedTaskSummary.repoId, + taskId: selectedTaskSummary.id, + } + : null, + ); + const sessionState = useSubscription( + subscriptionManager, + "session", + selectedTaskSummary && selectedSessionId + ? { + organizationId, + repoId: selectedTaskSummary.repoId, + taskId: selectedTaskSummary.id, + sessionId: selectedSessionId, + } + : null, + ); + const activeSandbox = useMemo(() => { + if (!taskState.data?.activeSandboxId) return null; + return taskState.data.sandboxes?.find((s) => s.sandboxId === taskState.data!.activeSandboxId) ?? null; + }, [taskState.data?.activeSandboxId, taskState.data?.sandboxes]); + const sandboxState = useSubscription( + subscriptionManager, + "sandboxProcesses", + activeSandbox + ? { + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, + sandboxId: activeSandbox.sandboxId, + } + : null, + ); + const hasSandbox = Boolean(activeSandbox) && sandboxState.status !== "error"; + const modelGroupsQuery = useQuery({ + queryKey: ["mock-layout", "workspace-model-groups", organizationId, activeSandbox?.sandboxProviderId ?? "", activeSandbox?.sandboxId ?? ""], + enabled: Boolean(activeSandbox?.sandboxId), + staleTime: 30_000, + refetchOnWindowFocus: false, + queryFn: async () => { + if (!activeSandbox) { + throw new Error("Cannot load workspace model groups without an active sandbox."); + } + + return await backendClient.getSandboxWorkspaceModelGroups(organizationId, activeSandbox.sandboxProviderId, activeSandbox.sandboxId); + }, + }); + const modelGroups = modelGroupsQuery.data && modelGroupsQuery.data.length > 0 ? modelGroupsQuery.data : DEFAULT_WORKSPACE_MODEL_GROUPS; + const tasks = useMemo(() => { + const sessionCache = new Map(); + if (selectedTaskSummary && taskState.data) { + for (const session of taskState.data.sessionsSummary) { + const cached = + (selectedSessionId && session.id === selectedSessionId ? sessionState.data : undefined) ?? + subscriptionManager.getSnapshot("session", { + organizationId, + repoId: selectedTaskSummary.repoId, + taskId: selectedTaskSummary.id, + sessionId: session.id, + }); + if (cached) { + sessionCache.set(session.id, { + draft: cached.draft, + transcript: cached.transcript, + }); + } + } + } + + const hydratedTasks = taskSummaries.map((summary) => + summary.id === selectedTaskSummary?.id ? toTaskModel(summary, taskState.data, sessionCache) : toTaskModel(summary), + ); + return hydratedTasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs); + }, [selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummariesData, organizationId]); + const openPullRequests = openPullRequestsData ?? []; + const rawRepositories = useMemo(() => groupRepositories(organizationRepos, tasks, openPullRequests), [tasks, organizationReposData, openPullRequestsData]); + const appSnapshot = useMockAppSnapshot(); + const currentUser = activeMockUser(appSnapshot); + const activeOrg = activeMockOrganization(appSnapshot); + const liveGithub = organizationState.data?.github ?? activeOrg?.github ?? null; + const navigateToUsage = useCallback(() => { + if (activeOrg) { + void navigate({ to: "/organizations/$organizationId/billing" as never, params: { organizationId: activeOrg.id } as never }); + } + }, [activeOrg, navigate]); + const [repositoryOrder, setRepositoryOrder] = useState(null); + const repositories = useMemo(() => { + if (!repositoryOrder) return rawRepositories; + const byId = new Map(rawRepositories.map((p) => [p.id, p])); + const ordered = repositoryOrder.map((id) => byId.get(id)).filter(Boolean) as typeof rawRepositories; + for (const p of rawRepositories) { + if (!repositoryOrder.includes(p.id)) ordered.push(p); + } + return ordered; + }, [rawRepositories, repositoryOrder]); + const [activeSessionIdByTask, setActiveSessionIdByTask] = useState>({}); + const [lastAgentSessionIdByTask, setLastAgentSessionIdByTask] = useState>({}); + const [openDiffsByTask, setOpenDiffsByTask] = useState>({}); + const [selectedNewTaskRepoId, setSelectedNewTaskRepoId] = useState(""); + const [leftWidth, setLeftWidth] = useState(() => readStoredWidth(LEFT_WIDTH_STORAGE_KEY, LEFT_SIDEBAR_DEFAULT_WIDTH)); + const [rightWidth, setRightWidth] = useState(() => readStoredWidth(RIGHT_WIDTH_STORAGE_KEY, RIGHT_SIDEBAR_DEFAULT_WIDTH)); + const leftWidthRef = useRef(leftWidth); + const rightWidthRef = useRef(rightWidth); + const autoCreatingSessionForTaskRef = useRef>(new Set()); + const [leftSidebarOpen, setLeftSidebarOpen] = useState(true); + const [rightSidebarOpen, setRightSidebarOpen] = useState(true); + const [leftSidebarPeeking, setLeftSidebarPeeking] = useState(false); + const showDevPanel = useDevPanel(); + const peekTimeoutRef = useRef | null>(null); + + const startPeek = useCallback(() => { + if (peekTimeoutRef.current) clearTimeout(peekTimeoutRef.current); + setLeftSidebarPeeking(true); + }, []); + + const endPeek = useCallback(() => { + peekTimeoutRef.current = setTimeout(() => setLeftSidebarPeeking(false), 200); + }, []); + + const reorderRepositories = useCallback( + (fromIndex: number, toIndex: number) => { + const ids = repositories.map((p) => p.id); + const [moved] = ids.splice(fromIndex, 1); + ids.splice(toIndex, 0, moved!); + setRepositoryOrder(ids); + }, + [repositories], + ); + + const [taskOrderByRepository, setTaskOrderByRepository] = useState>({}); + const reorderTasks = useCallback( + (repositoryId: string, fromIndex: number, toIndex: number) => { + const repository = repositories.find((p) => p.id === repositoryId); + if (!repository) return; + const currentOrder = taskOrderByRepository[repositoryId] ?? repository.tasks.map((t) => t.id); + const ids = [...currentOrder]; + const [moved] = ids.splice(fromIndex, 1); + ids.splice(toIndex, 0, moved!); + setTaskOrderByRepository((prev) => ({ ...prev, [repositoryId]: ids })); + }, + [repositories, taskOrderByRepository], + ); + + useEffect(() => { + leftWidthRef.current = leftWidth; + window.localStorage.setItem(LEFT_WIDTH_STORAGE_KEY, String(leftWidth)); + }, [leftWidth]); + + useEffect(() => { + rightWidthRef.current = rightWidth; + window.localStorage.setItem(RIGHT_WIDTH_STORAGE_KEY, String(rightWidth)); + }, [rightWidth]); + + const startLeftRef = useRef(leftWidth); + const startRightRef = useRef(rightWidth); + + const onLeftResize = useCallback((deltaX: number) => { + setLeftWidth(Math.min(Math.max(startLeftRef.current + deltaX, SIDEBAR_MIN_WIDTH), SIDEBAR_MAX_WIDTH)); + }, []); + + const onLeftResizeStart = useCallback(() => { + startLeftRef.current = leftWidthRef.current; + }, []); + + const onRightResize = useCallback((deltaX: number) => { + setRightWidth(Math.min(Math.max(startRightRef.current - deltaX, SIDEBAR_MIN_WIDTH), SIDEBAR_MAX_WIDTH)); + }, []); + + const onRightResizeStart = useCallback(() => { + startRightRef.current = rightWidthRef.current; + }, []); + + const activeTask = useMemo(() => { + if (selectedTaskId) { + return tasks.find((task) => task.id === selectedTaskId) ?? tasks[0] ?? null; + } + return tasks[0] ?? null; + }, [selectedTaskId, tasks]); + + useEffect(() => { + if (activeTask) { + return; + } + + const fallbackTaskId = tasks[0]?.id; + if (!fallbackTaskId) { + return; + } + + const fallbackTask = tasks.find((task) => task.id === fallbackTaskId) ?? null; + + void navigate({ + to: "/organizations/$organizationId/tasks/$taskId", + params: { + organizationId, + taskId: fallbackTaskId, + }, + search: { sessionId: fallbackTask?.sessions[0]?.id ?? undefined }, + replace: true, + }); + }, [activeTask, navigate, tasks, organizationId]); + + const openDiffs = activeTask ? sanitizeOpenDiffs(activeTask, openDiffsByTask[activeTask.id]) : []; + const lastAgentSessionId = activeTask ? sanitizeLastAgentSessionId(activeTask, lastAgentSessionIdByTask[activeTask.id]) : null; + const activeSessionId = activeTask + ? sanitizeActiveSessionId(activeTask, activeSessionIdByTask[activeTask.id] ?? activeTask.activeSessionId ?? null, openDiffs, lastAgentSessionId) + : null; + const selectedSessionHydrating = Boolean( + selectedSessionId && activeSessionId === selectedSessionId && sessionState.status === "loading" && !sessionState.data, + ); + + const syncRouteSession = useCallback( + (taskId: string, sessionId: string | null, replace = false) => { + void navigate({ + to: "/organizations/$organizationId/tasks/$taskId", + params: { + organizationId, + taskId, + }, + search: { sessionId: sessionId ?? undefined }, + ...(replace ? { replace: true } : {}), + }); + }, + [navigate, organizationId], + ); + + useEffect(() => { + if (!activeTask) { + return; + } + + const resolvedRouteSessionId = sanitizeLastAgentSessionId(activeTask, selectedSessionId); + if (!resolvedRouteSessionId) { + return; + } + + if (selectedSessionId !== resolvedRouteSessionId) { + syncRouteSession(activeTask.id, resolvedRouteSessionId, true); + return; + } + + if (lastAgentSessionIdByTask[activeTask.id] === resolvedRouteSessionId) { + return; + } + + setLastAgentSessionIdByTask((current) => ({ + ...current, + [activeTask.id]: resolvedRouteSessionId, + })); + setActiveSessionIdByTask((current) => { + const currentActive = current[activeTask.id]; + if (currentActive && isDiffTab(currentActive)) { + return current; + } + + return { + ...current, + [activeTask.id]: resolvedRouteSessionId, + }; + }); + }, [activeTask, lastAgentSessionIdByTask, selectedSessionId, syncRouteSession]); + + useEffect(() => { + const organizationRepos = organizationReposData ?? []; + if (selectedNewTaskRepoId && organizationRepos.some((repo) => repo.id === selectedNewTaskRepoId)) { + return; + } + + const fallbackRepoId = + activeTask?.repoId && organizationRepos.some((repo) => repo.id === activeTask.repoId) ? activeTask.repoId : (organizationRepos[0]?.id ?? ""); + if (fallbackRepoId !== selectedNewTaskRepoId) { + setSelectedNewTaskRepoId(fallbackRepoId); + } + }, [activeTask?.repoId, selectedNewTaskRepoId, organizationReposData]); + + useEffect(() => { + if (!activeTask) { + return; + } + if (activeTask.sessions.length > 0) { + autoCreatingSessionForTaskRef.current.delete(activeTask.id); + return; + } + if (selectedSessionId) { + return; + } + if (autoCreatingSessionForTaskRef.current.has(activeTask.id)) { + return; + } + + autoCreatingSessionForTaskRef.current.add(activeTask.id); + void (async () => { + try { + const { sessionId } = await taskWorkspaceClient.addSession({ repoId: activeTask.repoId, taskId: activeTask.id }); + syncRouteSession(activeTask.id, sessionId, true); + } catch (error) { + logger.error( + { + taskId: activeTask.id, + ...createErrorContext(error), + }, + "failed_to_auto_create_workspace_session", + ); + // Keep the guard in the set on error to prevent retry storms. + // The guard is cleared when sessions appear (line above) or the task changes. + } + })(); + }, [activeTask, selectedSessionId, syncRouteSession, taskWorkspaceClient]); + + const createTask = useCallback( + (overrideRepoId?: string, options?: { title?: string; task?: string; branch?: string; onBranch?: string }) => { + void (async () => { + const repoId = overrideRepoId || selectedNewTaskRepoId; + if (!repoId) { + throw new Error("Cannot create a task without an available repo"); + } + + const { taskId, sessionId } = await taskWorkspaceClient.createTask({ + repoId, + task: options?.task ?? "New task", + model: currentUser?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + title: options?.title ?? "New task", + ...(options?.branch ? { branch: options.branch } : {}), + ...(options?.onBranch ? { onBranch: options.onBranch } : {}), + }); + await navigate({ + to: "/organizations/$organizationId/tasks/$taskId", + params: { + organizationId, + taskId, + }, + search: { sessionId: sessionId ?? undefined }, + }); + })(); + }, + [currentUser?.defaultModel, navigate, selectedNewTaskRepoId, taskWorkspaceClient, organizationId], + ); + + const openDiffTab = useCallback( + (path: string) => { + if (!activeTask) { + throw new Error("Cannot open a diff tab without an active task"); + } + setOpenDiffsByTask((current) => { + const existing = sanitizeOpenDiffs(activeTask, current[activeTask.id]); + if (existing.includes(path)) { + return current; + } + + return { + ...current, + [activeTask.id]: [...existing, path], + }; + }); + setActiveSessionIdByTask((current) => ({ + ...current, + [activeTask.id]: diffTabId(path), + })); + }, + [activeTask], + ); + + const selectTask = useCallback( + (id: string) => { + const task = tasks.find((candidate) => candidate.id === id) ?? null; + void navigate({ + to: "/organizations/$organizationId/tasks/$taskId", + params: { + organizationId, + taskId: id, + }, + search: { sessionId: task?.sessions[0]?.id ?? undefined }, + }); + }, + [navigate, tasks, organizationId], + ); + + const markTaskUnread = useCallback( + (id: string) => { + const task = tasks.find((candidate) => candidate.id === id); + if (!task) { + return; + } + void taskWorkspaceClient.markTaskUnread({ repoId: task.repoId, taskId: id }); + }, + [tasks], + ); + + const renameTask = useCallback( + (id: string) => { + const currentTask = tasks.find((task) => task.id === id); + if (!currentTask) { + throw new Error(`Unable to rename missing task ${id}`); + } + + const nextTitle = window.prompt("Rename task", currentTask.title); + if (nextTitle === null) { + return; + } + + const trimmedTitle = nextTitle.trim(); + if (!trimmedTitle) { + return; + } + + void taskWorkspaceClient.renameTask({ repoId: currentTask.repoId, taskId: id, value: trimmedTitle }); + }, + [tasks], + ); + + const changeOwner = useCallback( + (member: { id: string; name: string; email: string }) => { + if (!activeTask) { + throw new Error("Cannot change owner without an active task"); + } + void taskWorkspaceClient.changeOwner({ + repoId: activeTask.repoId, + taskId: activeTask.id, + targetUserId: member.id, + targetUserName: member.name, + targetUserEmail: member.email, + }); + }, + [activeTask], + ); + + const archiveTask = useCallback(() => { + if (!activeTask) { + throw new Error("Cannot archive without an active task"); + } + void taskWorkspaceClient.archiveTask({ repoId: activeTask.repoId, taskId: activeTask.id }); + }, [activeTask]); + + const publishPr = useCallback(() => { + if (!activeTask) { + throw new Error("Cannot publish PR without an active task"); + } + void taskWorkspaceClient.publishPr({ repoId: activeTask.repoId, taskId: activeTask.id }); + }, [activeTask]); + + const revertFile = useCallback( + (path: string) => { + if (!activeTask) { + throw new Error("Cannot revert a file without an active task"); + } + setOpenDiffsByTask((current) => ({ + ...current, + [activeTask.id]: sanitizeOpenDiffs(activeTask, current[activeTask.id]).filter((candidate) => candidate !== path), + })); + setActiveSessionIdByTask((current) => ({ + ...current, + [activeTask.id]: + current[activeTask.id] === diffTabId(path) + ? sanitizeLastAgentSessionId(activeTask, lastAgentSessionIdByTask[activeTask.id]) + : (current[activeTask.id] ?? null), + })); + + void taskWorkspaceClient.revertFile({ + repoId: activeTask.repoId, + taskId: activeTask.id, + path, + }); + }, + [activeTask, lastAgentSessionIdByTask], + ); + + const isDesktop = !!import.meta.env.VITE_DESKTOP; + const onDragMouseDown = useCallback((event: ReactPointerEvent) => { + if (event.button !== 0) return; + // Tauri v2 IPC: invoke start_dragging on the webview window + const ipc = (window as unknown as Record).__TAURI_INTERNALS__ as + | { + invoke: (cmd: string, args?: unknown) => Promise; + } + | undefined; + if (ipc?.invoke) { + ipc.invoke("plugin:window|start_dragging").catch(() => {}); + } + }, []); + const dragRegion = isDesktop ? ( +
+ {/* Background drag target – sits behind interactive elements */} +
+
+ ) : null; + + const collapsedToggleClass = css({ + width: "26px", + height: "26px", + borderRadius: "6px", + display: "flex", + alignItems: "center", + justifyContent: "center", + cursor: "pointer", + color: t.textTertiary, + position: "relative", + zIndex: 9999, + flexShrink: 0, + ":hover": { color: t.textSecondary, backgroundColor: t.interactiveHover }, + }); + + const sidebarTransition = "width 200ms ease"; + const contentFrameStyle: React.CSSProperties = { + flex: 1, + minWidth: 0, + display: "flex", + flexDirection: "row", + overflow: "hidden", + marginBottom: "8px", + marginRight: "8px", + marginLeft: leftSidebarOpen ? 0 : "8px", + }; + + if (!activeTask) { + return ( + <> + {dragRegion} + +
+
+ void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} + onToggleSidebar={() => setLeftSidebarOpen(false)} + /> +
+
+
+ {leftSidebarOpen ? : null} + + {!leftSidebarOpen || !rightSidebarOpen ? ( +
+ {leftSidebarOpen ? null : ( +
setLeftSidebarOpen(true)}> + +
+ )} +
+ {rightSidebarOpen ? null : ( +
setRightSidebarOpen(true)}> + +
+ )} +
+ ) : null} + +
+
+ {liveGithub?.syncStatus === "syncing" || liveGithub?.syncStatus === "pending" ? ( + <> +
+

Syncing with GitHub

+

+ {liveGithub.lastSyncLabel || `Importing repos from @${liveGithub.connectedAccount || "GitHub"}...`} + {(liveGithub.totalRepositoryCount ?? 0) > 0 && ( + <> + {" "} + {liveGithub.syncPhase === "syncing_repositories" + ? `${liveGithub.importedRepoCount} of ${liveGithub.totalRepositoryCount} repos imported so far.` + : `${liveGithub.processedRepositoryCount} of ${liveGithub.totalRepositoryCount} repos processed in ${liveGithub.syncPhase?.replace(/^syncing_/, "").replace(/_/g, " ") ?? "sync"}.`} + + )} +

+ + ) : liveGithub?.syncStatus === "error" ? ( + <> +

GitHub sync failed

+

There was a problem syncing repos from GitHub. Check the dev panel for details.

+ + ) : ( + <> +

Create your first task

+

+ {organizationRepos.length > 0 + ? "Start from the sidebar to create a task on the first available repo." + : "No repos are available in this organization yet."} +

+ + + )} +
+
+ + + {rightSidebarOpen ? : null} +
+
+ +
+
+
+ + {liveGithub && } + {showDevPanel && ( + + )} + + ); + } + + return ( + <> + {dragRegion} + +
+
+ void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} + onToggleSidebar={() => setLeftSidebarOpen(false)} + /> +
+
+ {!leftSidebarOpen && leftSidebarPeeking ? ( + <> +
setLeftSidebarPeeking(false)} + onMouseEnter={endPeek} + /> +
+ { + selectTask(id); + setLeftSidebarPeeking(false); + }} + onCreate={createTask} + onSelectNewTaskRepo={setSelectedNewTaskRepoId} + onMarkUnread={markTaskUnread} + onRenameTask={renameTask} + onReorderRepositories={reorderRepositories} + taskOrderByRepository={taskOrderByRepository} + onReorderTasks={reorderTasks} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} + onToggleSidebar={() => { + setLeftSidebarPeeking(false); + setLeftSidebarOpen(true); + }} + /> +
+ + ) : null} +
+ {leftSidebarOpen ? : null} +
+ { + setActiveSessionIdByTask((current) => ({ ...current, [activeTask.id]: sessionId })); + }} + onSetLastAgentSessionId={(sessionId) => { + setLastAgentSessionIdByTask((current) => ({ ...current, [activeTask.id]: sessionId })); + }} + onSetOpenDiffs={(paths) => { + setOpenDiffsByTask((current) => ({ ...current, [activeTask.id]: paths })); + }} + sidebarCollapsed={!leftSidebarOpen} + onToggleSidebar={() => { + setLeftSidebarPeeking(false); + setLeftSidebarOpen(true); + }} + onSidebarPeekStart={startPeek} + onSidebarPeekEnd={endPeek} + rightSidebarCollapsed={!rightSidebarOpen} + onToggleRightSidebar={() => setRightSidebarOpen(true)} + selectedSessionHydrating={selectedSessionHydrating} + onNavigateToUsage={navigateToUsage} + /> +
+ {rightSidebarOpen ? : null} +
+
+ setRightSidebarOpen(false)} + /> +
+
+
+ {liveGithub && } + {showDevPanel && ( + ({ + id: tab.id, + sessionId: tab.sessionId ?? null, + sessionName: tab.sessionName ?? tab.id, + agent: tab.agent, + model: tab.model, + status: tab.status, + thinkingSinceMs: tab.thinkingSinceMs ?? null, + unread: tab.unread ?? false, + created: tab.created ?? false, + })) ?? [], + }} + /> + )} + + + ); +} diff --git a/foundry/packages/frontend/src/components/mock-layout/diff-content.tsx b/foundry/packages/frontend/src/components/mock-layout/diff-content.tsx new file mode 100644 index 0000000..8665e97 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/diff-content.tsx @@ -0,0 +1,77 @@ +import { memo, useMemo } from "react"; +import { FileCode, Plus } from "lucide-react"; + +import { ScrollBody } from "./ui"; +import { parseDiffLines, type FileChange } from "./view-model"; + +export const DiffContent = memo(function DiffContent({ + filePath, + file, + diff, + onAddAttachment, +}: { + filePath: string; + file?: FileChange; + diff?: string; + onAddAttachment?: (filePath: string, lineNumber: number, lineContent: string) => void; +}) { + const diffLines = useMemo(() => (diff ? parseDiffLines(diff) : []), [diff]); + + return ( + <> +
+ +
{filePath}
+ {file ? ( +
+ +{file.added} + −{file.removed} +
+ ) : null} +
+ + {diff ? ( +
+ {diffLines.map((line) => { + const isHunk = line.kind === "hunk"; + return ( +
onAddAttachment(filePath, line.lineNumber, line.text) : undefined} + > +
+ {!isHunk && onAddAttachment ? ( + + ) : null} + {isHunk ? "" : line.lineNumber} +
+
+ {line.text} +
+
+ ); + })} +
+ ) : ( +
+
No diff data available for this file
+
+ )} +
+ + ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/history-minimap.tsx b/foundry/packages/frontend/src/components/mock-layout/history-minimap.tsx new file mode 100644 index 0000000..ae2443d --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/history-minimap.tsx @@ -0,0 +1,128 @@ +import { memo, useEffect, useRef, useState } from "react"; +import { useStyletron } from "baseui"; +import { LabelXSmall } from "baseui/typography"; +import { History } from "lucide-react"; + +import { useFoundryTokens } from "../../app/theme"; +import { formatMessageTimestamp, type HistoryEvent } from "./view-model"; + +export const HistoryMinimap = memo(function HistoryMinimap({ events, onSelect }: { events: HistoryEvent[]; onSelect: (event: HistoryEvent) => void }) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const [open, setOpen] = useState(false); + const containerRef = useRef(null); + + useEffect(() => { + if (!open) return; + const handleClick = (e: MouseEvent) => { + if (containerRef.current && !containerRef.current.contains(e.target as Node)) { + setOpen(false); + } + }; + document.addEventListener("mousedown", handleClick); + return () => document.removeEventListener("mousedown", handleClick); + }, [open]); + + if (events.length === 0) { + return null; + } + + return ( +
+
setOpen((prev) => !prev)} + onKeyDown={(e) => { + if (e.key === "Enter" || e.key === " ") setOpen((prev) => !prev); + }} + className={css({ + width: "26px", + height: "26px", + borderRadius: "6px", + display: "flex", + alignItems: "center", + justifyContent: "center", + cursor: "pointer", + color: open ? t.textSecondary : t.textTertiary, + backgroundColor: open ? t.interactiveHover : "transparent", + transition: "background 200ms ease, color 200ms ease", + ":hover": { color: t.textSecondary, backgroundColor: t.interactiveHover }, + })} + > + +
+ + {open ? ( +
+
+ + Task events + + {events.length} +
+
+ {events.map((event) => ( + + ))} +
+
+ ) : null} +
+ ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx new file mode 100644 index 0000000..499e6cd --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx @@ -0,0 +1,376 @@ +import { AgentTranscript as AgentTranscript_, type AgentTranscriptClassNames, type TranscriptEntry } from "@sandbox-agent/react"; +import { memo, useEffect, useMemo, type MutableRefObject, type RefObject } from "react"; + +// Cast needed: tsup-generated .d.ts returns react_jsx_runtime.JSX.Element which +// doesn't unify with the consumer's JSX.Element under Bundler moduleResolution. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const AgentTranscript = AgentTranscript_ as any as React.FC<{ + entries: TranscriptEntry[]; + classNames?: Partial; + scrollRef?: RefObject; + scrollToEntryId?: string | null; + virtualize?: boolean; + isThinking?: boolean; + renderMessageText?: (entry: TranscriptEntry) => React.ReactNode; + renderThinkingState?: () => React.ReactNode; +}>; +import { useStyletron } from "baseui"; +import { LabelSmall, LabelXSmall } from "baseui/typography"; +import { Copy } from "lucide-react"; + +import { useFoundryTokens } from "../../app/theme"; +import { HistoryMinimap } from "./history-minimap"; +import { SpinnerDot } from "./ui"; +import { buildDisplayMessages, formatMessageDuration, formatMessageTimestamp, type AgentSession, type HistoryEvent, type Message } from "./view-model"; + +const TranscriptMessageBody = memo(function TranscriptMessageBody({ + message, + messageRefs, + copiedMessageId, + onCopyMessage, + isTarget, + onTargetRendered, +}: { + message: Message; + messageRefs: MutableRefObject>; + copiedMessageId: string | null; + onCopyMessage: (message: Message) => void; + isTarget?: boolean; + onTargetRendered?: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const isUser = message.sender === "client"; + const isCopied = copiedMessageId === message.id; + const messageTimestamp = formatMessageTimestamp(message.createdAtMs); + const displayFooter = isUser ? messageTimestamp : message.durationMs ? `${messageTimestamp} • Took ${formatMessageDuration(message.durationMs)}` : null; + + useEffect(() => { + if (!isTarget) { + return; + } + + const targetNode = messageRefs.current.get(message.id); + if (!targetNode) { + return; + } + + targetNode.scrollIntoView({ behavior: "smooth", block: "center" }); + onTargetRendered?.(); + }, [isTarget, message.id, messageRefs, onTargetRendered]); + + return ( +
{ + if (node) { + messageRefs.current.set(message.id, node); + } else { + messageRefs.current.delete(message.id); + } + }} + className={css({ + display: "flex", + flexDirection: "column", + alignItems: isUser ? "flex-end" : "flex-start", + gap: "6px", + })} + > +
+
+ {message.text} +
+
+
+ {displayFooter ? ( + + {displayFooter} + + ) : null} + +
+
+ ); +}); + +export const MessageList = memo(function MessageList({ + session, + scrollRef, + messageRefs, + historyEvents, + onSelectHistoryEvent, + targetMessageId, + onTargetMessageResolved, + copiedMessageId, + onCopyMessage, + thinkingTimerLabel, + pendingMessage, +}: { + session: AgentSession | null | undefined; + scrollRef: RefObject; + messageRefs: MutableRefObject>; + historyEvents: HistoryEvent[]; + onSelectHistoryEvent: (event: HistoryEvent) => void; + targetMessageId?: string | null; + onTargetMessageResolved?: () => void; + copiedMessageId: string | null; + onCopyMessage: (message: Message) => void; + thinkingTimerLabel: string | null; + pendingMessage: { text: string; sentAt: number } | null; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const PENDING_MESSAGE_ID = "__pending__"; + const messages = useMemo(() => buildDisplayMessages(session), [session]); + const messagesById = useMemo(() => new Map(messages.map((message) => [message.id, message])), [messages]); + const messageIndexById = useMemo(() => new Map(messages.map((message, index) => [message.id, index])), [messages]); + const transcriptEntries = useMemo(() => { + const entries: TranscriptEntry[] = messages.map((message) => ({ + id: message.id, + eventId: message.id, + kind: "message", + time: new Date(message.createdAtMs).toISOString(), + role: message.sender === "client" ? "user" : "assistant", + text: message.text, + })); + if (pendingMessage) { + entries.push({ + id: PENDING_MESSAGE_ID, + eventId: PENDING_MESSAGE_ID, + kind: "message", + time: new Date(pendingMessage.sentAt).toISOString(), + role: "user", + text: pendingMessage.text, + }); + } + return entries; + }, [messages, pendingMessage]); + + const messageContentClass = css({ + maxWidth: "100%", + display: "flex", + flexDirection: "column", + }); + + const transcriptClassNames: Partial = { + root: css({ + display: "flex", + flexDirection: "column", + gap: "12px", + }), + message: css({ + display: "flex", + }), + messageContent: messageContentClass, + messageText: css({ + width: "100%", + }), + thinkingRow: css({ + display: "flex", + alignItems: "center", + gap: "8px", + padding: "4px 0", + }), + thinkingIndicator: css({ + display: "flex", + alignItems: "center", + gap: "8px", + color: t.accent, + fontSize: "11px", + fontFamily: '"IBM Plex Mono", monospace', + letterSpacing: "0.01em", + }), + }; + const scrollContainerClass = css({ + padding: "16px 52px 16px 20px", + display: "flex", + flexDirection: "column", + flex: 1, + minHeight: 0, + overflowY: "auto", + }); + + useEffect(() => { + if (!targetMessageId) { + return; + } + + const targetNode = messageRefs.current.get(targetMessageId); + if (targetNode) { + targetNode.scrollIntoView({ behavior: "smooth", block: "center" }); + onTargetMessageResolved?.(); + return; + } + + const targetIndex = messageIndexById.get(targetMessageId); + if (targetIndex == null) { + return; + } + + scrollRef.current?.scrollTo({ + top: Math.max(0, targetIndex * 88), + behavior: "smooth", + }); + }, [messageIndexById, messageRefs, onTargetMessageResolved, scrollRef, targetMessageId]); + + return ( + <> + + {historyEvents.length > 0 ? : null} +
+ {session && transcriptEntries.length === 0 ? ( +
+ + {!session?.created ? "Choose an agent and model, then send your first message" : "No messages yet in this session"} + +
+ ) : ( + { + if (entry.id === PENDING_MESSAGE_ID && pendingMessage) { + const pendingMsg: Message = { + id: PENDING_MESSAGE_ID, + sender: "client", + text: pendingMessage.text, + createdAtMs: pendingMessage.sentAt, + event: { + id: PENDING_MESSAGE_ID, + eventIndex: -1, + sessionId: "", + connectionId: "", + sender: "client", + createdAt: pendingMessage.sentAt, + payload: {}, + }, + }; + return ( +
+ +
+ ); + } + const message = messagesById.get(entry.id); + if (!message) { + return null; + } + + return ( + + ); + }} + isThinking={Boolean((session && session.status === "running" && transcriptEntries.length > 0) || pendingMessage)} + renderThinkingState={() => ( +
+ + + Agent is thinking + {thinkingTimerLabel ? ( + + {thinkingTimerLabel} + + ) : null} + +
+ )} + /> + )} +
+ + ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx b/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx new file mode 100644 index 0000000..6ec6ea6 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx @@ -0,0 +1,178 @@ +import { memo, useState } from "react"; +import { useStyletron } from "baseui"; +import { StatefulPopover, PLACEMENT } from "baseui/popover"; +import { ChevronUp, Star } from "lucide-react"; +import { workspaceModelLabel, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; + +import { useFoundryTokens } from "../../app/theme"; +import { AgentIcon } from "./ui"; +import { type ModelId } from "./view-model"; + +const ModelPickerContent = memo(function ModelPickerContent({ + groups, + value, + defaultModel, + onChange, + onSetDefault, + close, +}: { + groups: WorkspaceModelGroup[]; + value: ModelId; + defaultModel: ModelId; + onChange: (id: ModelId) => void; + onSetDefault: (id: ModelId) => void; + close: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const [hoveredId, setHoveredId] = useState(null); + + return ( +
+ {groups.map((group) => ( +
+
+ {group.provider} +
+ {group.models.map((model) => { + const isActive = model.id === value; + const isDefault = model.id === defaultModel; + const isHovered = model.id === hoveredId; + const agent = group.agentKind; + + return ( +
setHoveredId(model.id)} + onMouseLeave={() => setHoveredId(null)} + onClick={() => { + onChange(model.id); + close(); + }} + className={css({ + display: "flex", + alignItems: "center", + gap: "8px", + padding: "6px 12px", + cursor: "pointer", + fontSize: "12px", + fontWeight: isActive ? 600 : 400, + color: isActive ? t.textPrimary : t.textSecondary, + borderRadius: "6px", + marginLeft: "4px", + marginRight: "4px", + ":hover": { backgroundColor: t.borderSubtle }, + })} + > + + {model.label} + {isDefault ? : null} + {!isDefault && isHovered ? ( + { + event.stopPropagation(); + onSetDefault(model.id); + }} + /> + ) : null} +
+ ); + })} +
+ ))} +
+ ); +}); + +export const ModelPicker = memo(function ModelPicker({ + groups, + value, + defaultModel, + onChange, + onSetDefault, +}: { + groups: WorkspaceModelGroup[]; + value: ModelId; + defaultModel: ModelId; + onChange: (id: ModelId) => void; + onSetDefault: (id: ModelId) => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const [isOpen, setIsOpen] = useState(false); + const [isHovered, setIsHovered] = useState(false); + + return ( + setIsOpen(true)} + onClose={() => setIsOpen(false)} + overrides={{ + Body: { + style: { + backgroundColor: "rgba(32, 32, 32, 0.98)", + backdropFilter: "blur(12px)", + borderTopLeftRadius: "10px", + borderTopRightRadius: "10px", + borderBottomLeftRadius: "10px", + borderBottomRightRadius: "10px", + border: `1px solid ${t.borderDefault}`, + boxShadow: `0 8px 32px rgba(0, 0, 0, 0.5), 0 0 0 1px ${t.interactiveSubtle}`, + zIndex: 100, + }, + }, + Inner: { + style: { + backgroundColor: "transparent", + padding: "0", + }, + }, + }} + content={({ close }) => ( + + )} + > +
+ +
+
+ ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx b/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx new file mode 100644 index 0000000..b7e27be --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx @@ -0,0 +1,184 @@ +import { memo, type Ref } from "react"; +import { useStyletron } from "baseui"; +import { ChatComposer, type ChatComposerClassNames } from "@sandbox-agent/react"; +import { FileCode, SendHorizonal, Square, X } from "lucide-react"; +import { type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; + +import { useFoundryTokens } from "../../app/theme"; +import { ModelPicker } from "./model-picker"; +import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT } from "./ui"; +import { fileName, type LineAttachment, type ModelId } from "./view-model"; + +export const PromptComposer = memo(function PromptComposer({ + draft, + textareaRef, + placeholder, + attachments, + modelGroups, + defaultModel, + model, + isRunning, + onDraftChange, + onSend, + onStop, + onRemoveAttachment, + onChangeModel, + onSetDefaultModel, +}: { + draft: string; + textareaRef: Ref; + placeholder: string; + attachments: LineAttachment[]; + modelGroups: WorkspaceModelGroup[]; + defaultModel: ModelId; + model: ModelId; + isRunning: boolean; + onDraftChange: (value: string) => void; + onSend: () => void; + onStop: () => void; + onRemoveAttachment: (id: string) => void; + onChangeModel: (model: ModelId) => void; + onSetDefaultModel: (model: ModelId) => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const composerClassNames: Partial = { + form: css({ + position: "relative", + backgroundColor: t.interactiveHover, + border: `1px solid ${t.borderDefault}`, + borderRadius: "12px", + minHeight: `${PROMPT_TEXTAREA_MIN_HEIGHT + 36}px`, + transition: "border-color 200ms ease", + ":focus-within": { borderColor: t.borderMedium }, + display: "flex", + flexDirection: "column", + }), + input: css({ + display: "block", + width: "100%", + minHeight: `${PROMPT_TEXTAREA_MIN_HEIGHT + 20}px`, + padding: "14px 58px 8px 14px", + background: "transparent", + border: "none", + borderRadius: "12px 12px 0 0", + color: t.textPrimary, + fontSize: "13px", + fontFamily: "inherit", + resize: "none", + outline: "none", + lineHeight: "1.4", + maxHeight: `${PROMPT_TEXTAREA_MAX_HEIGHT + 40}px`, + boxSizing: "border-box", + overflowY: "hidden", + "::placeholder": { color: t.textSecondary }, + }), + submit: css({ + appearance: "none", + WebkitAppearance: "none", + boxSizing: "border-box", + width: "32px", + height: "32px", + padding: "0", + margin: "0", + border: "none", + borderRadius: "10px", + cursor: "pointer", + position: "absolute", + right: "12px", + bottom: "12px", + display: "flex", + alignItems: "center", + justifyContent: "center", + lineHeight: 0, + fontSize: 0, + color: t.textPrimary, + transition: "background 200ms ease", + backgroundColor: isRunning ? t.interactiveHover : t.borderMedium, + ":hover": { + backgroundColor: isRunning ? t.borderMedium : "rgba(255, 255, 255, 0.20)", + }, + ":disabled": { + cursor: "not-allowed", + opacity: 0.45, + }, + }), + submitContent: css({ + display: "flex", + alignItems: "center", + justifyContent: "center", + width: "100%", + height: "100%", + lineHeight: 0, + color: isRunning ? t.textPrimary : t.textPrimary, + }), + }; + + return ( +
+ {attachments.length > 0 ? ( +
+ {attachments.map((attachment) => ( +
+ + + {fileName(attachment.filePath)}:{attachment.lineNumber} + + onRemoveAttachment(attachment.id)} /> +
+ ))} +
+ ) : null} + { + if (event.key === "Enter" && !event.shiftKey) { + event.preventDefault(); + if (isRunning) { + onStop(); + } else { + onSend(); + } + } + }} + placeholder={placeholder} + inputRef={textareaRef} + rows={2} + allowEmptySubmit={isRunning} + submitLabel={isRunning ? "Stop" : "Send"} + classNames={composerClassNames} + renderSubmitContent={() => (isRunning ? : )} + renderFooter={() => ( +
+ +
+ )} + /> +
+ ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx new file mode 100644 index 0000000..3565b44 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx @@ -0,0 +1,759 @@ +import { memo, useCallback, useMemo, useRef, useState, type MouseEvent } from "react"; +import { useStyletron } from "baseui"; +import { LabelSmall, LabelXSmall } from "baseui/typography"; +import { + Archive, + ArrowUpFromLine, + ChevronDown, + ChevronRight, + FileCode, + FilePlus, + FileX, + FolderOpen, + ExternalLink, + GitBranch, + GitPullRequest, + PanelRight, + User, +} from "lucide-react"; + +import { useFoundryTokens } from "../../app/theme"; +import { createErrorContext } from "@sandbox-agent/foundry-shared"; +import { logger } from "../../logging.js"; +import { type ContextMenuItem, ContextMenuOverlay, PanelHeaderBar, SPanel, ScrollBody, useContextMenu } from "./ui"; +import { type FileTreeNode, type Task, diffTabId } from "./view-model"; + +const FileTree = memo(function FileTree({ + nodes, + depth, + onSelectFile, + onFileContextMenu, + changedPaths, +}: { + nodes: FileTreeNode[]; + depth: number; + onSelectFile: (path: string) => void; + onFileContextMenu: (event: MouseEvent, path: string) => void; + changedPaths: Set; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const [collapsed, setCollapsed] = useState>(new Set()); + + return ( + <> + {nodes.map((node) => { + const isCollapsed = collapsed.has(node.path); + const isChanged = changedPaths.has(node.path); + return ( +
+
{ + if (node.isDir) { + setCollapsed((current) => { + const next = new Set(current); + if (next.has(node.path)) { + next.delete(node.path); + } else { + next.add(node.path); + } + return next; + }); + return; + } + + onSelectFile(node.path); + }} + onContextMenu={node.isDir ? undefined : (event) => onFileContextMenu(event, node.path)} + className={css({ + display: "flex", + alignItems: "center", + gap: "4px", + paddingTop: "3px", + paddingRight: "10px", + paddingBottom: "3px", + paddingLeft: `${10 + depth * 16}px`, + cursor: "pointer", + fontSize: "12px", + fontFamily: '"IBM Plex Mono", monospace', + color: isChanged ? t.textPrimary : t.textTertiary, + ":hover": { backgroundColor: t.interactiveHover }, + })} + > + {node.isDir ? ( + <> + + + + ) : ( + + )} + {node.name} +
+ {node.isDir && !isCollapsed && node.children ? ( + + ) : null} +
+ ); + })} + + ); +}); + +export const RightSidebar = memo(function RightSidebar({ + task, + activeSessionId, + onOpenDiff, + onArchive, + onRevertFile, + onPublishPr, + onChangeOwner, + members, + onToggleSidebar, +}: { + task: Task; + activeSessionId: string | null; + onOpenDiff: (path: string) => void; + onArchive: () => void; + onRevertFile: (path: string) => void; + onPublishPr: () => void; + onChangeOwner: (member: { id: string; name: string; email: string }) => void; + members: Array<{ id: string; name: string; email: string }>; + onToggleSidebar?: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const [rightTab, setRightTab] = useState<"overview" | "changes" | "files">("overview"); + const contextMenu = useContextMenu(); + const changedPaths = useMemo(() => new Set(task.fileChanges.map((file) => file.path)), [task.fileChanges]); + const isTerminal = task.status === "archived"; + const [compact, setCompact] = useState(false); + const headerRef = useCallback((node: HTMLDivElement | null) => { + if (!node) return; + const observer = new ResizeObserver((entries) => { + for (const entry of entries) { + setCompact(entry.contentRect.width < 400); + } + }); + observer.observe(node); + }, []); + const [ownerDropdownOpen, setOwnerDropdownOpen] = useState(false); + const ownerDropdownRef = useRef(null); + const pullRequestUrl = task.pullRequest?.url ?? null; + + const copyFilePath = useCallback(async (path: string) => { + try { + if (!window.navigator.clipboard) { + throw new Error("Clipboard API unavailable in mock layout"); + } + + await window.navigator.clipboard.writeText(path); + } catch (error) { + logger.error( + { + path, + ...createErrorContext(error), + }, + "failed_to_copy_file_path", + ); + } + }, []); + + const openFileMenu = useCallback( + (event: MouseEvent, path: string) => { + const items: ContextMenuItem[] = []; + + if (changedPaths.has(path)) { + items.push({ label: "Revert", onClick: () => onRevertFile(path) }); + } + + items.push({ label: "Copy Path", onClick: () => void copyFilePath(path) }); + contextMenu.open(event, items); + }, + [changedPaths, contextMenu, copyFilePath, onRevertFile], + ); + + return ( + + +
+ {!isTerminal ? ( +
+ + + +
+ ) : null} + {onToggleSidebar ? ( +
{ + if (event.key === "Enter" || event.key === " ") onToggleSidebar(); + }} + className={css({ + width: "26px", + height: "26px", + borderRadius: "6px", + color: t.textTertiary, + cursor: "pointer", + display: "flex", + alignItems: "center", + justifyContent: "center", + flexShrink: 0, + ":hover": { color: t.textSecondary, backgroundColor: t.interactiveHover }, + })} + > + +
+ ) : null} +
+
+ +
+
+ + + +
+ + + {rightTab === "overview" ? ( +
+
+ + Owner + +
+
setOwnerDropdownOpen((prev) => !prev)} + onKeyDown={(event) => { + if (event.key === "Enter" || event.key === " ") setOwnerDropdownOpen((prev) => !prev); + }} + className={css({ + display: "flex", + alignItems: "center", + gap: "10px", + paddingTop: "4px", + paddingRight: "8px", + paddingBottom: "4px", + paddingLeft: "4px", + borderRadius: "6px", + cursor: "pointer", + ":hover": { backgroundColor: t.interactiveHover }, + })} + > + {task.primaryUserLogin ? ( + <> + {task.primaryUserAvatarUrl ? ( + {task.primaryUserLogin} + ) : ( +
+ +
+ )} + + {task.primaryUserLogin} + + + ) : ( + <> +
+ +
+ + No owner assigned + + + )} + +
+ {ownerDropdownOpen ? ( + <> +
setOwnerDropdownOpen(false)} + className={css({ position: "fixed", top: 0, left: 0, right: 0, bottom: 0, zIndex: 99 })} + /> +
+ {members.map((member) => ( +
{ + onChangeOwner(member); + setOwnerDropdownOpen(false); + }} + onKeyDown={(event) => { + if (event.key === "Enter" || event.key === " ") { + onChangeOwner(member); + setOwnerDropdownOpen(false); + } + }} + className={css({ + display: "flex", + alignItems: "center", + gap: "8px", + paddingTop: "6px", + paddingRight: "12px", + paddingBottom: "6px", + paddingLeft: "12px", + cursor: "pointer", + fontSize: "12px", + color: t.textPrimary, + ":hover": { backgroundColor: t.interactiveHover }, + })} + > +
+ +
+ {member.name} +
+ ))} + {members.length === 0 ? ( +
+ No members +
+ ) : null} +
+ + ) : null} +
+
+
+ + Branch + +
+ + + {task.branch ?? "No branch"} + +
+
+
+ + Repository + + {task.repoName} +
+ {task.pullRequest ? ( +
+ + Pull Request + +
+ + + #{task.pullRequest.number} {task.pullRequest.title ?? ""} + +
+
+ ) : null} + {task.sandboxes?.find((s) => s.sandboxId === task.activeSandboxId)?.url ? ( + + ) : null} +
+ ) : rightTab === "changes" ? ( +
+ {task.fileChanges.length === 0 ? ( +
+ No changes yet +
+ ) : null} + {task.fileChanges.map((file) => { + const isActive = activeSessionId === diffTabId(file.path); + const TypeIcon = file.type === "A" ? FilePlus : file.type === "D" ? FileX : FileCode; + const iconColor = file.type === "A" ? t.statusSuccess : file.type === "D" ? t.statusError : t.textTertiary; + return ( +
onOpenDiff(file.path)} + onContextMenu={(event) => openFileMenu(event, file.path)} + className={css({ + display: "flex", + alignItems: "center", + gap: "8px", + padding: "6px 10px", + borderRadius: "6px", + backgroundColor: isActive ? t.interactiveHover : "transparent", + cursor: "pointer", + ":hover": { backgroundColor: t.interactiveHover }, + })} + > + +
+ {file.path} +
+
+ +{file.added} + -{file.removed} + {file.type} +
+
+ ); + })} +
+ ) : ( +
+ {task.fileTree.length > 0 ? ( + + ) : ( +
+ No files yet +
+ )} +
+ )} + +
+ {contextMenu.menu ? : null} + + ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/session-strip.tsx b/foundry/packages/frontend/src/components/mock-layout/session-strip.tsx new file mode 100644 index 0000000..105fbef --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/session-strip.tsx @@ -0,0 +1,238 @@ +import { memo } from "react"; +import { useStyletron } from "baseui"; +import { LabelXSmall } from "baseui/typography"; +import { FileCode, Plus, X } from "lucide-react"; + +import { useFoundryTokens } from "../../app/theme"; +import { ContextMenuOverlay, SessionAvatar, useContextMenu } from "./ui"; +import { diffTabId, fileName, type Task } from "./view-model"; + +export const SessionStrip = memo(function SessionStrip({ + task, + activeSessionId, + openDiffs, + editingSessionId, + editingSessionName, + onEditingSessionNameChange, + onSwitchSession, + onStartRenamingSession, + onCommitSessionRename, + onCancelSessionRename, + onSetSessionUnread, + onCloseSession, + onCloseDiffTab, + onAddSession, + sidebarCollapsed, +}: { + task: Task; + activeSessionId: string | null; + openDiffs: string[]; + editingSessionId: string | null; + editingSessionName: string; + onEditingSessionNameChange: (value: string) => void; + onSwitchSession: (sessionId: string) => void; + onStartRenamingSession: (sessionId: string) => void; + onCommitSessionRename: () => void; + onCancelSessionRename: () => void; + onSetSessionUnread: (sessionId: string, unread: boolean) => void; + onCloseSession: (sessionId: string) => void; + onCloseDiffTab: (path: string) => void; + onAddSession: () => void; + sidebarCollapsed?: boolean; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const isDesktop = !!import.meta.env.VITE_DESKTOP; + const contextMenu = useContextMenu(); + + return ( + <> + +
+ {task.sessions.map((tab) => { + const isActive = tab.id === activeSessionId; + return ( +
onSwitchSession(tab.id)} + onDoubleClick={() => onStartRenamingSession(tab.id)} + onMouseDown={(event) => { + if (event.button === 1 && task.sessions.length > 1) { + event.preventDefault(); + onCloseSession(tab.id); + } + }} + onContextMenu={(event) => + contextMenu.open(event, [ + { label: "Rename session", onClick: () => onStartRenamingSession(tab.id) }, + { + label: tab.unread ? "Mark as read" : "Mark as unread", + onClick: () => onSetSessionUnread(tab.id, !tab.unread), + }, + ...(task.sessions.length > 1 ? [{ label: "Close session", onClick: () => onCloseSession(tab.id) }] : []), + ]) + } + data-session + className={css({ + display: "flex", + alignItems: "center", + gap: "6px", + padding: "4px 12px", + marginTop: "6px", + marginBottom: "6px", + borderRadius: "8px", + backgroundColor: isActive ? t.interactiveHover : "transparent", + cursor: "pointer", + transition: "color 200ms ease, background-color 200ms ease", + flexShrink: 0, + ":hover": { color: t.textPrimary, backgroundColor: isActive ? t.interactiveHover : t.interactiveSubtle }, + })} + > +
+ +
+ {editingSessionId === tab.id ? ( + onEditingSessionNameChange(event.target.value)} + onBlur={onCommitSessionRename} + onClick={(event) => event.stopPropagation()} + onDoubleClick={(event) => event.stopPropagation()} + onKeyDown={(event) => { + if (event.key === "Enter") { + onCommitSessionRename(); + } else if (event.key === "Escape") { + onCancelSessionRename(); + } + }} + className={css({ + appearance: "none", + WebkitAppearance: "none", + background: "none", + border: "none", + padding: "0", + margin: "0", + outline: "none", + minWidth: "72px", + maxWidth: "180px", + fontSize: "11px", + fontWeight: 600, + color: t.textPrimary, + borderBottom: `1px solid ${t.borderFocus}`, + })} + /> + ) : ( + + {tab.sessionName} + + )} + {task.sessions.length > 1 ? ( + { + event.stopPropagation(); + onCloseSession(tab.id); + }} + /> + ) : null} +
+ ); + })} + {openDiffs.map((path) => { + const sessionId = diffTabId(path); + const isActive = sessionId === activeSessionId; + return ( +
onSwitchSession(sessionId)} + onMouseDown={(event) => { + if (event.button === 1) { + event.preventDefault(); + onCloseDiffTab(path); + } + }} + data-session + className={css({ + display: "flex", + alignItems: "center", + gap: "6px", + padding: "4px 12px", + marginTop: "6px", + marginBottom: "6px", + borderRadius: "8px", + backgroundColor: isActive ? t.interactiveHover : "transparent", + cursor: "pointer", + transition: "color 200ms ease, background-color 200ms ease", + flexShrink: 0, + ":hover": { color: t.textPrimary, backgroundColor: isActive ? t.interactiveHover : t.interactiveSubtle }, + })} + > + + + {fileName(path)} + + { + event.stopPropagation(); + onCloseDiffTab(path); + }} + /> +
+ ); + })} +
+ +
+
+ {contextMenu.menu ? : null} + + ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx new file mode 100644 index 0000000..6ebb026 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx @@ -0,0 +1,1190 @@ +import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState } from "react"; +import { createPortal } from "react-dom"; +import { useNavigate } from "@tanstack/react-router"; +import { useVirtualizer } from "@tanstack/react-virtual"; +import { useStyletron } from "baseui"; +import { LabelSmall, LabelXSmall } from "baseui/typography"; +import { Select, type Value } from "baseui/select"; +import { + ChevronDown, + ChevronRight, + ChevronUp, + CloudUpload, + CreditCard, + GitPullRequestDraft, + ListChecks, + LogOut, + MoreHorizontal, + PanelLeft, + Plus, + Settings, + User, +} from "lucide-react"; + +import { formatRelativeAge, type Task, type RepositorySection } from "./view-model"; +import { ContextMenuOverlay, TaskIndicator, PanelHeaderBar, SPanel, ScrollBody, useContextMenu } from "./ui"; +import { activeMockOrganization, eligibleOrganizations, useMockAppClient, useMockAppSnapshot } from "../../lib/mock-app"; +import { useFoundryTokens } from "../../app/theme"; +import type { FoundryTokens } from "../../styles/tokens"; + +const REPOSITORY_COLORS = ["#6366f1", "#f59e0b", "#10b981", "#ef4444", "#8b5cf6", "#ec4899", "#06b6d4", "#f97316"]; + +/** Strip the org prefix (e.g. "rivet-dev/") when all repos share the same org. */ +function stripCommonOrgPrefix(label: string, repos: Array<{ label: string }>): string { + const slashIdx = label.indexOf("/"); + if (slashIdx < 0) return label; + const prefix = label.slice(0, slashIdx + 1); + if (repos.every((r) => r.label.startsWith(prefix))) { + return label.slice(slashIdx + 1); + } + return label; +} + +function repositoryInitial(label: string): string { + const parts = label.split("/"); + const name = parts[parts.length - 1] ?? label; + return name.charAt(0).toUpperCase(); +} + +function repositoryIconColor(label: string): string { + let hash = 0; + for (let i = 0; i < label.length; i++) { + hash = (hash * 31 + label.charCodeAt(i)) | 0; + } + return REPOSITORY_COLORS[Math.abs(hash) % REPOSITORY_COLORS.length]!; +} + +export const Sidebar = memo(function Sidebar({ + repositories, + newTaskRepos, + selectedNewTaskRepoId, + activeId, + onSelect, + onCreate, + onSelectNewTaskRepo, + onMarkUnread, + onRenameTask, + onReorderRepositories, + taskOrderByRepository, + onReorderTasks, + onReloadOrganization, + onReloadRepository, + onToggleSidebar, +}: { + repositories: RepositorySection[]; + newTaskRepos: Array<{ id: string; label: string }>; + selectedNewTaskRepoId: string; + activeId: string; + onSelect: (id: string) => void; + onCreate: (repoId?: string) => void; + onSelectNewTaskRepo: (repoId: string) => void; + onMarkUnread: (id: string) => void; + onRenameTask: (id: string) => void; + onReorderRepositories: (fromIndex: number, toIndex: number) => void; + taskOrderByRepository: Record; + onReorderTasks: (repositoryId: string, fromIndex: number, toIndex: number) => void; + onReloadOrganization: () => void; + onReloadRepository: (repoId: string) => void; + onToggleSidebar?: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const contextMenu = useContextMenu(); + const [collapsedRepositories, setCollapsedRepositories] = useState>({}); + const [hoveredRepositoryId, setHoveredRepositoryId] = useState(null); + const [headerMenuOpen, setHeaderMenuOpen] = useState(false); + const headerMenuRef = useRef(null); + const scrollRef = useRef(null); + + // Mouse-based drag and drop state + type DragState = + | { type: "repository"; fromIdx: number; overIdx: number | null } + | { type: "task"; repositoryId: string; fromIdx: number; overIdx: number | null } + | null; + const [drag, setDrag] = useState(null); + const dragRef = useRef(null); + const startYRef = useRef(0); + const didDragRef = useRef(false); + + // Attach global mousemove/mouseup when dragging + useEffect(() => { + if (!drag) return; + const onMove = (e: MouseEvent) => { + // Detect which element is under the cursor using data attributes + const el = document.elementFromPoint(e.clientX, e.clientY); + if (!el) return; + const repositoryEl = (el as HTMLElement).closest?.("[data-repository-idx]") as HTMLElement | null; + const taskEl = (el as HTMLElement).closest?.("[data-task-idx]") as HTMLElement | null; + + if (drag.type === "repository" && repositoryEl) { + const overIdx = Number(repositoryEl.dataset.repositoryIdx); + if (overIdx !== drag.overIdx) { + setDrag({ ...drag, overIdx }); + dragRef.current = { ...drag, overIdx }; + } + } else if (drag.type === "task" && taskEl) { + const overRepositoryId = taskEl.dataset.taskRepositoryId ?? ""; + const overIdx = Number(taskEl.dataset.taskIdx); + if (overRepositoryId === drag.repositoryId && overIdx !== drag.overIdx) { + setDrag({ ...drag, overIdx }); + dragRef.current = { ...drag, overIdx }; + } + } + // Mark that we actually moved (to distinguish from clicks) + if (Math.abs(e.clientY - startYRef.current) > 4) { + didDragRef.current = true; + } + }; + const onUp = () => { + const d = dragRef.current; + if (d && didDragRef.current && d.overIdx !== null && d.fromIdx !== d.overIdx) { + if (d.type === "repository") { + onReorderRepositories(d.fromIdx, d.overIdx); + } else { + onReorderTasks(d.repositoryId, d.fromIdx, d.overIdx); + } + } + dragRef.current = null; + didDragRef.current = false; + setDrag(null); + }; + document.addEventListener("mousemove", onMove); + document.addEventListener("mouseup", onUp); + return () => { + document.removeEventListener("mousemove", onMove); + document.removeEventListener("mouseup", onUp); + }; + }, [drag, onReorderRepositories, onReorderTasks]); + + useEffect(() => { + if (!headerMenuOpen) { + return; + } + const onMouseDown = (event: MouseEvent) => { + if (headerMenuRef.current?.contains(event.target as Node)) { + return; + } + setHeaderMenuOpen(false); + }; + document.addEventListener("mousedown", onMouseDown); + return () => document.removeEventListener("mousedown", onMouseDown); + }, [headerMenuOpen]); + + const [createSelectOpen, setCreateSelectOpen] = useState(false); + const selectOptions = useMemo(() => newTaskRepos.map((repo) => ({ id: repo.id, label: stripCommonOrgPrefix(repo.label, newTaskRepos) })), [newTaskRepos]); + type FlatItem = + | { key: string; type: "repository-header"; repository: RepositorySection; repositoryIndex: number } + | { key: string; type: "task"; repository: RepositorySection; repositoryIndex: number; task: Task; taskIndex: number } + | { key: string; type: "task-drop-zone"; repository: RepositorySection; repositoryIndex: number; taskCount: number } + | { key: string; type: "repository-drop-zone"; repositoryCount: number }; + const flatItems = useMemo(() => { + const items: FlatItem[] = []; + repositories.forEach((repository, repositoryIndex) => { + items.push({ key: `repository:${repository.id}`, type: "repository-header", repository, repositoryIndex }); + if (!collapsedRepositories[repository.id]) { + const orderedTaskIds = taskOrderByRepository[repository.id]; + const orderedTasks = orderedTaskIds + ? (() => { + const byId = new Map(repository.tasks.map((t) => [t.id, t])); + const sorted = orderedTaskIds.map((id) => byId.get(id)).filter(Boolean) as typeof repository.tasks; + for (const t of repository.tasks) { + if (!orderedTaskIds.includes(t.id)) sorted.push(t); + } + return sorted; + })() + : repository.tasks; + orderedTasks.forEach((task, taskIndex) => { + items.push({ key: `task:${task.id}`, type: "task" as const, repository, repositoryIndex, task, taskIndex }); + }); + items.push({ key: `task-drop:${repository.id}`, type: "task-drop-zone", repository, repositoryIndex, taskCount: orderedTasks.length }); + } + }); + items.push({ key: "repository-drop-zone", type: "repository-drop-zone", repositoryCount: repositories.length }); + return items; + }, [collapsedRepositories, repositories, taskOrderByRepository]); + const virtualizer = useVirtualizer({ + count: flatItems.length, + getItemKey: (index) => flatItems[index]?.key ?? index, + getScrollElement: () => scrollRef.current, + estimateSize: () => 40, + overscan: 12, + measureElement: (element) => element.getBoundingClientRect().height, + }); + + return ( + + + {import.meta.env.VITE_DESKTOP ? ( +
+ {onToggleSidebar ? ( +
{ + if (event.key === "Enter" || event.key === " ") onToggleSidebar(); + }} + className={css({ + width: "26px", + height: "26px", + borderRadius: "6px", + color: t.textTertiary, + cursor: "pointer", + display: "flex", + alignItems: "center", + justifyContent: "center", + flexShrink: 0, + ":hover": { color: t.textSecondary, backgroundColor: t.interactiveHover }, + })} + > + +
+ ) : null} +
+ ) : null} + + + + Tasks + + {!import.meta.env.VITE_DESKTOP && onToggleSidebar ? ( +
{ + if (event.key === "Enter" || event.key === " ") onToggleSidebar(); + }} + className={css({ + width: "26px", + height: "26px", + borderRadius: "6px", + color: t.textTertiary, + cursor: "pointer", + display: "flex", + alignItems: "center", + justifyContent: "center", + flexShrink: 0, + ":hover": { color: t.textSecondary, backgroundColor: t.interactiveHover }, + })} + > + +
+ ) : null} + {createSelectOpen ? ( +
+ e.stopPropagation()} + onBlur={(e) => { + const val = e.currentTarget.value.trim(); + if (val) { + setCustomTabNames((prev) => ({ ...prev, [tab.id]: val })); + } + setEditingTabId(null); + }} + onKeyDown={(e) => { + if (e.key === "Enter") { + e.currentTarget.blur(); + } else if (e.key === "Escape") { + setEditingTabId(null); + } + }} + className={css({ + flex: 1, + minWidth: 0, + background: "transparent", + border: "none", + outline: "none", + color: "inherit", + font: "inherit", + fontSize: "12px", + padding: 0, + margin: 0, + })} + /> + ) : ( + { + e.stopPropagation(); + setEditingTabId(tab.id); + }} + className={css({ flex: 1, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" })} + > + {customTabNames[tab.id] ?? tab.title} + + ))} + {listWidth >= 80 && ( +
{ + e.stopPropagation(); + closeTerminalTab(tab.id); + }} + onKeyDown={(event) => { + if (event.key === "Enter" || event.key === " ") closeTerminalTab(tab.id); + }} + className={css({ + width: "18px", + height: "18px", + borderRadius: "4px", + display: "flex", + alignItems: "center", + justifyContent: "center", + color: t.textMuted, + flexShrink: 0, + opacity: isHovered ? 1 : 0, + pointerEvents: isHovered ? "auto" : "none", + transition: "opacity 150ms ease, background 200ms ease, color 200ms ease", + ":hover": { + backgroundColor: "rgba(255, 255, 255, 0.20)", + color: t.textSecondary, + }, + })} + > + +
+ )} +
+ ); + })} + + {/* Bottom drop zone for dragging to end of list */} +
+
+
+ )} + + ); +} diff --git a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx new file mode 100644 index 0000000..16f87e6 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx @@ -0,0 +1,185 @@ +import { memo, useMemo } from "react"; +import { useStyletron } from "baseui"; +import { LabelSmall } from "baseui/typography"; +import { Clock, PanelLeft, PanelRight } from "lucide-react"; + +import { useFoundryTokens } from "../../app/theme"; +import { deriveHeaderStatus } from "../../features/tasks/status"; +import { HeaderStatusPill, PanelHeaderBar } from "./ui"; +import { type AgentSession, type Task } from "./view-model"; + +export const TranscriptHeader = memo(function TranscriptHeader({ + task, + hasSandbox, + activeSession, + editingField, + editValue, + onEditValueChange, + onStartEditingField, + onCommitEditingField, + onCancelEditingField, + onSetActiveSessionUnread, + sidebarCollapsed, + onToggleSidebar, + onSidebarPeekStart, + onSidebarPeekEnd, + rightSidebarCollapsed, + onToggleRightSidebar, + onNavigateToUsage, +}: { + task: Task; + hasSandbox: boolean; + activeSession: AgentSession | null | undefined; + editingField: "title" | null; + editValue: string; + onEditValueChange: (value: string) => void; + onStartEditingField: (field: "title", value: string) => void; + onCommitEditingField: (field: "title") => void; + onCancelEditingField: () => void; + onSetActiveSessionUnread: (unread: boolean) => void; + sidebarCollapsed?: boolean; + onToggleSidebar?: () => void; + onSidebarPeekStart?: () => void; + onSidebarPeekEnd?: () => void; + rightSidebarCollapsed?: boolean; + onToggleRightSidebar?: () => void; + onNavigateToUsage?: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + const isDesktop = !!import.meta.env.VITE_DESKTOP; + const needsTrafficLightInset = isDesktop && sidebarCollapsed; + const headerStatus = useMemo( + () => deriveHeaderStatus(task.status, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), + [task.status, activeSession?.status, activeSession?.errorMessage, hasSandbox], + ); + + return ( + + {sidebarCollapsed && onToggleSidebar ? ( +
+ +
+ ) : null} + {editingField === "title" ? ( + onEditValueChange(event.target.value)} + onBlur={() => onCommitEditingField("title")} + onKeyDown={(event) => { + if (event.key === "Enter") { + onCommitEditingField("title"); + } else if (event.key === "Escape") { + onCancelEditingField(); + } + }} + className={css({ + appearance: "none", + WebkitAppearance: "none", + background: "none", + border: "none", + padding: "0", + margin: "0", + outline: "none", + fontWeight: 500, + fontSize: "14px", + color: t.textPrimary, + borderBottom: `1px solid ${t.borderFocus}`, + minWidth: "80px", + maxWidth: "300px", + })} + /> + ) : ( + onStartEditingField("title", task.title)} + > + {task.title} + + )} + {task.branch ? ( + + {task.branch} + + ) : null} + +
+
{ + if (e.key === "Enter" || e.key === " ") onNavigateToUsage?.(); + }} + className={css({ + display: "inline-flex", + alignItems: "center", + gap: "5px", + padding: "4px 12px", + borderRadius: "6px", + backgroundColor: "transparent", + fontSize: "11px", + fontWeight: 500, + lineHeight: 1, + color: t.textTertiary, + whiteSpace: "nowrap", + cursor: "pointer", + transition: "background 200ms ease, color 200ms ease", + ":hover": { backgroundColor: t.interactiveHover, color: t.textSecondary }, + })} + > + + {task.minutesUsed ?? 0} min used +
+ {rightSidebarCollapsed && onToggleRightSidebar ? ( +
+ +
+ ) : null} + + ); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/ui.tsx b/foundry/packages/frontend/src/components/mock-layout/ui.tsx new file mode 100644 index 0000000..b86ca18 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/ui.tsx @@ -0,0 +1,315 @@ +import { memo, useCallback, useEffect, useState, type MouseEvent } from "react"; +import { styled, useStyletron } from "baseui"; +import { GitPullRequest, GitPullRequestDraft } from "lucide-react"; + +import { useFoundryTokens } from "../../app/theme"; +import { getFoundryTokens } from "../../styles/tokens"; +import type { AgentKind, AgentSession } from "./view-model"; + +export interface ContextMenuItem { + label: string; + onClick: () => void; +} + +export function useContextMenu() { + const [menu, setMenu] = useState<{ x: number; y: number; items: ContextMenuItem[] } | null>(null); + + useEffect(() => { + if (!menu) { + return; + } + + const close = () => setMenu(null); + window.addEventListener("click", close); + window.addEventListener("contextmenu", close); + return () => { + window.removeEventListener("click", close); + window.removeEventListener("contextmenu", close); + }; + }, [menu]); + + const open = useCallback((event: MouseEvent, items: ContextMenuItem[]) => { + event.preventDefault(); + event.stopPropagation(); + setMenu({ x: event.clientX, y: event.clientY, items }); + }, []); + + return { menu, open, close: useCallback(() => setMenu(null), []) }; +} + +export const ContextMenuOverlay = memo(function ContextMenuOverlay({ + menu, + onClose, +}: { + menu: { x: number; y: number; items: ContextMenuItem[] }; + onClose: () => void; +}) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + + return ( +
+ {menu.items.map((item, index) => ( +
{ + item.onClick(); + onClose(); + }} + className={css({ + padding: "8px 14px", + fontSize: "12px", + color: t.textPrimary, + cursor: "pointer", + ":hover": { backgroundColor: t.interactiveHover }, + })} + > + {item.label} +
+ ))} +
+ ); +}); + +export const SpinnerDot = memo(function SpinnerDot({ size = 10 }: { size?: number }) { + const t = useFoundryTokens(); + + return ( +
+ ); +}); + +export const UnreadDot = memo(function UnreadDot() { + const t = useFoundryTokens(); + + return ( +
+ ); +}); + +export const TaskIndicator = memo(function TaskIndicator({ + isRunning, + isProvisioning, + hasUnread, + isDraft, +}: { + isRunning: boolean; + isProvisioning: boolean; + hasUnread: boolean; + isDraft: boolean; +}) { + const t = useFoundryTokens(); + + if (isRunning) return ; + if (isProvisioning) return ; + if (hasUnread) return ; + if (isDraft) return ; + return ; +}); + +const ClaudeIcon = memo(function ClaudeIcon({ size = 14 }: { size?: number }) { + return ( + + + + ); +}); + +const OpenAIIcon = memo(function OpenAIIcon({ size = 14 }: { size?: number }) { + const t = useFoundryTokens(); + + return ( + + + + ); +}); + +const CursorIcon = memo(function CursorIcon({ size = 14 }: { size?: number }) { + const t = useFoundryTokens(); + + return ( + + + + + ); +}); + +export const AgentIcon = memo(function AgentIcon({ agent, size = 14 }: { agent: AgentKind; size?: number }) { + switch (agent) { + case "Claude": + return ; + case "Codex": + return ; + case "Cursor": + return ; + default: + return ; + } +}); + +export type HeaderStatusVariant = "error" | "warning" | "success" | "neutral"; + +export interface HeaderStatusInfo { + variant: HeaderStatusVariant; + label: string; + spinning: boolean; + tooltip?: string; +} + +export const HeaderStatusPill = memo(function HeaderStatusPill({ status }: { status: HeaderStatusInfo }) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + + const colorMap: Record = { + error: { bg: `${t.statusError}18`, text: t.statusError, dot: t.statusError }, + warning: { bg: `${t.statusWarning}18`, text: t.statusWarning, dot: t.statusWarning }, + success: { bg: `${t.statusSuccess}18`, text: t.statusSuccess, dot: t.statusSuccess }, + neutral: { bg: t.interactiveSubtle, text: t.textTertiary, dot: t.textTertiary }, + }; + const colors = colorMap[status.variant]; + + return ( +
+ {status.spinning ? ( +
+ ) : ( +
+ )} + {status.label} +
+ ); +}); + +export const SessionAvatar = memo(function SessionAvatar({ session }: { session: AgentSession }) { + if (session.status === "running" || session.status === "pending_provision" || session.status === "pending_session_create") return ; + if (session.unread) return ; + return ; +}); + +export const Shell = styled("div", ({ $theme }) => { + const t = getFoundryTokens($theme); + return { + display: "flex", + height: "100dvh", + backgroundColor: t.surfaceSecondary, + overflow: "hidden", + }; +}); + +export const SPanel = styled("section", ({ $theme }) => { + const t = getFoundryTokens($theme); + return { + minHeight: 0, + flex: 1, + display: "flex", + flexDirection: "column" as const, + backgroundColor: t.surfaceSecondary, + overflow: "hidden", + }; +}); + +export const ScrollBody = styled("div", () => ({ + minHeight: 0, + flex: 1, + position: "relative" as const, + overflowY: "auto" as const, + display: "flex", + flexDirection: "column" as const, +})); + +export const HEADER_HEIGHT = "42px"; +export const PROMPT_TEXTAREA_MIN_HEIGHT = 56; +export const PROMPT_TEXTAREA_MAX_HEIGHT = 100; + +export const PanelHeaderBar = styled("div", ({ $theme }) => { + const t = getFoundryTokens($theme); + return { + display: "flex", + alignItems: "center", + minHeight: HEADER_HEIGHT, + maxHeight: HEADER_HEIGHT, + paddingTop: "0", + paddingRight: "14px", + paddingBottom: "0", + paddingLeft: "14px", + borderBottom: `1px solid ${t.borderDefault}`, + backgroundColor: t.surfaceTertiary, + gap: "8px", + flexShrink: 0, + position: "relative" as const, + zIndex: 9999, + }; +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts new file mode 100644 index 0000000..bc6ab87 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts @@ -0,0 +1,206 @@ +import { describe, expect, it } from "vitest"; +import type { WorkspaceSession } from "@sandbox-agent/foundry-shared"; +import { buildDisplayMessages } from "./view-model"; + +function makeSession(transcript: WorkspaceSession["transcript"]): WorkspaceSession { + return { + id: "session-1", + sessionId: "session-1", + sessionName: "Session 1", + agent: "Codex", + model: "gpt-5.3-codex", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { + text: "", + attachments: [], + updatedAtMs: null, + }, + transcript, + }; +} + +describe("buildDisplayMessages", () => { + it("collapses chunked agent output into a single display message", () => { + const messages = buildDisplayMessages( + makeSession([ + { + id: "evt-setup", + eventIndex: 0, + sessionId: "session-1", + createdAt: 0, + connectionId: "conn-1", + sender: "client", + payload: { + method: "session/new", + params: { + cwd: "/repo", + }, + }, + }, + { + id: "evt-client", + eventIndex: 1, + sessionId: "session-1", + createdAt: 1, + connectionId: "conn-1", + sender: "client", + payload: { + method: "session/prompt", + params: { + prompt: [{ type: "text", text: "hello" }], + }, + }, + }, + { + id: "evt-config", + eventIndex: 1, + sessionId: "session-1", + createdAt: 1, + connectionId: "conn-1", + sender: "agent", + payload: { + result: { + configOptions: [], + }, + }, + }, + { + id: "evt-chunk-1", + eventIndex: 2, + sessionId: "session-1", + createdAt: 2, + connectionId: "conn-1", + sender: "agent", + payload: { + method: "session/update", + params: { + update: { + sessionUpdate: "agent_message_chunk", + content: { + type: "text", + text: "hel", + }, + }, + }, + }, + }, + { + id: "evt-chunk-2", + eventIndex: 3, + sessionId: "session-1", + createdAt: 3, + connectionId: "conn-1", + sender: "agent", + payload: { + method: "session/update", + params: { + update: { + sessionUpdate: "agent_message_chunk", + content: { + type: "text", + text: "lo", + }, + }, + }, + }, + }, + { + id: "evt-stop", + eventIndex: 4, + sessionId: "session-1", + createdAt: 4, + connectionId: "conn-1", + sender: "agent", + payload: { + result: { + stopReason: "end_turn", + }, + }, + }, + ]), + ); + + expect(messages).toEqual([ + expect.objectContaining({ + id: "evt-client", + sender: "client", + text: "hello", + }), + expect.objectContaining({ + id: "evt-chunk-1", + sender: "agent", + text: "hello", + }), + ]); + }); + + it("hides non-message session update envelopes", () => { + const messages = buildDisplayMessages( + makeSession([ + { + id: "evt-client", + eventIndex: 1, + sessionId: "session-1", + createdAt: 1, + connectionId: "conn-1", + sender: "client", + payload: { + method: "session/prompt", + params: { + prompt: [{ type: "text", text: "hello" }], + }, + }, + }, + { + id: "evt-update", + eventIndex: 2, + sessionId: "session-1", + createdAt: 2, + connectionId: "conn-1", + sender: "agent", + payload: { + method: "session/update", + params: { + update: { + sessionUpdate: "agent_thought", + content: { + type: "text", + text: "thinking", + }, + }, + }, + }, + }, + { + id: "evt-result", + eventIndex: 3, + sessionId: "session-1", + createdAt: 3, + connectionId: "conn-1", + sender: "agent", + payload: { + result: { + text: "done", + }, + }, + }, + ]), + ); + + expect(messages).toEqual([ + expect.objectContaining({ + id: "evt-client", + sender: "client", + text: "hello", + }), + expect.objectContaining({ + id: "evt-result", + sender: "agent", + text: "done", + }), + ]); + }); +}); diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.ts new file mode 100644 index 0000000..9232293 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.ts @@ -0,0 +1,325 @@ +import { + DEFAULT_WORKSPACE_MODEL_GROUPS as SharedModelGroups, + workspaceModelLabel as sharedWorkspaceModelLabel, + workspaceProviderAgent as sharedWorkspaceProviderAgent, +} from "@sandbox-agent/foundry-shared"; +import type { + WorkspaceAgentKind as AgentKind, + WorkspaceSession as AgentSession, + WorkspaceDiffLineKind as DiffLineKind, + WorkspaceFileChange as FileChange, + WorkspaceFileTreeNode as FileTreeNode, + WorkspaceTask as Task, + WorkspaceHistoryEvent as HistoryEvent, + WorkspaceLineAttachment as LineAttachment, + WorkspaceModelGroup as ModelGroup, + WorkspaceModelId as ModelId, + WorkspaceParsedDiffLine as ParsedDiffLine, + WorkspaceRepositorySection as RepositorySection, + WorkspaceTranscriptEvent as TranscriptEvent, +} from "@sandbox-agent/foundry-shared"; +import { extractEventText } from "../../features/sessions/model"; + +export type { RepositorySection }; + +export const MODEL_GROUPS: ModelGroup[] = SharedModelGroups; + +export function formatRelativeAge(updatedAtMs: number, nowMs = Date.now()): string { + const deltaSeconds = Math.max(0, Math.floor((nowMs - updatedAtMs) / 1000)); + if (deltaSeconds < 60) return `${deltaSeconds}s`; + const minutes = Math.floor(deltaSeconds / 60); + if (minutes < 60) return `${minutes}m`; + const hours = Math.floor(minutes / 60); + if (hours < 24) return `${hours}h`; + const days = Math.floor(hours / 24); + return `${days}d`; +} + +export function formatMessageTimestamp(createdAtMs: number, nowMs = Date.now()): string { + const createdAt = new Date(createdAtMs); + const now = new Date(nowMs); + const sameDay = createdAt.toDateString() === now.toDateString(); + + const timeLabel = createdAt.toLocaleTimeString([], { + hour: "numeric", + minute: "2-digit", + }); + + if (sameDay) { + return timeLabel; + } + + const deltaDays = Math.floor((nowMs - createdAtMs) / (24 * 60 * 60 * 1000)); + if (deltaDays < 7) { + const weekdayLabel = createdAt.toLocaleDateString([], { weekday: "short" }); + return `${weekdayLabel} ${timeLabel}`; + } + + return createdAt.toLocaleDateString([], { + month: "short", + day: "numeric", + }); +} + +export function formatThinkingDuration(durationMs: number): string { + const totalSeconds = Math.max(0, Math.floor(durationMs / 1000)); + const minutes = Math.floor(totalSeconds / 60); + const seconds = totalSeconds % 60; + return `${minutes}:${String(seconds).padStart(2, "0")}`; +} + +export function formatMessageDuration(durationMs: number): string { + const totalSeconds = Math.max(1, Math.round(durationMs / 1000)); + if (totalSeconds < 60) { + return `${totalSeconds}s`; + } + + const minutes = Math.floor(totalSeconds / 60); + const seconds = totalSeconds % 60; + return `${minutes}m ${String(seconds).padStart(2, "0")}s`; +} + +export function modelLabel(id: ModelId): string { + return sharedWorkspaceModelLabel(id, MODEL_GROUPS); +} + +export function providerAgent(provider: string): AgentKind { + return sharedWorkspaceProviderAgent(provider); +} + +const DIFF_PREFIX = "diff:"; + +export function isDiffTab(id: string): boolean { + return id.startsWith(DIFF_PREFIX); +} + +export function diffPath(id: string): string { + return id.slice(DIFF_PREFIX.length); +} + +export function diffTabId(path: string): string { + return `${DIFF_PREFIX}${path}`; +} + +export function fileName(path: string): string { + return path.split("/").pop() ?? path; +} + +function eventOrder(id: string): number { + const match = id.match(/\d+/); + return match ? Number(match[0]) : 0; +} + +function historyPreview(event: TranscriptEvent): string { + const content = extractEventText(event.payload).trim() || "Untitled event"; + return content.length > 42 ? `${content.slice(0, 39)}...` : content; +} + +function historyDetail(event: TranscriptEvent): string { + const content = extractEventText(event.payload).trim(); + return content || "Untitled event"; +} + +export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { + return sessions + .flatMap((session) => + session.transcript + .filter((event) => event.sender === "client") + .map((event) => ({ + id: `history-${session.id}-${event.id}`, + messageId: event.id, + preview: historyPreview(event), + sessionName: session.sessionName, + sessionId: session.id, + createdAtMs: event.createdAt, + detail: historyDetail(event), + })), + ) + .sort((left, right) => eventOrder(left.messageId) - eventOrder(right.messageId)); +} + +export interface Message { + id: string; + sender: "client" | "agent"; + text: string; + createdAtMs: number; + durationMs?: number; + event: TranscriptEvent; +} + +function isAgentChunkEvent(event: TranscriptEvent): string | null { + const payload = event.payload; + if (!payload || typeof payload !== "object") { + return null; + } + + const params = (payload as { params?: unknown }).params; + if (!params || typeof params !== "object") { + return null; + } + + const update = (params as { update?: unknown }).update; + if (!update || typeof update !== "object") { + return null; + } + + if ((update as { sessionUpdate?: unknown }).sessionUpdate !== "agent_message_chunk") { + return null; + } + + const content = (update as { content?: unknown }).content; + if (!content || typeof content !== "object") { + return null; + } + + const text = (content as { text?: unknown }).text; + return typeof text === "string" ? text : null; +} + +function isClientPromptEvent(event: TranscriptEvent): boolean { + const payload = event.payload; + if (!payload || typeof payload !== "object") { + return false; + } + + return (payload as { method?: unknown }).method === "session/prompt"; +} + +function shouldDisplayEvent(event: TranscriptEvent): boolean { + const payload = event.payload; + if (event.sender === "client") { + return isClientPromptEvent(event) && Boolean(extractEventText(payload).trim()); + } + + if (!payload || typeof payload !== "object") { + return Boolean(extractEventText(payload).trim()); + } + + if ((payload as { error?: unknown }).error) { + return true; + } + + if (isAgentChunkEvent(event) !== null) { + return false; + } + + if ((payload as { method?: unknown }).method === "session/update") { + return false; + } + + const result = (payload as { result?: unknown }).result; + if (result && typeof result === "object") { + if (typeof (result as { stopReason?: unknown }).stopReason === "string") { + return false; + } + if (typeof (result as { text?: unknown }).text !== "string") { + return false; + } + } + + const params = (payload as { params?: unknown }).params; + if (params && typeof params === "object") { + const update = (params as { update?: unknown }).update; + if (update && typeof update === "object") { + const sessionUpdate = (update as { sessionUpdate?: unknown }).sessionUpdate; + if ( + sessionUpdate === "usage_update" || + sessionUpdate === "available_commands_update" || + sessionUpdate === "config_options_update" || + sessionUpdate === "available_modes_update" || + sessionUpdate === "available_models_update" + ) { + return false; + } + } + } + + return Boolean(extractEventText(payload).trim()); +} + +export function buildDisplayMessages(session: AgentSession | null | undefined): Message[] { + if (!session) { + return []; + } + + const messages: Message[] = []; + let pendingAgentMessage: Message | null = null; + + const flushPendingAgentMessage = () => { + if (pendingAgentMessage && pendingAgentMessage.text.length > 0) { + messages.push(pendingAgentMessage); + } + pendingAgentMessage = null; + }; + + for (const event of session.transcript) { + const chunkText = isAgentChunkEvent(event); + if (chunkText !== null) { + if (!pendingAgentMessage) { + pendingAgentMessage = { + id: event.id, + sender: "agent", + text: chunkText, + createdAtMs: event.createdAt, + event, + }; + } else { + pendingAgentMessage.text += chunkText; + } + continue; + } + + flushPendingAgentMessage(); + + if (!shouldDisplayEvent(event)) { + continue; + } + + messages.push({ + id: event.id, + sender: event.sender, + text: extractEventText(event.payload), + createdAtMs: event.createdAt, + durationMs: + event.payload && typeof event.payload === "object" + ? typeof (event.payload as { result?: { durationMs?: unknown } }).result?.durationMs === "number" + ? ((event.payload as { result?: { durationMs?: number } }).result?.durationMs ?? undefined) + : undefined + : undefined, + event, + }); + } + + flushPendingAgentMessage(); + return messages; +} + +export function parseDiffLines(diff: string): ParsedDiffLine[] { + return diff.split("\n").map((text, index) => { + if (text.startsWith("@@")) { + return { kind: "hunk", lineNumber: index + 1, text }; + } + if (text.startsWith("+")) { + return { kind: "add", lineNumber: index + 1, text }; + } + if (text.startsWith("-")) { + return { kind: "remove", lineNumber: index + 1, text }; + } + return { kind: "context", lineNumber: index + 1, text }; + }); +} + +export type { + AgentKind, + AgentSession, + DiffLineKind, + FileChange, + FileTreeNode, + Task, + HistoryEvent, + LineAttachment, + ModelGroup, + ModelId, + ParsedDiffLine, + TranscriptEvent, +}; diff --git a/foundry/packages/frontend/src/components/mock-onboarding.tsx b/foundry/packages/frontend/src/components/mock-onboarding.tsx new file mode 100644 index 0000000..4528695 --- /dev/null +++ b/foundry/packages/frontend/src/components/mock-onboarding.tsx @@ -0,0 +1,1274 @@ +import { useCallback, useEffect, useMemo, useState } from "react"; +import { type FoundryBillingPlanId, type FoundryOrganization, type FoundryOrganizationMember, type FoundryUser } from "@sandbox-agent/foundry-shared"; +import { useNavigate } from "@tanstack/react-router"; +import { ArrowLeft, Clock, CreditCard, FileText, Github, LogOut, Moon, Settings, Sun, Users } from "lucide-react"; +import { activeMockUser, eligibleOrganizations, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; +import { isMockFrontendClient } from "../lib/env"; +import { useColorMode, useFoundryTokens } from "../app/theme"; +import type { FoundryTokens } from "../styles/tokens"; +import { appSurfaceStyle, primaryButtonStyle, secondaryButtonStyle, subtleButtonStyle, cardStyle, badgeStyle, inputStyle } from "../styles/shared-styles"; + +const dateFormatter = new Intl.DateTimeFormat("en-US", { + month: "short", + day: "numeric", + year: "numeric", +}); + +const planCatalog: Record< + FoundryBillingPlanId, + { + label: string; + price: string; + pricePerMonth: number; + seats: string; + taskHours: number; + summary: string; + } +> = { + free: { + label: "Free", + price: "$0", + pricePerMonth: 0, + seats: "1 seat included", + taskHours: 8, + summary: "Get started with up to 8 task hours per month.", + }, + team: { + label: "Pro", + price: "$25/mo", + pricePerMonth: 25, + seats: "per seat", + taskHours: 200, + summary: "200 task hours per seat, with the ability to purchase additional hours.", + }, +}; + +const taskHourPackages = [ + { hours: 50, price: 6 }, + { hours: 100, price: 12 }, + { hours: 200, price: 24 }, + { hours: 400, price: 48 }, + { hours: 600, price: 72 }, + { hours: 1000, price: 120 }, +]; + +function DesktopDragRegion() { + const isDesktop = !!import.meta.env.VITE_DESKTOP; + const onDragMouseDown = useCallback((event: React.PointerEvent) => { + if (event.button !== 0) return; + const ipc = (window as unknown as Record).__TAURI_INTERNALS__ as + | { + invoke: (cmd: string, args?: unknown) => Promise; + } + | undefined; + if (ipc?.invoke) { + ipc.invoke("plugin:window|start_dragging").catch(() => {}); + } + }, []); + + if (!isDesktop) return null; + + return ( +
+
+
+ ); +} + +function formatDate(value: string | null): string { + if (!value) { + return "N/A"; + } + return dateFormatter.format(new Date(value)); +} + +function organizationPath(organization: FoundryOrganization): string { + return `/organizations/${organization.organizationId}`; +} + +function settingsPath(organization: FoundryOrganization): string { + return `/organizations/${organization.id}/settings`; +} + +function billingPath(organization: FoundryOrganization): string { + return `/organizations/${organization.id}/billing`; +} + +function checkoutPath(organization: FoundryOrganization, planId: FoundryBillingPlanId): string { + return `/organizations/${organization.id}/checkout/${planId}`; +} + +function statusBadge(t: FoundryTokens, organization: FoundryOrganization) { + if (organization.kind === "personal") { + return Personal organization; + } + return GitHub organization; +} + +function githubBadge(t: FoundryTokens, organization: FoundryOrganization) { + if (organization.github.installationStatus === "connected") { + return GitHub connected; + } + if (organization.github.installationStatus === "reconnect_required") { + return Reconnect required; + } + return Install GitHub App; +} + +function StatCard({ label, value, caption }: { label: string; value: string; caption: string }) { + const t = useFoundryTokens(); + return ( +
+
{label}
+
{value}
+
{caption}
+
+ ); +} + +function MemberRow({ member }: { member: FoundryOrganizationMember }) { + const t = useFoundryTokens(); + return ( +
+
+
{member.name}
+
{member.email}
+
+
{member.role}
+
+ + {member.state} + +
+
+ ); +} + +export function MockSignInPage() { + const client = useMockAppClient(); + const navigate = useNavigate(); + const t = useFoundryTokens(); + + return ( +
+ +
+ {/* Foundry icon */} + + + + + +

+ Sign in to Sandbox Agent Foundry +

+ +

+ Connect your GitHub account to get started. +

+ + {/* GitHub sign-in button */} + + + {/* Footer */} + + Learn more + +
+
+ ); +} + +export function MockOrganizationSelectorPage() { + const client = useMockAppClient(); + const snapshot = useMockAppSnapshot(); + const organizations: FoundryOrganization[] = eligibleOrganizations(snapshot); + const navigate = useNavigate(); + const t = useFoundryTokens(); + + return ( +
+ +
+ {/* Header */} +
+ + + + +

Select a organization

+

Choose where you want to work.

+
+ + {/* Organization list */} +
+ {organizations.map((organization, index) => ( + + ))} +
+ + {/* Footer */} +
+ +
+
+
+ ); +} + +type SettingsSection = "settings" | "members" | "billing" | "docs"; + +function SettingsNavItem({ icon, label, active, onClick }: { icon: React.ReactNode; label: string; active: boolean; onClick: () => void }) { + const t = useFoundryTokens(); + return ( + + ); +} + +function SettingsContentSection({ title, description, children }: { title: string; description?: string; children: React.ReactNode }) { + const t = useFoundryTokens(); + return ( +
+

{title}

+ {description ?

{description}

: null} +
{children}
+
+ ); +} + +function SettingsRow({ label, description, action }: { label: string; description?: string; action?: React.ReactNode }) { + const t = useFoundryTokens(); + return ( +
+
+
{label}
+ {description ?
{description}
: null} +
+ {action ?? null} +
+ ); +} + +function SettingsLayout({ + organization, + activeSection, + onSectionChange, + children, +}: { + organization: FoundryOrganization; + activeSection: SettingsSection; + onSectionChange?: (section: SettingsSection) => void; + children: React.ReactNode; +}) { + const client = useMockAppClient(); + const snapshot = useMockAppSnapshot(); + const user = activeMockUser(snapshot); + const navigate = useNavigate(); + const t = useFoundryTokens(); + + const navSections: Array<{ section: SettingsSection; icon: React.ReactNode; label: string }> = [ + { section: "settings", icon: , label: "Settings" }, + { section: "members", icon: , label: "Members" }, + { section: "billing", icon: , label: "Billing & Invoices" }, + { section: "docs", icon: , label: "Docs" }, + ]; + + return ( +
+ +
+ {/* Left nav */} +
+ {/* Back to organization */} + + + {/* User header */} +
+ {user?.name ?? "User"} + + {planCatalog[organization.billing.planId]?.label ?? "Free"} Plan · {user?.email ?? ""} + +
+ + {navSections.map((item) => ( + { + if (item.section === "billing") { + void navigate({ to: billingPath(organization) }); + } else if (onSectionChange) { + onSectionChange(item.section); + } else { + void navigate({ to: settingsPath(organization) }); + } + }} + /> + ))} +
+ + {/* Content */} +
+
{children}
+
+
+
+ ); +} + +export function MockOrganizationSettingsPage({ organization }: { organization: FoundryOrganization }) { + const client = useMockAppClient(); + const navigate = useNavigate(); + const t = useFoundryTokens(); + const [section, setSection] = useState("settings"); + const [displayName, setDisplayName] = useState(organization.settings.displayName); + const [slug, setSlug] = useState(organization.settings.slug); + const [primaryDomain, setPrimaryDomain] = useState(organization.settings.primaryDomain); + + useEffect(() => { + setDisplayName(organization.settings.displayName); + setSlug(organization.settings.slug); + setPrimaryDomain(organization.settings.primaryDomain); + }, [organization.id, organization.settings.displayName, organization.settings.slug, organization.settings.primaryDomain]); + + return ( + + {section === "settings" ? ( +
+
+

Settings

+
+ + + +
+ + +
+
+ +
+
+ + + + + +
+ + +
+
+ + + window.open("https://sandbox-agent.dev", "_blank", "noopener,noreferrer")} style={secondaryButtonStyle(t)}> + Configure + + } + /> + + + + + Delete + + } + /> + +
+ ) : null} + + {section === "members" ? ( +
+
+

Members

+

+ {organization.members.length} member{organization.members.length !== 1 ? "s" : ""} +

+
+
+ {organization.members.map((member) => ( + + ))} +
+ + {/* Upgrade CTA for free plan */} + {!organization.billing.stripeCustomerId.trim() ? ( +
+
Invite your team
+
+ Upgrade to Pro to add team members and unlock collaboration features: +
+
+ {[ + "Hand off tasks to teammates for review or continuation", + "Shared organization with unified billing across your org", + "200 task hours per seat, with bulk hour purchases available", + "Collaborative task history and audit trail", + ].map((feature) => ( +
+ + + {feature} +
+ ))} +
+ +
+ ) : null} +
+ ) : null} + + {section === "docs" ? ( +
+
+

Docs

+

Documentation and resources.

+
+ window.open("https://sandbox-agent.dev", "_blank", "noopener,noreferrer")} style={secondaryButtonStyle(t)}> + Open docs + + } + /> +
+ ) : null} +
+ ); +} + +export function MockOrganizationBillingPage({ organization }: { organization: FoundryOrganization }) { + const client = useMockAppClient(); + const navigate = useNavigate(); + const t = useFoundryTokens(); + const hasStripeCustomer = organization.billing.stripeCustomerId.trim().length > 0; + const effectivePlanId: FoundryBillingPlanId = hasStripeCustomer ? organization.billing.planId : "free"; + const currentPlan = planCatalog[effectivePlanId]!; + // Mock usage data + const taskHoursUsed = effectivePlanId === "free" ? 5.2 : 147.3; + const taskHoursIncluded = currentPlan.taskHours; + const taskHoursRemaining = Math.max(0, taskHoursIncluded - taskHoursUsed); + const usagePercent = Math.min(100, (taskHoursUsed / taskHoursIncluded) * 100); + const isOverage = taskHoursUsed > taskHoursIncluded; + const isFree = effectivePlanId === "free"; + + return ( + +
+
+

Billing & Invoices

+

Manage your plan, task hours, and invoices.

+
+ + {/* Overview stats */} +
+ + + +
+ + {/* Task hours usage bar */} +
+
+
+ + Task Hours +
+ + {taskHoursUsed.toFixed(1)} / {taskHoursIncluded}h used + +
+
+
90 ? "#ef4444" : usagePercent > 70 ? "#f59e0b" : "#22c55e", + transition: "width 500ms ease", + }} + /> +
+
+ Metered by the minute + $0.12 / task hour overage +
+
+ + {/* Upgrade to Pro (only shown on Free plan) */} + {isFree ? ( +
+
+
+
Upgrade to Pro
+
+ Get 200 task hours per month, plus the ability to purchase additional hours in bulk. Currently limited to {currentPlan.taskHours} hours on the + Free plan. +
+
+ +
+
+ ) : null} + + {/* Buy more task hours (only shown on Pro plan) */} + {!isFree ? ( + +
+ {taskHourPackages.map((pkg) => ( +
{ + (event.currentTarget as HTMLDivElement).style.borderColor = t.borderMedium; + }} + onMouseLeave={(event) => { + (event.currentTarget as HTMLDivElement).style.borderColor = t.borderSubtle; + }} + > +
{pkg.hours}h
+
${((pkg.price / pkg.hours) * 60).toFixed(1)}¢/min
+ +
+ ))} +
+
+ ) : null} + + {/* Payment method */} + {hasStripeCustomer ? ( + +
+ + {organization.billing.status === "scheduled_cancel" ? ( + + ) : ( + + )} +
+
+ ) : null} + + {/* Invoices */} + + {organization.billing.invoices.length === 0 ? ( +
No invoices yet.
+ ) : ( +
+ {organization.billing.invoices.map((invoice) => ( +
+
+
{invoice.label}
+
{invoice.issuedAt}
+
+
${invoice.amountUsd}
+
+ + {invoice.status} + +
+
+ ))} +
+ )} +
+
+ + ); +} + +export function MockHostedCheckoutPage({ organization, planId }: { organization: FoundryOrganization; planId: FoundryBillingPlanId }) { + const client = useMockAppClient(); + const navigate = useNavigate(); + const t = useFoundryTokens(); + const plan = planCatalog[planId]!; + + return ( + +
+
+

Checkout {plan.label}

+

Complete payment to activate the {plan.label} plan.

+
+ + +
+ + + + +
+
+ + + + +
+ + +
+
+
+
+ ); +} + +function CheckoutLine({ label, value }: { label: string; value: string }) { + const t = useFoundryTokens(); + return ( +
+
{label}
+
{value}
+
+ ); +} + +export function MockAccountSettingsPage() { + const client = useMockAppClient(); + const snapshot = useMockAppSnapshot(); + const user = activeMockUser(snapshot); + const navigate = useNavigate(); + const t = useFoundryTokens(); + const [name, setName] = useState(user?.name ?? ""); + const [email, setEmail] = useState(user?.email ?? ""); + + useEffect(() => { + setName(user?.name ?? ""); + setEmail(user?.email ?? ""); + }, [user?.name, user?.email]); + + return ( +
+ +
+ {/* Left nav */} +
+ + +
+ {user?.name ?? "User"} + {user?.email ?? ""} +
+ + } label="General" active onClick={() => {}} /> +
+ + {/* Content */} +
+
+
+
+

Account

+

Manage your personal account settings.

+
+ + + + + +
+ +
+
+ + + + + + +
+ +
+
+ + + + Delete + + } + /> + +
+
+
+
+
+ ); +} + +function AppearanceSection() { + const { colorMode, setColorMode } = useColorMode(); + const t = useFoundryTokens(); + const isDark = colorMode === "dark"; + + return ( + + setColorMode(isDark ? "light" : "dark")} + style={{ + position: "relative", + width: "36px", + height: "20px", + borderRadius: "10px", + border: "1px solid rgba(128, 128, 128, 0.3)", + background: isDark ? t.borderDefault : t.accent, + cursor: "pointer", + padding: 0, + transition: "background 0.2s", + flexShrink: 0, + }} + > +
+ {isDark ? : } +
+ + } + /> +
+ ); +} diff --git a/foundry/packages/frontend/src/components/organization-dashboard.tsx b/foundry/packages/frontend/src/components/organization-dashboard.tsx new file mode 100644 index 0000000..4f54ac3 --- /dev/null +++ b/foundry/packages/frontend/src/components/organization-dashboard.tsx @@ -0,0 +1,1666 @@ +import { useEffect, useMemo, useState, type ReactNode } from "react"; +import type { RepoBranchRecord, RepoOverview, TaskWorkspaceSnapshot, WorkspaceTaskStatus } from "@sandbox-agent/foundry-shared"; +import { currentFoundryOrganization, useSubscription } from "@sandbox-agent/foundry-client"; +import { useMutation, useQuery } from "@tanstack/react-query"; +import { Link, useNavigate } from "@tanstack/react-router"; +import { Button } from "baseui/button"; +import { Input } from "baseui/input"; +import { Modal, ModalBody, ModalFooter, ModalHeader } from "baseui/modal"; +import { Select, type OnChangeParams, type Option, type Value } from "baseui/select"; +import { Skeleton } from "baseui/skeleton"; +import { Tag } from "baseui/tag"; +import { Textarea } from "baseui/textarea"; +import { StyledDivider } from "baseui/divider"; +import { styled, useStyletron } from "baseui"; +import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography"; +import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal } from "lucide-react"; +import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status"; +import { HeaderStatusPill } from "./mock-layout/ui"; +import { buildTranscript, resolveSessionSelection } from "../features/sessions/model"; +import { backendClient } from "../lib/backend"; +import { subscriptionManager } from "../lib/subscription"; +import { DevPanel, useDevPanel } from "./dev-panel"; + +interface OrganizationDashboardProps { + organizationId: string; + selectedTaskId?: string; + selectedRepoId?: string; +} + +type RepoOverviewFilter = "active" | "archived" | "unmapped" | "all"; +type StatusTagKind = "neutral" | "positive" | "warning" | "negative"; +type SelectItem = Readonly<{ id: string; label: string; disabled?: boolean }>; + +const AppShell = styled("main", ({ $theme }) => ({ + minHeight: "100dvh", + backgroundColor: $theme.colors.backgroundPrimary, +})); + +const DashboardGrid = styled("div", ({ $theme }) => ({ + display: "grid", + gap: "1px", + minHeight: "100dvh", + backgroundColor: $theme.colors.borderOpaque, + gridTemplateColumns: "minmax(0, 1fr)", + "@media screen and (min-width: 960px)": { + gridTemplateColumns: "260px minmax(0, 1fr)", + }, + "@media screen and (min-width: 1480px)": { + gridTemplateColumns: "260px minmax(0, 1fr) 280px", + }, +})); + +const Panel = styled("section", ({ $theme }) => ({ + minHeight: 0, + display: "flex", + flexDirection: "column", + backgroundColor: $theme.colors.backgroundSecondary, + overflow: "hidden", +})); + +const PanelHeader = styled("div", ({ $theme }) => ({ + padding: "10px 12px", + borderBottom: `1px solid ${$theme.colors.borderOpaque}`, + display: "flex", + flexDirection: "column", + gap: "8px", +})); + +const ScrollBody = styled("div", ({ $theme }) => ({ + minHeight: 0, + flex: 1, + overflowY: "auto", + padding: "10px 12px", + display: "flex", + flexDirection: "column", + gap: "8px", +})); + +const DetailRail = styled("aside", ({ $theme }) => ({ + minHeight: 0, + display: "none", + backgroundColor: $theme.colors.backgroundSecondary, + overflow: "hidden", + "@media screen and (min-width: 1480px)": { + display: "flex", + flexDirection: "column", + }, +})); + +const FILTER_OPTIONS: SelectItem[] = [ + { id: "active", label: "Active + Unmapped" }, + { id: "archived", label: "Archived Tasks" }, + { id: "unmapped", label: "Unmapped Only" }, + { id: "all", label: "All Branches" }, +]; + +function statusKind(status: WorkspaceTaskStatus): StatusTagKind { + if (status === "running") return "positive"; + if (status === "error") return "negative"; + if (String(status).startsWith("init_")) return "warning"; + return "neutral"; +} + +function formatTime(value: number): string { + return new Date(value).toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" }); +} + +function formatRelativeAge(value: number): string { + const deltaSeconds = Math.max(0, Math.floor((Date.now() - value) / 1000)); + if (deltaSeconds < 60) return `${deltaSeconds}s`; + const minutes = Math.floor(deltaSeconds / 60); + if (minutes < 60) return `${minutes}m`; + const hours = Math.floor(minutes / 60); + if (hours < 24) return `${hours}h`; + const days = Math.floor(hours / 24); + return `${days}d`; +} + +function branchTestIdToken(value: string): string { + const token = value + .trim() + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-+|-+$/g, ""); + return token || "branch"; +} + +function repoSummary(overview: RepoOverview | undefined): { + total: number; + mapped: number; + unmapped: number; + openPrs: number; +} { + if (!overview) { + return { + total: 0, + mapped: 0, + unmapped: 0, + openPrs: 0, + }; + } + + let mapped = 0; + let openPrs = 0; + + for (const row of overview.branches) { + if (row.taskId) { + mapped += 1; + } + if (row.pullRequest && row.pullRequest.state !== "MERGED" && row.pullRequest.state !== "CLOSED") { + openPrs += 1; + } + } + + return { + total: overview.branches.length, + mapped, + unmapped: Math.max(0, overview.branches.length - mapped), + openPrs, + }; +} + +function branchKind(row: RepoBranchRecord): StatusTagKind { + if (row.pullRequest?.isDraft || row.pullRequest?.state === "OPEN") { + return "warning"; + } + if (row.pullRequest?.state === "MERGED") { + return "positive"; + } + return "neutral"; +} + +function branchPullRequestLabel(branch: RepoBranchRecord): string { + if (!branch.pullRequest) { + return "no pr"; + } + if (branch.pullRequest.isDraft) { + return "draft"; + } + return branch.pullRequest.state.toLowerCase(); +} + +function matchesOverviewFilter(branch: RepoBranchRecord, filter: RepoOverviewFilter): boolean { + if (filter === "archived") { + return branch.taskStatus === "archived"; + } + if (filter === "unmapped") { + return branch.taskId === null; + } + if (filter === "active") { + return branch.taskStatus !== "archived"; + } + return true; +} + +function selectValue(option: Option | null | undefined): Value { + return option ? [option] : []; +} + +function optionId(value: Value): string | null { + const id = value[0]?.id; + if (typeof id === "string") return id; + if (typeof id === "number") return String(id); + return null; +} + +function createOption(item: SelectItem): Option { + return { + id: item.id, + label: item.label, + disabled: item.disabled, + }; +} + +function inputTestIdOverrides(testId?: string) { + return testId + ? { + Input: { + props: { + "data-testid": testId, + }, + }, + } + : undefined; +} + +function textareaTestIdOverrides(testId?: string) { + return testId + ? { + Input: { + props: { + "data-testid": testId, + }, + }, + } + : undefined; +} + +function selectTestIdOverrides(testId?: string) { + return testId + ? { + ControlContainer: { + props: { + "data-testid": testId, + }, + }, + } + : undefined; +} + +function EmptyState({ children, testId }: { children: string; testId?: string }) { + return ( +
+ + {children} + +
+ ); +} + +function StatusPill({ children, kind }: { children: ReactNode; kind: StatusTagKind }) { + return ( + + {children} + + ); +} + +function MetaRow({ label, value, mono = false }: { label: string; value: string; mono?: boolean }) { + return ( +
+ {label} + {mono ? ( + + {value} + + ) : ( + + {value} + + )} +
+ ); +} + +export function OrganizationDashboard({ organizationId, selectedTaskId, selectedRepoId }: OrganizationDashboardProps) { + const [css, theme] = useStyletron(); + const navigate = useNavigate(); + const showDevPanel = useDevPanel(); + const repoOverviewMode = typeof selectedRepoId === "string" && selectedRepoId.length > 0; + + const [draft, setDraft] = useState(""); + const [activeSessionId, setActiveSessionId] = useState(null); + const [createRepoId, setCreateRepoId] = useState(""); + const [newTask, setNewTask] = useState(""); + const [newTitle, setNewTitle] = useState(""); + const [newBranchName, setNewBranchName] = useState(""); + const [createOnBranch, setCreateOnBranch] = useState(null); + const [createTaskOpen, setCreateTaskOpen] = useState(false); + const [selectedOverviewBranch, setSelectedOverviewBranch] = useState(null); + const [overviewFilter, setOverviewFilter] = useState("active"); + const [createError, setCreateError] = useState(null); + + const appState = useSubscription(subscriptionManager, "app", {}); + const activeOrg = appState.data ? currentFoundryOrganization(appState.data) : null; + + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const reposData = organizationState.data?.repos; + const rowsData = organizationState.data?.taskSummaries; + const repos = reposData ?? []; + const rows = rowsData ?? []; + const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rowsData, selectedTaskId]); + const taskState = useSubscription( + subscriptionManager, + "task", + !repoOverviewMode && selectedSummary + ? { + organizationId, + repoId: selectedSummary.repoId, + taskId: selectedSummary.id, + } + : null, + ); + const activeRepoId = selectedRepoId ?? createRepoId; + + const repoOverviewQuery = useQuery({ + queryKey: ["organization", organizationId, "repo-overview", activeRepoId], + enabled: Boolean(repoOverviewMode && activeRepoId), + queryFn: async () => { + if (!activeRepoId) { + throw new Error("No repo selected"); + } + return backendClient.getRepoOverview(organizationId, activeRepoId); + }, + }); + + useEffect(() => { + const repos = reposData ?? []; + if (repoOverviewMode && selectedRepoId) { + setCreateRepoId(selectedRepoId); + return; + } + if (!createRepoId && repos.length > 0) { + setCreateRepoId(repos[0]!.id); + } + }, [createRepoId, repoOverviewMode, reposData, selectedRepoId]); + + const repoGroups = useMemo(() => { + const repos = reposData ?? []; + const rows = rowsData ?? []; + const byRepo = new Map(); + for (const row of rows) { + const bucket = byRepo.get(row.repoId); + if (bucket) { + bucket.push(row); + } else { + byRepo.set(row.repoId, [row]); + } + } + + return repos + .map((repo) => { + const tasks = [...(byRepo.get(repo.id) ?? [])].sort((a, b) => b.updatedAtMs - a.updatedAtMs); + const latestTaskAt = tasks[0]?.updatedAtMs ?? 0; + return { + repoId: repo.id, + repoLabel: repo.label, + latestActivityAt: Math.max(repo.latestActivityMs, latestTaskAt), + tasks, + }; + }) + .sort((a, b) => { + if (a.latestActivityAt !== b.latestActivityAt) { + return b.latestActivityAt - a.latestActivityAt; + } + return a.repoLabel.localeCompare(b.repoLabel); + }); + }, [reposData, rowsData]); + + const selectedForSession = repoOverviewMode ? null : (taskState.data ?? null); + + const activeSandbox = useMemo(() => { + if (!selectedForSession) return null; + const byActive = selectedForSession.activeSandboxId + ? (selectedForSession.sandboxes.find((sandbox) => sandbox.sandboxId === selectedForSession.activeSandboxId) ?? null) + : null; + return byActive ?? selectedForSession.sandboxes[0] ?? null; + }, [selectedForSession]); + + useEffect(() => { + const rows = rowsData ?? []; + if (!repoOverviewMode && !selectedTaskId && rows.length > 0) { + void navigate({ + to: "/organizations/$organizationId/tasks/$taskId", + params: { + organizationId, + taskId: rows[0]!.id, + }, + search: { sessionId: undefined }, + replace: true, + }); + } + }, [navigate, repoOverviewMode, rowsData, selectedTaskId, organizationId]); + + useEffect(() => { + setActiveSessionId(null); + setDraft(""); + }, [selectedForSession?.id]); + + const sessionRowsData = selectedForSession?.sessionsSummary; + const sessionRows = sessionRowsData ?? []; + const taskStatus = selectedForSession?.status ?? null; + const taskStatusState = describeTaskState(taskStatus); + const taskStateSummary = `${taskStatusState.title}. ${taskStatusState.detail}`; + const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskStatus && taskStatus !== "running" && taskStatus !== "idle"); + const sessionSelection = useMemo( + () => + resolveSessionSelection({ + explicitSessionId: activeSessionId, + taskSessionId: selectedForSession?.activeSessionId ?? null, + sessions: sessionRows.map((session) => ({ + id: session.id, + agent: session.agent, + agentSessionId: session.sessionId ?? "", + lastConnectionId: "", + createdAt: 0, + status: session.status, + })), + }), + [activeSessionId, selectedForSession?.activeSessionId, sessionRowsData], + ); + const resolvedSessionId = sessionSelection.sessionId; + const staleSessionId = sessionSelection.staleSessionId; + const sessionState = useSubscription( + subscriptionManager, + "session", + selectedForSession && resolvedSessionId + ? { + organizationId, + repoId: selectedForSession.repoId, + taskId: selectedForSession.id, + sessionId: resolvedSessionId, + } + : null, + ); + const selectedSessionSummary = useMemo(() => sessionRows.find((session) => session.id === resolvedSessionId) ?? null, [resolvedSessionId, sessionRowsData]); + const isPendingProvision = selectedSessionSummary?.status === "pending_provision"; + const isPendingSessionCreate = selectedSessionSummary?.status === "pending_session_create"; + const isSessionError = selectedSessionSummary?.status === "error"; + const canStartSession = Boolean(selectedForSession && activeSandbox?.sandboxId); + const devPanelFocusedTask = useMemo(() => { + if (repoOverviewMode) { + return null; + } + + const task = selectedForSession ?? selectedSummary; + if (!task) { + return null; + } + + return { + id: task.id, + repoId: task.repoId, + title: task.title, + status: task.status, + branch: task.branch ?? null, + activeSandboxId: selectedForSession?.activeSandboxId ?? null, + activeSessionId: selectedForSession?.activeSessionId ?? null, + sandboxes: selectedForSession?.sandboxes ?? [], + sessions: selectedForSession?.sessionsSummary ?? [], + }; + }, [repoOverviewMode, selectedForSession, selectedSummary]); + const devPanelSnapshot = useMemo( + (): TaskWorkspaceSnapshot => ({ + organizationId, + repos: repos.map((repo) => ({ id: repo.id, label: repo.label })), + repositories: [], + tasks: rows.map((task) => ({ + id: task.id, + repoId: task.repoId, + title: task.title, + status: task.status, + repoName: task.repoName, + updatedAtMs: task.updatedAtMs, + branch: task.branch ?? null, + pullRequest: task.pullRequest, + sessions: task.sessionsSummary.map((session) => ({ + ...session, + draft: { + text: "", + attachments: [], + updatedAtMs: null, + }, + transcript: [], + })), + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 0, + activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null, + })), + }), + [reposData, rowsData, selectedForSession, organizationId], + ); + + const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => { + if (!selectedForSession || !activeSandbox?.sandboxId) { + throw new Error("No sandbox is available for this task"); + } + const preferredAgent = selectedSessionSummary?.agent === "Claude" ? "claude" : selectedSessionSummary?.agent === "Codex" ? "codex" : undefined; + return backendClient.createSandboxSession({ + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, + sandboxId: activeSandbox.sandboxId, + prompt: selectedForSession.task, + cwd: activeSandbox.cwd ?? undefined, + agent: preferredAgent, + }); + }; + + const createSession = useMutation({ + mutationFn: async () => startSessionFromTask(), + onSuccess: (session) => { + setActiveSessionId(session.id); + }, + }); + + const ensureSessionForPrompt = async (): Promise => { + if (resolvedSessionId) { + return resolvedSessionId; + } + const created = await startSessionFromTask(); + setActiveSessionId(created.id); + return created.id; + }; + + const sendPrompt = useMutation({ + mutationFn: async (prompt: string) => { + if (!selectedForSession || !activeSandbox?.sandboxId) { + throw new Error("No sandbox is available for this task"); + } + const sessionId = await ensureSessionForPrompt(); + await backendClient.sendSandboxPrompt({ + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, + sandboxId: activeSandbox.sandboxId, + sessionId, + prompt, + }); + }, + onSuccess: () => { + setDraft(""); + }, + }); + + const transcript = buildTranscript(sessionState.data?.transcript ?? []); + const canCreateTask = createRepoId.trim().length > 0 && newTask.trim().length > 0; + + const createTask = useMutation({ + mutationFn: async () => { + const repoId = createRepoId.trim(); + const task = newTask.trim(); + if (!repoId || !task) { + throw new Error("Repository and task are required"); + } + + const draftTitle = newTitle.trim(); + const draftBranchName = newBranchName.trim(); + + return backendClient.createTask({ + organizationId, + repoId, + task, + explicitTitle: draftTitle || undefined, + explicitBranchName: createOnBranch ? undefined : draftBranchName || undefined, + onBranch: createOnBranch ?? undefined, + }); + }, + onSuccess: async (task) => { + setCreateError(null); + setNewTask(""); + setNewTitle(""); + setNewBranchName(""); + setCreateOnBranch(null); + setCreateTaskOpen(false); + await navigate({ + to: "/organizations/$organizationId/tasks/$taskId", + params: { + organizationId, + taskId: task.taskId, + }, + search: { sessionId: undefined }, + }); + }, + onError: (error) => { + setCreateError(error instanceof Error ? error.message : String(error)); + }, + }); + + const openCreateFromBranch = (repoId: string, branchName: string): void => { + setCreateRepoId(repoId); + setCreateOnBranch(branchName); + setNewBranchName(""); + setCreateError(null); + if (!newTask.trim()) { + setNewTask(`Continue work on ${branchName}`); + } + setCreateTaskOpen(true); + }; + + const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.id, label: repo.label })), [reposData]); + const selectedRepoOption = repoOptions.find((option) => option.id === createRepoId) ?? null; + const selectedFilterOption = useMemo( + () => createOption(FILTER_OPTIONS.find((option) => option.id === overviewFilter) ?? FILTER_OPTIONS[0]!), + [overviewFilter], + ); + const sessionOptions = useMemo( + () => sessionRows.map((session) => createOption({ id: session.id, label: `${session.sessionName} (${session.status})` })), + [sessionRowsData], + ); + const selectedSessionOption = sessionOptions.find((option) => option.id === resolvedSessionId) ?? null; + + const overview = repoOverviewQuery.data; + const overviewStats = repoSummary(overview); + const filteredOverviewBranches = useMemo(() => { + if (!overview?.branches?.length) { + return []; + } + return overview.branches.filter((branch) => matchesOverviewFilter(branch, overviewFilter)); + }, [overview, overviewFilter]); + const selectedBranchOverview = useMemo(() => { + if (!filteredOverviewBranches.length) { + return null; + } + if (!selectedOverviewBranch) { + return filteredOverviewBranches[0] ?? null; + } + return filteredOverviewBranches.find((row) => row.branchName === selectedOverviewBranch) ?? filteredOverviewBranches[0] ?? null; + }, [filteredOverviewBranches, selectedOverviewBranch]); + + useEffect(() => { + if (!filteredOverviewBranches.length) { + setSelectedOverviewBranch(null); + return; + } + if (!selectedOverviewBranch || !filteredOverviewBranches.some((row) => row.branchName === selectedOverviewBranch)) { + setSelectedOverviewBranch(filteredOverviewBranches[0]?.branchName ?? null); + } + }, [filteredOverviewBranches, selectedOverviewBranch]); + + const modalOverrides = useMemo( + () => ({ + Dialog: { + style: { + borderRadius: "0", + backgroundColor: theme.colors.backgroundSecondary, + border: `1px solid ${theme.colors.borderOpaque}`, + boxShadow: "0 18px 40px rgba(0, 0, 0, 0.45)", + }, + }, + Close: { + style: { + borderRadius: "0", + }, + }, + }), + [theme.colors.backgroundSecondary, theme.colors.borderOpaque], + ); + + return ( + + + + +
+
+ Organization +
+ + + {organizationId} + +
+
+ + +
+ +
+ Tasks +
+
+ + + {organizationState.status === "loading" ? ( + <> + + + ) : null} + + {organizationState.status !== "loading" && repoGroups.length === 0 ? ( + No repos or tasks yet. Create the repository in GitHub, then sync repos from organization settings. + ) : null} + + {repoGroups.map((group) => ( +
+ + {group.repoLabel} + + +
+ {group.tasks + .filter((task) => task.status !== "archived" || task.id === selectedSummary?.id) + .map((task) => { + const isActive = !repoOverviewMode && task.id === selectedSummary?.id; + return ( + + + {task.title ?? "Determining title..."} + +
+ + {task.branch ?? "Determining branch..."} + + {task.status} +
+ + ); + })} + + +
+
+ ))} +
+
+ + + {repoOverviewMode ? ( + <> + +
+
+ + + Repo Overview + +
+ +
+
+ { + const next = optionId(params.value); + if (next) { + setActiveSessionId(next); + } + }} + overrides={selectTestIdOverrides("task-session-select")} + /> +
+ ) : null} +
+ +
+ {resolvedSessionId && sessionState.status === "loading" ? : null} + + {selectedSessionSummary && (isPendingProvision || isPendingSessionCreate) ? ( +
+ + {shouldUseTaskStateEmptyState ? taskStatusState.title : isPendingProvision ? "Provisioning sandbox..." : "Creating session..."} + + + + {shouldUseTaskStateEmptyState + ? taskStateSummary + : isPendingProvision + ? "The task is still provisioning." + : "The session is being created."} + +
+ ) : null} + + {transcript.length === 0 && !(resolvedSessionId && sessionState.status === "loading") ? ( + + {shouldUseTaskStateEmptyState + ? taskStateSummary + : isPendingProvision + ? "Provisioning sandbox..." + : isPendingSessionCreate + ? "Creating session..." + : isSessionError + ? (selectedSessionSummary?.errorMessage ?? "Session failed to start.") + : !activeSandbox?.sandboxId + ? "This task is still provisioning its sandbox." + : staleSessionId + ? `Session ${staleSessionId} is unavailable. Start a new session to continue.` + : resolvedSessionId + ? "No transcript events yet. Send a prompt to start this session." + : "No active session for this task."} + + ) : null} + +
+ {transcript.map((entry) => ( +
+
+ {entry.sender} + {formatTime(entry.createdAt)} +
+
+                                {entry.text}
+                              
+
+ ))} +
+
+
+ +
+