diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..79e7f52 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,152 @@ +name: release + +on: + release: + types: [published] + workflow_dispatch: + inputs: + version: + description: "Version (e.g. 0.1.0 or v0.1.0)" + required: true + type: string + +defaults: + run: + shell: bash -e {0} + +env: + CARGO_INCREMENTAL: 0 + +jobs: + setup: + name: "Setup" + runs-on: ubuntu-24.04 + outputs: + version: ${{ steps.vars.outputs.version }} + latest: ${{ steps.latest.outputs.latest }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Resolve version + id: vars + run: | + if [ "${{ github.event_name }}" = "release" ]; then + VERSION="${{ github.event.release.tag_name }}" + else + VERSION="${{ inputs.version }}" + fi + + VERSION="${VERSION#v}" + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + + - name: Determine latest + id: latest + run: | + node scripts/release/main.js --version "${{ steps.vars.outputs.version }}" --print-latest --output "$GITHUB_OUTPUT" + + binaries: + name: "Build & Upload Binaries" + needs: [setup] + strategy: + matrix: + include: + - platform: linux + target: x86_64-unknown-linux-musl + binary_ext: "" + arch: x86_64 + - platform: windows + target: x86_64-pc-windows-gnu + binary_ext: ".exe" + arch: x86_64 + - platform: macos + target: x86_64-apple-darwin + binary_ext: "" + arch: x86_64 + - platform: macos + target: aarch64-apple-darwin + binary_ext: "" + arch: aarch64 + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build binary + run: | + docker/release/build.sh ${{ matrix.target }} + ls -la dist/ + + - name: Install AWS CLI + run: | + sudo apt-get update + sudo apt-get install -y unzip curl + + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + sudo ./aws/install --update + + - name: Upload binaries + env: + AWS_ACCESS_KEY_ID: ${{ secrets.R2_RELEASES_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_RELEASES_SECRET_ACCESS_KEY }} + run: | + VERSION="${{ needs.setup.outputs.version }}" + BINARY_NAME="sandbox-daemon-${{ matrix.target }}${{ matrix.binary_ext }}" + + aws s3 cp \ + "dist/${BINARY_NAME}" \ + "s3://rivet-releases/sandbox-daemon/${VERSION}/${BINARY_NAME}" \ + --region auto \ + --endpoint-url https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com \ + --checksum-algorithm CRC32 + + if [ "${{ needs.setup.outputs.latest }}" = "true" ]; then + aws s3 cp \ + "dist/${BINARY_NAME}" \ + "s3://rivet-releases/sandbox-daemon/latest/${BINARY_NAME}" \ + --region auto \ + --endpoint-url https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com \ + --checksum-algorithm CRC32 + fi + + artifacts: + name: "TypeScript + Install Script" + needs: [setup] + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install AWS CLI + run: | + sudo apt-get update + sudo apt-get install -y unzip curl + + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" + unzip awscliv2.zip + sudo ./aws/install --update + + - name: Upload TypeScript artifacts and install script + env: + R2_RELEASES_ACCESS_KEY_ID: ${{ secrets.R2_RELEASES_ACCESS_KEY_ID }} + R2_RELEASES_SECRET_ACCESS_KEY: ${{ secrets.R2_RELEASES_SECRET_ACCESS_KEY }} + run: | + VERSION="${{ needs.setup.outputs.version }}" + if [ "${{ needs.setup.outputs.latest }}" = "true" ]; then + LATEST_FLAG="--latest" + else + LATEST_FLAG="--no-latest" + fi + + node scripts/release/main.js --version "$VERSION" $LATEST_FLAG --upload-typescript --upload-install diff --git a/CLAUDE.md b/CLAUDE.md index 35b0e23..abc7b8f 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,4 +1,4 @@ -# Claude Code Instructions +# Instructions ## Agent Schemas @@ -12,10 +12,11 @@ Universal schema guidance: ## Spec Tracking -- Track required tests in `spec/required-tests.md` as you write code. -- Capture unresolved questions/ambiguities in `spec/im-not-sure.md`. - Update `todo.md` as work progresses; add new tasks as they arise. - Keep CLI subcommands in sync with every HTTP endpoint. +- Update `CLAUDE.md` to keep CLI endpoints in sync with HTTP API changes. +- When changing the HTTP API, update the TypeScript SDK and CLI together. +- Do not make breaking changes to API endpoints. ## Git Commits diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..3a4500e --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,3 @@ +[workspace] +resolver = "2" +members = ["engine/packages/*"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..6163891 --- /dev/null +++ b/README.md @@ -0,0 +1,94 @@ +# Sandbox Daemon + +Run inside sandboxes to provide support + +- **Any coding agent**: Universal API to interact with all agents with full feature coverage +- **Server Mode**: Run as HTTP server from any sandbox provider or as TypeScript & Python SDK +- **Universal session schema**: Universal schema to store agent transcripts +- **Supports your sandbox provider**: Daytona, E2B, Vercel Sandboxes, [add your own](TODO) +- **Lightweight, portable Rust binary**: Install anywhere with 1 curl command +- **Compatible with Vercel AI SDK**: TODO + +## Quickstart + +Start with the SDK: + +``` +TODO +``` + +To run this in server mode, install with: + +``` +TODO +``` + +And run with: + +``` +TODO +``` + +See the example for your provider of choice: + +- TODO +- [Add your own](TODO) + +## Security + +TODO: Tokens +TODO: Using a gateawy +TODO: BYO tokens with extractor + +## Demo Frontend + +TODO: Screenshot + +This project provides a demo frontend for testing the connection. Run it with: + +``` +TODO +``` + +## Agent Compatibility Matrix + +TODO + +## Reference + +### TypeScript SDK + +TODO + +### HTTP API + +TODO + +### CLI + +TODO + +## FAQ + +TODO + +- Why not use PTY? This is the recommended option for XXXX +- Why not use ? +- Does it support ? +- Can I use this with my personal OpenAPI & Claude tokens? + +## Project Scope + +This project aims to solve 3 problems with agents: + +- **Universal Agent API**: Claude Code, Codex, Amp, and OpenCode all have put a lot of work in to the agent scaffold. Each have respective pros and cons and need to be easy to be swapped between. +- **Agent Transcript**: Maintaining agent transcripts is difficult since the agent manages its own sessions. This provides a simpler way to read and retrieve agent transcripts in your system. +- **Agents In Sandboxes**: There are many complications with running agents inside of sandbox providers. This lets you run a simple curl command to spawn an HTTP server for using any agent from within the sandbox. + +Features out of scope: + +- **Storage of sessions on disk**: Sessions are already stored by the respective coding agents on disk. It's assumed that the consumer is streaming data from this machine to an extral storage, such as Postgres, ClickHouse, or Rivet. +- **Direct LLM wrappers**: Use the [Vercel AI SDK](https://ai-sdk.dev/docs/introduction) if you want to impelment your own agent from scratch +- **Git Repo Management**: Just use git commands or the features provided by your sandbox provider of choice. +- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide skills that lets you integrate this project with sandbox providers. + diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 0000000..71521bd --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,41 @@ +## soon + +- discuss actor arch in readme + give example +- skillfile + - specifically include the release checklist +- image/etc input + +## later + +- auto-updating for long running job +- persistence +- system information/cpu/etc +- git utils +- api features + - list agent modes available + - list models available + - handle planning mode +- api key gateway +- configuring mcp/skills/etc +- process management inside container +- otel +- better authentication systems +- s3-based file system +- ai sdk compatibility for their ecosystem (useChat, etc) +- resumable messages +- todo lists +- all other features +- misc + - bootstrap tool that extracts tokens from the current system +- skill +- pre-package these as bun binaries instead of npm installations +- build & release pipeline with musl +- agent feature matrix for api features +- tunnels +- mcp integration (can connect to given endpoints) +- provide a pty to access the agent data +- other agent features like file system +- python sdk +- comparison to agentapi: + - it does not use the pty since we need to get more information from the agent directly +- transfer sessions between agents diff --git a/docker/release/build.sh b/docker/release/build.sh new file mode 100755 index 0000000..f8f0301 --- /dev/null +++ b/docker/release/build.sh @@ -0,0 +1,54 @@ +#!/bin/bash +set -euo pipefail + +TARGET=${1:-x86_64-unknown-linux-musl} + +case $TARGET in + x86_64-unknown-linux-musl) + echo "Building for Linux x86_64 musl" + DOCKERFILE="linux-x86_64.Dockerfile" + TARGET_STAGE="builder" + BINARY="sandbox-daemon-$TARGET" + ;; + x86_64-pc-windows-gnu) + echo "Building for Windows x86_64" + DOCKERFILE="windows.Dockerfile" + TARGET_STAGE="" + BINARY="sandbox-daemon-$TARGET.exe" + ;; + x86_64-apple-darwin) + echo "Building for macOS x86_64" + DOCKERFILE="macos-x86_64.Dockerfile" + TARGET_STAGE="x86_64-builder" + BINARY="sandbox-daemon-$TARGET" + ;; + aarch64-apple-darwin) + echo "Building for macOS aarch64" + DOCKERFILE="macos-aarch64.Dockerfile" + TARGET_STAGE="aarch64-builder" + BINARY="sandbox-daemon-$TARGET" + ;; + *) + echo "Unsupported target: $TARGET" + exit 1 + ;; + esac + +DOCKER_BUILDKIT=1 +if [ -n "$TARGET_STAGE" ]; then + docker build --target "$TARGET_STAGE" -f "docker/release/$DOCKERFILE" -t "sandbox-daemon-builder-$TARGET" . +else + docker build -f "docker/release/$DOCKERFILE" -t "sandbox-daemon-builder-$TARGET" . +fi + +CONTAINER_ID=$(docker create "sandbox-daemon-builder-$TARGET") +mkdir -p dist + +docker cp "$CONTAINER_ID:/artifacts/$BINARY" "dist/" +docker rm "$CONTAINER_ID" + +if [[ "$BINARY" != *.exe ]]; then + chmod +x "dist/$BINARY" +fi + +echo "Binary saved to: dist/$BINARY" diff --git a/docker/release/linux-x86_64.Dockerfile b/docker/release/linux-x86_64.Dockerfile new file mode 100644 index 0000000..1609e66 --- /dev/null +++ b/docker/release/linux-x86_64.Dockerfile @@ -0,0 +1,27 @@ +# syntax=docker/dockerfile:1.10.0 +FROM rust:1.91.0 AS builder + +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update -y && \ + apt-get install -y \ + musl-tools \ + pkg-config \ + ca-certificates \ + git && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +RUN rustup target add x86_64-unknown-linux-musl + +WORKDIR /build +COPY . . + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + RUSTFLAGS="-C target-feature=+crt-static" \ + cargo build -p sandbox-daemon-core --release --target x86_64-unknown-linux-musl && \ + mkdir -p /artifacts && \ + cp target/x86_64-unknown-linux-musl/release/sandbox-daemon /artifacts/sandbox-daemon-x86_64-unknown-linux-musl + +CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/macos-aarch64.Dockerfile b/docker/release/macos-aarch64.Dockerfile new file mode 100644 index 0000000..d0d6c99 --- /dev/null +++ b/docker/release/macos-aarch64.Dockerfile @@ -0,0 +1,62 @@ +# syntax=docker/dockerfile:1.10.0 +FROM rust:1.91.0 AS base + +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -y \ + clang \ + cmake \ + patch \ + libxml2-dev \ + wget \ + xz-utils \ + curl \ + git && \ + rm -rf /var/lib/apt/lists/* + +# Install osxcross +RUN git config --global --add safe.directory '*' && \ + git clone https://github.com/tpoechtrager/osxcross /root/osxcross && \ + cd /root/osxcross && \ + wget -nc https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.3.sdk.tar.xz && \ + mv MacOSX11.3.sdk.tar.xz tarballs/ && \ + UNATTENDED=yes OSX_VERSION_MIN=10.7 ./build.sh + +ENV PATH="/root/osxcross/target/bin:$PATH" + +ENV OSXCROSS_SDK=MacOSX11.3.sdk \ + SDKROOT=/root/osxcross/target/SDK/MacOSX11.3.sdk \ + BINDGEN_EXTRA_CLANG_ARGS_aarch64_apple_darwin="--sysroot=/root/osxcross/target/SDK/MacOSX11.3.sdk -isystem /root/osxcross/target/SDK/MacOSX11.3.sdk/usr/include" \ + CFLAGS_aarch64_apple_darwin="-B/root/osxcross/target/bin" \ + CXXFLAGS_aarch64_apple_darwin="-B/root/osxcross/target/bin" \ + CARGO_TARGET_AARCH64_APPLE_DARWIN_LINKER=aarch64-apple-darwin20.4-clang \ + CC_aarch64_apple_darwin=aarch64-apple-darwin20.4-clang \ + CXX_aarch64_apple_darwin=aarch64-apple-darwin20.4-clang++ \ + AR_aarch64_apple_darwin=aarch64-apple-darwin20.4-ar \ + RANLIB_aarch64_apple_darwin=aarch64-apple-darwin20.4-ranlib \ + MACOSX_DEPLOYMENT_TARGET=10.14 \ + CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true + +WORKDIR /build + +FROM base AS aarch64-builder + +RUN rustup target add aarch64-apple-darwin + +RUN mkdir -p /root/.cargo && \ + echo '\ +[target.aarch64-apple-darwin]\n\ +linker = "aarch64-apple-darwin20.4-clang"\n\ +ar = "aarch64-apple-darwin20.4-ar"\n\ +' > /root/.cargo/config.toml + +COPY . . + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-daemon-core --release --target aarch64-apple-darwin && \ + mkdir -p /artifacts && \ + cp target/aarch64-apple-darwin/release/sandbox-daemon /artifacts/sandbox-daemon-aarch64-apple-darwin + +CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/macos-x86_64.Dockerfile b/docker/release/macos-x86_64.Dockerfile new file mode 100644 index 0000000..d477e15 --- /dev/null +++ b/docker/release/macos-x86_64.Dockerfile @@ -0,0 +1,62 @@ +# syntax=docker/dockerfile:1.10.0 +FROM rust:1.91.0 AS base + +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -y \ + clang \ + cmake \ + patch \ + libxml2-dev \ + wget \ + xz-utils \ + curl \ + git && \ + rm -rf /var/lib/apt/lists/* + +# Install osxcross +RUN git config --global --add safe.directory '*' && \ + git clone https://github.com/tpoechtrager/osxcross /root/osxcross && \ + cd /root/osxcross && \ + wget -nc https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.3.sdk.tar.xz && \ + mv MacOSX11.3.sdk.tar.xz tarballs/ && \ + UNATTENDED=yes OSX_VERSION_MIN=10.7 ./build.sh + +ENV PATH="/root/osxcross/target/bin:$PATH" + +ENV OSXCROSS_SDK=MacOSX11.3.sdk \ + SDKROOT=/root/osxcross/target/SDK/MacOSX11.3.sdk \ + BINDGEN_EXTRA_CLANG_ARGS_X86_64_apple_darwin="--sysroot=/root/osxcross/target/SDK/MacOSX11.3.sdk -isystem /root/osxcross/target/SDK/MacOSX11.3.sdk/usr/include" \ + CFLAGS_X86_64_apple_darwin="-B/root/osxcross/target/bin" \ + CXXFLAGS_X86_64_apple_darwin="-B/root/osxcross/target/bin" \ + CARGO_TARGET_X86_64_APPLE_DARWIN_LINKER=x86_64-apple-darwin20.4-clang \ + CC_x86_64_apple_darwin=x86_64-apple-darwin20.4-clang \ + CXX_x86_64_apple_darwin=x86_64-apple-darwin20.4-clang++ \ + AR_X86_64_apple_darwin=x86_64-apple-darwin20.4-ar \ + RANLIB_X86_64_apple_darwin=x86_64-apple-darwin20.4-ranlib \ + MACOSX_DEPLOYMENT_TARGET=10.14 \ + CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true + +WORKDIR /build + +FROM base AS x86_64-builder + +RUN rustup target add x86_64-apple-darwin + +RUN mkdir -p /root/.cargo && \ + echo '\ +[target.x86_64-apple-darwin]\n\ +linker = "x86_64-apple-darwin20.4-clang"\n\ +ar = "x86_64-apple-darwin20.4-ar"\n\ +' > /root/.cargo/config.toml + +COPY . . + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-daemon-core --release --target x86_64-apple-darwin && \ + mkdir -p /artifacts && \ + cp target/x86_64-apple-darwin/release/sandbox-daemon /artifacts/sandbox-daemon-x86_64-apple-darwin + +CMD ["ls", "-la", "/artifacts"] diff --git a/docker/release/windows.Dockerfile b/docker/release/windows.Dockerfile new file mode 100644 index 0000000..6d00e7b --- /dev/null +++ b/docker/release/windows.Dockerfile @@ -0,0 +1,49 @@ +# syntax=docker/dockerfile:1.10.0 +FROM rust:1.91.0 + +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update && apt-get install -y \ + llvm-14-dev \ + libclang-14-dev \ + clang-14 \ + gcc-mingw-w64-x86-64 \ + g++-mingw-w64-x86-64 \ + binutils-mingw-w64-x86-64 \ + ca-certificates \ + curl \ + git && \ + rm -rf /var/lib/apt/lists/* + +# Switch MinGW-w64 to the POSIX threading model toolchain +RUN update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix && \ + update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix + +RUN rustup target add x86_64-pc-windows-gnu + +RUN mkdir -p /root/.cargo && \ + echo '\ +[target.x86_64-pc-windows-gnu]\n\ +linker = "x86_64-w64-mingw32-gcc"\n\ +' > /root/.cargo/config.toml + +ENV CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER=x86_64-w64-mingw32-gcc \ + CC_x86_64_pc_windows_gnu=x86_64-w64-mingw32-gcc \ + CXX_x86_64_pc_windows_gnu=x86_64-w64-mingw32-g++ \ + CC_x86_64-pc-windows-gnu=x86_64-w64-mingw32-gcc \ + CXX_x86_64-pc-windows-gnu=x86_64-w64-mingw32-g++ \ + LIBCLANG_PATH=/usr/lib/llvm-14/lib \ + CLANG_PATH=/usr/bin/clang-14 \ + CARGO_INCREMENTAL=0 \ + CARGO_NET_GIT_FETCH_WITH_CLI=true + +WORKDIR /build +COPY . . + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/build/target \ + cargo build -p sandbox-daemon-core --release --target x86_64-pc-windows-gnu && \ + mkdir -p /artifacts && \ + cp target/x86_64-pc-windows-gnu/release/sandbox-daemon.exe /artifacts/sandbox-daemon-x86_64-pc-windows-gnu.exe + +CMD ["ls", "-la", "/artifacts"] diff --git a/engine/packages/agent-credentials/Cargo.toml b/engine/packages/agent-credentials/Cargo.toml new file mode 100644 index 0000000..05a0889 --- /dev/null +++ b/engine/packages/agent-credentials/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "sandbox-daemon-agent-credentials" +version = "0.1.0" +edition = "2021" + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +dirs = "5.0" +time = { version = "0.3", features = ["parsing", "formatting"] } diff --git a/engine/packages/agent-credentials/src/lib.rs b/engine/packages/agent-credentials/src/lib.rs new file mode 100644 index 0000000..64d3d4b --- /dev/null +++ b/engine/packages/agent-credentials/src/lib.rs @@ -0,0 +1,371 @@ +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use time::OffsetDateTime; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct ProviderCredentials { + pub api_key: String, + pub source: String, + pub auth_type: AuthType, + pub provider: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum AuthType { + ApiKey, + Oauth, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct ExtractedCredentials { + pub anthropic: Option, + pub openai: Option, + pub other: HashMap, +} + +#[derive(Debug, Clone, Default)] +pub struct CredentialExtractionOptions { + pub home_dir: Option, + pub include_oauth: bool, +} + +impl CredentialExtractionOptions { + pub fn new() -> Self { + Self { + home_dir: None, + include_oauth: true, + } + } +} + +pub fn extract_claude_credentials(options: &CredentialExtractionOptions) -> Option { + let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); + let include_oauth = options.include_oauth; + + let config_paths = [ + home_dir.join(".claude.json.api"), + home_dir.join(".claude.json"), + home_dir.join(".claude.json.nathan"), + ]; + + let key_paths = [ + vec!["primaryApiKey"], + vec!["apiKey"], + vec!["anthropicApiKey"], + vec!["customApiKey"], + ]; + + for path in config_paths { + let data = read_json_file(&path)?; + for key_path in &key_paths { + if let Some(key) = read_string_field(&data, key_path) { + if key.starts_with("sk-ant-") { + return Some(ProviderCredentials { + api_key: key, + source: "claude-code".to_string(), + auth_type: AuthType::ApiKey, + provider: "anthropic".to_string(), + }); + } + } + } + } + + if include_oauth { + let oauth_paths = [ + home_dir.join(".claude").join(".credentials.json"), + home_dir.join(".claude-oauth-credentials.json"), + ]; + for path in oauth_paths { + let data = match read_json_file(&path) { + Some(value) => value, + None => continue, + }; + let access = read_string_field(&data, &["claudeAiOauth", "accessToken"]); + if let Some(token) = access { + if let Some(expires_at) = + read_string_field(&data, &["claudeAiOauth", "expiresAt"]) + { + if is_expired_rfc3339(&expires_at) { + continue; + } + } + return Some(ProviderCredentials { + api_key: token, + source: "claude-code".to_string(), + auth_type: AuthType::Oauth, + provider: "anthropic".to_string(), + }); + } + } + } + + None +} + +pub fn extract_codex_credentials(options: &CredentialExtractionOptions) -> Option { + let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); + let include_oauth = options.include_oauth; + let path = home_dir.join(".codex").join("auth.json"); + let data = read_json_file(&path)?; + + if let Some(key) = data.get("OPENAI_API_KEY").and_then(Value::as_str) { + if !key.is_empty() { + return Some(ProviderCredentials { + api_key: key.to_string(), + source: "codex".to_string(), + auth_type: AuthType::ApiKey, + provider: "openai".to_string(), + }); + } + } + + if include_oauth { + if let Some(token) = read_string_field(&data, &["tokens", "access_token"]) { + return Some(ProviderCredentials { + api_key: token, + source: "codex".to_string(), + auth_type: AuthType::Oauth, + provider: "openai".to_string(), + }); + } + } + + None +} + +pub fn extract_opencode_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials { + let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); + let include_oauth = options.include_oauth; + let path = home_dir + .join(".local") + .join("share") + .join("opencode") + .join("auth.json"); + + let mut result = ExtractedCredentials::default(); + let data = match read_json_file(&path) { + Some(value) => value, + None => return result, + }; + + let obj = match data.as_object() { + Some(obj) => obj, + None => return result, + }; + + for (provider_name, value) in obj { + let config = match value.as_object() { + Some(config) => config, + None => continue, + }; + + let auth_type = config + .get("type") + .and_then(Value::as_str) + .unwrap_or(""); + + let credentials = if auth_type == "api" { + config.get("key").and_then(Value::as_str).map(|key| ProviderCredentials { + api_key: key.to_string(), + source: "opencode".to_string(), + auth_type: AuthType::ApiKey, + provider: provider_name.to_string(), + }) + } else if auth_type == "oauth" && include_oauth { + let expires = config.get("expires").and_then(Value::as_i64); + if let Some(expires) = expires { + if expires < current_epoch_millis() { + None + } else { + config + .get("access") + .and_then(Value::as_str) + .map(|token| ProviderCredentials { + api_key: token.to_string(), + source: "opencode".to_string(), + auth_type: AuthType::Oauth, + provider: provider_name.to_string(), + }) + } + } else { + config + .get("access") + .and_then(Value::as_str) + .map(|token| ProviderCredentials { + api_key: token.to_string(), + source: "opencode".to_string(), + auth_type: AuthType::Oauth, + provider: provider_name.to_string(), + }) + } + } else { + None + }; + + if let Some(credentials) = credentials { + if provider_name == "anthropic" { + result.anthropic = Some(credentials.clone()); + } else if provider_name == "openai" { + result.openai = Some(credentials.clone()); + } else { + result.other.insert(provider_name.to_string(), credentials.clone()); + } + } + } + + result +} + +pub fn extract_amp_credentials(options: &CredentialExtractionOptions) -> Option { + let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); + let path = home_dir.join(".amp").join("config.json"); + let data = read_json_file(&path)?; + + let key_paths: Vec> = vec![ + vec!["anthropicApiKey"], + vec!["anthropic_api_key"], + vec!["apiKey"], + vec!["api_key"], + vec!["accessToken"], + vec!["access_token"], + vec!["token"], + vec!["auth", "anthropicApiKey"], + vec!["auth", "apiKey"], + vec!["auth", "token"], + vec!["anthropic", "apiKey"], + vec!["anthropic", "token"], + ]; + + for key_path in key_paths { + if let Some(key) = read_string_field(&data, &key_path) { + if !key.is_empty() { + return Some(ProviderCredentials { + api_key: key, + source: "amp".to_string(), + auth_type: AuthType::ApiKey, + provider: "anthropic".to_string(), + }); + } + } + } + + None +} + +pub fn extract_all_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials { + let mut result = ExtractedCredentials::default(); + + if let Ok(value) = std::env::var("ANTHROPIC_API_KEY") { + result.anthropic = Some(ProviderCredentials { + api_key: value, + source: "environment".to_string(), + auth_type: AuthType::ApiKey, + provider: "anthropic".to_string(), + }); + } else if let Ok(value) = std::env::var("CLAUDE_API_KEY") { + result.anthropic = Some(ProviderCredentials { + api_key: value, + source: "environment".to_string(), + auth_type: AuthType::ApiKey, + provider: "anthropic".to_string(), + }); + } + + if let Ok(value) = std::env::var("OPENAI_API_KEY") { + result.openai = Some(ProviderCredentials { + api_key: value, + source: "environment".to_string(), + auth_type: AuthType::ApiKey, + provider: "openai".to_string(), + }); + } else if let Ok(value) = std::env::var("CODEX_API_KEY") { + result.openai = Some(ProviderCredentials { + api_key: value, + source: "environment".to_string(), + auth_type: AuthType::ApiKey, + provider: "openai".to_string(), + }); + } + + if result.anthropic.is_none() { + result.anthropic = extract_amp_credentials(options); + } + + if result.anthropic.is_none() { + result.anthropic = extract_claude_credentials(options); + } + + if result.openai.is_none() { + result.openai = extract_codex_credentials(options); + } + + let opencode_credentials = extract_opencode_credentials(options); + if result.anthropic.is_none() { + result.anthropic = opencode_credentials.anthropic.clone(); + } + if result.openai.is_none() { + result.openai = opencode_credentials.openai.clone(); + } + + for (key, value) in opencode_credentials.other { + result.other.entry(key).or_insert(value); + } + + result +} + +pub fn get_anthropic_api_key(options: &CredentialExtractionOptions) -> Option { + extract_all_credentials(options) + .anthropic + .map(|cred| cred.api_key) +} + +pub fn get_openai_api_key(options: &CredentialExtractionOptions) -> Option { + extract_all_credentials(options) + .openai + .map(|cred| cred.api_key) +} + +pub fn set_credentials_as_env_vars(credentials: &ExtractedCredentials) { + if let Some(cred) = &credentials.anthropic { + std::env::set_var("ANTHROPIC_API_KEY", &cred.api_key); + } + if let Some(cred) = &credentials.openai { + std::env::set_var("OPENAI_API_KEY", &cred.api_key); + } +} + +fn read_json_file(path: &Path) -> Option { + let contents = fs::read_to_string(path).ok()?; + serde_json::from_str(&contents).ok() +} + +fn read_string_field(value: &Value, path: &[&str]) -> Option { + let mut current = value; + for key in path { + current = current.get(*key)?; + } + current.as_str().map(|s| s.to_string()) +} + +fn default_home_dir() -> PathBuf { + dirs::home_dir().unwrap_or_else(|| PathBuf::from(".")) +} + +fn current_epoch_millis() -> i64 { + let now = OffsetDateTime::now_utc(); + (now.unix_timestamp() * 1000) + (now.millisecond() as i64) +} + +fn is_expired_rfc3339(value: &str) -> bool { + match OffsetDateTime::parse(value, &time::format_description::well_known::Rfc3339) { + Ok(expiry) => expiry < OffsetDateTime::now_utc(), + Err(_) => false, + } +} diff --git a/engine/packages/openapi-gen/src/main.rs b/engine/packages/openapi-gen/src/main.rs new file mode 100644 index 0000000..b03ed94 --- /dev/null +++ b/engine/packages/openapi-gen/src/main.rs @@ -0,0 +1,40 @@ +use std::env; +use std::fs; +use std::path::PathBuf; + +fn main() { + let mut out: Option = None; + let mut stdout = false; + let mut args = env::args().skip(1).peekable(); + while let Some(arg) = args.next() { + if arg == "--stdout" { + stdout = true; + continue; + } + if arg == "--out" { + if let Some(value) = args.next() { + out = Some(PathBuf::from(value)); + } + continue; + } + if let Some(value) = arg.strip_prefix("--out=") { + out = Some(PathBuf::from(value)); + continue; + } + if out.is_none() { + out = Some(PathBuf::from(arg)); + } + } + + let schema = sandbox_daemon_openapi_gen::OPENAPI_JSON; + if stdout { + println!("{schema}"); + return; + } + + let out = out.unwrap_or_else(|| PathBuf::from("openapi.json")); + if let Err(err) = fs::write(&out, schema) { + eprintln!("failed to write {}: {err}", out.display()); + std::process::exit(1); + } +} diff --git a/engine/packages/sandbox-daemon/src/agents.rs b/engine/packages/sandbox-daemon/src/agents.rs deleted file mode 100644 index 1696980..0000000 --- a/engine/packages/sandbox-daemon/src/agents.rs +++ /dev/null @@ -1,683 +0,0 @@ -use std::collections::HashMap; -use std::fmt; -use std::fs; -use std::io::{self, Read}; -use std::path::{Path, PathBuf}; -use std::process::{Command, ExitStatus}; - -use flate2::read::GzDecoder; -use reqwest::blocking::Client; -use serde::{Deserialize, Serialize}; -use thiserror::Error; -use url::Url; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum AgentId { - Claude, - Codex, - Opencode, - Amp, -} - -impl AgentId { - pub fn as_str(self) -> &'static str { - match self { - AgentId::Claude => "claude", - AgentId::Codex => "codex", - AgentId::Opencode => "opencode", - AgentId::Amp => "amp", - } - } - - pub fn binary_name(self) -> &'static str { - match self { - AgentId::Claude => "claude", - AgentId::Codex => "codex", - AgentId::Opencode => "opencode", - AgentId::Amp => "amp", - } - } -} - -impl fmt::Display for AgentId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Platform { - LinuxX64, - LinuxX64Musl, - LinuxArm64, - MacosArm64, - MacosX64, -} - -impl Platform { - pub fn detect() -> Result { - let os = std::env::consts::OS; - let arch = std::env::consts::ARCH; - let is_musl = cfg!(target_env = "musl"); - - match (os, arch, is_musl) { - ("linux", "x86_64", true) => Ok(Self::LinuxX64Musl), - ("linux", "x86_64", false) => Ok(Self::LinuxX64), - ("linux", "aarch64", _) => Ok(Self::LinuxArm64), - ("macos", "aarch64", _) => Ok(Self::MacosArm64), - ("macos", "x86_64", _) => Ok(Self::MacosX64), - _ => Err(AgentError::UnsupportedPlatform { - os: os.to_string(), - arch: arch.to_string(), - }), - } - } -} - -#[derive(Debug, Clone)] -pub struct AgentManager { - install_dir: PathBuf, - platform: Platform, -} - -impl AgentManager { - pub fn new(install_dir: impl Into) -> Result { - Ok(Self { - install_dir: install_dir.into(), - platform: Platform::detect()?, - }) - } - - pub fn with_platform( - install_dir: impl Into, - platform: Platform, - ) -> Self { - Self { - install_dir: install_dir.into(), - platform, - } - } - - pub fn install(&self, agent: AgentId, options: InstallOptions) -> Result { - let install_path = self.binary_path(agent); - if install_path.exists() && !options.reinstall { - return Ok(InstallResult { - path: install_path, - version: self.version(agent).unwrap_or(None), - }); - } - - fs::create_dir_all(&self.install_dir)?; - - match agent { - AgentId::Claude => install_claude(&install_path, self.platform, options.version.as_deref())?, - AgentId::Codex => install_codex(&install_path, self.platform, options.version.as_deref())?, - AgentId::Opencode => install_opencode(&install_path, self.platform, options.version.as_deref())?, - AgentId::Amp => install_amp(&install_path, self.platform, options.version.as_deref())?, - } - - Ok(InstallResult { - path: install_path, - version: self.version(agent).unwrap_or(None), - }) - } - - pub fn is_installed(&self, agent: AgentId) -> bool { - self.binary_path(agent).exists() || find_in_path(agent.binary_name()).is_some() - } - - pub fn binary_path(&self, agent: AgentId) -> PathBuf { - self.install_dir.join(agent.binary_name()) - } - - pub fn version(&self, agent: AgentId) -> Result, AgentError> { - let path = self.resolve_binary(agent)?; - let attempts = [vec!["--version"], vec!["version"], vec!["-V"]]; - for args in attempts { - let output = Command::new(&path).args(args).output(); - if let Ok(output) = output { - if output.status.success() { - if let Some(version) = parse_version_output(&output) { - return Ok(Some(version)); - } - } - } - } - Ok(None) - } - - pub fn spawn(&self, agent: AgentId, options: SpawnOptions) -> Result { - let path = self.resolve_binary(agent)?; - let working_dir = options - .working_dir - .clone() - .unwrap_or_else(|| std::env::current_dir().unwrap_or_default()); - let mut command = Command::new(&path); - command.current_dir(&working_dir); - - match agent { - AgentId::Claude => { - command - .arg("--print") - .arg("--output-format") - .arg("stream-json") - .arg("--verbose") - .arg("--dangerously-skip-permissions"); - if let Some(model) = options.model.as_deref() { - command.arg("--model").arg(model); - } - if let Some(session_id) = options.session_id.as_deref() { - command.arg("--resume").arg(session_id); - } - if let Some(permission_mode) = options.permission_mode.as_deref() { - if permission_mode == "plan" { - command.arg("--permission-mode").arg("plan"); - } - } - command.arg(&options.prompt); - } - AgentId::Codex => { - command - .arg("exec") - .arg("--json") - .arg("--dangerously-bypass-approvals-and-sandbox"); - if let Some(model) = options.model.as_deref() { - command.arg("-m").arg(model); - } - command.arg(&options.prompt); - } - AgentId::Opencode => { - command - .arg("run") - .arg("--format") - .arg("json"); - if let Some(model) = options.model.as_deref() { - command.arg("-m").arg(model); - } - if let Some(agent_mode) = options.agent_mode.as_deref() { - command.arg("--agent").arg(agent_mode); - } - if let Some(variant) = options.variant.as_deref() { - command.arg("--variant").arg(variant); - } - if let Some(session_id) = options.session_id.as_deref() { - command.arg("-s").arg(session_id); - } - command.arg(&options.prompt); - } - AgentId::Amp => { - let output = spawn_amp(&path, &working_dir, &options)?; - return Ok(SpawnResult { - status: output.status, - stdout: String::from_utf8_lossy(&output.stdout).to_string(), - stderr: String::from_utf8_lossy(&output.stderr).to_string(), - }); - } - } - - for (key, value) in options.env { - command.env(key, value); - } - - let output = command.output().map_err(AgentError::Io)?; - Ok(SpawnResult { - status: output.status, - stdout: String::from_utf8_lossy(&output.stdout).to_string(), - stderr: String::from_utf8_lossy(&output.stderr).to_string(), - }) - } - - fn resolve_binary(&self, agent: AgentId) -> Result { - let path = self.binary_path(agent); - if path.exists() { - return Ok(path); - } - if let Some(path) = find_in_path(agent.binary_name()) { - return Ok(path); - } - Err(AgentError::BinaryNotFound { agent }) - } -} - -#[derive(Debug, Clone)] -pub struct InstallOptions { - pub reinstall: bool, - pub version: Option, -} - -impl Default for InstallOptions { - fn default() -> Self { - Self { - reinstall: false, - version: None, - } - } -} - -#[derive(Debug, Clone)] -pub struct InstallResult { - pub path: PathBuf, - pub version: Option, -} - -#[derive(Debug, Clone)] -pub struct SpawnOptions { - pub prompt: String, - pub model: Option, - pub variant: Option, - pub agent_mode: Option, - pub permission_mode: Option, - pub session_id: Option, - pub working_dir: Option, - pub env: HashMap, -} - -impl SpawnOptions { - pub fn new(prompt: impl Into) -> Self { - Self { - prompt: prompt.into(), - model: None, - variant: None, - agent_mode: None, - permission_mode: None, - session_id: None, - working_dir: None, - env: HashMap::new(), - } - } -} - -#[derive(Debug, Clone)] -pub struct SpawnResult { - pub status: ExitStatus, - pub stdout: String, - pub stderr: String, -} - -#[derive(Debug, Error)] -pub enum AgentError { - #[error("unsupported platform {os}/{arch}")] - UnsupportedPlatform { os: String, arch: String }, - #[error("unsupported agent {agent}")] - UnsupportedAgent { agent: String }, - #[error("binary not found for {agent}")] - BinaryNotFound { agent: AgentId }, - #[error("download failed: {url}")] - DownloadFailed { url: Url }, - #[error("http error: {0}")] - Http(#[from] reqwest::Error), - #[error("url parse error: {0}")] - UrlParse(#[from] url::ParseError), - #[error("io error: {0}")] - Io(#[from] io::Error), - #[error("extract failed: {0}")] - ExtractFailed(String), -} - -fn parse_version_output(output: &std::process::Output) -> Option { - let stdout = String::from_utf8_lossy(&output.stdout); - let stderr = String::from_utf8_lossy(&output.stderr); - let combined = format!("{}\n{}", stdout, stderr); - combined - .lines() - .map(str::trim) - .find(|line| !line.is_empty()) - .map(|line| line.to_string()) -} - -fn spawn_amp( - path: &Path, - working_dir: &Path, - options: &SpawnOptions, -) -> Result { - let flags = detect_amp_flags(path, working_dir).unwrap_or_default(); - let mut args: Vec<&str> = Vec::new(); - if flags.execute { - args.push("--execute"); - } else if flags.print { - args.push("--print"); - } - if flags.output_format { - args.push("--output-format"); - args.push("stream-json"); - } - if flags.dangerously_skip_permissions { - args.push("--dangerously-skip-permissions"); - } - - let mut command = Command::new(path); - command.current_dir(working_dir); - if let Some(model) = options.model.as_deref() { - command.arg("--model").arg(model); - } - if let Some(session_id) = options.session_id.as_deref() { - command.arg("--continue").arg(session_id); - } - command.args(&args).arg(&options.prompt); - for (key, value) in &options.env { - command.env(key, value); - } - let output = command.output().map_err(AgentError::Io)?; - if output.status.success() { - return Ok(output); - } - - let stderr = String::from_utf8_lossy(&output.stderr); - if stderr.contains("unknown option") - || stderr.contains("unknown flag") - || stderr.contains("User message must be provided") - { - return spawn_amp_fallback(path, working_dir, options); - } - - Ok(output) -} - -#[derive(Debug, Default, Clone, Copy)] -struct AmpFlags { - execute: bool, - print: bool, - output_format: bool, - dangerously_skip_permissions: bool, -} - -fn detect_amp_flags(path: &Path, working_dir: &Path) -> Option { - let output = Command::new(path) - .current_dir(working_dir) - .arg("--help") - .output() - .ok()?; - let text = format!( - "{}\n{}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ); - Some(AmpFlags { - execute: text.contains("--execute"), - print: text.contains("--print"), - output_format: text.contains("--output-format"), - dangerously_skip_permissions: text.contains("--dangerously-skip-permissions"), - }) -} - -fn spawn_amp_fallback( - path: &Path, - working_dir: &Path, - options: &SpawnOptions, -) -> Result { - let attempts = vec![ - vec!["--execute"], - vec!["--print", "--output-format", "stream-json"], - vec!["--output-format", "stream-json"], - vec!["--dangerously-skip-permissions"], - vec![], - ]; - - for args in attempts { - let mut command = Command::new(path); - command.current_dir(working_dir); - if let Some(model) = options.model.as_deref() { - command.arg("--model").arg(model); - } - if let Some(session_id) = options.session_id.as_deref() { - command.arg("--continue").arg(session_id); - } - if !args.is_empty() { - command.args(&args); - } - command.arg(&options.prompt); - for (key, value) in &options.env { - command.env(key, value); - } - let output = command.output().map_err(AgentError::Io)?; - if output.status.success() { - return Ok(output); - } - } - - let mut command = Command::new(path); - command.current_dir(working_dir); - if let Some(model) = options.model.as_deref() { - command.arg("--model").arg(model); - } - if let Some(session_id) = options.session_id.as_deref() { - command.arg("--continue").arg(session_id); - } - command.arg(&options.prompt); - for (key, value) in &options.env { - command.env(key, value); - } - Ok(command.output().map_err(AgentError::Io)?) -} - -fn find_in_path(binary_name: &str) -> Option { - let path_var = std::env::var_os("PATH")?; - for path in std::env::split_paths(&path_var) { - let candidate = path.join(binary_name); - if candidate.exists() { - return Some(candidate); - } - } - None -} - -fn download_bytes(url: &Url) -> Result, AgentError> { - let client = Client::builder().build()?; - let mut response = client.get(url.clone()).send()?; - if !response.status().is_success() { - return Err(AgentError::DownloadFailed { url: url.clone() }); - } - let mut bytes = Vec::new(); - response.read_to_end(&mut bytes)?; - Ok(bytes) -} - -fn install_claude(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> { - let version = match version { - Some(version) => version.to_string(), - None => { - let url = Url::parse( - "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/latest", - )?; - let text = String::from_utf8(download_bytes(&url)?).map_err(|err| AgentError::ExtractFailed(err.to_string()))?; - text.trim().to_string() - } - }; - - let platform_segment = match platform { - Platform::LinuxX64 => "linux-x64", - Platform::LinuxX64Musl => "linux-x64-musl", - Platform::LinuxArm64 => "linux-arm64", - Platform::MacosArm64 => "darwin-arm64", - Platform::MacosX64 => "darwin-x64", - }; - - let url = Url::parse(&format!( - "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/{version}/{platform_segment}/claude" - ))?; - let bytes = download_bytes(&url)?; - write_executable(path, &bytes)?; - Ok(()) -} - -fn install_amp(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> { - let version = match version { - Some(version) => version.to_string(), - None => { - let url = Url::parse("https://storage.googleapis.com/amp-public-assets-prod-0/cli/cli-version.txt")?; - let text = String::from_utf8(download_bytes(&url)?).map_err(|err| AgentError::ExtractFailed(err.to_string()))?; - text.trim().to_string() - } - }; - - let platform_segment = match platform { - Platform::LinuxX64 | Platform::LinuxX64Musl => "linux-x64", - Platform::LinuxArm64 => "linux-arm64", - Platform::MacosArm64 => "darwin-arm64", - Platform::MacosX64 => "darwin-x64", - }; - - let url = Url::parse(&format!( - "https://storage.googleapis.com/amp-public-assets-prod-0/cli/{version}/amp-{platform_segment}" - ))?; - let bytes = download_bytes(&url)?; - write_executable(path, &bytes)?; - Ok(()) -} - -fn install_codex(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> { - let target = match platform { - Platform::LinuxX64 | Platform::LinuxX64Musl => "x86_64-unknown-linux-musl", - Platform::LinuxArm64 => "aarch64-unknown-linux-musl", - Platform::MacosArm64 => "aarch64-apple-darwin", - Platform::MacosX64 => "x86_64-apple-darwin", - }; - - let url = match version { - Some(version) => Url::parse(&format!( - "https://github.com/openai/codex/releases/download/{version}/codex-{target}.tar.gz" - ))?, - None => Url::parse(&format!( - "https://github.com/openai/codex/releases/latest/download/codex-{target}.tar.gz" - ))?, - }; - - let bytes = download_bytes(&url)?; - let temp_dir = tempfile::tempdir()?; - let cursor = io::Cursor::new(bytes); - let mut archive = tar::Archive::new(GzDecoder::new(cursor)); - archive.unpack(temp_dir.path())?; - - let expected = format!("codex-{target}"); - let binary = find_file_recursive(temp_dir.path(), &expected)? - .ok_or_else(|| AgentError::ExtractFailed(format!("missing {expected}")))?; - move_executable(&binary, path)?; - Ok(()) -} - -fn install_opencode(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> { - match platform { - Platform::MacosArm64 => { - let url = match version { - Some(version) => Url::parse(&format!( - "https://github.com/anomalyco/opencode/releases/download/{version}/opencode-darwin-arm64.zip" - ))?, - None => Url::parse( - "https://github.com/anomalyco/opencode/releases/latest/download/opencode-darwin-arm64.zip", - )?, - }; - install_zip_binary(path, &url, "opencode") - } - Platform::MacosX64 => { - let url = match version { - Some(version) => Url::parse(&format!( - "https://github.com/anomalyco/opencode/releases/download/{version}/opencode-darwin-x64.zip" - ))?, - None => Url::parse( - "https://github.com/anomalyco/opencode/releases/latest/download/opencode-darwin-x64.zip", - )?, - }; - install_zip_binary(path, &url, "opencode") - } - _ => { - let platform_segment = match platform { - Platform::LinuxX64 => "linux-x64", - Platform::LinuxX64Musl => "linux-x64-musl", - Platform::LinuxArm64 => "linux-arm64", - Platform::MacosArm64 | Platform::MacosX64 => unreachable!(), - }; - let url = match version { - Some(version) => Url::parse(&format!( - "https://github.com/anomalyco/opencode/releases/download/{version}/opencode-{platform_segment}.tar.gz" - ))?, - None => Url::parse(&format!( - "https://github.com/anomalyco/opencode/releases/latest/download/opencode-{platform_segment}.tar.gz" - ))?, - }; - - let bytes = download_bytes(&url)?; - let temp_dir = tempfile::tempdir()?; - let cursor = io::Cursor::new(bytes); - let mut archive = tar::Archive::new(GzDecoder::new(cursor)); - archive.unpack(temp_dir.path())?; - let binary = find_file_recursive(temp_dir.path(), "opencode")? - .ok_or_else(|| AgentError::ExtractFailed("missing opencode".to_string()))?; - move_executable(&binary, path)?; - Ok(()) - } - } -} - -fn install_zip_binary(path: &Path, url: &Url, binary_name: &str) -> Result<(), AgentError> { - let bytes = download_bytes(url)?; - let reader = io::Cursor::new(bytes); - let mut archive = zip::ZipArchive::new(reader).map_err(|err| AgentError::ExtractFailed(err.to_string()))?; - let temp_dir = tempfile::tempdir()?; - for i in 0..archive.len() { - let mut file = archive - .by_index(i) - .map_err(|err| AgentError::ExtractFailed(err.to_string()))?; - if !file.name().ends_with(binary_name) { - continue; - } - let out_path = temp_dir.path().join(binary_name); - let mut out_file = fs::File::create(&out_path)?; - io::copy(&mut file, &mut out_file)?; - move_executable(&out_path, path)?; - return Ok(()); - } - Err(AgentError::ExtractFailed(format!("missing {binary_name}"))) -} - -fn write_executable(path: &Path, bytes: &[u8]) -> Result<(), AgentError> { - if let Some(parent) = path.parent() { - fs::create_dir_all(parent)?; - } - fs::write(path, bytes)?; - set_executable(path)?; - Ok(()) -} - -fn move_executable(source: &Path, dest: &Path) -> Result<(), AgentError> { - if let Some(parent) = dest.parent() { - fs::create_dir_all(parent)?; - } - if dest.exists() { - fs::remove_file(dest)?; - } - fs::copy(source, dest)?; - set_executable(dest)?; - Ok(()) -} - -#[cfg(unix)] -fn set_executable(path: &Path) -> Result<(), AgentError> { - use std::os::unix::fs::PermissionsExt; - let mut perms = fs::metadata(path)?.permissions(); - perms.set_mode(0o755); - fs::set_permissions(path, perms)?; - Ok(()) -} - -#[cfg(not(unix))] -fn set_executable(_path: &Path) -> Result<(), AgentError> { - Ok(()) -} - -fn find_file_recursive(dir: &Path, filename: &str) -> Result, AgentError> { - for entry in fs::read_dir(dir)? { - let entry = entry?; - let path = entry.path(); - if path.is_dir() { - if let Some(found) = find_file_recursive(&path, filename)? { - return Ok(Some(found)); - } - } else if let Some(name) = path.file_name().and_then(|s| s.to_str()) { - if name == filename { - return Ok(Some(path)); - } - } - } - Ok(None) -} diff --git a/engine/packages/sandbox-daemon/src/credentials.rs b/engine/packages/sandbox-daemon/src/credentials.rs index a4f36a2..3a6e9e1 100644 --- a/engine/packages/sandbox-daemon/src/credentials.rs +++ b/engine/packages/sandbox-daemon/src/credentials.rs @@ -1,335 +1 @@ -use std::collections::HashMap; -use std::fs; -use std::path::{Path, PathBuf}; - -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use time::OffsetDateTime; - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] -pub struct ProviderCredentials { - pub api_key: String, - pub source: String, - pub auth_type: AuthType, - pub provider: String, -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -pub enum AuthType { - ApiKey, - Oauth, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -pub struct ExtractedCredentials { - pub anthropic: Option, - pub openai: Option, - pub other: HashMap, -} - -#[derive(Debug, Clone, Default)] -pub struct CredentialExtractionOptions { - pub home_dir: Option, - pub include_oauth: bool, -} - -impl CredentialExtractionOptions { - pub fn new() -> Self { - Self { - home_dir: None, - include_oauth: true, - } - } -} - -pub fn extract_claude_credentials( - options: &CredentialExtractionOptions, -) -> Option { - let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); - let include_oauth = options.include_oauth; - - let config_paths = [ - home_dir.join(".claude.json.api"), - home_dir.join(".claude.json"), - home_dir.join(".claude.json.nathan"), - ]; - - let key_paths = [ - vec!["primaryApiKey"], - vec!["apiKey"], - vec!["anthropicApiKey"], - vec!["customApiKey"], - ]; - - for path in config_paths { - let data = read_json_file(&path)?; - for key_path in &key_paths { - if let Some(key) = read_string_field(&data, key_path) { - if key.starts_with("sk-ant-") { - return Some(ProviderCredentials { - api_key: key, - source: "claude-code".to_string(), - auth_type: AuthType::ApiKey, - provider: "anthropic".to_string(), - }); - } - } - } - } - - if include_oauth { - let oauth_paths = [ - home_dir.join(".claude").join(".credentials.json"), - home_dir.join(".claude-oauth-credentials.json"), - ]; - for path in oauth_paths { - let data = match read_json_file(&path) { - Some(value) => value, - None => continue, - }; - let access = read_string_field(&data, &["claudeAiOauth", "accessToken"]); - if let Some(token) = access { - if let Some(expires_at) = read_string_field(&data, &["claudeAiOauth", "expiresAt"]) { - if is_expired_rfc3339(&expires_at) { - continue; - } - } - return Some(ProviderCredentials { - api_key: token, - source: "claude-code".to_string(), - auth_type: AuthType::Oauth, - provider: "anthropic".to_string(), - }); - } - } - } - - None -} - -pub fn extract_codex_credentials( - options: &CredentialExtractionOptions, -) -> Option { - let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); - let include_oauth = options.include_oauth; - let path = home_dir.join(".codex").join("auth.json"); - let data = read_json_file(&path)?; - - if let Some(key) = data.get("OPENAI_API_KEY").and_then(Value::as_str) { - if !key.is_empty() { - return Some(ProviderCredentials { - api_key: key.to_string(), - source: "codex".to_string(), - auth_type: AuthType::ApiKey, - provider: "openai".to_string(), - }); - } - } - - if include_oauth { - if let Some(token) = read_string_field(&data, &["tokens", "access_token"]) { - return Some(ProviderCredentials { - api_key: token, - source: "codex".to_string(), - auth_type: AuthType::Oauth, - provider: "openai".to_string(), - }); - } - } - - None -} - -pub fn extract_opencode_credentials( - options: &CredentialExtractionOptions, -) -> ExtractedCredentials { - let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir); - let include_oauth = options.include_oauth; - let path = home_dir - .join(".local") - .join("share") - .join("opencode") - .join("auth.json"); - - let mut result = ExtractedCredentials::default(); - let data = match read_json_file(&path) { - Some(value) => value, - None => return result, - }; - - let obj = match data.as_object() { - Some(obj) => obj, - None => return result, - }; - - for (provider_name, value) in obj { - let config = match value.as_object() { - Some(config) => config, - None => continue, - }; - - let auth_type = config - .get("type") - .and_then(Value::as_str) - .unwrap_or(""); - - let credentials = if auth_type == "api" { - config.get("key").and_then(Value::as_str).map(|key| ProviderCredentials { - api_key: key.to_string(), - source: "opencode".to_string(), - auth_type: AuthType::ApiKey, - provider: provider_name.to_string(), - }) - } else if auth_type == "oauth" && include_oauth { - let expires = config.get("expires").and_then(Value::as_i64); - if let Some(expires) = expires { - if expires < current_epoch_millis() { - None - } else { - config - .get("access") - .and_then(Value::as_str) - .map(|token| ProviderCredentials { - api_key: token.to_string(), - source: "opencode".to_string(), - auth_type: AuthType::Oauth, - provider: provider_name.to_string(), - }) - } - } else { - config - .get("access") - .and_then(Value::as_str) - .map(|token| ProviderCredentials { - api_key: token.to_string(), - source: "opencode".to_string(), - auth_type: AuthType::Oauth, - provider: provider_name.to_string(), - }) - } - } else { - None - }; - - if let Some(credentials) = credentials { - if provider_name == "anthropic" { - result.anthropic = Some(credentials.clone()); - } else if provider_name == "openai" { - result.openai = Some(credentials.clone()); - } else { - result.other.insert(provider_name.to_string(), credentials.clone()); - } - } - } - - result -} - -pub fn extract_all_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials { - let mut result = ExtractedCredentials::default(); - - if let Ok(value) = std::env::var("ANTHROPIC_API_KEY") { - result.anthropic = Some(ProviderCredentials { - api_key: value, - source: "environment".to_string(), - auth_type: AuthType::ApiKey, - provider: "anthropic".to_string(), - }); - } else if let Ok(value) = std::env::var("CLAUDE_API_KEY") { - result.anthropic = Some(ProviderCredentials { - api_key: value, - source: "environment".to_string(), - auth_type: AuthType::ApiKey, - provider: "anthropic".to_string(), - }); - } - - if let Ok(value) = std::env::var("OPENAI_API_KEY") { - result.openai = Some(ProviderCredentials { - api_key: value, - source: "environment".to_string(), - auth_type: AuthType::ApiKey, - provider: "openai".to_string(), - }); - } else if let Ok(value) = std::env::var("CODEX_API_KEY") { - result.openai = Some(ProviderCredentials { - api_key: value, - source: "environment".to_string(), - auth_type: AuthType::ApiKey, - provider: "openai".to_string(), - }); - } - - if result.anthropic.is_none() { - result.anthropic = extract_claude_credentials(options); - } - - if result.openai.is_none() { - result.openai = extract_codex_credentials(options); - } - - let opencode_credentials = extract_opencode_credentials(options); - if result.anthropic.is_none() { - result.anthropic = opencode_credentials.anthropic.clone(); - } - if result.openai.is_none() { - result.openai = opencode_credentials.openai.clone(); - } - - for (key, value) in opencode_credentials.other { - result.other.entry(key).or_insert(value); - } - - result -} - -pub fn get_anthropic_api_key(options: &CredentialExtractionOptions) -> Option { - extract_all_credentials(options) - .anthropic - .map(|cred| cred.api_key) -} - -pub fn get_openai_api_key(options: &CredentialExtractionOptions) -> Option { - extract_all_credentials(options) - .openai - .map(|cred| cred.api_key) -} - -pub fn set_credentials_as_env_vars(credentials: &ExtractedCredentials) { - if let Some(cred) = &credentials.anthropic { - std::env::set_var("ANTHROPIC_API_KEY", &cred.api_key); - } - if let Some(cred) = &credentials.openai { - std::env::set_var("OPENAI_API_KEY", &cred.api_key); - } -} - -fn read_json_file(path: &Path) -> Option { - let contents = fs::read_to_string(path).ok()?; - serde_json::from_str(&contents).ok() -} - -fn read_string_field(value: &Value, path: &[&str]) -> Option { - let mut current = value; - for key in path { - current = current.get(*key)?; - } - current.as_str().map(|s| s.to_string()) -} - -fn default_home_dir() -> PathBuf { - dirs::home_dir().unwrap_or_else(|| PathBuf::from(".")) -} - -fn current_epoch_millis() -> i64 { - let now = OffsetDateTime::now_utc(); - (now.unix_timestamp() * 1000) + (now.millisecond() as i64) -} - -fn is_expired_rfc3339(value: &str) -> bool { - match OffsetDateTime::parse(value, &time::format_description::well_known::Rfc3339) { - Ok(expiry) => expiry < OffsetDateTime::now_utc(), - Err(_) => false, - } -} +pub use sandbox_daemon_agent_credentials::*; diff --git a/engine/packages/sandbox-daemon/src/lib.rs b/engine/packages/sandbox-daemon/src/lib.rs index 94429d4..2fbc535 100644 --- a/engine/packages/sandbox-daemon/src/lib.rs +++ b/engine/packages/sandbox-daemon/src/lib.rs @@ -1,5 +1,4 @@ //! Sandbox daemon core utilities. -pub mod agents; pub mod credentials; pub mod router; diff --git a/engine/packages/sandbox-daemon/tests/agents.rs b/engine/packages/sandbox-daemon/tests/agents.rs index f9c5382..b5de6f7 100644 --- a/engine/packages/sandbox-daemon/tests/agents.rs +++ b/engine/packages/sandbox-daemon/tests/agents.rs @@ -1,7 +1,11 @@ use std::collections::HashMap; -use sandbox_daemon_core::agents::{AgentId, AgentManager, InstallOptions, SpawnOptions}; -use sandbox_daemon_core::credentials::{extract_all_credentials, CredentialExtractionOptions}; +use sandbox_daemon_agent_management::agents::{ + AgentError, AgentId, AgentManager, InstallOptions, SpawnOptions, +}; +use sandbox_daemon_agent_management::credentials::{ + extract_all_credentials, CredentialExtractionOptions, +}; fn build_env() -> HashMap { let options = CredentialExtractionOptions::new(); @@ -21,6 +25,10 @@ fn amp_configured() -> bool { home.join(".amp").join("config.json").exists() } +fn prompt_ok(label: &str) -> String { + format!("Respond with exactly the text {label} and nothing else.") +} + #[test] fn test_agents_install_version_spawn() -> Result<(), Box> { let temp_dir = tempfile::tempdir()?; @@ -32,11 +40,19 @@ fn test_agents_install_version_spawn() -> Result<(), Box> for agent in agents { let install = manager.install(agent, InstallOptions::default())?; assert!(install.path.exists(), "expected install for {agent}"); + assert!(manager.is_installed(agent), "expected is_installed for {agent}"); + manager.install( + agent, + InstallOptions { + reinstall: true, + version: None, + }, + )?; let version = manager.version(agent)?; assert!(version.is_some(), "expected version for {agent}"); if agent != AgentId::Amp || amp_configured() { - let mut spawn = SpawnOptions::new("Respond with exactly the text OK and nothing else."); + let mut spawn = SpawnOptions::new(prompt_ok("OK")); spawn.env = env.clone(); let result = manager.spawn(agent, spawn)?; assert!( @@ -44,8 +60,53 @@ fn test_agents_install_version_spawn() -> Result<(), Box> "spawn failed for {agent}: {}", result.stderr ); - let output = format!("{}{}", result.stdout, result.stderr); + assert!( + !result.events.is_empty(), + "expected events for {agent} but got none" + ); + assert!( + result.session_id.is_some(), + "expected session id for {agent}" + ); + let combined = format!("{}{}", result.stdout, result.stderr); + let output = result.result.clone().unwrap_or(combined); assert!(output.contains("OK"), "expected OK for {agent}, got: {output}"); + + if agent == AgentId::Claude || agent == AgentId::Opencode || (agent == AgentId::Amp && amp_configured()) { + let mut resume = SpawnOptions::new(prompt_ok("OK2")); + resume.env = env.clone(); + resume.session_id = result.session_id.clone(); + let resumed = manager.spawn(agent, resume)?; + assert!( + resumed.status.success(), + "resume spawn failed for {agent}: {}", + resumed.stderr + ); + let combined = format!("{}{}", resumed.stdout, resumed.stderr); + let output = resumed.result.clone().unwrap_or(combined); + assert!(output.contains("OK2"), "expected OK2 for {agent}, got: {output}"); + } else if agent == AgentId::Codex { + let mut resume = SpawnOptions::new(prompt_ok("OK2")); + resume.env = env.clone(); + resume.session_id = result.session_id.clone(); + let err = manager.spawn(agent, resume).expect_err("expected resume error for codex"); + assert!(matches!(err, AgentError::ResumeUnsupported { .. })); + } + + if agent == AgentId::Claude || agent == AgentId::Codex { + let mut plan = SpawnOptions::new(prompt_ok("OK3")); + plan.env = env.clone(); + plan.permission_mode = Some("plan".to_string()); + let planned = manager.spawn(agent, plan)?; + assert!( + planned.status.success(), + "plan spawn failed for {agent}: {}", + planned.stderr + ); + let combined = format!("{}{}", planned.stdout, planned.stderr); + let output = planned.result.clone().unwrap_or(combined); + assert!(output.contains("OK3"), "expected OK3 for {agent}, got: {output}"); + } } } diff --git a/engine/packages/universal-agent-schema/Cargo.toml b/engine/packages/universal-agent-schema/Cargo.toml index e47b4b0..dde308a 100644 --- a/engine/packages/universal-agent-schema/Cargo.toml +++ b/engine/packages/universal-agent-schema/Cargo.toml @@ -7,4 +7,6 @@ edition = "2021" sandbox-daemon-agent-schema = { path = "../agent-schema" } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" +schemars = "0.8" thiserror = "1.0" +utoipa = { version = "4.2", features = ["axum_extras"] } diff --git a/engine/packages/universal-agent-schema/src/lib.rs b/engine/packages/universal-agent-schema/src/lib.rs index c438f49..28c3632 100644 --- a/engine/packages/universal-agent-schema/src/lib.rs +++ b/engine/packages/universal-agent-schema/src/lib.rs @@ -1,10 +1,12 @@ use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; +use schemars::JsonSchema; use thiserror::Error; +use utoipa::ToSchema; pub use sandbox_daemon_agent_schema::{amp, claude, codex, opencode}; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct UniversalEvent { pub id: u64, @@ -16,7 +18,7 @@ pub struct UniversalEvent { pub data: UniversalEventData, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(untagged)] pub enum UniversalEventData { Message { message: UniversalMessage }, @@ -33,7 +35,7 @@ pub enum UniversalEventData { Unknown { raw: Value }, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct Started { #[serde(default, skip_serializing_if = "Option::is_none")] @@ -42,7 +44,7 @@ pub struct Started { pub details: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct CrashInfo { pub message: String, @@ -52,7 +54,7 @@ pub struct CrashInfo { pub details: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] pub struct UniversalMessageParsed { pub role: String, #[serde(default, skip_serializing_if = "Option::is_none")] @@ -62,7 +64,7 @@ pub struct UniversalMessageParsed { pub parts: Vec, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(untagged)] pub enum UniversalMessage { Parsed(UniversalMessageParsed), @@ -73,7 +75,7 @@ pub enum UniversalMessage { }, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(tag = "type", rename_all = "snake_case")] pub enum UniversalMessagePart { Text { text: String }, @@ -134,7 +136,7 @@ pub enum UniversalMessagePart { Unknown { raw: Value }, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(tag = "type", rename_all = "snake_case")] pub enum AttachmentSource { Path { path: String }, @@ -146,7 +148,7 @@ pub enum AttachmentSource { }, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct QuestionRequest { pub id: String, @@ -156,7 +158,7 @@ pub struct QuestionRequest { pub tool: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct QuestionInfo { pub question: String, @@ -169,7 +171,7 @@ pub struct QuestionInfo { pub custom: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct QuestionOption { pub label: String, @@ -177,14 +179,14 @@ pub struct QuestionOption { pub description: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct QuestionToolRef { pub message_id: String, pub call_id: String, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct PermissionRequest { pub id: String, @@ -198,7 +200,7 @@ pub struct PermissionRequest { pub tool: Option, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct PermissionToolRef { pub message_id: String, @@ -261,17 +263,47 @@ fn message_from_parts(role: &str, parts: Vec) -> Universal }) } -fn message_parts_to_text(parts: &[UniversalMessagePart]) -> Option { +fn text_only_from_parts(parts: &[UniversalMessagePart]) -> Result { let mut text = String::new(); for part in parts { - if let UniversalMessagePart::Text { text: part_text } = part { - if !text.is_empty() { - text.push_str("\n"); + match part { + UniversalMessagePart::Text { text: part_text } => { + if !text.is_empty() { + text.push_str("\n"); + } + text.push_str(part_text); + } + UniversalMessagePart::ToolCall { .. } => { + return Err(ConversionError::Unsupported("tool call part")) + } + UniversalMessagePart::ToolResult { .. } => { + return Err(ConversionError::Unsupported("tool result part")) + } + UniversalMessagePart::FunctionCall { .. } => { + return Err(ConversionError::Unsupported("function call part")) + } + UniversalMessagePart::FunctionResult { .. } => { + return Err(ConversionError::Unsupported("function result part")) + } + UniversalMessagePart::File { .. } => { + return Err(ConversionError::Unsupported("file part")) + } + UniversalMessagePart::Image { .. } => { + return Err(ConversionError::Unsupported("image part")) + } + UniversalMessagePart::Error { .. } => { + return Err(ConversionError::Unsupported("error part")) + } + UniversalMessagePart::Unknown { .. } => { + return Err(ConversionError::Unsupported("unknown part")) } - text.push_str(part_text); } } - if text.is_empty() { None } else { Some(text) } + if text.is_empty() { + Err(ConversionError::MissingField("text part")) + } else { + Ok(text) + } } fn extract_message_from_value(value: &Value) -> Option { @@ -293,27 +325,37 @@ pub mod convert_opencode { pub fn event_to_universal(event: &opencode::Event) -> EventConversion { match event { opencode::Event::MessageUpdated(updated) => { - let (message, session_id) = message_from_opencode(&updated.properties.info); + let opencode::EventMessageUpdated { properties, type_: _ } = updated; + let opencode::EventMessageUpdatedProperties { info } = properties; + let (message, session_id) = message_from_opencode(info); EventConversion::new(UniversalEventData::Message { message }) .with_session(session_id) } opencode::Event::MessagePartUpdated(updated) => { - let (message, session_id) = part_to_message(&updated.properties.part); + let opencode::EventMessagePartUpdated { properties, type_: _ } = updated; + let opencode::EventMessagePartUpdatedProperties { part, delta } = properties; + let (message, session_id) = part_to_message(part, delta.as_ref()); EventConversion::new(UniversalEventData::Message { message }) .with_session(session_id) } opencode::Event::QuestionAsked(asked) => { - let question = question_request_from_opencode(&asked.properties); + let opencode::EventQuestionAsked { properties, type_: _ } = asked; + let question = question_request_from_opencode(properties); + let session_id = question.session_id.clone(); EventConversion::new(UniversalEventData::QuestionAsked { question_asked: question }) - .with_session(Some(String::from(asked.properties.session_id.clone()))) + .with_session(Some(session_id)) } opencode::Event::PermissionAsked(asked) => { - let permission = permission_request_from_opencode(&asked.properties); + let opencode::EventPermissionAsked { properties, type_: _ } = asked; + let permission = permission_request_from_opencode(properties); + let session_id = permission.session_id.clone(); EventConversion::new(UniversalEventData::PermissionAsked { permission_asked: permission }) - .with_session(Some(String::from(asked.properties.session_id.clone()))) + .with_session(Some(session_id)) } opencode::Event::SessionCreated(created) => { - let details = serde_json::to_value(created).ok(); + let opencode::EventSessionCreated { properties, type_: _ } = created; + let opencode::EventSessionCreatedProperties { info } = properties; + let details = serde_json::to_value(info).ok(); let started = Started { message: Some("session.created".to_string()), details, @@ -321,15 +363,20 @@ pub mod convert_opencode { EventConversion::new(UniversalEventData::Started { started }) } opencode::Event::SessionError(error) => { - let message = extract_message_from_value(&serde_json::to_value(&error.properties).unwrap_or(Value::Null)) + let opencode::EventSessionError { properties, type_: _ } = error; + let opencode::EventSessionErrorProperties { + error: _error, + session_id, + } = properties; + let message = extract_message_from_value(&serde_json::to_value(properties).unwrap_or(Value::Null)) .unwrap_or_else(|| "opencode session error".to_string()); let crash = CrashInfo { message, kind: Some("session.error".to_string()), - details: serde_json::to_value(&error.properties).ok(), + details: serde_json::to_value(properties).ok(), }; EventConversion::new(UniversalEventData::Error { error: crash }) - .with_session(error.properties.session_id.clone()) + .with_session(session_id.clone()) } _ => EventConversion::new(UniversalEventData::Unknown { raw: serde_json::to_value(event).unwrap_or(Value::Null), @@ -370,17 +417,18 @@ pub mod convert_opencode { for part in &parsed.parts { match part { UniversalMessagePart::Text { text } => { - parts.push(opencode::TextPartInput { - id: None, - ignored: None, - metadata: Map::new(), - synthetic: None, - text: text.clone(), - time: None, - type_: "text".to_string(), - }); + parts.push(text_part_input_from_text(text)); + } + UniversalMessagePart::ToolCall { .. } + | UniversalMessagePart::ToolResult { .. } + | UniversalMessagePart::FunctionCall { .. } + | UniversalMessagePart::FunctionResult { .. } + | UniversalMessagePart::File { .. } + | UniversalMessagePart::Image { .. } + | UniversalMessagePart::Error { .. } + | UniversalMessagePart::Unknown { .. } => { + return Err(ConversionError::Unsupported("non-text part")) } - _ => return Err(ConversionError::Unsupported("non-text part")), } } if parts.is_empty() { @@ -389,166 +437,765 @@ pub mod convert_opencode { Ok(parts) } + #[derive(Debug, Clone, Serialize, Deserialize)] + #[serde(untagged)] + pub enum OpencodePartInput { + Text(opencode::TextPartInput), + File(opencode::FilePartInput), + } + + pub fn universal_message_to_part_inputs( + message: &UniversalMessage, + ) -> Result, ConversionError> { + let parsed = match message { + UniversalMessage::Parsed(parsed) => parsed, + UniversalMessage::Unparsed { .. } => { + return Err(ConversionError::Unsupported("unparsed message")) + } + }; + universal_parts_to_part_inputs(&parsed.parts) + } + + pub fn universal_parts_to_part_inputs( + parts: &[UniversalMessagePart], + ) -> Result, ConversionError> { + let mut inputs = Vec::new(); + for part in parts { + inputs.push(universal_part_to_opencode_input(part)?); + } + if inputs.is_empty() { + return Err(ConversionError::MissingField("parts")); + } + Ok(inputs) + } + + pub fn universal_part_to_opencode_input( + part: &UniversalMessagePart, + ) -> Result { + match part { + UniversalMessagePart::Text { text } => Ok(OpencodePartInput::Text( + text_part_input_from_text(text), + )), + UniversalMessagePart::File { + source, + mime_type, + filename, + .. + } => Ok(OpencodePartInput::File(file_part_input_from_universal( + source, + mime_type.as_deref(), + filename.as_ref(), + )?)), + UniversalMessagePart::Image { + source, mime_type, .. + } => Ok(OpencodePartInput::File(file_part_input_from_universal( + source, + mime_type.as_deref(), + None, + )?)), + UniversalMessagePart::ToolCall { .. } + | UniversalMessagePart::ToolResult { .. } + | UniversalMessagePart::FunctionCall { .. } + | UniversalMessagePart::FunctionResult { .. } + | UniversalMessagePart::Error { .. } + | UniversalMessagePart::Unknown { .. } => { + Err(ConversionError::Unsupported("unsupported part")) + } + } + } + + fn text_part_input_from_text(text: &str) -> opencode::TextPartInput { + opencode::TextPartInput { + id: None, + ignored: None, + metadata: Map::new(), + synthetic: None, + text: text.to_string(), + time: None, + type_: "text".to_string(), + } + } + pub fn text_part_input_to_universal(part: &opencode::TextPartInput) -> UniversalMessage { - let mut metadata = part.metadata.clone(); - if let Some(id) = &part.id { + let opencode::TextPartInput { + id, + ignored, + metadata, + synthetic, + text, + time, + type_, + } = part; + let mut metadata = metadata.clone(); + if let Some(id) = id { metadata.insert("partId".to_string(), Value::String(id.clone())); } + if let Some(ignored) = ignored { + metadata.insert("ignored".to_string(), Value::Bool(*ignored)); + } + if let Some(synthetic) = synthetic { + metadata.insert("synthetic".to_string(), Value::Bool(*synthetic)); + } + if let Some(time) = time { + metadata.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + } + metadata.insert("type".to_string(), Value::String(type_.clone())); UniversalMessage::Parsed(UniversalMessageParsed { role: "user".to_string(), id: None, metadata, - parts: vec![UniversalMessagePart::Text { - text: part.text.clone(), - }], + parts: vec![UniversalMessagePart::Text { text: text.clone() }], }) } + fn file_part_input_from_universal( + source: &AttachmentSource, + mime_type: Option<&str>, + filename: Option<&String>, + ) -> Result { + let mime = mime_type.ok_or(ConversionError::MissingField("mime_type"))?; + let url = attachment_source_to_opencode_url(source, mime)?; + Ok(opencode::FilePartInput { + filename: filename.cloned(), + id: None, + mime: mime.to_string(), + source: None, + type_: "file".to_string(), + url, + }) + } + + fn attachment_source_to_opencode_url( + source: &AttachmentSource, + mime_type: &str, + ) -> Result { + match source { + AttachmentSource::Url { url } => Ok(url.clone()), + AttachmentSource::Path { path } => Ok(format!("file://{}", path)), + AttachmentSource::Data { data, encoding } => { + let encoding = encoding.as_deref().unwrap_or("base64"); + if encoding != "base64" { + return Err(ConversionError::Unsupported("opencode data encoding")); + } + Ok(format!("data:{};base64,{}", mime_type, data)) + } + } + } + fn message_from_opencode(message: &opencode::Message) -> (UniversalMessage, Option) { match message { opencode::Message::UserMessage(user) => { + let opencode::UserMessage { + agent, + id, + model, + role, + session_id, + summary, + system, + time, + tools, + variant, + } = user; let mut metadata = Map::new(); - metadata.insert("agent".to_string(), Value::String(user.agent.clone())); + metadata.insert("agent".to_string(), Value::String(agent.clone())); + metadata.insert( + "model".to_string(), + serde_json::to_value(model).unwrap_or(Value::Null), + ); + metadata.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + metadata.insert( + "tools".to_string(), + serde_json::to_value(tools).unwrap_or(Value::Null), + ); + if let Some(summary) = summary { + metadata.insert( + "summary".to_string(), + serde_json::to_value(summary).unwrap_or(Value::Null), + ); + } + if let Some(system) = system { + metadata.insert("system".to_string(), Value::String(system.clone())); + } + if let Some(variant) = variant { + metadata.insert("variant".to_string(), Value::String(variant.clone())); + } let parsed = UniversalMessageParsed { - role: user.role.clone(), - id: Some(user.id.clone()), + role: role.clone(), + id: Some(id.clone()), metadata, parts: Vec::new(), }; ( UniversalMessage::Parsed(parsed), - Some(user.session_id.clone()), + Some(session_id.clone()), ) } opencode::Message::AssistantMessage(assistant) => { + let opencode::AssistantMessage { + agent, + cost, + error, + finish, + id, + mode, + model_id, + parent_id, + path, + provider_id, + role, + session_id, + summary, + time, + tokens, + } = assistant; let mut metadata = Map::new(); - metadata.insert("agent".to_string(), Value::String(assistant.agent.clone())); + metadata.insert("agent".to_string(), Value::String(agent.clone())); + metadata.insert( + "cost".to_string(), + serde_json::to_value(cost).unwrap_or(Value::Null), + ); + metadata.insert("mode".to_string(), Value::String(mode.clone())); + metadata.insert("modelId".to_string(), Value::String(model_id.clone())); + metadata.insert("providerId".to_string(), Value::String(provider_id.clone())); + metadata.insert("parentId".to_string(), Value::String(parent_id.clone())); + metadata.insert( + "path".to_string(), + serde_json::to_value(path).unwrap_or(Value::Null), + ); + metadata.insert( + "tokens".to_string(), + serde_json::to_value(tokens).unwrap_or(Value::Null), + ); + metadata.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + if let Some(error) = error { + metadata.insert( + "error".to_string(), + serde_json::to_value(error).unwrap_or(Value::Null), + ); + } + if let Some(finish) = finish { + metadata.insert("finish".to_string(), Value::String(finish.clone())); + } + if let Some(summary) = summary { + metadata.insert( + "summary".to_string(), + serde_json::to_value(summary).unwrap_or(Value::Null), + ); + } let parsed = UniversalMessageParsed { - role: assistant.role.clone(), - id: Some(assistant.id.clone()), + role: role.clone(), + id: Some(id.clone()), metadata, parts: Vec::new(), }; ( UniversalMessage::Parsed(parsed), - Some(assistant.session_id.clone()), + Some(session_id.clone()), ) } } } - fn part_to_message(part: &opencode::Part) -> (UniversalMessage, Option) { + fn part_to_message(part: &opencode::Part, delta: Option<&String>) -> (UniversalMessage, Option) { match part { opencode::Part::Variant0(text_part) => { - let mut metadata = Map::new(); - metadata.insert("messageId".to_string(), Value::String(text_part.message_id.clone())); - metadata.insert("partId".to_string(), Value::String(text_part.id.clone())); + let opencode::TextPart { + id, + ignored, + message_id, + metadata, + session_id, + synthetic, + text, + time, + type_, + } = text_part; + let mut part_metadata = base_part_metadata(message_id, id, delta); + part_metadata.insert("type".to_string(), Value::String(type_.clone())); + if let Some(ignored) = ignored { + part_metadata.insert("ignored".to_string(), Value::Bool(*ignored)); + } + if let Some(synthetic) = synthetic { + part_metadata.insert("synthetic".to_string(), Value::Bool(*synthetic)); + } + if let Some(time) = time { + part_metadata.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + } + if !metadata.is_empty() { + part_metadata.insert( + "partMetadata".to_string(), + Value::Object(metadata.clone()), + ); + } let parsed = UniversalMessageParsed { role: "assistant".to_string(), - id: Some(text_part.message_id.clone()), - metadata, - parts: vec![UniversalMessagePart::Text { - text: text_part.text.clone(), - }], + id: Some(message_id.clone()), + metadata: part_metadata, + parts: vec![UniversalMessagePart::Text { text: text.clone() }], }; - ( - UniversalMessage::Parsed(parsed), - Some(text_part.session_id.clone()), + (UniversalMessage::Parsed(parsed), Some(session_id.clone())) + } + opencode::Part::Variant1 { + agent: _agent, + command: _command, + description: _description, + id, + message_id, + model: _model, + prompt: _prompt, + session_id, + type_: _type, + } => unknown_part_message(message_id, id, session_id, serde_json::to_value(part).unwrap_or(Value::Null), delta), + opencode::Part::Variant2(reasoning_part) => { + let opencode::ReasoningPart { + id, + message_id, + metadata: _metadata, + session_id, + text: _text, + time: _time, + type_: _type, + } = reasoning_part; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(reasoning_part).unwrap_or(Value::Null), + delta, ) } + opencode::Part::Variant3(file_part) => { + let opencode::FilePart { + filename: _filename, + id, + message_id, + mime: _mime, + session_id, + source: _source, + type_: _type, + url: _url, + } = file_part; + let part_metadata = base_part_metadata(message_id, id, delta); + let part = file_part_to_universal_part(file_part); + let parsed = UniversalMessageParsed { + role: "assistant".to_string(), + id: Some(message_id.clone()), + metadata: part_metadata, + parts: vec![part], + }; + (UniversalMessage::Parsed(parsed), Some(session_id.clone())) + } opencode::Part::Variant4(tool_part) => { - let mut metadata = Map::new(); - metadata.insert("messageId".to_string(), Value::String(tool_part.message_id.clone())); - metadata.insert("partId".to_string(), Value::String(tool_part.id.clone())); - let parts = tool_state_to_parts(&tool_part); + let opencode::ToolPart { + call_id, + id, + message_id, + metadata, + session_id, + state, + tool, + type_, + } = tool_part; + let mut part_metadata = base_part_metadata(message_id, id, delta); + part_metadata.insert("type".to_string(), Value::String(type_.clone())); + part_metadata.insert("callId".to_string(), Value::String(call_id.clone())); + part_metadata.insert("tool".to_string(), Value::String(tool.clone())); + if !metadata.is_empty() { + part_metadata.insert( + "partMetadata".to_string(), + Value::Object(metadata.clone()), + ); + } + let (mut parts, state_meta) = tool_state_to_parts(call_id, tool, state); + if let Some(state_meta) = state_meta { + part_metadata.insert("toolState".to_string(), state_meta); + } let parsed = UniversalMessageParsed { role: "assistant".to_string(), - id: Some(tool_part.message_id.clone()), - metadata, - parts, + id: Some(message_id.clone()), + metadata: part_metadata, + parts: parts.drain(..).collect(), }; - ( - UniversalMessage::Parsed(parsed), - Some(tool_part.session_id.clone()), + (UniversalMessage::Parsed(parsed), Some(session_id.clone())) + } + opencode::Part::Variant5(step_start) => { + let opencode::StepStartPart { + id, + message_id, + session_id, + snapshot: _snapshot, + type_: _type, + } = step_start; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(step_start).unwrap_or(Value::Null), + delta, + ) + } + opencode::Part::Variant6(step_finish) => { + let opencode::StepFinishPart { + cost: _cost, + id, + message_id, + reason: _reason, + session_id, + snapshot: _snapshot, + tokens: _tokens, + type_: _type, + } = step_finish; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(step_finish).unwrap_or(Value::Null), + delta, + ) + } + opencode::Part::Variant7(snapshot_part) => { + let opencode::SnapshotPart { + id, + message_id, + session_id, + snapshot: _snapshot, + type_: _type, + } = snapshot_part; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(snapshot_part).unwrap_or(Value::Null), + delta, + ) + } + opencode::Part::Variant8(patch_part) => { + let opencode::PatchPart { + files: _files, + hash: _hash, + id, + message_id, + session_id, + type_: _type, + } = patch_part; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(patch_part).unwrap_or(Value::Null), + delta, + ) + } + opencode::Part::Variant9(agent_part) => { + let opencode::AgentPart { + id, + message_id, + name: _name, + session_id, + source: _source, + type_: _type, + } = agent_part; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(agent_part).unwrap_or(Value::Null), + delta, + ) + } + opencode::Part::Variant10(retry_part) => { + let opencode::RetryPart { + attempt: _attempt, + error: _error, + id, + message_id, + session_id, + time: _time, + type_: _type, + } = retry_part; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(retry_part).unwrap_or(Value::Null), + delta, + ) + } + opencode::Part::Variant11(compaction_part) => { + let opencode::CompactionPart { + auto: _auto, + id, + message_id, + session_id, + type_: _type, + } = compaction_part; + unknown_part_message( + message_id, + id, + session_id, + serde_json::to_value(compaction_part).unwrap_or(Value::Null), + delta, ) } - _ => ( - UniversalMessage::Unparsed { - raw: serde_json::to_value(part).unwrap_or(Value::Null), - error: Some("unsupported opencode part".to_string()), - }, - None, - ), } } - fn tool_state_to_parts(tool_part: &opencode::ToolPart) -> Vec { - match &tool_part.state { - opencode::ToolState::Pending(state) => vec![UniversalMessagePart::ToolCall { - id: Some(tool_part.call_id.clone()), - name: tool_part.tool.clone(), - input: Value::Object(state.input.clone()), - }], - opencode::ToolState::Running(state) => vec![UniversalMessagePart::ToolCall { - id: Some(tool_part.call_id.clone()), - name: tool_part.tool.clone(), - input: Value::Object(state.input.clone()), - }], - opencode::ToolState::Completed(state) => vec![UniversalMessagePart::ToolResult { - id: Some(tool_part.call_id.clone()), - name: Some(tool_part.tool.clone()), - output: Value::String(state.output.clone()), - is_error: Some(false), - }], - opencode::ToolState::Error(state) => vec![UniversalMessagePart::ToolResult { - id: Some(tool_part.call_id.clone()), - name: Some(tool_part.tool.clone()), - output: Value::String(state.error.clone()), - is_error: Some(true), - }], + fn base_part_metadata(message_id: &str, part_id: &str, delta: Option<&String>) -> Map { + let mut metadata = Map::new(); + metadata.insert("messageId".to_string(), Value::String(message_id.to_string())); + metadata.insert("partId".to_string(), Value::String(part_id.to_string())); + if let Some(delta) = delta { + metadata.insert("delta".to_string(), Value::String(delta.clone())); + } + metadata + } + + fn unknown_part_message( + message_id: &str, + part_id: &str, + session_id: &str, + raw: Value, + delta: Option<&String>, + ) -> (UniversalMessage, Option) { + let metadata = base_part_metadata(message_id, part_id, delta); + let parsed = UniversalMessageParsed { + role: "assistant".to_string(), + id: Some(message_id.to_string()), + metadata, + parts: vec![UniversalMessagePart::Unknown { raw }], + }; + (UniversalMessage::Parsed(parsed), Some(session_id.to_string())) + } + + fn file_part_to_universal_part(file_part: &opencode::FilePart) -> UniversalMessagePart { + let opencode::FilePart { + filename, + id: _id, + message_id: _message_id, + mime, + session_id: _session_id, + source: _source, + type_: _type, + url, + } = file_part; + let raw = serde_json::to_value(file_part).unwrap_or(Value::Null); + let source = AttachmentSource::Url { url: url.clone() }; + if mime.starts_with("image/") { + UniversalMessagePart::Image { + source, + mime_type: Some(mime.clone()), + alt: filename.clone(), + raw: Some(raw), + } + } else { + UniversalMessagePart::File { + source, + mime_type: Some(mime.clone()), + filename: filename.clone(), + raw: Some(raw), + } + } + } + + fn tool_state_to_parts( + call_id: &str, + tool: &str, + state: &opencode::ToolState, + ) -> (Vec, Option) { + match state { + opencode::ToolState::Pending(state) => { + let opencode::ToolStatePending { input, raw, status } = state; + let mut meta = Map::new(); + meta.insert("status".to_string(), Value::String(status.clone())); + meta.insert("raw".to_string(), Value::String(raw.clone())); + meta.insert("input".to_string(), Value::Object(input.clone())); + ( + vec![UniversalMessagePart::ToolCall { + id: Some(call_id.to_string()), + name: tool.to_string(), + input: Value::Object(input.clone()), + }], + Some(Value::Object(meta)), + ) + } + opencode::ToolState::Running(state) => { + let opencode::ToolStateRunning { + input, + metadata, + status, + time, + title, + } = state; + let mut meta = Map::new(); + meta.insert("status".to_string(), Value::String(status.clone())); + meta.insert("input".to_string(), Value::Object(input.clone())); + meta.insert("metadata".to_string(), Value::Object(metadata.clone())); + meta.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + if let Some(title) = title { + meta.insert("title".to_string(), Value::String(title.clone())); + } + ( + vec![UniversalMessagePart::ToolCall { + id: Some(call_id.to_string()), + name: tool.to_string(), + input: Value::Object(input.clone()), + }], + Some(Value::Object(meta)), + ) + } + opencode::ToolState::Completed(state) => { + let opencode::ToolStateCompleted { + attachments, + input, + metadata, + output, + status, + time, + title, + } = state; + let mut meta = Map::new(); + meta.insert("status".to_string(), Value::String(status.clone())); + meta.insert("input".to_string(), Value::Object(input.clone())); + meta.insert("metadata".to_string(), Value::Object(metadata.clone())); + meta.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + meta.insert("title".to_string(), Value::String(title.clone())); + if !attachments.is_empty() { + meta.insert( + "attachments".to_string(), + serde_json::to_value(attachments).unwrap_or(Value::Null), + ); + } + let mut parts = vec![UniversalMessagePart::ToolResult { + id: Some(call_id.to_string()), + name: Some(tool.to_string()), + output: Value::String(output.clone()), + is_error: Some(false), + }]; + for attachment in attachments { + parts.push(file_part_to_universal_part(attachment)); + } + (parts, Some(Value::Object(meta))) + } + opencode::ToolState::Error(state) => { + let opencode::ToolStateError { + error, + input, + metadata, + status, + time, + } = state; + let mut meta = Map::new(); + meta.insert("status".to_string(), Value::String(status.clone())); + meta.insert("error".to_string(), Value::String(error.clone())); + meta.insert("input".to_string(), Value::Object(input.clone())); + meta.insert("metadata".to_string(), Value::Object(metadata.clone())); + meta.insert( + "time".to_string(), + serde_json::to_value(time).unwrap_or(Value::Null), + ); + ( + vec![UniversalMessagePart::ToolResult { + id: Some(call_id.to_string()), + name: Some(tool.to_string()), + output: Value::String(error.clone()), + is_error: Some(true), + }], + Some(Value::Object(meta)), + ) + } } } fn question_request_from_opencode(request: &opencode::QuestionRequest) -> QuestionRequest { + let opencode::QuestionRequest { + id, + questions, + session_id, + tool, + } = request; QuestionRequest { - id: String::from(request.id.clone()), - session_id: String::from(request.session_id.clone()), - questions: request - .questions + id: id.clone().into(), + session_id: session_id.clone().into(), + questions: questions .iter() - .map(|question| QuestionInfo { - question: question.question.clone(), - header: Some(question.header.clone()), - options: question - .options - .iter() - .map(|opt| QuestionOption { - label: opt.label.clone(), - description: Some(opt.description.clone()), - }) - .collect(), - multi_select: question.multiple, - custom: question.custom, + .map(|question| { + let opencode::QuestionInfo { + custom, + header, + multiple, + options, + question, + } = question; + QuestionInfo { + question: question.clone(), + header: Some(header.clone()), + options: options + .iter() + .map(|opt| { + let opencode::QuestionOption { description, label } = opt; + QuestionOption { + label: label.clone(), + description: Some(description.clone()), + } + }) + .collect(), + multi_select: *multiple, + custom: *custom, + } }) .collect(), - tool: request.tool.as_ref().map(|tool| QuestionToolRef { - message_id: tool.message_id.clone(), - call_id: tool.call_id.clone(), + tool: tool.as_ref().map(|tool| { + let opencode::QuestionRequestTool { message_id, call_id } = tool; + QuestionToolRef { + message_id: message_id.clone(), + call_id: call_id.clone(), + } }), } } fn permission_request_from_opencode(request: &opencode::PermissionRequest) -> PermissionRequest { + let opencode::PermissionRequest { + always, + id, + metadata, + patterns, + permission, + session_id, + tool, + } = request; PermissionRequest { - id: String::from(request.id.clone()), - session_id: String::from(request.session_id.clone()), - permission: request.permission.clone(), - patterns: request.patterns.clone(), - metadata: request.metadata.clone(), - always: request.always.clone(), - tool: request.tool.as_ref().map(|tool| PermissionToolRef { - message_id: tool.message_id.clone(), - call_id: tool.call_id.clone(), + id: id.clone().into(), + session_id: session_id.clone().into(), + permission: permission.clone(), + patterns: patterns.clone(), + metadata: metadata.clone(), + always: always.clone(), + tool: tool.as_ref().map(|tool| { + let opencode::PermissionRequestTool { message_id, call_id } = tool; + PermissionToolRef { + message_id: message_id.clone(), + call_id: call_id.clone(), + } }), } } @@ -617,20 +1264,26 @@ pub mod convert_codex { use super::*; pub fn event_to_universal(event: &codex::ThreadEvent) -> EventConversion { - match event.type_ { + let codex::ThreadEvent { + error, + item, + thread_id, + type_, + } = event; + match type_ { codex::ThreadEventType::ThreadCreated | codex::ThreadEventType::ThreadUpdated => { let started = Started { - message: Some(event.type_.to_string()), + message: Some(type_.to_string()), details: serde_json::to_value(event).ok(), }; EventConversion::new(UniversalEventData::Started { started }) - .with_session(event.thread_id.clone()) + .with_session(thread_id.clone()) } codex::ThreadEventType::ItemCreated | codex::ThreadEventType::ItemUpdated => { - if let Some(item) = &event.item { + if let Some(item) = item.as_ref() { let message = thread_item_to_message(item); EventConversion::new(UniversalEventData::Message { message }) - .with_session(event.thread_id.clone()) + .with_session(thread_id.clone()) } else { EventConversion::new(UniversalEventData::Unknown { raw: serde_json::to_value(event).unwrap_or(Value::Null), @@ -638,15 +1291,15 @@ pub mod convert_codex { } } codex::ThreadEventType::Error => { - let message = extract_message_from_value(&Value::Object(event.error.clone())) + let message = extract_message_from_value(&Value::Object(error.clone())) .unwrap_or_else(|| "codex error".to_string()); let crash = CrashInfo { message, kind: Some("error".to_string()), - details: Some(Value::Object(event.error.clone())), + details: Some(Value::Object(error.clone())), }; EventConversion::new(UniversalEventData::Error { error: crash }) - .with_session(event.thread_id.clone()) + .with_session(thread_id.clone()) } } } @@ -661,8 +1314,7 @@ pub mod convert_codex { } }; let id = parsed.id.clone().ok_or(ConversionError::MissingField("message.id"))?; - let content = message_parts_to_text(&parsed.parts) - .ok_or(ConversionError::MissingField("text part"))?; + let content = text_only_from_parts(&parsed.parts)?; let role = match parsed.role.as_str() { "user" => Some(codex::ThreadItemRole::User), "assistant" => Some(codex::ThreadItemRole::Assistant), @@ -688,12 +1340,13 @@ pub mod convert_codex { } pub fn message_to_universal(message: &codex::Message) -> UniversalMessage { + let codex::Message { role, content } = message; UniversalMessage::Parsed(UniversalMessageParsed { - role: message.role.to_string(), + role: role.to_string(), id: None, metadata: Map::new(), parts: vec![UniversalMessagePart::Text { - text: message.content.clone(), + text: content.clone(), }], }) } @@ -707,8 +1360,7 @@ pub mod convert_codex { return Err(ConversionError::Unsupported("unparsed message")) } }; - let content = message_parts_to_text(&parsed.parts) - .ok_or(ConversionError::MissingField("text part"))?; + let content = text_only_from_parts(&parsed.parts)?; Ok(codex::Message { role: match parsed.role.as_str() { "user" => codex::MessageRole::User, @@ -720,26 +1372,176 @@ pub mod convert_codex { }) } + pub fn inputs_to_universal_message(inputs: &[codex::Input], role: &str) -> UniversalMessage { + let parts = inputs.iter().map(input_to_universal_part).collect(); + UniversalMessage::Parsed(UniversalMessageParsed { + role: role.to_string(), + id: None, + metadata: Map::new(), + parts, + }) + } + + pub fn input_to_universal_part(input: &codex::Input) -> UniversalMessagePart { + let codex::Input { + content, + mime_type, + path, + type_, + } = input; + let raw = serde_json::to_value(input).unwrap_or(Value::Null); + match type_ { + codex::InputType::Text => match content { + Some(content) => UniversalMessagePart::Text { + text: content.clone(), + }, + None => UniversalMessagePart::Unknown { raw }, + }, + codex::InputType::File => { + let source = if let Some(path) = path { + AttachmentSource::Path { path: path.clone() } + } else if let Some(content) = content { + AttachmentSource::Data { + data: content.clone(), + encoding: None, + } + } else { + return UniversalMessagePart::Unknown { raw }; + }; + UniversalMessagePart::File { + source, + mime_type: mime_type.clone(), + filename: None, + raw: Some(raw), + } + } + codex::InputType::Image => { + let source = if let Some(path) = path { + AttachmentSource::Path { path: path.clone() } + } else if let Some(content) = content { + AttachmentSource::Data { + data: content.clone(), + encoding: None, + } + } else { + return UniversalMessagePart::Unknown { raw }; + }; + UniversalMessagePart::Image { + source, + mime_type: mime_type.clone(), + alt: None, + raw: Some(raw), + } + } + } + } + + pub fn universal_message_to_inputs( + message: &UniversalMessage, + ) -> Result, ConversionError> { + let parsed = match message { + UniversalMessage::Parsed(parsed) => parsed, + UniversalMessage::Unparsed { .. } => { + return Err(ConversionError::Unsupported("unparsed message")) + } + }; + universal_parts_to_inputs(&parsed.parts) + } + + pub fn universal_parts_to_inputs( + parts: &[UniversalMessagePart], + ) -> Result, ConversionError> { + let mut inputs = Vec::new(); + for part in parts { + match part { + UniversalMessagePart::Text { text } => inputs.push(codex::Input { + content: Some(text.clone()), + mime_type: None, + path: None, + type_: codex::InputType::Text, + }), + UniversalMessagePart::File { + source, + mime_type, + .. + } => inputs.push(input_from_attachment(source, mime_type.as_ref(), codex::InputType::File)?), + UniversalMessagePart::Image { + source, mime_type, .. + } => inputs.push(input_from_attachment( + source, + mime_type.as_ref(), + codex::InputType::Image, + )?), + UniversalMessagePart::ToolCall { .. } + | UniversalMessagePart::ToolResult { .. } + | UniversalMessagePart::FunctionCall { .. } + | UniversalMessagePart::FunctionResult { .. } + | UniversalMessagePart::Error { .. } + | UniversalMessagePart::Unknown { .. } => { + return Err(ConversionError::Unsupported("unsupported part")) + } + } + } + if inputs.is_empty() { + return Err(ConversionError::MissingField("parts")); + } + Ok(inputs) + } + + fn input_from_attachment( + source: &AttachmentSource, + mime_type: Option<&String>, + input_type: codex::InputType, + ) -> Result { + match source { + AttachmentSource::Path { path } => Ok(codex::Input { + content: None, + mime_type: mime_type.cloned(), + path: Some(path.clone()), + type_: input_type, + }), + AttachmentSource::Data { data, encoding } => { + if let Some(encoding) = encoding.as_deref() { + if encoding != "base64" { + return Err(ConversionError::Unsupported("codex data encoding")); + } + } + Ok(codex::Input { + content: Some(data.clone()), + mime_type: mime_type.cloned(), + path: None, + type_: input_type, + }) + } + AttachmentSource::Url { .. } => Err(ConversionError::Unsupported("codex input url")), + } + } + fn thread_item_to_message(item: &codex::ThreadItem) -> UniversalMessage { + let codex::ThreadItem { + content, + id, + role, + status, + type_, + } = item; let mut metadata = Map::new(); - metadata.insert("itemType".to_string(), Value::String(item.type_.to_string())); - let role = item - .role + metadata.insert("itemType".to_string(), Value::String(type_.to_string())); + if let Some(status) = status { + metadata.insert("status".to_string(), Value::String(status.to_string())); + } + let role = role .as_ref() .map(|role| role.to_string()) .unwrap_or_else(|| "assistant".to_string()); - let parts = match item.type_ { - codex::ThreadItemType::Message => message_parts_from_codex_content(&item.content), - codex::ThreadItemType::FunctionCall => { - vec![function_call_part_from_codex(item)] - } - codex::ThreadItemType::FunctionResult => { - vec![function_result_part_from_codex(item)] - } + let parts = match type_ { + codex::ThreadItemType::Message => message_parts_from_codex_content(content), + codex::ThreadItemType::FunctionCall => vec![function_call_part_from_codex(id, content)], + codex::ThreadItemType::FunctionResult => vec![function_result_part_from_codex(id, content)], }; UniversalMessage::Parsed(UniversalMessageParsed { role, - id: Some(item.id.clone()), + id: Some(id.clone()), metadata, parts, }) @@ -761,27 +1563,33 @@ pub mod convert_codex { } } - fn function_call_part_from_codex(item: &codex::ThreadItem) -> UniversalMessagePart { - let raw = thread_item_content_to_value(&item.content); + fn function_call_part_from_codex( + item_id: &str, + content: &Option, + ) -> UniversalMessagePart { + let raw = thread_item_content_to_value(content); let name = extract_object_field(&raw, "name"); let arguments = extract_object_value(&raw, "arguments").unwrap_or_else(|| raw.clone()); UniversalMessagePart::FunctionCall { - id: Some(item.id.clone()), + id: Some(item_id.to_string()), name, arguments, raw: Some(raw), } } - fn function_result_part_from_codex(item: &codex::ThreadItem) -> UniversalMessagePart { - let raw = thread_item_content_to_value(&item.content); + fn function_result_part_from_codex( + item_id: &str, + content: &Option, + ) -> UniversalMessagePart { + let raw = thread_item_content_to_value(content); let name = extract_object_field(&raw, "name"); let result = extract_object_value(&raw, "result") .or_else(|| extract_object_value(&raw, "output")) .or_else(|| extract_object_value(&raw, "content")) .unwrap_or_else(|| raw.clone()); UniversalMessagePart::FunctionResult { - id: Some(item.id.clone()), + id: Some(item_id.to_string()), name, result, is_error: None, @@ -820,25 +1628,33 @@ pub mod convert_amp { use super::*; pub fn event_to_universal(event: &::StreamJsonMessage) -> EventConversion { - match event.type_ { + let amp::StreamJsonMessage { + content, + error, + id, + tool_call, + type_, + } = event; + match type_ { amp::StreamJsonMessageType::Message => { - let text = event.content.clone().unwrap_or_default(); + let text = content.clone().unwrap_or_default(); let mut message = message_from_text("assistant", text); if let UniversalMessage::Parsed(parsed) = &mut message { - parsed.id = event.id.clone(); + parsed.id = id.clone(); } EventConversion::new(UniversalEventData::Message { message }) } amp::StreamJsonMessageType::ToolCall => { - let tool_call = event.tool_call.as_ref(); + let tool_call = tool_call.as_ref(); let part = if let Some(tool_call) = tool_call { - let input = match &tool_call.arguments { + let amp::ToolCall { arguments, id, name } = tool_call; + let input = match arguments { amp::ToolCallArguments::Variant0(text) => Value::String(text.clone()), amp::ToolCallArguments::Variant1(map) => Value::Object(map.clone()), }; UniversalMessagePart::ToolCall { - id: Some(tool_call.id.clone()), - name: tool_call.name.clone(), + id: Some(id.clone()), + name: name.clone(), input, } } else { @@ -846,18 +1662,17 @@ pub mod convert_amp { }; let mut message = message_from_parts("assistant", vec![part]); if let UniversalMessage::Parsed(parsed) = &mut message { - parsed.id = event.id.clone(); + parsed.id = id.clone(); } EventConversion::new(UniversalEventData::Message { message }) } amp::StreamJsonMessageType::ToolResult => { - let output = event - .content + let output = content .clone() .map(Value::String) .unwrap_or(Value::Null); let part = UniversalMessagePart::ToolResult { - id: event.id.clone(), + id: id.clone(), name: None, output, is_error: None, @@ -866,7 +1681,7 @@ pub mod convert_amp { EventConversion::new(UniversalEventData::Message { message }) } amp::StreamJsonMessageType::Error => { - let message = event.error.clone().unwrap_or_else(|| "amp error".to_string()); + let message = error.clone().unwrap_or_else(|| "amp error".to_string()); let crash = CrashInfo { message, kind: Some("amp".to_string()), @@ -889,8 +1704,7 @@ pub mod convert_amp { return Err(ConversionError::Unsupported("unparsed message")) } }; - let content = message_parts_to_text(&parsed.parts) - .ok_or(ConversionError::MissingField("text part"))?; + let content = text_only_from_parts(&parsed.parts)?; Ok(amp::StreamJsonMessage { content: Some(content), error: None, @@ -904,22 +1718,28 @@ pub mod convert_amp { } pub fn message_to_universal(message: &::Message) -> UniversalMessage { + let amp::Message { + role, + content, + tool_calls, + } = message; let mut parts = vec![UniversalMessagePart::Text { - text: message.content.clone(), + text: content.clone(), }]; - for call in &message.tool_calls { - let input = match &call.arguments { + for call in tool_calls { + let amp::ToolCall { arguments, id, name } = call; + let input = match arguments { amp::ToolCallArguments::Variant0(text) => Value::String(text.clone()), amp::ToolCallArguments::Variant1(map) => Value::Object(map.clone()), }; parts.push(UniversalMessagePart::ToolCall { - id: Some(call.id.clone()), - name: call.name.clone(), + id: Some(id.clone()), + name: name.clone(), input, }); } UniversalMessage::Parsed(UniversalMessageParsed { - role: message.role.to_string(), + role: role.to_string(), id: None, metadata: Map::new(), parts, @@ -935,8 +1755,7 @@ pub mod convert_amp { return Err(ConversionError::Unsupported("unparsed message")) } }; - let content = message_parts_to_text(&parsed.parts) - .ok_or(ConversionError::MissingField("text part"))?; + let content = text_only_from_parts(&parsed.parts)?; Ok(amp::Message { role: match parsed.role.as_str() { "user" => amp::MessageRole::User, @@ -976,8 +1795,7 @@ pub mod convert_claude { return Err(ConversionError::Unsupported("unparsed message")) } }; - let text = message_parts_to_text(&parsed.parts) - .ok_or(ConversionError::MissingField("text part"))?; + let text = text_only_from_parts(&parsed.parts)?; Ok(Value::Object(Map::from_iter([ ("type".to_string(), Value::String("assistant".to_string())), ( @@ -1010,8 +1828,7 @@ pub mod convert_claude { return Err(ConversionError::Unsupported("unparsed message")) } }; - message_parts_to_text(&parsed.parts) - .ok_or(ConversionError::MissingField("text part")) + text_only_from_parts(&parsed.parts) } fn assistant_event_to_universal(event: &Value) -> EventConversion { diff --git a/frontend/AGENTS.md b/frontend/AGENTS.md new file mode 120000 index 0000000..681311e --- /dev/null +++ b/frontend/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/frontend/CLAUDE.md b/frontend/CLAUDE.md new file mode 100644 index 0000000..511b8e5 --- /dev/null +++ b/frontend/CLAUDE.md @@ -0,0 +1,150 @@ +## Frontend Style Guide + +Examples should follow these design conventions: + +**Color Palette (Dark Theme)** +- Primary accent: `#ff4f00` (orange) for interactive elements and highlights +- Background: `#000000` (main), `#1c1c1e` (cards/containers) +- Borders: `#2c2c2e` +- Input backgrounds: `#2c2c2e` with border `#3a3a3c` +- Text: `#ffffff` (primary), `#8e8e93` (secondary/muted) +- Success: `#30d158` (green) +- Warning: `#ff4f00` (orange) +- Danger: `#ff3b30` (red) +- Purple: `#bf5af2` (for special states like rollback) + +**Typography** +- UI: System fonts (`-apple-system, BlinkMacSystemFont, 'Segoe UI', 'Inter', Roboto, sans-serif`) +- Code: `ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace` +- Sizes: 14-16px body, 12-13px labels, large numbers 48-72px + +**Sizing & Spacing** +- Border radius: 8px (cards/containers/buttons), 6px (inputs/badges) +- Section padding: 20-24px +- Gap between items: 12px +- Transitions: 200ms ease for all interactive states + +**Button Styles** +- Padding: 12px 20px +- Border: none +- Border radius: 8px +- Font size: 14px, weight 600 +- Hover: none (no hover state) +- Disabled: 50% opacity, `cursor: not-allowed` + +**CSS Approach** +- Plain CSS in ` + + +
+ + + diff --git a/frontend/packages/web/package.json b/frontend/packages/web/package.json new file mode 100644 index 0000000..2b0c4d0 --- /dev/null +++ b/frontend/packages/web/package.json @@ -0,0 +1,24 @@ +{ + "name": "@sandbox-daemon/web", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "devDependencies": { + "@sandbox-daemon/typescript-sdk": "workspace:*", + "@types/react": "^18.3.3", + "@types/react-dom": "^18.3.0", + "@vitejs/plugin-react": "^4.3.1", + "typescript": "^5.7.3", + "vite": "^5.4.7" + }, + "dependencies": { + "lucide-react": "^0.469.0", + "react": "^18.3.1", + "react-dom": "^18.3.1" + } +} diff --git a/frontend/packages/web/src/App.tsx b/frontend/packages/web/src/App.tsx new file mode 100644 index 0000000..ac13616 --- /dev/null +++ b/frontend/packages/web/src/App.tsx @@ -0,0 +1,1098 @@ +import { + AlertCircle, + CheckCircle2, + Clipboard, + Cloud, + Download, + HelpCircle, + PauseCircle, + PlayCircle, + PlugZap, + RefreshCw, + Send, + Shield, + TerminalSquare +} from "lucide-react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; + +const API_PREFIX = "/v1"; + +type AgentInfo = { + id: string; + installed: boolean; + version?: string; + path?: string; +}; + +type AgentMode = { + id: string; + name: string; + description?: string; +}; + +type UniversalEvent = { + id: number; + timestamp: string; + sessionId: string; + agent: string; + agentSessionId?: string; + data: UniversalEventData; +}; + +type UniversalEventData = + | { message: UniversalMessage } + | { started: StartedInfo } + | { error: CrashInfo } + | { questionAsked: QuestionRequest } + | { permissionAsked: PermissionRequest }; + +type UniversalMessage = { + role?: string; + content?: string; + type?: string; + raw?: unknown; +}; + +type StartedInfo = { + message?: string; + pid?: number; + [key: string]: unknown; +}; + +type CrashInfo = { + message?: string; + code?: string; + detail?: string; + [key: string]: unknown; +}; + +type QuestionOption = { + label: string; + description?: string; +}; + +type QuestionItem = { + header?: string; + question: string; + options: QuestionOption[]; + multiSelect?: boolean; +}; + +type QuestionRequest = { + id: string; + sessionID?: string; + questions: QuestionItem[]; + tool?: { messageID?: string; callID?: string }; +}; + +type PermissionRequest = { + id: string; + sessionID?: string; + permission: string; + patterns?: string[]; + metadata?: Record; + always?: string[]; + tool?: { messageID?: string; callID?: string }; +}; + +type RequestLog = { + id: number; + method: string; + url: string; + body?: string; + status?: number; + time: string; + curl: string; + error?: string; +}; + +const defaultAgents = ["claude", "codex", "opencode", "amp"]; + +const buildUrl = (endpoint: string, path: string, query?: Record) => { + const base = endpoint.replace(/\/$/, ""); + const fullPath = path.startsWith("/") ? path : `/${path}`; + const url = new URL(`${base}${fullPath}`); + if (query) { + Object.entries(query).forEach(([key, value]) => { + if (value !== "") { + url.searchParams.set(key, value); + } + }); + } + return url.toString(); +}; + +const safeJson = (text: string) => { + if (!text) { + return null; + } + try { + return JSON.parse(text); + } catch { + return text; + } +}; + +const formatJson = (value: unknown) => { + if (value === null || value === undefined) { + return ""; + } + if (typeof value === "string") { + return value; + } + try { + return JSON.stringify(value, null, 2); + } catch { + return String(value); + } +}; + +const escapeSingleQuotes = (value: string) => value.replace(/'/g, `'\\''`); + +const buildCurl = (method: string, url: string, body?: string, token?: string) => { + const headers: string[] = []; + if (token) { + headers.push(`-H 'x-sandbox-token: ${escapeSingleQuotes(token)}'`); + } + if (body) { + headers.push(`-H 'Content-Type: application/json'`); + } + const data = body ? `-d '${escapeSingleQuotes(body)}'` : ""; + return `curl -X ${method} ${headers.join(" ")} ${data} '${escapeSingleQuotes(url)}'` + .replace(/\s+/g, " ") + .trim(); +}; + +const getEventType = (event: UniversalEvent) => { + if ("message" in event.data) return "message"; + if ("started" in event.data) return "started"; + if ("error" in event.data) return "error"; + if ("questionAsked" in event.data) return "question"; + if ("permissionAsked" in event.data) return "permission"; + return "event"; +}; + +const formatTime = (value: string) => { + if (!value) return ""; + const date = new Date(value); + if (Number.isNaN(date.getTime())) return value; + return date.toLocaleTimeString(); +}; + +export default function App() { + const [endpoint, setEndpoint] = useState("http://localhost:8787"); + const [token, setToken] = useState(""); + const [connected, setConnected] = useState(false); + const [connecting, setConnecting] = useState(false); + const [connectError, setConnectError] = useState(null); + + const [agents, setAgents] = useState([]); + const [modesByAgent, setModesByAgent] = useState>({}); + + const [agentId, setAgentId] = useState("claude"); + const [agentMode, setAgentMode] = useState("build"); + const [permissionMode, setPermissionMode] = useState("default"); + const [model, setModel] = useState(""); + const [variant, setVariant] = useState(""); + const [agentVersion, setAgentVersion] = useState(""); + const [sessionId, setSessionId] = useState("demo-session"); + const [sessionInfo, setSessionInfo] = useState<{ healthy: boolean; agentSessionId?: string } | null>(null); + const [sessionError, setSessionError] = useState(null); + + const [message, setMessage] = useState(""); + const [events, setEvents] = useState([]); + const [offset, setOffset] = useState(0); + const offsetRef = useRef(0); + + const [polling, setPolling] = useState(false); + const pollTimerRef = useRef(null); + const [streamMode, setStreamMode] = useState<"poll" | "sse">("poll"); + const eventSourceRef = useRef(null); + const [eventError, setEventError] = useState(null); + + const [questionSelections, setQuestionSelections] = useState>({}); + const [questionStatus, setQuestionStatus] = useState>({}); + const [permissionStatus, setPermissionStatus] = useState>({}); + + const [requestLog, setRequestLog] = useState([]); + const logIdRef = useRef(1); + const [copiedLogId, setCopiedLogId] = useState(null); + + const logRequest = useCallback((entry: RequestLog) => { + setRequestLog((prev) => { + const next = [entry, ...prev]; + return next.slice(0, 200); + }); + }, []); + + const apiFetch = useCallback( + async ( + path: string, + options?: { + method?: string; + body?: unknown; + query?: Record; + } + ) => { + const method = options?.method ?? "GET"; + const url = buildUrl(endpoint, path, options?.query); + const bodyText = options?.body ? JSON.stringify(options.body) : undefined; + const headers: Record = {}; + if (bodyText) { + headers["Content-Type"] = "application/json"; + } + if (token) { + headers["x-sandbox-token"] = token; + } + const curl = buildCurl(method, url, bodyText, token); + const logId = logIdRef.current++; + const entry: RequestLog = { + id: logId, + method, + url, + body: bodyText, + time: new Date().toLocaleTimeString(), + curl + }; + let logged = false; + + try { + const response = await fetch(url, { + method, + headers, + body: bodyText + }); + const text = await response.text(); + const data = safeJson(text); + logRequest({ ...entry, status: response.status }); + logged = true; + if (!response.ok) { + const errorMessage = + (typeof data === "object" && data && "detail" in data && data.detail) || + (typeof data === "object" && data && "title" in data && data.title) || + (typeof data === "string" ? data : `Request failed with ${response.status}`); + throw new Error(String(errorMessage)); + } + return data; + } catch (error) { + const message = error instanceof Error ? error.message : "Request failed"; + if (!logged) { + logRequest({ ...entry, status: 0, error: message }); + } + throw error; + } + }, + [endpoint, token, logRequest] + ); + + const connect = async () => { + setConnecting(true); + setConnectError(null); + try { + const data = await apiFetch(`${API_PREFIX}/agents`); + const list = (data as { agents?: AgentInfo[] })?.agents ?? []; + setAgents(list); + if (list.length > 0) { + setAgentId(list[0]?.id ?? "claude"); + } + setConnected(true); + } catch (error) { + const message = error instanceof Error ? error.message : "Unable to connect"; + setConnectError(message); + setConnected(false); + } finally { + setConnecting(false); + } + }; + + const disconnect = () => { + setConnected(false); + setSessionInfo(null); + setSessionError(null); + setEvents([]); + setOffset(0); + offsetRef.current = 0; + setEventError(null); + stopPolling(); + stopSse(); + }; + + const refreshAgents = async () => { + try { + const data = await apiFetch(`${API_PREFIX}/agents`); + setAgents((data as { agents?: AgentInfo[] })?.agents ?? []); + } catch (error) { + setConnectError(error instanceof Error ? error.message : "Unable to refresh agents"); + } + }; + + const installAgent = async (targetId: string, reinstall: boolean) => { + try { + await apiFetch(`${API_PREFIX}/agents/${targetId}/install`, { + method: "POST", + body: { reinstall } + }); + await refreshAgents(); + } catch (error) { + setConnectError(error instanceof Error ? error.message : "Install failed"); + } + }; + + const loadModes = async (targetId: string) => { + try { + const data = await apiFetch(`${API_PREFIX}/agents/${targetId}/modes`); + const modes = (data as { modes?: AgentMode[] })?.modes ?? []; + setModesByAgent((prev) => ({ ...prev, [targetId]: modes })); + } catch (error) { + setConnectError(error instanceof Error ? error.message : "Unable to load modes"); + } + }; + + const createSession = async () => { + setSessionError(null); + try { + const body: Record = { agent: agentId }; + if (agentMode) body.agentMode = agentMode; + if (permissionMode) body.permissionMode = permissionMode; + if (model) body.model = model; + if (variant) body.variant = variant; + if (agentVersion) body.agentVersion = agentVersion; + const data = await apiFetch(`${API_PREFIX}/sessions/${sessionId}`, { + method: "POST", + body + }); + const response = data as { healthy?: boolean; agentSessionId?: string }; + setSessionInfo({ healthy: Boolean(response.healthy), agentSessionId: response.agentSessionId }); + setEvents([]); + setOffset(0); + offsetRef.current = 0; + setEventError(null); + } catch (error) { + setSessionError(error instanceof Error ? error.message : "Unable to create session"); + setSessionInfo(null); + } + }; + + const sendMessage = async () => { + if (!message.trim()) return; + try { + await apiFetch(`${API_PREFIX}/sessions/${sessionId}/messages`, { + method: "POST", + body: { message } + }); + setMessage(""); + } catch (error) { + setEventError(error instanceof Error ? error.message : "Unable to send message"); + } + }; + + const appendEvents = useCallback((incoming: UniversalEvent[]) => { + if (!incoming.length) return; + setEvents((prev) => [...prev, ...incoming]); + const lastId = incoming[incoming.length - 1]?.id ?? offsetRef.current; + offsetRef.current = lastId; + setOffset(lastId); + }, []); + + const fetchEvents = useCallback(async () => { + if (!sessionId) return; + try { + const data = await apiFetch(`${API_PREFIX}/sessions/${sessionId}/events`, { + query: { + offset: String(offsetRef.current), + limit: "200" + } + }); + const response = data as { events?: UniversalEvent[]; hasMore?: boolean }; + const newEvents = response.events ?? []; + appendEvents(newEvents); + setEventError(null); + } catch (error) { + setEventError(error instanceof Error ? error.message : "Unable to fetch events"); + } + }, [apiFetch, appendEvents, sessionId]); + + const startPolling = () => { + stopSse(); + if (pollTimerRef.current) return; + setPolling(true); + fetchEvents(); + pollTimerRef.current = window.setInterval(fetchEvents, 2500); + }; + + const stopPolling = () => { + if (pollTimerRef.current) { + window.clearInterval(pollTimerRef.current); + pollTimerRef.current = null; + } + setPolling(false); + }; + + const startSse = () => { + stopPolling(); + if (eventSourceRef.current) return; + if (token) { + setEventError("SSE streams cannot send auth headers. Use polling or run daemon with --no-token."); + return; + } + const url = buildUrl(endpoint, `${API_PREFIX}/sessions/${sessionId}/events/sse`, { + offset: String(offsetRef.current) + }); + const source = new EventSource(url); + eventSourceRef.current = source; + source.onmessage = (event) => { + try { + const parsed = safeJson(event.data); + if (Array.isArray(parsed)) { + appendEvents(parsed as UniversalEvent[]); + } else if (parsed && typeof parsed === "object") { + appendEvents([parsed as UniversalEvent]); + } + } catch (error) { + setEventError(error instanceof Error ? error.message : "SSE parse error"); + } + }; + source.onerror = () => { + setEventError("SSE connection error. Falling back to polling."); + stopSse(); + }; + }; + + const stopSse = () => { + if (eventSourceRef.current) { + eventSourceRef.current.close(); + eventSourceRef.current = null; + } + }; + + const resetEvents = () => { + setEvents([]); + setOffset(0); + offsetRef.current = 0; + }; + + const handleCopy = async (entry: RequestLog) => { + try { + await navigator.clipboard.writeText(entry.curl); + setCopiedLogId(entry.id); + window.setTimeout(() => setCopiedLogId(null), 1500); + } catch { + setCopiedLogId(null); + } + }; + + const toggleQuestionOption = ( + requestId: string, + questionIndex: number, + optionLabel: string, + multiSelect: boolean + ) => { + setQuestionSelections((prev) => { + const next = { ...prev }; + const currentAnswers = next[requestId] ? [...next[requestId]] : []; + const selections = currentAnswers[questionIndex] ? [...currentAnswers[questionIndex]] : []; + if (multiSelect) { + if (selections.includes(optionLabel)) { + currentAnswers[questionIndex] = selections.filter((label) => label !== optionLabel); + } else { + currentAnswers[questionIndex] = [...selections, optionLabel]; + } + } else { + currentAnswers[questionIndex] = [optionLabel]; + } + next[requestId] = currentAnswers; + return next; + }); + }; + + const answerQuestion = async (request: QuestionRequest) => { + const answers = questionSelections[request.id] ?? []; + try { + await apiFetch(`${API_PREFIX}/sessions/${sessionId}/questions/${request.id}/reply`, { + method: "POST", + body: { answers } + }); + setQuestionStatus((prev) => ({ ...prev, [request.id]: "replied" })); + } catch (error) { + setEventError(error instanceof Error ? error.message : "Unable to reply"); + } + }; + + const rejectQuestion = async (requestId: string) => { + try { + await apiFetch(`${API_PREFIX}/sessions/${sessionId}/questions/${requestId}/reject`, { + method: "POST", + body: {} + }); + setQuestionStatus((prev) => ({ ...prev, [requestId]: "rejected" })); + } catch (error) { + setEventError(error instanceof Error ? error.message : "Unable to reject"); + } + }; + + const replyPermission = async (requestId: string, reply: "once" | "always" | "reject") => { + try { + await apiFetch(`${API_PREFIX}/sessions/${sessionId}/permissions/${requestId}/reply`, { + method: "POST", + body: { reply } + }); + setPermissionStatus((prev) => ({ ...prev, [requestId]: "replied" })); + } catch (error) { + setEventError(error instanceof Error ? error.message : "Unable to reply"); + } + }; + + const questionRequests = useMemo(() => { + return events + .filter((event) => "questionAsked" in event.data) + .map((event) => (event.data as { questionAsked: QuestionRequest }).questionAsked) + .filter((request) => !questionStatus[request.id]); + }, [events, questionStatus]); + + const permissionRequests = useMemo(() => { + return events + .filter((event) => "permissionAsked" in event.data) + .map((event) => (event.data as { permissionAsked: PermissionRequest }).permissionAsked) + .filter((request) => !permissionStatus[request.id]); + }, [events, permissionStatus]); + + const transcriptEvents = useMemo(() => { + return events.filter( + (event): event is UniversalEvent & { data: { message: UniversalMessage } } => "message" in event.data + ); + }, [events]); + + useEffect(() => { + return () => { + stopPolling(); + stopSse(); + }; + }, []); + + useEffect(() => { + if (!connected) return; + refreshAgents(); + }, [connected]); + + const availableAgents = agents.length ? agents.map((agent) => agent.id) : defaultAgents; + const activeModes = modesByAgent[agentId] ?? []; + + return ( +
+
+
+ + Sandbox Daemon Console +
+
+ + + {connected ? "Connected" : "Disconnected"} + + {connected && ( + + )} +
+
+ + {!connected ? ( +
+
+
Bring the agent fleet online.
+
+ Point this console at a running sandbox-daemon, then manage sessions, messages, and approvals in + one place. +
+
+ sandbox-daemon --host 0.0.0.0 --port 8787 --token <token> --cors-allowed-origin + http://localhost:5173 --cors-allowed-methods GET,POST --cors-allowed-headers Authorization,x-sandbox-token +
+
+ CORS required for browser access + Token optional with --no-token + HTTP API under /v1 +
+
+ If you see a network or CORS error, make sure CORS flags are enabled in the daemon CLI. +
+
+
+
+ + + Connect + +
+
+ + + {connectError && ( +
+ Connection failed: {connectError} +
If this is a CORS error, enable CORS flags on the daemon.
+
+ )} + +
+
+
+ ) : ( +
+
+
+ + + Agents + + +
+
+ {agents.length === 0 &&
No agents reported yet. Refresh when ready.
} +
+ {(agents.length ? agents : defaultAgents.map((id) => ({ id, installed: false }))).map((agent) => ( +
+
+ {agent.id} + + {agent.installed ? "Installed" : "Missing"} + +
+
+ {agent.version ? `Version ${agent.version}` : "Version unknown"} +
+ {agent.path &&
{agent.path}
} +
+ + + +
+ {modesByAgent[agent.id] && modesByAgent[agent.id].length > 0 && ( +
+ {modesByAgent[agent.id].map((mode) => ( +
+ {mode.name} - {mode.description ?? mode.id} +
+ ))} +
+ )} +
+ ))} +
+
+
+ +
+
+ + + Session Setup + +
+
+ + + + +
+ + +
+ + {sessionInfo && ( +
+ {sessionInfo.healthy ? "Session ready." : "Session unhealthy."} + {sessionInfo.agentSessionId && ( +
Agent session id: {sessionInfo.agentSessionId}
+ )} +
+ )} + {sessionError &&
{sessionError}
} + +
+ Agent mode controls behavior. Permission mode controls what the agent can do. +
+
+
+ +
+
+ + + Message + + POST /sessions/:id/messages +
+
+