chore: sync workspace changes

This commit is contained in:
Nathan Flurry 2026-01-25 01:57:16 -08:00
parent 30d3aca1ee
commit f92ecd9b9a
38 changed files with 4829 additions and 1219 deletions

152
.github/workflows/release.yaml vendored Normal file
View file

@ -0,0 +1,152 @@
name: release
on:
release:
types: [published]
workflow_dispatch:
inputs:
version:
description: "Version (e.g. 0.1.0 or v0.1.0)"
required: true
type: string
defaults:
run:
shell: bash -e {0}
env:
CARGO_INCREMENTAL: 0
jobs:
setup:
name: "Setup"
runs-on: ubuntu-24.04
outputs:
version: ${{ steps.vars.outputs.version }}
latest: ${{ steps.latest.outputs.latest }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Resolve version
id: vars
run: |
if [ "${{ github.event_name }}" = "release" ]; then
VERSION="${{ github.event.release.tag_name }}"
else
VERSION="${{ inputs.version }}"
fi
VERSION="${VERSION#v}"
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
- name: Determine latest
id: latest
run: |
node scripts/release/main.js --version "${{ steps.vars.outputs.version }}" --print-latest --output "$GITHUB_OUTPUT"
binaries:
name: "Build & Upload Binaries"
needs: [setup]
strategy:
matrix:
include:
- platform: linux
target: x86_64-unknown-linux-musl
binary_ext: ""
arch: x86_64
- platform: windows
target: x86_64-pc-windows-gnu
binary_ext: ".exe"
arch: x86_64
- platform: macos
target: x86_64-apple-darwin
binary_ext: ""
arch: x86_64
- platform: macos
target: aarch64-apple-darwin
binary_ext: ""
arch: aarch64
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build binary
run: |
docker/release/build.sh ${{ matrix.target }}
ls -la dist/
- name: Install AWS CLI
run: |
sudo apt-get update
sudo apt-get install -y unzip curl
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install --update
- name: Upload binaries
env:
AWS_ACCESS_KEY_ID: ${{ secrets.R2_RELEASES_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_RELEASES_SECRET_ACCESS_KEY }}
run: |
VERSION="${{ needs.setup.outputs.version }}"
BINARY_NAME="sandbox-daemon-${{ matrix.target }}${{ matrix.binary_ext }}"
aws s3 cp \
"dist/${BINARY_NAME}" \
"s3://rivet-releases/sandbox-daemon/${VERSION}/${BINARY_NAME}" \
--region auto \
--endpoint-url https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com \
--checksum-algorithm CRC32
if [ "${{ needs.setup.outputs.latest }}" = "true" ]; then
aws s3 cp \
"dist/${BINARY_NAME}" \
"s3://rivet-releases/sandbox-daemon/latest/${BINARY_NAME}" \
--region auto \
--endpoint-url https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com \
--checksum-algorithm CRC32
fi
artifacts:
name: "TypeScript + Install Script"
needs: [setup]
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-node@v4
with:
node-version: 20
- name: Install AWS CLI
run: |
sudo apt-get update
sudo apt-get install -y unzip curl
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install --update
- name: Upload TypeScript artifacts and install script
env:
R2_RELEASES_ACCESS_KEY_ID: ${{ secrets.R2_RELEASES_ACCESS_KEY_ID }}
R2_RELEASES_SECRET_ACCESS_KEY: ${{ secrets.R2_RELEASES_SECRET_ACCESS_KEY }}
run: |
VERSION="${{ needs.setup.outputs.version }}"
if [ "${{ needs.setup.outputs.latest }}" = "true" ]; then
LATEST_FLAG="--latest"
else
LATEST_FLAG="--no-latest"
fi
node scripts/release/main.js --version "$VERSION" $LATEST_FLAG --upload-typescript --upload-install

View file

@ -1,4 +1,4 @@
# Claude Code Instructions # Instructions
## Agent Schemas ## Agent Schemas
@ -12,10 +12,11 @@ Universal schema guidance:
## Spec Tracking ## Spec Tracking
- Track required tests in `spec/required-tests.md` as you write code.
- Capture unresolved questions/ambiguities in `spec/im-not-sure.md`.
- Update `todo.md` as work progresses; add new tasks as they arise. - Update `todo.md` as work progresses; add new tasks as they arise.
- Keep CLI subcommands in sync with every HTTP endpoint. - Keep CLI subcommands in sync with every HTTP endpoint.
- Update `CLAUDE.md` to keep CLI endpoints in sync with HTTP API changes.
- When changing the HTTP API, update the TypeScript SDK and CLI together.
- Do not make breaking changes to API endpoints.
## Git Commits ## Git Commits

3
Cargo.toml Normal file
View file

@ -0,0 +1,3 @@
[workspace]
resolver = "2"
members = ["engine/packages/*"]

94
README.md Normal file
View file

@ -0,0 +1,94 @@
# Sandbox Daemon
Run inside sandboxes to provide support
- **Any coding agent**: Universal API to interact with all agents with full feature coverage
- **Server Mode**: Run as HTTP server from any sandbox provider or as TypeScript & Python SDK
- **Universal session schema**: Universal schema to store agent transcripts
- **Supports your sandbox provider**: Daytona, E2B, Vercel Sandboxes, [add your own](TODO)
- **Lightweight, portable Rust binary**: Install anywhere with 1 curl command
- **Compatible with Vercel AI SDK**: TODO
## Quickstart
Start with the SDK:
```
TODO
```
To run this in server mode, install with:
```
TODO
```
And run with:
```
TODO
```
See the example for your provider of choice:
- TODO
- [Add your own](TODO)
## Security
TODO: Tokens
TODO: Using a gateawy
TODO: BYO tokens with extractor
## Demo Frontend
TODO: Screenshot
This project provides a demo frontend for testing the connection. Run it with:
```
TODO
```
## Agent Compatibility Matrix
TODO
## Reference
### TypeScript SDK
TODO
### HTTP API
TODO
### CLI
TODO
## FAQ
TODO
- Why not use PTY? This is the recommended option for XXXX
- Why not use <feature that already exists on sandbox API>?
- Does it support <platform>?
- Can I use this with my personal OpenAPI & Claude tokens?
## Project Scope
This project aims to solve 3 problems with agents:
- **Universal Agent API**: Claude Code, Codex, Amp, and OpenCode all have put a lot of work in to the agent scaffold. Each have respective pros and cons and need to be easy to be swapped between.
- **Agent Transcript**: Maintaining agent transcripts is difficult since the agent manages its own sessions. This provides a simpler way to read and retrieve agent transcripts in your system.
- **Agents In Sandboxes**: There are many complications with running agents inside of sandbox providers. This lets you run a simple curl command to spawn an HTTP server for using any agent from within the sandbox.
Features out of scope:
- **Storage of sessions on disk**: Sessions are already stored by the respective coding agents on disk. It's assumed that the consumer is streaming data from this machine to an extral storage, such as Postgres, ClickHouse, or Rivet.
- **Direct LLM wrappers**: Use the [Vercel AI SDK](https://ai-sdk.dev/docs/introduction) if you want to impelment your own agent from scratch
- **Git Repo Management**: Just use git commands or the features provided by your sandbox provider of choice.
- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide skills that lets you integrate this project with sandbox providers.

41
ROADMAP.md Normal file
View file

@ -0,0 +1,41 @@
## soon
- discuss actor arch in readme + give example
- skillfile
- specifically include the release checklist
- image/etc input
## later
- auto-updating for long running job
- persistence
- system information/cpu/etc
- git utils
- api features
- list agent modes available
- list models available
- handle planning mode
- api key gateway
- configuring mcp/skills/etc
- process management inside container
- otel
- better authentication systems
- s3-based file system
- ai sdk compatibility for their ecosystem (useChat, etc)
- resumable messages
- todo lists
- all other features
- misc
- bootstrap tool that extracts tokens from the current system
- skill
- pre-package these as bun binaries instead of npm installations
- build & release pipeline with musl
- agent feature matrix for api features
- tunnels
- mcp integration (can connect to given endpoints)
- provide a pty to access the agent data
- other agent features like file system
- python sdk
- comparison to agentapi:
- it does not use the pty since we need to get more information from the agent directly
- transfer sessions between agents

54
docker/release/build.sh Executable file
View file

@ -0,0 +1,54 @@
#!/bin/bash
set -euo pipefail
TARGET=${1:-x86_64-unknown-linux-musl}
case $TARGET in
x86_64-unknown-linux-musl)
echo "Building for Linux x86_64 musl"
DOCKERFILE="linux-x86_64.Dockerfile"
TARGET_STAGE="builder"
BINARY="sandbox-daemon-$TARGET"
;;
x86_64-pc-windows-gnu)
echo "Building for Windows x86_64"
DOCKERFILE="windows.Dockerfile"
TARGET_STAGE=""
BINARY="sandbox-daemon-$TARGET.exe"
;;
x86_64-apple-darwin)
echo "Building for macOS x86_64"
DOCKERFILE="macos-x86_64.Dockerfile"
TARGET_STAGE="x86_64-builder"
BINARY="sandbox-daemon-$TARGET"
;;
aarch64-apple-darwin)
echo "Building for macOS aarch64"
DOCKERFILE="macos-aarch64.Dockerfile"
TARGET_STAGE="aarch64-builder"
BINARY="sandbox-daemon-$TARGET"
;;
*)
echo "Unsupported target: $TARGET"
exit 1
;;
esac
DOCKER_BUILDKIT=1
if [ -n "$TARGET_STAGE" ]; then
docker build --target "$TARGET_STAGE" -f "docker/release/$DOCKERFILE" -t "sandbox-daemon-builder-$TARGET" .
else
docker build -f "docker/release/$DOCKERFILE" -t "sandbox-daemon-builder-$TARGET" .
fi
CONTAINER_ID=$(docker create "sandbox-daemon-builder-$TARGET")
mkdir -p dist
docker cp "$CONTAINER_ID:/artifacts/$BINARY" "dist/"
docker rm "$CONTAINER_ID"
if [[ "$BINARY" != *.exe ]]; then
chmod +x "dist/$BINARY"
fi
echo "Binary saved to: dist/$BINARY"

View file

@ -0,0 +1,27 @@
# syntax=docker/dockerfile:1.10.0
FROM rust:1.91.0 AS builder
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update -y && \
apt-get install -y \
musl-tools \
pkg-config \
ca-certificates \
git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN rustup target add x86_64-unknown-linux-musl
WORKDIR /build
COPY . .
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
RUSTFLAGS="-C target-feature=+crt-static" \
cargo build -p sandbox-daemon-core --release --target x86_64-unknown-linux-musl && \
mkdir -p /artifacts && \
cp target/x86_64-unknown-linux-musl/release/sandbox-daemon /artifacts/sandbox-daemon-x86_64-unknown-linux-musl
CMD ["ls", "-la", "/artifacts"]

View file

@ -0,0 +1,62 @@
# syntax=docker/dockerfile:1.10.0
FROM rust:1.91.0 AS base
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y \
clang \
cmake \
patch \
libxml2-dev \
wget \
xz-utils \
curl \
git && \
rm -rf /var/lib/apt/lists/*
# Install osxcross
RUN git config --global --add safe.directory '*' && \
git clone https://github.com/tpoechtrager/osxcross /root/osxcross && \
cd /root/osxcross && \
wget -nc https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.3.sdk.tar.xz && \
mv MacOSX11.3.sdk.tar.xz tarballs/ && \
UNATTENDED=yes OSX_VERSION_MIN=10.7 ./build.sh
ENV PATH="/root/osxcross/target/bin:$PATH"
ENV OSXCROSS_SDK=MacOSX11.3.sdk \
SDKROOT=/root/osxcross/target/SDK/MacOSX11.3.sdk \
BINDGEN_EXTRA_CLANG_ARGS_aarch64_apple_darwin="--sysroot=/root/osxcross/target/SDK/MacOSX11.3.sdk -isystem /root/osxcross/target/SDK/MacOSX11.3.sdk/usr/include" \
CFLAGS_aarch64_apple_darwin="-B/root/osxcross/target/bin" \
CXXFLAGS_aarch64_apple_darwin="-B/root/osxcross/target/bin" \
CARGO_TARGET_AARCH64_APPLE_DARWIN_LINKER=aarch64-apple-darwin20.4-clang \
CC_aarch64_apple_darwin=aarch64-apple-darwin20.4-clang \
CXX_aarch64_apple_darwin=aarch64-apple-darwin20.4-clang++ \
AR_aarch64_apple_darwin=aarch64-apple-darwin20.4-ar \
RANLIB_aarch64_apple_darwin=aarch64-apple-darwin20.4-ranlib \
MACOSX_DEPLOYMENT_TARGET=10.14 \
CARGO_INCREMENTAL=0 \
CARGO_NET_GIT_FETCH_WITH_CLI=true
WORKDIR /build
FROM base AS aarch64-builder
RUN rustup target add aarch64-apple-darwin
RUN mkdir -p /root/.cargo && \
echo '\
[target.aarch64-apple-darwin]\n\
linker = "aarch64-apple-darwin20.4-clang"\n\
ar = "aarch64-apple-darwin20.4-ar"\n\
' > /root/.cargo/config.toml
COPY . .
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
cargo build -p sandbox-daemon-core --release --target aarch64-apple-darwin && \
mkdir -p /artifacts && \
cp target/aarch64-apple-darwin/release/sandbox-daemon /artifacts/sandbox-daemon-aarch64-apple-darwin
CMD ["ls", "-la", "/artifacts"]

View file

@ -0,0 +1,62 @@
# syntax=docker/dockerfile:1.10.0
FROM rust:1.91.0 AS base
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y \
clang \
cmake \
patch \
libxml2-dev \
wget \
xz-utils \
curl \
git && \
rm -rf /var/lib/apt/lists/*
# Install osxcross
RUN git config --global --add safe.directory '*' && \
git clone https://github.com/tpoechtrager/osxcross /root/osxcross && \
cd /root/osxcross && \
wget -nc https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.3.sdk.tar.xz && \
mv MacOSX11.3.sdk.tar.xz tarballs/ && \
UNATTENDED=yes OSX_VERSION_MIN=10.7 ./build.sh
ENV PATH="/root/osxcross/target/bin:$PATH"
ENV OSXCROSS_SDK=MacOSX11.3.sdk \
SDKROOT=/root/osxcross/target/SDK/MacOSX11.3.sdk \
BINDGEN_EXTRA_CLANG_ARGS_X86_64_apple_darwin="--sysroot=/root/osxcross/target/SDK/MacOSX11.3.sdk -isystem /root/osxcross/target/SDK/MacOSX11.3.sdk/usr/include" \
CFLAGS_X86_64_apple_darwin="-B/root/osxcross/target/bin" \
CXXFLAGS_X86_64_apple_darwin="-B/root/osxcross/target/bin" \
CARGO_TARGET_X86_64_APPLE_DARWIN_LINKER=x86_64-apple-darwin20.4-clang \
CC_x86_64_apple_darwin=x86_64-apple-darwin20.4-clang \
CXX_x86_64_apple_darwin=x86_64-apple-darwin20.4-clang++ \
AR_X86_64_apple_darwin=x86_64-apple-darwin20.4-ar \
RANLIB_X86_64_apple_darwin=x86_64-apple-darwin20.4-ranlib \
MACOSX_DEPLOYMENT_TARGET=10.14 \
CARGO_INCREMENTAL=0 \
CARGO_NET_GIT_FETCH_WITH_CLI=true
WORKDIR /build
FROM base AS x86_64-builder
RUN rustup target add x86_64-apple-darwin
RUN mkdir -p /root/.cargo && \
echo '\
[target.x86_64-apple-darwin]\n\
linker = "x86_64-apple-darwin20.4-clang"\n\
ar = "x86_64-apple-darwin20.4-ar"\n\
' > /root/.cargo/config.toml
COPY . .
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
cargo build -p sandbox-daemon-core --release --target x86_64-apple-darwin && \
mkdir -p /artifacts && \
cp target/x86_64-apple-darwin/release/sandbox-daemon /artifacts/sandbox-daemon-x86_64-apple-darwin
CMD ["ls", "-la", "/artifacts"]

View file

@ -0,0 +1,49 @@
# syntax=docker/dockerfile:1.10.0
FROM rust:1.91.0
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y \
llvm-14-dev \
libclang-14-dev \
clang-14 \
gcc-mingw-w64-x86-64 \
g++-mingw-w64-x86-64 \
binutils-mingw-w64-x86-64 \
ca-certificates \
curl \
git && \
rm -rf /var/lib/apt/lists/*
# Switch MinGW-w64 to the POSIX threading model toolchain
RUN update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix && \
update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix
RUN rustup target add x86_64-pc-windows-gnu
RUN mkdir -p /root/.cargo && \
echo '\
[target.x86_64-pc-windows-gnu]\n\
linker = "x86_64-w64-mingw32-gcc"\n\
' > /root/.cargo/config.toml
ENV CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER=x86_64-w64-mingw32-gcc \
CC_x86_64_pc_windows_gnu=x86_64-w64-mingw32-gcc \
CXX_x86_64_pc_windows_gnu=x86_64-w64-mingw32-g++ \
CC_x86_64-pc-windows-gnu=x86_64-w64-mingw32-gcc \
CXX_x86_64-pc-windows-gnu=x86_64-w64-mingw32-g++ \
LIBCLANG_PATH=/usr/lib/llvm-14/lib \
CLANG_PATH=/usr/bin/clang-14 \
CARGO_INCREMENTAL=0 \
CARGO_NET_GIT_FETCH_WITH_CLI=true
WORKDIR /build
COPY . .
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/build/target \
cargo build -p sandbox-daemon-core --release --target x86_64-pc-windows-gnu && \
mkdir -p /artifacts && \
cp target/x86_64-pc-windows-gnu/release/sandbox-daemon.exe /artifacts/sandbox-daemon-x86_64-pc-windows-gnu.exe
CMD ["ls", "-la", "/artifacts"]

View file

@ -0,0 +1,10 @@
[package]
name = "sandbox-daemon-agent-credentials"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
dirs = "5.0"
time = { version = "0.3", features = ["parsing", "formatting"] }

View file

@ -0,0 +1,371 @@
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProviderCredentials {
pub api_key: String,
pub source: String,
pub auth_type: AuthType,
pub provider: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AuthType {
ApiKey,
Oauth,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ExtractedCredentials {
pub anthropic: Option<ProviderCredentials>,
pub openai: Option<ProviderCredentials>,
pub other: HashMap<String, ProviderCredentials>,
}
#[derive(Debug, Clone, Default)]
pub struct CredentialExtractionOptions {
pub home_dir: Option<PathBuf>,
pub include_oauth: bool,
}
impl CredentialExtractionOptions {
pub fn new() -> Self {
Self {
home_dir: None,
include_oauth: true,
}
}
}
pub fn extract_claude_credentials(options: &CredentialExtractionOptions) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let config_paths = [
home_dir.join(".claude.json.api"),
home_dir.join(".claude.json"),
home_dir.join(".claude.json.nathan"),
];
let key_paths = [
vec!["primaryApiKey"],
vec!["apiKey"],
vec!["anthropicApiKey"],
vec!["customApiKey"],
];
for path in config_paths {
let data = read_json_file(&path)?;
for key_path in &key_paths {
if let Some(key) = read_string_field(&data, key_path) {
if key.starts_with("sk-ant-") {
return Some(ProviderCredentials {
api_key: key,
source: "claude-code".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
}
}
}
if include_oauth {
let oauth_paths = [
home_dir.join(".claude").join(".credentials.json"),
home_dir.join(".claude-oauth-credentials.json"),
];
for path in oauth_paths {
let data = match read_json_file(&path) {
Some(value) => value,
None => continue,
};
let access = read_string_field(&data, &["claudeAiOauth", "accessToken"]);
if let Some(token) = access {
if let Some(expires_at) =
read_string_field(&data, &["claudeAiOauth", "expiresAt"])
{
if is_expired_rfc3339(&expires_at) {
continue;
}
}
return Some(ProviderCredentials {
api_key: token,
source: "claude-code".to_string(),
auth_type: AuthType::Oauth,
provider: "anthropic".to_string(),
});
}
}
}
None
}
pub fn extract_codex_credentials(options: &CredentialExtractionOptions) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir.join(".codex").join("auth.json");
let data = read_json_file(&path)?;
if let Some(key) = data.get("OPENAI_API_KEY").and_then(Value::as_str) {
if !key.is_empty() {
return Some(ProviderCredentials {
api_key: key.to_string(),
source: "codex".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
}
if include_oauth {
if let Some(token) = read_string_field(&data, &["tokens", "access_token"]) {
return Some(ProviderCredentials {
api_key: token,
source: "codex".to_string(),
auth_type: AuthType::Oauth,
provider: "openai".to_string(),
});
}
}
None
}
pub fn extract_opencode_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir
.join(".local")
.join("share")
.join("opencode")
.join("auth.json");
let mut result = ExtractedCredentials::default();
let data = match read_json_file(&path) {
Some(value) => value,
None => return result,
};
let obj = match data.as_object() {
Some(obj) => obj,
None => return result,
};
for (provider_name, value) in obj {
let config = match value.as_object() {
Some(config) => config,
None => continue,
};
let auth_type = config
.get("type")
.and_then(Value::as_str)
.unwrap_or("");
let credentials = if auth_type == "api" {
config.get("key").and_then(Value::as_str).map(|key| ProviderCredentials {
api_key: key.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::ApiKey,
provider: provider_name.to_string(),
})
} else if auth_type == "oauth" && include_oauth {
let expires = config.get("expires").and_then(Value::as_i64);
if let Some(expires) = expires {
if expires < current_epoch_millis() {
None
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
None
};
if let Some(credentials) = credentials {
if provider_name == "anthropic" {
result.anthropic = Some(credentials.clone());
} else if provider_name == "openai" {
result.openai = Some(credentials.clone());
} else {
result.other.insert(provider_name.to_string(), credentials.clone());
}
}
}
result
}
pub fn extract_amp_credentials(options: &CredentialExtractionOptions) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let path = home_dir.join(".amp").join("config.json");
let data = read_json_file(&path)?;
let key_paths: Vec<Vec<&str>> = vec![
vec!["anthropicApiKey"],
vec!["anthropic_api_key"],
vec!["apiKey"],
vec!["api_key"],
vec!["accessToken"],
vec!["access_token"],
vec!["token"],
vec!["auth", "anthropicApiKey"],
vec!["auth", "apiKey"],
vec!["auth", "token"],
vec!["anthropic", "apiKey"],
vec!["anthropic", "token"],
];
for key_path in key_paths {
if let Some(key) = read_string_field(&data, &key_path) {
if !key.is_empty() {
return Some(ProviderCredentials {
api_key: key,
source: "amp".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
}
}
None
}
pub fn extract_all_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials {
let mut result = ExtractedCredentials::default();
if let Ok(value) = std::env::var("ANTHROPIC_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
} else if let Ok(value) = std::env::var("CLAUDE_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
if let Ok(value) = std::env::var("OPENAI_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
} else if let Ok(value) = std::env::var("CODEX_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
if result.anthropic.is_none() {
result.anthropic = extract_amp_credentials(options);
}
if result.anthropic.is_none() {
result.anthropic = extract_claude_credentials(options);
}
if result.openai.is_none() {
result.openai = extract_codex_credentials(options);
}
let opencode_credentials = extract_opencode_credentials(options);
if result.anthropic.is_none() {
result.anthropic = opencode_credentials.anthropic.clone();
}
if result.openai.is_none() {
result.openai = opencode_credentials.openai.clone();
}
for (key, value) in opencode_credentials.other {
result.other.entry(key).or_insert(value);
}
result
}
pub fn get_anthropic_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.anthropic
.map(|cred| cred.api_key)
}
pub fn get_openai_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.openai
.map(|cred| cred.api_key)
}
pub fn set_credentials_as_env_vars(credentials: &ExtractedCredentials) {
if let Some(cred) = &credentials.anthropic {
std::env::set_var("ANTHROPIC_API_KEY", &cred.api_key);
}
if let Some(cred) = &credentials.openai {
std::env::set_var("OPENAI_API_KEY", &cred.api_key);
}
}
fn read_json_file(path: &Path) -> Option<Value> {
let contents = fs::read_to_string(path).ok()?;
serde_json::from_str(&contents).ok()
}
fn read_string_field(value: &Value, path: &[&str]) -> Option<String> {
let mut current = value;
for key in path {
current = current.get(*key)?;
}
current.as_str().map(|s| s.to_string())
}
fn default_home_dir() -> PathBuf {
dirs::home_dir().unwrap_or_else(|| PathBuf::from("."))
}
fn current_epoch_millis() -> i64 {
let now = OffsetDateTime::now_utc();
(now.unix_timestamp() * 1000) + (now.millisecond() as i64)
}
fn is_expired_rfc3339(value: &str) -> bool {
match OffsetDateTime::parse(value, &time::format_description::well_known::Rfc3339) {
Ok(expiry) => expiry < OffsetDateTime::now_utc(),
Err(_) => false,
}
}

View file

@ -0,0 +1,40 @@
use std::env;
use std::fs;
use std::path::PathBuf;
fn main() {
let mut out: Option<PathBuf> = None;
let mut stdout = false;
let mut args = env::args().skip(1).peekable();
while let Some(arg) = args.next() {
if arg == "--stdout" {
stdout = true;
continue;
}
if arg == "--out" {
if let Some(value) = args.next() {
out = Some(PathBuf::from(value));
}
continue;
}
if let Some(value) = arg.strip_prefix("--out=") {
out = Some(PathBuf::from(value));
continue;
}
if out.is_none() {
out = Some(PathBuf::from(arg));
}
}
let schema = sandbox_daemon_openapi_gen::OPENAPI_JSON;
if stdout {
println!("{schema}");
return;
}
let out = out.unwrap_or_else(|| PathBuf::from("openapi.json"));
if let Err(err) = fs::write(&out, schema) {
eprintln!("failed to write {}: {err}", out.display());
std::process::exit(1);
}
}

View file

@ -1,683 +0,0 @@
use std::collections::HashMap;
use std::fmt;
use std::fs;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::process::{Command, ExitStatus};
use flate2::read::GzDecoder;
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum AgentId {
Claude,
Codex,
Opencode,
Amp,
}
impl AgentId {
pub fn as_str(self) -> &'static str {
match self {
AgentId::Claude => "claude",
AgentId::Codex => "codex",
AgentId::Opencode => "opencode",
AgentId::Amp => "amp",
}
}
pub fn binary_name(self) -> &'static str {
match self {
AgentId::Claude => "claude",
AgentId::Codex => "codex",
AgentId::Opencode => "opencode",
AgentId::Amp => "amp",
}
}
}
impl fmt::Display for AgentId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Platform {
LinuxX64,
LinuxX64Musl,
LinuxArm64,
MacosArm64,
MacosX64,
}
impl Platform {
pub fn detect() -> Result<Self, AgentError> {
let os = std::env::consts::OS;
let arch = std::env::consts::ARCH;
let is_musl = cfg!(target_env = "musl");
match (os, arch, is_musl) {
("linux", "x86_64", true) => Ok(Self::LinuxX64Musl),
("linux", "x86_64", false) => Ok(Self::LinuxX64),
("linux", "aarch64", _) => Ok(Self::LinuxArm64),
("macos", "aarch64", _) => Ok(Self::MacosArm64),
("macos", "x86_64", _) => Ok(Self::MacosX64),
_ => Err(AgentError::UnsupportedPlatform {
os: os.to_string(),
arch: arch.to_string(),
}),
}
}
}
#[derive(Debug, Clone)]
pub struct AgentManager {
install_dir: PathBuf,
platform: Platform,
}
impl AgentManager {
pub fn new(install_dir: impl Into<PathBuf>) -> Result<Self, AgentError> {
Ok(Self {
install_dir: install_dir.into(),
platform: Platform::detect()?,
})
}
pub fn with_platform(
install_dir: impl Into<PathBuf>,
platform: Platform,
) -> Self {
Self {
install_dir: install_dir.into(),
platform,
}
}
pub fn install(&self, agent: AgentId, options: InstallOptions) -> Result<InstallResult, AgentError> {
let install_path = self.binary_path(agent);
if install_path.exists() && !options.reinstall {
return Ok(InstallResult {
path: install_path,
version: self.version(agent).unwrap_or(None),
});
}
fs::create_dir_all(&self.install_dir)?;
match agent {
AgentId::Claude => install_claude(&install_path, self.platform, options.version.as_deref())?,
AgentId::Codex => install_codex(&install_path, self.platform, options.version.as_deref())?,
AgentId::Opencode => install_opencode(&install_path, self.platform, options.version.as_deref())?,
AgentId::Amp => install_amp(&install_path, self.platform, options.version.as_deref())?,
}
Ok(InstallResult {
path: install_path,
version: self.version(agent).unwrap_or(None),
})
}
pub fn is_installed(&self, agent: AgentId) -> bool {
self.binary_path(agent).exists() || find_in_path(agent.binary_name()).is_some()
}
pub fn binary_path(&self, agent: AgentId) -> PathBuf {
self.install_dir.join(agent.binary_name())
}
pub fn version(&self, agent: AgentId) -> Result<Option<String>, AgentError> {
let path = self.resolve_binary(agent)?;
let attempts = [vec!["--version"], vec!["version"], vec!["-V"]];
for args in attempts {
let output = Command::new(&path).args(args).output();
if let Ok(output) = output {
if output.status.success() {
if let Some(version) = parse_version_output(&output) {
return Ok(Some(version));
}
}
}
}
Ok(None)
}
pub fn spawn(&self, agent: AgentId, options: SpawnOptions) -> Result<SpawnResult, AgentError> {
let path = self.resolve_binary(agent)?;
let working_dir = options
.working_dir
.clone()
.unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
let mut command = Command::new(&path);
command.current_dir(&working_dir);
match agent {
AgentId::Claude => {
command
.arg("--print")
.arg("--output-format")
.arg("stream-json")
.arg("--verbose")
.arg("--dangerously-skip-permissions");
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--resume").arg(session_id);
}
if let Some(permission_mode) = options.permission_mode.as_deref() {
if permission_mode == "plan" {
command.arg("--permission-mode").arg("plan");
}
}
command.arg(&options.prompt);
}
AgentId::Codex => {
command
.arg("exec")
.arg("--json")
.arg("--dangerously-bypass-approvals-and-sandbox");
if let Some(model) = options.model.as_deref() {
command.arg("-m").arg(model);
}
command.arg(&options.prompt);
}
AgentId::Opencode => {
command
.arg("run")
.arg("--format")
.arg("json");
if let Some(model) = options.model.as_deref() {
command.arg("-m").arg(model);
}
if let Some(agent_mode) = options.agent_mode.as_deref() {
command.arg("--agent").arg(agent_mode);
}
if let Some(variant) = options.variant.as_deref() {
command.arg("--variant").arg(variant);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("-s").arg(session_id);
}
command.arg(&options.prompt);
}
AgentId::Amp => {
let output = spawn_amp(&path, &working_dir, &options)?;
return Ok(SpawnResult {
status: output.status,
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
}
for (key, value) in options.env {
command.env(key, value);
}
let output = command.output().map_err(AgentError::Io)?;
Ok(SpawnResult {
status: output.status,
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
})
}
fn resolve_binary(&self, agent: AgentId) -> Result<PathBuf, AgentError> {
let path = self.binary_path(agent);
if path.exists() {
return Ok(path);
}
if let Some(path) = find_in_path(agent.binary_name()) {
return Ok(path);
}
Err(AgentError::BinaryNotFound { agent })
}
}
#[derive(Debug, Clone)]
pub struct InstallOptions {
pub reinstall: bool,
pub version: Option<String>,
}
impl Default for InstallOptions {
fn default() -> Self {
Self {
reinstall: false,
version: None,
}
}
}
#[derive(Debug, Clone)]
pub struct InstallResult {
pub path: PathBuf,
pub version: Option<String>,
}
#[derive(Debug, Clone)]
pub struct SpawnOptions {
pub prompt: String,
pub model: Option<String>,
pub variant: Option<String>,
pub agent_mode: Option<String>,
pub permission_mode: Option<String>,
pub session_id: Option<String>,
pub working_dir: Option<PathBuf>,
pub env: HashMap<String, String>,
}
impl SpawnOptions {
pub fn new(prompt: impl Into<String>) -> Self {
Self {
prompt: prompt.into(),
model: None,
variant: None,
agent_mode: None,
permission_mode: None,
session_id: None,
working_dir: None,
env: HashMap::new(),
}
}
}
#[derive(Debug, Clone)]
pub struct SpawnResult {
pub status: ExitStatus,
pub stdout: String,
pub stderr: String,
}
#[derive(Debug, Error)]
pub enum AgentError {
#[error("unsupported platform {os}/{arch}")]
UnsupportedPlatform { os: String, arch: String },
#[error("unsupported agent {agent}")]
UnsupportedAgent { agent: String },
#[error("binary not found for {agent}")]
BinaryNotFound { agent: AgentId },
#[error("download failed: {url}")]
DownloadFailed { url: Url },
#[error("http error: {0}")]
Http(#[from] reqwest::Error),
#[error("url parse error: {0}")]
UrlParse(#[from] url::ParseError),
#[error("io error: {0}")]
Io(#[from] io::Error),
#[error("extract failed: {0}")]
ExtractFailed(String),
}
fn parse_version_output(output: &std::process::Output) -> Option<String> {
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
let combined = format!("{}\n{}", stdout, stderr);
combined
.lines()
.map(str::trim)
.find(|line| !line.is_empty())
.map(|line| line.to_string())
}
fn spawn_amp(
path: &Path,
working_dir: &Path,
options: &SpawnOptions,
) -> Result<std::process::Output, AgentError> {
let flags = detect_amp_flags(path, working_dir).unwrap_or_default();
let mut args: Vec<&str> = Vec::new();
if flags.execute {
args.push("--execute");
} else if flags.print {
args.push("--print");
}
if flags.output_format {
args.push("--output-format");
args.push("stream-json");
}
if flags.dangerously_skip_permissions {
args.push("--dangerously-skip-permissions");
}
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
command.args(&args).arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
let output = command.output().map_err(AgentError::Io)?;
if output.status.success() {
return Ok(output);
}
let stderr = String::from_utf8_lossy(&output.stderr);
if stderr.contains("unknown option")
|| stderr.contains("unknown flag")
|| stderr.contains("User message must be provided")
{
return spawn_amp_fallback(path, working_dir, options);
}
Ok(output)
}
#[derive(Debug, Default, Clone, Copy)]
struct AmpFlags {
execute: bool,
print: bool,
output_format: bool,
dangerously_skip_permissions: bool,
}
fn detect_amp_flags(path: &Path, working_dir: &Path) -> Option<AmpFlags> {
let output = Command::new(path)
.current_dir(working_dir)
.arg("--help")
.output()
.ok()?;
let text = format!(
"{}\n{}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
Some(AmpFlags {
execute: text.contains("--execute"),
print: text.contains("--print"),
output_format: text.contains("--output-format"),
dangerously_skip_permissions: text.contains("--dangerously-skip-permissions"),
})
}
fn spawn_amp_fallback(
path: &Path,
working_dir: &Path,
options: &SpawnOptions,
) -> Result<std::process::Output, AgentError> {
let attempts = vec![
vec!["--execute"],
vec!["--print", "--output-format", "stream-json"],
vec!["--output-format", "stream-json"],
vec!["--dangerously-skip-permissions"],
vec![],
];
for args in attempts {
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
if !args.is_empty() {
command.args(&args);
}
command.arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
let output = command.output().map_err(AgentError::Io)?;
if output.status.success() {
return Ok(output);
}
}
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
command.arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
Ok(command.output().map_err(AgentError::Io)?)
}
fn find_in_path(binary_name: &str) -> Option<PathBuf> {
let path_var = std::env::var_os("PATH")?;
for path in std::env::split_paths(&path_var) {
let candidate = path.join(binary_name);
if candidate.exists() {
return Some(candidate);
}
}
None
}
fn download_bytes(url: &Url) -> Result<Vec<u8>, AgentError> {
let client = Client::builder().build()?;
let mut response = client.get(url.clone()).send()?;
if !response.status().is_success() {
return Err(AgentError::DownloadFailed { url: url.clone() });
}
let mut bytes = Vec::new();
response.read_to_end(&mut bytes)?;
Ok(bytes)
}
fn install_claude(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
let version = match version {
Some(version) => version.to_string(),
None => {
let url = Url::parse(
"https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/latest",
)?;
let text = String::from_utf8(download_bytes(&url)?).map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
text.trim().to_string()
}
};
let platform_segment = match platform {
Platform::LinuxX64 => "linux-x64",
Platform::LinuxX64Musl => "linux-x64-musl",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 => "darwin-arm64",
Platform::MacosX64 => "darwin-x64",
};
let url = Url::parse(&format!(
"https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/{version}/{platform_segment}/claude"
))?;
let bytes = download_bytes(&url)?;
write_executable(path, &bytes)?;
Ok(())
}
fn install_amp(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
let version = match version {
Some(version) => version.to_string(),
None => {
let url = Url::parse("https://storage.googleapis.com/amp-public-assets-prod-0/cli/cli-version.txt")?;
let text = String::from_utf8(download_bytes(&url)?).map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
text.trim().to_string()
}
};
let platform_segment = match platform {
Platform::LinuxX64 | Platform::LinuxX64Musl => "linux-x64",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 => "darwin-arm64",
Platform::MacosX64 => "darwin-x64",
};
let url = Url::parse(&format!(
"https://storage.googleapis.com/amp-public-assets-prod-0/cli/{version}/amp-{platform_segment}"
))?;
let bytes = download_bytes(&url)?;
write_executable(path, &bytes)?;
Ok(())
}
fn install_codex(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
let target = match platform {
Platform::LinuxX64 | Platform::LinuxX64Musl => "x86_64-unknown-linux-musl",
Platform::LinuxArm64 => "aarch64-unknown-linux-musl",
Platform::MacosArm64 => "aarch64-apple-darwin",
Platform::MacosX64 => "x86_64-apple-darwin",
};
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/openai/codex/releases/download/{version}/codex-{target}.tar.gz"
))?,
None => Url::parse(&format!(
"https://github.com/openai/codex/releases/latest/download/codex-{target}.tar.gz"
))?,
};
let bytes = download_bytes(&url)?;
let temp_dir = tempfile::tempdir()?;
let cursor = io::Cursor::new(bytes);
let mut archive = tar::Archive::new(GzDecoder::new(cursor));
archive.unpack(temp_dir.path())?;
let expected = format!("codex-{target}");
let binary = find_file_recursive(temp_dir.path(), &expected)?
.ok_or_else(|| AgentError::ExtractFailed(format!("missing {expected}")))?;
move_executable(&binary, path)?;
Ok(())
}
fn install_opencode(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
match platform {
Platform::MacosArm64 => {
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/download/{version}/opencode-darwin-arm64.zip"
))?,
None => Url::parse(
"https://github.com/anomalyco/opencode/releases/latest/download/opencode-darwin-arm64.zip",
)?,
};
install_zip_binary(path, &url, "opencode")
}
Platform::MacosX64 => {
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/download/{version}/opencode-darwin-x64.zip"
))?,
None => Url::parse(
"https://github.com/anomalyco/opencode/releases/latest/download/opencode-darwin-x64.zip",
)?,
};
install_zip_binary(path, &url, "opencode")
}
_ => {
let platform_segment = match platform {
Platform::LinuxX64 => "linux-x64",
Platform::LinuxX64Musl => "linux-x64-musl",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 | Platform::MacosX64 => unreachable!(),
};
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/download/{version}/opencode-{platform_segment}.tar.gz"
))?,
None => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/latest/download/opencode-{platform_segment}.tar.gz"
))?,
};
let bytes = download_bytes(&url)?;
let temp_dir = tempfile::tempdir()?;
let cursor = io::Cursor::new(bytes);
let mut archive = tar::Archive::new(GzDecoder::new(cursor));
archive.unpack(temp_dir.path())?;
let binary = find_file_recursive(temp_dir.path(), "opencode")?
.ok_or_else(|| AgentError::ExtractFailed("missing opencode".to_string()))?;
move_executable(&binary, path)?;
Ok(())
}
}
}
fn install_zip_binary(path: &Path, url: &Url, binary_name: &str) -> Result<(), AgentError> {
let bytes = download_bytes(url)?;
let reader = io::Cursor::new(bytes);
let mut archive = zip::ZipArchive::new(reader).map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
let temp_dir = tempfile::tempdir()?;
for i in 0..archive.len() {
let mut file = archive
.by_index(i)
.map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
if !file.name().ends_with(binary_name) {
continue;
}
let out_path = temp_dir.path().join(binary_name);
let mut out_file = fs::File::create(&out_path)?;
io::copy(&mut file, &mut out_file)?;
move_executable(&out_path, path)?;
return Ok(());
}
Err(AgentError::ExtractFailed(format!("missing {binary_name}")))
}
fn write_executable(path: &Path, bytes: &[u8]) -> Result<(), AgentError> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(path, bytes)?;
set_executable(path)?;
Ok(())
}
fn move_executable(source: &Path, dest: &Path) -> Result<(), AgentError> {
if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?;
}
if dest.exists() {
fs::remove_file(dest)?;
}
fs::copy(source, dest)?;
set_executable(dest)?;
Ok(())
}
#[cfg(unix)]
fn set_executable(path: &Path) -> Result<(), AgentError> {
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(path, perms)?;
Ok(())
}
#[cfg(not(unix))]
fn set_executable(_path: &Path) -> Result<(), AgentError> {
Ok(())
}
fn find_file_recursive(dir: &Path, filename: &str) -> Result<Option<PathBuf>, AgentError> {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
if let Some(found) = find_file_recursive(&path, filename)? {
return Ok(Some(found));
}
} else if let Some(name) = path.file_name().and_then(|s| s.to_str()) {
if name == filename {
return Ok(Some(path));
}
}
}
Ok(None)
}

View file

@ -1,335 +1 @@
use std::collections::HashMap; pub use sandbox_daemon_agent_credentials::*;
use std::fs;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProviderCredentials {
pub api_key: String,
pub source: String,
pub auth_type: AuthType,
pub provider: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AuthType {
ApiKey,
Oauth,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ExtractedCredentials {
pub anthropic: Option<ProviderCredentials>,
pub openai: Option<ProviderCredentials>,
pub other: HashMap<String, ProviderCredentials>,
}
#[derive(Debug, Clone, Default)]
pub struct CredentialExtractionOptions {
pub home_dir: Option<PathBuf>,
pub include_oauth: bool,
}
impl CredentialExtractionOptions {
pub fn new() -> Self {
Self {
home_dir: None,
include_oauth: true,
}
}
}
pub fn extract_claude_credentials(
options: &CredentialExtractionOptions,
) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let config_paths = [
home_dir.join(".claude.json.api"),
home_dir.join(".claude.json"),
home_dir.join(".claude.json.nathan"),
];
let key_paths = [
vec!["primaryApiKey"],
vec!["apiKey"],
vec!["anthropicApiKey"],
vec!["customApiKey"],
];
for path in config_paths {
let data = read_json_file(&path)?;
for key_path in &key_paths {
if let Some(key) = read_string_field(&data, key_path) {
if key.starts_with("sk-ant-") {
return Some(ProviderCredentials {
api_key: key,
source: "claude-code".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
}
}
}
if include_oauth {
let oauth_paths = [
home_dir.join(".claude").join(".credentials.json"),
home_dir.join(".claude-oauth-credentials.json"),
];
for path in oauth_paths {
let data = match read_json_file(&path) {
Some(value) => value,
None => continue,
};
let access = read_string_field(&data, &["claudeAiOauth", "accessToken"]);
if let Some(token) = access {
if let Some(expires_at) = read_string_field(&data, &["claudeAiOauth", "expiresAt"]) {
if is_expired_rfc3339(&expires_at) {
continue;
}
}
return Some(ProviderCredentials {
api_key: token,
source: "claude-code".to_string(),
auth_type: AuthType::Oauth,
provider: "anthropic".to_string(),
});
}
}
}
None
}
pub fn extract_codex_credentials(
options: &CredentialExtractionOptions,
) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir.join(".codex").join("auth.json");
let data = read_json_file(&path)?;
if let Some(key) = data.get("OPENAI_API_KEY").and_then(Value::as_str) {
if !key.is_empty() {
return Some(ProviderCredentials {
api_key: key.to_string(),
source: "codex".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
}
if include_oauth {
if let Some(token) = read_string_field(&data, &["tokens", "access_token"]) {
return Some(ProviderCredentials {
api_key: token,
source: "codex".to_string(),
auth_type: AuthType::Oauth,
provider: "openai".to_string(),
});
}
}
None
}
pub fn extract_opencode_credentials(
options: &CredentialExtractionOptions,
) -> ExtractedCredentials {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir
.join(".local")
.join("share")
.join("opencode")
.join("auth.json");
let mut result = ExtractedCredentials::default();
let data = match read_json_file(&path) {
Some(value) => value,
None => return result,
};
let obj = match data.as_object() {
Some(obj) => obj,
None => return result,
};
for (provider_name, value) in obj {
let config = match value.as_object() {
Some(config) => config,
None => continue,
};
let auth_type = config
.get("type")
.and_then(Value::as_str)
.unwrap_or("");
let credentials = if auth_type == "api" {
config.get("key").and_then(Value::as_str).map(|key| ProviderCredentials {
api_key: key.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::ApiKey,
provider: provider_name.to_string(),
})
} else if auth_type == "oauth" && include_oauth {
let expires = config.get("expires").and_then(Value::as_i64);
if let Some(expires) = expires {
if expires < current_epoch_millis() {
None
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
None
};
if let Some(credentials) = credentials {
if provider_name == "anthropic" {
result.anthropic = Some(credentials.clone());
} else if provider_name == "openai" {
result.openai = Some(credentials.clone());
} else {
result.other.insert(provider_name.to_string(), credentials.clone());
}
}
}
result
}
pub fn extract_all_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials {
let mut result = ExtractedCredentials::default();
if let Ok(value) = std::env::var("ANTHROPIC_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
} else if let Ok(value) = std::env::var("CLAUDE_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
if let Ok(value) = std::env::var("OPENAI_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
} else if let Ok(value) = std::env::var("CODEX_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
if result.anthropic.is_none() {
result.anthropic = extract_claude_credentials(options);
}
if result.openai.is_none() {
result.openai = extract_codex_credentials(options);
}
let opencode_credentials = extract_opencode_credentials(options);
if result.anthropic.is_none() {
result.anthropic = opencode_credentials.anthropic.clone();
}
if result.openai.is_none() {
result.openai = opencode_credentials.openai.clone();
}
for (key, value) in opencode_credentials.other {
result.other.entry(key).or_insert(value);
}
result
}
pub fn get_anthropic_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.anthropic
.map(|cred| cred.api_key)
}
pub fn get_openai_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.openai
.map(|cred| cred.api_key)
}
pub fn set_credentials_as_env_vars(credentials: &ExtractedCredentials) {
if let Some(cred) = &credentials.anthropic {
std::env::set_var("ANTHROPIC_API_KEY", &cred.api_key);
}
if let Some(cred) = &credentials.openai {
std::env::set_var("OPENAI_API_KEY", &cred.api_key);
}
}
fn read_json_file(path: &Path) -> Option<Value> {
let contents = fs::read_to_string(path).ok()?;
serde_json::from_str(&contents).ok()
}
fn read_string_field(value: &Value, path: &[&str]) -> Option<String> {
let mut current = value;
for key in path {
current = current.get(*key)?;
}
current.as_str().map(|s| s.to_string())
}
fn default_home_dir() -> PathBuf {
dirs::home_dir().unwrap_or_else(|| PathBuf::from("."))
}
fn current_epoch_millis() -> i64 {
let now = OffsetDateTime::now_utc();
(now.unix_timestamp() * 1000) + (now.millisecond() as i64)
}
fn is_expired_rfc3339(value: &str) -> bool {
match OffsetDateTime::parse(value, &time::format_description::well_known::Rfc3339) {
Ok(expiry) => expiry < OffsetDateTime::now_utc(),
Err(_) => false,
}
}

View file

@ -1,5 +1,4 @@
//! Sandbox daemon core utilities. //! Sandbox daemon core utilities.
pub mod agents;
pub mod credentials; pub mod credentials;
pub mod router; pub mod router;

View file

@ -1,7 +1,11 @@
use std::collections::HashMap; use std::collections::HashMap;
use sandbox_daemon_core::agents::{AgentId, AgentManager, InstallOptions, SpawnOptions}; use sandbox_daemon_agent_management::agents::{
use sandbox_daemon_core::credentials::{extract_all_credentials, CredentialExtractionOptions}; AgentError, AgentId, AgentManager, InstallOptions, SpawnOptions,
};
use sandbox_daemon_agent_management::credentials::{
extract_all_credentials, CredentialExtractionOptions,
};
fn build_env() -> HashMap<String, String> { fn build_env() -> HashMap<String, String> {
let options = CredentialExtractionOptions::new(); let options = CredentialExtractionOptions::new();
@ -21,6 +25,10 @@ fn amp_configured() -> bool {
home.join(".amp").join("config.json").exists() home.join(".amp").join("config.json").exists()
} }
fn prompt_ok(label: &str) -> String {
format!("Respond with exactly the text {label} and nothing else.")
}
#[test] #[test]
fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>> { fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>> {
let temp_dir = tempfile::tempdir()?; let temp_dir = tempfile::tempdir()?;
@ -32,11 +40,19 @@ fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>>
for agent in agents { for agent in agents {
let install = manager.install(agent, InstallOptions::default())?; let install = manager.install(agent, InstallOptions::default())?;
assert!(install.path.exists(), "expected install for {agent}"); assert!(install.path.exists(), "expected install for {agent}");
assert!(manager.is_installed(agent), "expected is_installed for {agent}");
manager.install(
agent,
InstallOptions {
reinstall: true,
version: None,
},
)?;
let version = manager.version(agent)?; let version = manager.version(agent)?;
assert!(version.is_some(), "expected version for {agent}"); assert!(version.is_some(), "expected version for {agent}");
if agent != AgentId::Amp || amp_configured() { if agent != AgentId::Amp || amp_configured() {
let mut spawn = SpawnOptions::new("Respond with exactly the text OK and nothing else."); let mut spawn = SpawnOptions::new(prompt_ok("OK"));
spawn.env = env.clone(); spawn.env = env.clone();
let result = manager.spawn(agent, spawn)?; let result = manager.spawn(agent, spawn)?;
assert!( assert!(
@ -44,8 +60,53 @@ fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>>
"spawn failed for {agent}: {}", "spawn failed for {agent}: {}",
result.stderr result.stderr
); );
let output = format!("{}{}", result.stdout, result.stderr); assert!(
!result.events.is_empty(),
"expected events for {agent} but got none"
);
assert!(
result.session_id.is_some(),
"expected session id for {agent}"
);
let combined = format!("{}{}", result.stdout, result.stderr);
let output = result.result.clone().unwrap_or(combined);
assert!(output.contains("OK"), "expected OK for {agent}, got: {output}"); assert!(output.contains("OK"), "expected OK for {agent}, got: {output}");
if agent == AgentId::Claude || agent == AgentId::Opencode || (agent == AgentId::Amp && amp_configured()) {
let mut resume = SpawnOptions::new(prompt_ok("OK2"));
resume.env = env.clone();
resume.session_id = result.session_id.clone();
let resumed = manager.spawn(agent, resume)?;
assert!(
resumed.status.success(),
"resume spawn failed for {agent}: {}",
resumed.stderr
);
let combined = format!("{}{}", resumed.stdout, resumed.stderr);
let output = resumed.result.clone().unwrap_or(combined);
assert!(output.contains("OK2"), "expected OK2 for {agent}, got: {output}");
} else if agent == AgentId::Codex {
let mut resume = SpawnOptions::new(prompt_ok("OK2"));
resume.env = env.clone();
resume.session_id = result.session_id.clone();
let err = manager.spawn(agent, resume).expect_err("expected resume error for codex");
assert!(matches!(err, AgentError::ResumeUnsupported { .. }));
}
if agent == AgentId::Claude || agent == AgentId::Codex {
let mut plan = SpawnOptions::new(prompt_ok("OK3"));
plan.env = env.clone();
plan.permission_mode = Some("plan".to_string());
let planned = manager.spawn(agent, plan)?;
assert!(
planned.status.success(),
"plan spawn failed for {agent}: {}",
planned.stderr
);
let combined = format!("{}{}", planned.stdout, planned.stderr);
let output = planned.result.clone().unwrap_or(combined);
assert!(output.contains("OK3"), "expected OK3 for {agent}, got: {output}");
}
} }
} }

View file

@ -7,4 +7,6 @@ edition = "2021"
sandbox-daemon-agent-schema = { path = "../agent-schema" } sandbox-daemon-agent-schema = { path = "../agent-schema" }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
schemars = "0.8"
thiserror = "1.0" thiserror = "1.0"
utoipa = { version = "4.2", features = ["axum_extras"] }

File diff suppressed because it is too large Load diff

1
frontend/AGENTS.md Symbolic link
View file

@ -0,0 +1 @@
CLAUDE.md

150
frontend/CLAUDE.md Normal file
View file

@ -0,0 +1,150 @@
## Frontend Style Guide
Examples should follow these design conventions:
**Color Palette (Dark Theme)**
- Primary accent: `#ff4f00` (orange) for interactive elements and highlights
- Background: `#000000` (main), `#1c1c1e` (cards/containers)
- Borders: `#2c2c2e`
- Input backgrounds: `#2c2c2e` with border `#3a3a3c`
- Text: `#ffffff` (primary), `#8e8e93` (secondary/muted)
- Success: `#30d158` (green)
- Warning: `#ff4f00` (orange)
- Danger: `#ff3b30` (red)
- Purple: `#bf5af2` (for special states like rollback)
**Typography**
- UI: System fonts (`-apple-system, BlinkMacSystemFont, 'Segoe UI', 'Inter', Roboto, sans-serif`)
- Code: `ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace`
- Sizes: 14-16px body, 12-13px labels, large numbers 48-72px
**Sizing & Spacing**
- Border radius: 8px (cards/containers/buttons), 6px (inputs/badges)
- Section padding: 20-24px
- Gap between items: 12px
- Transitions: 200ms ease for all interactive states
**Button Styles**
- Padding: 12px 20px
- Border: none
- Border radius: 8px
- Font size: 14px, weight 600
- Hover: none (no hover state)
- Disabled: 50% opacity, `cursor: not-allowed`
**CSS Approach**
- Plain CSS in `<style>` tag within index.html (no preprocessors or Tailwind)
- Class-based selectors with state modifiers (`.active`, `.complete`, `.running`)
- Focus states use primary accent color (`#ff4f00`) for borders with subtle box-shadow
**Spacing System**
- Base unit: 4px
- Scale: 4px, 8px, 12px, 16px, 20px, 24px, 32px, 48px
- Component internal padding: 12-16px
- Section/card padding: 20px
- Card header padding: 16px 20px
- Gap between related items: 8-12px
- Gap between sections: 24-32px
- Margin between major blocks: 32px
**Iconography**
- Icon library: [Lucide](https://lucide.dev/) (React: `lucide-react`)
- Standard sizes: 16px (inline/small), 20px (buttons/UI), 24px (standalone/headers)
- Icon color: inherit from parent text color, or use `currentColor`
- Icon-only buttons must include `aria-label` for accessibility
- Stroke width: 2px (default), 1.5px for smaller icons
**Component Patterns**
*Buttons*
- Primary: `#ff4f00` background, white text
- Secondary: `#2c2c2e` background, white text
- Ghost: transparent background, `#ff4f00` text
- Danger: `#ff3b30` background, white text
- Success: `#30d158` background, white text
- Disabled: 50% opacity, `cursor: not-allowed`
*Form Inputs*
- Background: `#2c2c2e`
- Border: 1px solid `#3a3a3c`
- Border radius: 8px
- Padding: 12px 16px
- Focus: border-color `#ff4f00`, box-shadow `0 0 0 3px rgba(255, 79, 0, 0.2)`
- Placeholder text: `#6e6e73`
*Cards/Containers*
- Background: `#1c1c1e`
- Border: 1px solid `#2c2c2e`
- Border radius: 8px
- Padding: 20px
- Box shadow: `0 1px 3px rgba(0, 0, 0, 0.3)`
- Header style (when applicable):
- Background: `#2c2c2e`
- Padding: 16px 20px
- Font size: 18px, weight 600
- Border bottom: 1px solid `#2c2c2e`
- Border radius: 8px 8px 0 0 (top corners only)
- Negative margin to align with card edges: `-20px -20px 20px -20px`
*Modals/Overlays*
- Backdrop: `rgba(0, 0, 0, 0.75)`
- Modal background: `#1c1c1e`
- Border radius: 8px
- Max-width: 480px (small), 640px (medium), 800px (large)
- Padding: 24px
- Close button: top-right, 8px from edges
*Lists*
- Item padding: 12px 16px
- Dividers: 1px solid `#2c2c2e`
- Hover background: `#2c2c2e`
- Selected/active background: `rgba(255, 79, 0, 0.15)`
*Badges/Tags*
- Padding: 4px 8px
- Border radius: 6px
- Font size: 12px
- Font weight: 500
*Tabs*
- Container: `border-bottom: 1px solid #2c2c2e`, flex-wrap for overflow
- Tab: `padding: 12px 16px`, no background, `border-radius: 0`
- Tab border: `border-bottom: 2px solid transparent`, `margin-bottom: -1px`
- Tab text: `#8e8e93` (muted), font-weight 600, font-size 14px
- Active tab: `color: #ffffff`, `border-bottom-color: #ff4f00`
- Hover: none (no hover state)
- Transition: `color 200ms ease, border-color 200ms ease`
**UI States**
*Loading States*
- Spinner: 20px for inline, 32px for page-level
- Skeleton placeholders: `#2c2c2e` background with subtle pulse animation
- Loading text: "Loading..." in muted color
- Button loading: show spinner, disable interaction, keep button width stable
*Empty States*
- Center content vertically and horizontally
- Icon: 48px, muted color (`#6e6e73`)
- Heading: 18px, primary text color
- Description: 14px, muted color
- Optional action button below description
*Error States*
- Inline errors: `#ff3b30` text below input, 12px font size
- Error banners: `#ff3b30` left border (4px), `rgba(255, 59, 48, 0.1)` background
- Form validation: highlight input border in `#ff3b30`
- Error icon: Lucide `AlertCircle` or `XCircle`
*Disabled States*
- Opacity: 50%
- Cursor: `not-allowed`
- No hover/focus effects
- Preserve layout (don't collapse or hide)
*Success States*
- Color: `#30d158`
- Icon: Lucide `CheckCircle` or `Check`
- Toast/banner: `rgba(48, 209, 88, 0.1)` background with green left border

View file

@ -0,0 +1,565 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Sandbox Daemon Console</title>
<style>
:root {
color-scheme: dark;
--bg: #000000;
--surface: #1c1c1e;
--surface-2: #2c2c2e;
--border: #2c2c2e;
--input-bg: #2c2c2e;
--input-border: #3a3a3c;
--text: #ffffff;
--muted: #8e8e93;
--accent: #ff4f00;
--success: #30d158;
--warning: #ff4f00;
--danger: #ff3b30;
--purple: #bf5af2;
--shadow: 0 1px 3px rgba(0, 0, 0, 0.3);
--radius: 8px;
--radius-sm: 6px;
--transition: 200ms ease;
--space-1: 4px;
--space-2: 8px;
--space-3: 12px;
--space-4: 16px;
--space-5: 20px;
--space-6: 24px;
--space-7: 32px;
--space-8: 48px;
}
* {
box-sizing: border-box;
}
body {
margin: 0;
min-height: 100vh;
background: var(--bg);
color: var(--text);
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Inter", Roboto, sans-serif;
font-size: 15px;
}
body::before {
content: "";
position: fixed;
inset: 0;
background:
radial-gradient(circle at top right, rgba(255, 79, 0, 0.12), transparent 45%),
radial-gradient(circle at 20% 30%, rgba(191, 90, 242, 0.08), transparent 40%),
linear-gradient(120deg, rgba(255, 255, 255, 0.04), transparent 60%);
pointer-events: none;
z-index: 0;
}
#root {
position: relative;
z-index: 1;
}
a {
color: var(--accent);
text-decoration: none;
}
.app {
min-height: 100vh;
padding: var(--space-7) var(--space-7) var(--space-8);
display: flex;
flex-direction: column;
gap: var(--space-7);
}
.app-header {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--space-4);
}
.brand {
display: flex;
align-items: center;
gap: var(--space-3);
font-size: 18px;
font-weight: 600;
letter-spacing: 0.4px;
}
.brand-mark {
width: 14px;
height: 14px;
border-radius: 50%;
background: var(--accent);
box-shadow: 0 0 18px rgba(255, 79, 0, 0.4);
}
.status-pill {
display: inline-flex;
align-items: center;
gap: var(--space-2);
padding: 6px 10px;
border-radius: 999px;
background: var(--surface-2);
border: 1px solid var(--border);
color: var(--muted);
font-size: 12px;
font-weight: 600;
}
.status-pill.success {
color: var(--success);
border-color: rgba(48, 209, 88, 0.4);
}
.status-pill.warning {
color: var(--warning);
border-color: rgba(255, 79, 0, 0.5);
}
.status-dot {
width: 8px;
height: 8px;
border-radius: 50%;
background: currentColor;
}
.grid {
display: grid;
grid-template-columns: repeat(3, minmax(0, 1fr));
gap: var(--space-6);
}
.panel {
background: var(--surface);
border: 1px solid var(--border);
border-radius: var(--radius);
padding: var(--space-6);
box-shadow: var(--shadow);
display: flex;
flex-direction: column;
gap: var(--space-4);
}
.panel-header {
background: var(--surface-2);
margin: calc(var(--space-6) * -1) calc(var(--space-6) * -1) var(--space-4);
padding: 16px 20px;
border-radius: var(--radius) var(--radius) 0 0;
border-bottom: 1px solid var(--border);
font-size: 16px;
font-weight: 600;
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--space-3);
}
.panel-body {
display: flex;
flex-direction: column;
gap: var(--space-3);
}
.muted {
color: var(--muted);
}
.mono {
font-family: ui-monospace, SFMono-Regular, "SF Mono", Consolas, monospace;
}
.label {
font-size: 12px;
color: var(--muted);
text-transform: uppercase;
letter-spacing: 0.08em;
}
.field {
display: flex;
flex-direction: column;
gap: var(--space-2);
}
.input,
.textarea,
.select {
width: 100%;
background: var(--input-bg);
border: 1px solid var(--input-border);
color: var(--text);
border-radius: var(--radius);
padding: 12px 16px;
font-size: 14px;
outline: none;
transition: border-color var(--transition), box-shadow var(--transition);
}
.textarea {
min-height: 120px;
resize: vertical;
}
.input:focus,
.textarea:focus,
.select:focus {
border-color: var(--accent);
box-shadow: 0 0 0 3px rgba(255, 79, 0, 0.2);
}
.input::placeholder,
.textarea::placeholder {
color: #6e6e73;
}
.button {
display: inline-flex;
align-items: center;
justify-content: center;
gap: var(--space-2);
padding: 12px 20px;
border-radius: var(--radius);
border: none;
font-size: 14px;
font-weight: 600;
cursor: pointer;
transition: opacity var(--transition);
background: var(--surface-2);
color: var(--text);
}
.button.primary {
background: var(--accent);
color: #ffffff;
}
.button.secondary {
background: var(--surface-2);
color: #ffffff;
}
.button.ghost {
background: transparent;
color: var(--accent);
}
.button.danger {
background: var(--danger);
color: #ffffff;
}
.button.success {
background: var(--success);
color: #ffffff;
}
.button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.button-icon {
width: 16px;
height: 16px;
}
.inline-row {
display: flex;
align-items: center;
gap: var(--space-3);
flex-wrap: wrap;
}
.stack {
display: flex;
flex-direction: column;
gap: var(--space-2);
}
.pill {
display: inline-flex;
align-items: center;
gap: var(--space-2);
padding: 4px 8px;
border-radius: var(--radius-sm);
font-size: 12px;
font-weight: 600;
background: rgba(255, 79, 0, 0.12);
color: var(--accent);
}
.pill.success {
background: rgba(48, 209, 88, 0.15);
color: var(--success);
}
.pill.danger {
background: rgba(255, 59, 48, 0.15);
color: var(--danger);
}
.pill.neutral {
background: rgba(255, 255, 255, 0.08);
color: var(--muted);
}
.divider {
height: 1px;
width: 100%;
background: var(--border);
}
.card-list {
display: flex;
flex-direction: column;
gap: var(--space-3);
}
.card {
border: 1px solid var(--border);
border-radius: var(--radius);
padding: var(--space-4);
background: rgba(28, 28, 30, 0.6);
display: flex;
flex-direction: column;
gap: var(--space-3);
}
.card-title {
font-weight: 600;
font-size: 15px;
}
.card-meta {
font-size: 12px;
color: var(--muted);
}
.code-block {
background: #0b0b0c;
border: 1px solid var(--border);
border-radius: var(--radius);
padding: var(--space-3);
font-size: 12px;
line-height: 1.5;
overflow-x: auto;
}
.event-list {
display: flex;
flex-direction: column;
gap: var(--space-3);
max-height: 520px;
overflow-y: auto;
}
.event-item {
border: 1px solid var(--border);
border-radius: var(--radius);
padding: var(--space-3);
background: rgba(28, 28, 30, 0.7);
display: flex;
flex-direction: column;
gap: var(--space-2);
}
.event-title {
display: flex;
align-items: center;
justify-content: space-between;
font-size: 13px;
}
.event-type {
font-weight: 600;
}
.event-time {
color: var(--muted);
font-size: 11px;
}
.log-list {
display: flex;
flex-direction: column;
gap: var(--space-2);
max-height: 360px;
overflow-y: auto;
}
.log-item {
display: grid;
grid-template-columns: auto 1fr auto;
gap: var(--space-3);
align-items: center;
padding: var(--space-3);
border: 1px solid var(--border);
border-radius: var(--radius);
background: rgba(28, 28, 30, 0.7);
font-size: 12px;
}
.log-method {
font-weight: 600;
color: var(--accent);
}
.log-url {
word-break: break-all;
color: var(--muted);
}
.log-status {
font-size: 12px;
font-weight: 600;
}
.log-status.ok {
color: var(--success);
}
.log-status.error {
color: var(--danger);
}
.copy-button {
background: transparent;
border: 1px solid var(--border);
color: var(--text);
border-radius: var(--radius-sm);
padding: 6px 10px;
cursor: pointer;
}
.banner {
border-left: 4px solid var(--danger);
background: rgba(255, 59, 48, 0.1);
padding: var(--space-3);
border-radius: var(--radius-sm);
color: var(--danger);
font-size: 13px;
}
.success-banner {
border-left: 4px solid var(--success);
background: rgba(48, 209, 88, 0.1);
padding: var(--space-3);
border-radius: var(--radius-sm);
color: var(--success);
font-size: 13px;
}
.spinner {
width: 20px;
height: 20px;
border-radius: 50%;
border: 2px solid rgba(255, 255, 255, 0.2);
border-top: 2px solid var(--accent);
animation: spin 1s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
@keyframes rise {
from {
opacity: 0;
transform: translateY(12px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.reveal {
animation: rise 0.6s ease both;
}
.connect-screen {
max-width: 980px;
margin: 0 auto;
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: var(--space-6);
}
.connect-hero {
display: flex;
flex-direction: column;
gap: var(--space-4);
padding: var(--space-6);
border-radius: var(--radius);
border: 1px solid var(--border);
background: rgba(28, 28, 30, 0.6);
}
.hero-title {
font-size: 28px;
font-weight: 600;
line-height: 1.1;
}
.hero-subtitle {
color: var(--muted);
font-size: 14px;
}
.callout {
padding: var(--space-3);
border-radius: var(--radius);
border: 1px dashed rgba(255, 79, 0, 0.4);
color: var(--accent);
font-size: 13px;
}
.tag-list {
display: flex;
flex-wrap: wrap;
gap: var(--space-2);
}
@media (max-width: 1200px) {
.grid {
grid-template-columns: minmax(0, 1fr) minmax(0, 1fr);
}
}
@media (max-width: 980px) {
.grid {
grid-template-columns: minmax(0, 1fr);
}
.connect-screen {
grid-template-columns: minmax(0, 1fr);
}
}
@media (max-width: 720px) {
.app {
padding: var(--space-6);
}
.panel-header {
flex-direction: column;
align-items: flex-start;
}
}
</style>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

View file

@ -0,0 +1,24 @@
{
"name": "@sandbox-daemon/web",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"devDependencies": {
"@sandbox-daemon/typescript-sdk": "workspace:*",
"@types/react": "^18.3.3",
"@types/react-dom": "^18.3.0",
"@vitejs/plugin-react": "^4.3.1",
"typescript": "^5.7.3",
"vite": "^5.4.7"
},
"dependencies": {
"lucide-react": "^0.469.0",
"react": "^18.3.1",
"react-dom": "^18.3.1"
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,9 @@
import React from "react";
import ReactDOM from "react-dom/client";
import App from "./App";
ReactDOM.createRoot(document.getElementById("root")!).render(
<React.StrictMode>
<App />
</React.StrictMode>
);

View file

@ -0,0 +1 @@
/// <reference types="vite/client" />

View file

@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2020",
"useDefineForClassFields": true,
"lib": ["ES2020", "DOM", "DOM.Iterable"],
"module": "ESNext",
"skipLibCheck": true,
"moduleResolution": "Bundler",
"allowImportingTsExtensions": true,
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx",
"strict": true
},
"include": ["src"]
}

View file

@ -0,0 +1,10 @@
{
"compilerOptions": {
"composite": true,
"skipLibCheck": true,
"module": "ESNext",
"moduleResolution": "Bundler",
"allowSyntheticDefaultImports": true
},
"include": ["vite.config.ts"]
}

View file

@ -0,0 +1,9 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
export default defineConfig({
plugins: [react()],
server: {
port: 5173
}
});

13
package.json Normal file
View file

@ -0,0 +1,13 @@
{
"name": "sandbox-daemon-workspace",
"private": true,
"packageManager": "pnpm@9.15.0",
"scripts": {
"build": "turbo run build",
"dev": "turbo run dev --parallel",
"generate": "turbo run generate"
},
"devDependencies": {
"turbo": "^2.4.0"
}
}

4
pnpm-workspace.yaml Normal file
View file

@ -0,0 +1,4 @@
packages:
- "frontend/packages/*"
- "sdks/*"
- "resources/agent-schemas"

View file

@ -0,0 +1,87 @@
#!/bin/sh
# shellcheck enable=add-default-case
# shellcheck enable=avoid-nullary-conditions
# shellcheck enable=check-unassigned-uppercase
# shellcheck enable=deprecate-which
# shellcheck enable=quote-safe-variables
# shellcheck enable=require-variable-braces
set -eu
WORK_DIR="/tmp/sandbox_daemon_install"
rm -rf "$WORK_DIR"
mkdir -p "$WORK_DIR"
cd "$WORK_DIR"
SANDBOX_DAEMON_VERSION="${SANDBOX_DAEMON_VERSION:-__VERSION__}"
SANDBOX_DAEMON_BASE_URL="${SANDBOX_DAEMON_BASE_URL:-https://releases.rivet.dev}"
UNAME="$(uname -s)"
ARCH="$(uname -m)"
if [ "$(printf '%s' "$UNAME" | cut -c 1-6)" = "Darwin" ]; then
if [ "$ARCH" = "x86_64" ]; then
FILE_NAME="sandbox-daemon-x86_64-apple-darwin"
elif [ "$ARCH" = "arm64" ]; then
FILE_NAME="sandbox-daemon-aarch64-apple-darwin"
else
echo "Unknown arch $ARCH" 1>&2
exit 1
fi
elif [ "$(printf '%s' "$UNAME" | cut -c 1-5)" = "Linux" ]; then
if [ "$ARCH" = "x86_64" ]; then
FILE_NAME="sandbox-daemon-x86_64-unknown-linux-musl"
else
echo "Unsupported Linux arch $ARCH" 1>&2
exit 1
fi
else
echo "Unable to determine platform" 1>&2
exit 1
fi
set +u
if [ -z "$BIN_DIR" ]; then
BIN_DIR="/usr/local/bin"
fi
set -u
INSTALL_PATH="$BIN_DIR/sandbox-daemon"
if [ ! -d "$BIN_DIR" ]; then
CHECK_DIR="$BIN_DIR"
while [ ! -d "$CHECK_DIR" ] && [ "$CHECK_DIR" != "/" ]; do
CHECK_DIR=$(dirname "$CHECK_DIR")
done
if [ ! -w "$CHECK_DIR" ]; then
echo "> Creating directory $BIN_DIR (requires sudo)"
sudo mkdir -p "$BIN_DIR"
else
echo "> Creating directory $BIN_DIR"
mkdir -p "$BIN_DIR"
fi
fi
URL="$SANDBOX_DAEMON_BASE_URL/sandbox-daemon/${SANDBOX_DAEMON_VERSION}/${FILE_NAME}"
echo "> Downloading $URL"
curl -fsSL "$URL" -o sandbox-daemon
chmod +x sandbox-daemon
if [ ! -w "$BIN_DIR" ]; then
echo "> Installing sandbox-daemon to $INSTALL_PATH (requires sudo)"
sudo mv ./sandbox-daemon "$INSTALL_PATH"
else
echo "> Installing sandbox-daemon to $INSTALL_PATH"
mv ./sandbox-daemon "$INSTALL_PATH"
fi
case ":$PATH:" in
*:$BIN_DIR:*) ;;
*)
echo "WARNING: $BIN_DIR is not in \$PATH"
echo "For instructions on how to add it to your PATH, visit:"
echo "https://opensource.com/article/17/6/set-path-linux"
;;
esac
echo "sandbox-daemon installed successfully."

View file

@ -0,0 +1,17 @@
{
"name": "sandbox-daemon-typescript",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"generate:openapi": "cargo run -p sandbox-daemon-openapi-gen -- --out src/generated/openapi.json",
"generate:types": "openapi-typescript src/generated/openapi.json -o src/generated/openapi.ts",
"generate": "npm run generate:openapi && npm run generate:types",
"build": "tsc -p tsconfig.json"
},
"devDependencies": {
"@types/node": "^22.0.0",
"openapi-typescript": "^6.7.0",
"typescript": "^5.7.0"
}
}

View file

@ -0,0 +1,254 @@
import type { components } from "./generated/openapi";
export type AgentInstallRequest = components["schemas"]["AgentInstallRequest"];
export type AgentModeInfo = components["schemas"]["AgentModeInfo"];
export type AgentModesResponse = components["schemas"]["AgentModesResponse"];
export type AgentInfo = components["schemas"]["AgentInfo"];
export type AgentListResponse = components["schemas"]["AgentListResponse"];
export type CreateSessionRequest = components["schemas"]["CreateSessionRequest"];
export type CreateSessionResponse = components["schemas"]["CreateSessionResponse"];
export type MessageRequest = components["schemas"]["MessageRequest"];
export type EventsQuery = components["schemas"]["EventsQuery"];
export type EventsResponse = components["schemas"]["EventsResponse"];
export type QuestionReplyRequest = components["schemas"]["QuestionReplyRequest"];
export type PermissionReplyRequest = components["schemas"]["PermissionReplyRequest"];
export type PermissionReply = components["schemas"]["PermissionReply"];
export type ProblemDetails = components["schemas"]["ProblemDetails"];
export type UniversalEvent = components["schemas"]["UniversalEvent"];
const API_PREFIX = "/v1";
export interface SandboxDaemonClientOptions {
baseUrl: string;
token?: string;
fetch?: typeof fetch;
headers?: HeadersInit;
}
export class SandboxDaemonError extends Error {
readonly status: number;
readonly problem?: ProblemDetails;
readonly response: Response;
constructor(status: number, problem: ProblemDetails | undefined, response: Response) {
super(problem?.title ?? `Request failed with status ${status}`);
this.name = "SandboxDaemonError";
this.status = status;
this.problem = problem;
this.response = response;
}
}
type QueryValue = string | number | boolean | null | undefined;
type RequestOptions = {
query?: Record<string, QueryValue>;
body?: unknown;
headers?: HeadersInit;
accept?: string;
};
export class SandboxDaemonClient {
private readonly baseUrl: string;
private readonly token?: string;
private readonly fetcher: typeof fetch;
private readonly defaultHeaders?: HeadersInit;
constructor(options: SandboxDaemonClientOptions) {
this.baseUrl = options.baseUrl.replace(/\/$/, "");
this.token = options.token;
this.fetcher = options.fetch ?? globalThis.fetch;
this.defaultHeaders = options.headers;
if (!this.fetcher) {
throw new Error("Fetch API is not available; provide a fetch implementation.");
}
}
async listAgents(): Promise<AgentListResponse> {
return this.requestJson("GET", `${API_PREFIX}/agents`);
}
async installAgent(agent: string, request: AgentInstallRequest = {}): Promise<void> {
await this.requestJson("POST", `${API_PREFIX}/agents/${encodeURIComponent(agent)}/install`, {
body: request,
});
}
async getAgentModes(agent: string): Promise<AgentModesResponse> {
return this.requestJson("GET", `${API_PREFIX}/agents/${encodeURIComponent(agent)}/modes`);
}
async createSession(sessionId: string, request: CreateSessionRequest): Promise<CreateSessionResponse> {
return this.requestJson("POST", `${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}`, {
body: request,
});
}
async postMessage(sessionId: string, request: MessageRequest): Promise<void> {
await this.requestJson("POST", `${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/messages`, {
body: request,
});
}
async getEvents(sessionId: string, query?: EventsQuery): Promise<EventsResponse> {
return this.requestJson("GET", `${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/events`, {
query,
});
}
async getEventsSse(sessionId: string, query?: EventsQuery): Promise<Response> {
return this.requestRaw("GET", `${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/events/sse`, {
query,
accept: "text/event-stream",
});
}
async *streamEvents(sessionId: string, query?: EventsQuery): AsyncGenerator<UniversalEvent, void, void> {
const response = await this.getEventsSse(sessionId, query);
if (!response.body) {
throw new Error("SSE stream is not readable in this environment.");
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
buffer += decoder.decode(value, { stream: true });
let index = buffer.indexOf("\n\n");
while (index !== -1) {
const chunk = buffer.slice(0, index);
buffer = buffer.slice(index + 2);
const dataLines = chunk
.split(/\r?\n/)
.filter((line) => line.startsWith("data:"));
if (dataLines.length > 0) {
const payload = dataLines
.map((line) => line.slice(5).trim())
.join("\n");
if (payload) {
yield JSON.parse(payload) as UniversalEvent;
}
}
index = buffer.indexOf("\n\n");
}
}
}
async replyQuestion(
sessionId: string,
questionId: string,
request: QuestionReplyRequest,
): Promise<void> {
await this.requestJson(
"POST",
`${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/questions/${encodeURIComponent(questionId)}/reply`,
{ body: request },
);
}
async rejectQuestion(sessionId: string, questionId: string): Promise<void> {
await this.requestJson(
"POST",
`${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/questions/${encodeURIComponent(questionId)}/reject`,
);
}
async replyPermission(
sessionId: string,
permissionId: string,
request: PermissionReplyRequest,
): Promise<void> {
await this.requestJson(
"POST",
`${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/permissions/${encodeURIComponent(permissionId)}/reply`,
{ body: request },
);
}
private async requestJson<T>(method: string, path: string, options: RequestOptions = {}): Promise<T> {
const response = await this.requestRaw(method, path, {
query: options.query,
body: options.body,
headers: options.headers,
accept: options.accept ?? "application/json",
});
if (response.status === 204) {
return undefined as T;
}
const text = await response.text();
if (!text) {
return undefined as T;
}
return JSON.parse(text) as T;
}
private async requestRaw(method: string, path: string, options: RequestOptions = {}): Promise<Response> {
const url = this.buildUrl(path, options.query);
const headers = new Headers(this.defaultHeaders ?? undefined);
if (this.token) {
headers.set("Authorization", `Bearer ${this.token}`);
}
if (options.accept) {
headers.set("Accept", options.accept);
}
const init: RequestInit = { method, headers };
if (options.body !== undefined) {
headers.set("Content-Type", "application/json");
init.body = JSON.stringify(options.body);
}
if (options.headers) {
const extra = new Headers(options.headers);
extra.forEach((value, key) => headers.set(key, value));
}
const response = await this.fetcher(url, init);
if (!response.ok) {
const problem = await this.readProblem(response);
throw new SandboxDaemonError(response.status, problem, response);
}
return response;
}
private buildUrl(path: string, query?: Record<string, QueryValue>): string {
const url = new URL(`${this.baseUrl}${path}`);
if (query) {
Object.entries(query).forEach(([key, value]) => {
if (value === undefined || value === null) {
return;
}
url.searchParams.set(key, String(value));
});
}
return url.toString();
}
private async readProblem(response: Response): Promise<ProblemDetails | undefined> {
try {
const text = await response.clone().text();
if (!text) {
return undefined;
}
return JSON.parse(text) as ProblemDetails;
} catch {
return undefined;
}
}
}
export const createSandboxDaemonClient = (options: SandboxDaemonClientOptions): SandboxDaemonClient => {
return new SandboxDaemonClient(options);
};

View file

@ -0,0 +1,470 @@
/* eslint-disable */
// This file is generated by openapi-typescript. Do not edit by hand.
export interface paths {
"/v1/agents": {
get: {
responses: {
200: {
content: {
"application/json": components["schemas"]["AgentListResponse"];
};
};
};
};
};
"/v1/agents/{agent}/install": {
post: {
parameters: {
path: {
agent: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AgentInstallRequest"];
};
};
responses: {
204: {
description: string;
};
400: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
500: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/agents/{agent}/modes": {
get: {
parameters: {
path: {
agent: string;
};
};
responses: {
200: {
content: {
"application/json": components["schemas"]["AgentModesResponse"];
};
};
400: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/sessions/{session_id}": {
post: {
parameters: {
path: {
session_id: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["CreateSessionRequest"];
};
};
responses: {
200: {
content: {
"application/json": components["schemas"]["CreateSessionResponse"];
};
};
400: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
409: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/sessions/{session_id}/messages": {
post: {
parameters: {
path: {
session_id: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["MessageRequest"];
};
};
responses: {
204: {
description: string;
};
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/sessions/{session_id}/events": {
get: {
parameters: {
path: {
session_id: string;
};
query?: {
offset?: number | null;
limit?: number | null;
};
};
responses: {
200: {
content: {
"application/json": components["schemas"]["EventsResponse"];
};
};
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/sessions/{session_id}/events/sse": {
get: {
parameters: {
path: {
session_id: string;
};
query?: {
offset?: number | null;
};
};
responses: {
200: {
description: string;
};
};
};
};
"/v1/sessions/{session_id}/questions/{question_id}/reply": {
post: {
parameters: {
path: {
session_id: string;
question_id: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["QuestionReplyRequest"];
};
};
responses: {
204: {
description: string;
};
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/sessions/{session_id}/questions/{question_id}/reject": {
post: {
parameters: {
path: {
session_id: string;
question_id: string;
};
};
responses: {
204: {
description: string;
};
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
"/v1/sessions/{session_id}/permissions/{permission_id}/reply": {
post: {
parameters: {
path: {
session_id: string;
permission_id: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["PermissionReplyRequest"];
};
};
responses: {
204: {
description: string;
};
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
};
}
export interface components {
schemas: {
AgentError: {
type: components["schemas"]["ErrorType"];
message: string;
agent?: string | null;
session_id?: string | null;
details?: unknown | null;
};
AgentInfo: {
id: string;
installed: boolean;
path?: string | null;
version?: string | null;
};
AgentInstallRequest: {
reinstall?: boolean | null;
};
AgentListResponse: {
agents: components["schemas"]["AgentInfo"][];
};
AgentModeInfo: {
id: string;
name: string;
description: string;
};
AgentModesResponse: {
modes: components["schemas"]["AgentModeInfo"][];
};
AttachmentSource:
| {
type: "path";
path: string;
}
| {
type: "url";
url: string;
}
| {
type: "data";
data: string;
encoding?: string | null;
};
CrashInfo: {
message: string;
kind?: string | null;
details?: unknown | null;
};
CreateSessionRequest: {
agent: string;
agentMode?: string | null;
agentVersion?: string | null;
model?: string | null;
permissionMode?: string | null;
token?: string | null;
validateToken?: boolean | null;
variant?: string | null;
};
CreateSessionResponse: {
healthy: boolean;
error?: components["schemas"]["AgentError"] | null;
agentSessionId?: string | null;
};
ErrorType:
| "invalid_request"
| "unsupported_agent"
| "agent_not_installed"
| "install_failed"
| "agent_process_exited"
| "token_invalid"
| "permission_denied"
| "session_not_found"
| "session_already_exists"
| "mode_not_supported"
| "stream_error"
| "timeout";
EventsQuery: {
offset?: number | null;
limit?: number | null;
};
EventsResponse: {
events: components["schemas"]["UniversalEvent"][];
hasMore: boolean;
};
MessageRequest: {
message: string;
};
PermissionReply: "once" | "always" | "reject";
PermissionReplyRequest: {
reply: components["schemas"]["PermissionReply"];
};
PermissionRequest: {
id: string;
sessionId: string;
permission: string;
patterns: string[];
always: string[];
metadata?: Record<string, unknown>;
tool?: components["schemas"]["PermissionToolRef"] | null;
};
PermissionToolRef: {
messageId: string;
callId: string;
};
ProblemDetails: {
type: string;
title: string;
status: number;
detail?: string | null;
instance?: string | null;
[key: string]: unknown;
};
QuestionInfo: {
question: string;
options: components["schemas"]["QuestionOption"][];
header?: string | null;
multiSelect?: boolean | null;
custom?: boolean | null;
};
QuestionOption: {
label: string;
description?: string | null;
};
QuestionReplyRequest: {
answers: string[][];
};
QuestionRequest: {
id: string;
sessionId: string;
questions: components["schemas"]["QuestionInfo"][];
tool?: components["schemas"]["QuestionToolRef"] | null;
};
QuestionToolRef: {
messageId: string;
callId: string;
};
Started: {
message?: string | null;
details?: unknown | null;
};
UniversalEvent: {
id: number;
timestamp: string;
sessionId: string;
agent: string;
agentSessionId?: string | null;
data: components["schemas"]["UniversalEventData"];
};
UniversalEventData:
| { message: components["schemas"]["UniversalMessage"] }
| { started: components["schemas"]["Started"] }
| { error: components["schemas"]["CrashInfo"] }
| { questionAsked: components["schemas"]["QuestionRequest"] }
| { permissionAsked: components["schemas"]["PermissionRequest"] }
| { raw: unknown };
UniversalMessage:
| components["schemas"]["UniversalMessageParsed"]
| {
raw: unknown;
error?: string | null;
};
UniversalMessageParsed: {
role: string;
parts: components["schemas"]["UniversalMessagePart"][];
id?: string | null;
metadata?: Record<string, unknown>;
};
UniversalMessagePart:
| {
type: "text";
text: string;
}
| {
type: "tool_call";
name: string;
input: unknown;
id?: string | null;
}
| {
type: "tool_result";
output: unknown;
id?: string | null;
name?: string | null;
is_error?: boolean | null;
}
| {
type: "function_call";
arguments: unknown;
id?: string | null;
name?: string | null;
raw?: unknown | null;
}
| {
type: "function_result";
result: unknown;
id?: string | null;
name?: string | null;
is_error?: boolean | null;
raw?: unknown | null;
}
| {
type: "file";
source: components["schemas"]["AttachmentSource"];
mime_type?: string | null;
filename?: string | null;
raw?: unknown | null;
}
| {
type: "image";
source: components["schemas"]["AttachmentSource"];
mime_type?: string | null;
alt?: string | null;
raw?: unknown | null;
}
| {
type: "error";
message: string;
}
| {
type: "unknown";
raw: unknown;
};
};
responses: never;
parameters: never;
requestBodies: never;
headers: never;
pathItems: never;
}
export type webhooks = never;

View file

@ -0,0 +1,19 @@
export { SandboxDaemonClient, SandboxDaemonError, createSandboxDaemonClient } from "./client";
export type {
AgentInfo,
AgentInstallRequest,
AgentListResponse,
AgentModeInfo,
AgentModesResponse,
CreateSessionRequest,
CreateSessionResponse,
EventsQuery,
EventsResponse,
MessageRequest,
PermissionReply,
PermissionReplyRequest,
ProblemDetails,
QuestionReplyRequest,
UniversalEvent,
} from "./client";
export type { components, paths } from "./generated/openapi";

View file

@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022", "DOM"],
"module": "NodeNext",
"moduleResolution": "NodeNext",
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"outDir": "dist",
"rootDir": "src",
"resolveJsonModule": true,
"declaration": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

17
turbo.json Normal file
View file

@ -0,0 +1,17 @@
{
"$schema": "https://turbo.build/schema.json",
"tasks": {
"build": {
"dependsOn": ["^build"],
"outputs": ["dist/**", "build/**", "lib/**", "src/generated/**"]
},
"generate": {
"dependsOn": ["^generate"],
"outputs": ["src/generated/**"]
},
"dev": {
"cache": false,
"persistent": true
}
}
}