diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e009cad..476ed12 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -14,15 +14,16 @@ jobs: - uses: dtolnay/rust-toolchain@stable with: components: rustfmt, clippy - - uses: Swatinem/rust-cache@main + - uses: Swatinem/rust-cache@v2 - uses: pnpm/action-setup@v4 - uses: actions/setup-node@v4 with: node-version: 20 cache: pnpm - run: pnpm install + - run: npm install -g tsx - name: Run checks - run: ./scripts/release/main.ts --version 0.0.0 --check + run: ./scripts/release/main.ts --version 0.0.0 --only-steps run-ci-checks - name: Run ACP v1 server tests run: | cargo test -p sandbox-agent-agent-management @@ -31,5 +32,3 @@ jobs: cargo test -p sandbox-agent --lib - name: Run SDK tests run: pnpm --dir sdks/typescript test - - name: Run Inspector browser E2E - run: pnpm --filter @sandbox-agent/inspector test:agent-browser diff --git a/Cargo.toml b/Cargo.toml index ebef66d..5a0581e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ schemars = "0.8" utoipa = { version = "4.2", features = ["axum_extras"] } # Web framework -axum = "0.7" +axum = { version = "0.7", features = ["ws"] } tower = { version = "0.5", features = ["util"] } tower-http = { version = "0.5", features = ["cors", "trace"] } diff --git a/docs/openapi.json b/docs/openapi.json index c6e35f4..d600fda 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -948,6 +948,785 @@ } } } + }, + "/v1/processes": { + "get": { + "tags": [ + "v1" + ], + "operationId": "get_v1_processes", + "responses": { + "200": { + "description": "List processes", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessListResponse" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_processes", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessCreateRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Started process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Process limit or state conflict", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/config": { + "get": { + "tags": [ + "v1" + ], + "operationId": "get_v1_processes_config", + "responses": { + "200": { + "description": "Current runtime process config", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessConfig" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_processes_config", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessConfig" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Updated runtime process config", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessConfig" + } + } + } + }, + "400": { + "description": "Invalid config", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/run": { + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_processes_run", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessRunRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "One-off command result", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessRunResponse" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}": { + "get": { + "tags": [ + "v1" + ], + "operationId": "get_v1_process", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Process details", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + }, + "delete": { + "tags": [ + "v1" + ], + "operationId": "delete_v1_process", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "204": { + "description": "Process deleted" + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Process is still running", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/input": { + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_process_input", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInputRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Input accepted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInputResponse" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Process not writable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "413": { + "description": "Input exceeds configured limit", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/kill": { + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_process_kill", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "waitMs", + "in": "query", + "description": "Wait up to N ms for process to exit", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Kill signal sent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/logs": { + "get": { + "tags": [ + "v1" + ], + "operationId": "get_v1_process_logs", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "stream", + "in": "query", + "description": "stdout|stderr|combined|pty", + "required": false, + "schema": { + "allOf": [ + { + "$ref": "#/components/schemas/ProcessLogsStream" + } + ], + "nullable": true + } + }, + { + "name": "tail", + "in": "query", + "description": "Tail N entries", + "required": false, + "schema": { + "type": "integer", + "nullable": true, + "minimum": 0 + } + }, + { + "name": "follow", + "in": "query", + "description": "Follow via SSE", + "required": false, + "schema": { + "type": "boolean", + "nullable": true + } + }, + { + "name": "since", + "in": "query", + "description": "Only entries with sequence greater than this", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Process logs", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessLogsResponse" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/stop": { + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_process_stop", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "waitMs", + "in": "query", + "description": "Wait up to N ms for process to exit", + "required": false, + "schema": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + ], + "responses": { + "200": { + "description": "Stop signal sent", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/terminal/resize": { + "post": { + "tags": [ + "v1" + ], + "operationId": "post_v1_process_terminal_resize", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessTerminalResizeRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Resize accepted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProcessTerminalResizeResponse" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Not a terminal process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } + }, + "/v1/processes/{id}/terminal/ws": { + "get": { + "tags": [ + "v1" + ], + "operationId": "get_v1_process_terminal_ws", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Process ID", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "access_token", + "in": "query", + "description": "Bearer token alternative for WS auth", + "required": false, + "schema": { + "type": "string", + "nullable": true + } + } + ], + "responses": { + "101": { + "description": "WebSocket upgraded" + }, + "400": { + "description": "Invalid websocket frame or upgrade request", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "404": { + "description": "Unknown process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "409": { + "description": "Not a terminal process", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + }, + "501": { + "description": "Process API unsupported on this platform", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProblemDetails" + } + } + } + } + } + } } }, "components": { @@ -1596,6 +2375,399 @@ }, "additionalProperties": {} }, + "ProcessConfig": { + "type": "object", + "required": [ + "maxConcurrentProcesses", + "defaultRunTimeoutMs", + "maxRunTimeoutMs", + "maxOutputBytes", + "maxLogBytesPerProcess", + "maxInputBytesPerRequest" + ], + "properties": { + "defaultRunTimeoutMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "maxConcurrentProcesses": { + "type": "integer", + "minimum": 0 + }, + "maxInputBytesPerRequest": { + "type": "integer", + "minimum": 0 + }, + "maxLogBytesPerProcess": { + "type": "integer", + "minimum": 0 + }, + "maxOutputBytes": { + "type": "integer", + "minimum": 0 + }, + "maxRunTimeoutMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + } + } + }, + "ProcessCreateRequest": { + "type": "object", + "required": [ + "command" + ], + "properties": { + "args": { + "type": "array", + "items": { + "type": "string" + } + }, + "command": { + "type": "string" + }, + "cwd": { + "type": "string", + "nullable": true + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "interactive": { + "type": "boolean" + }, + "tty": { + "type": "boolean" + } + } + }, + "ProcessInfo": { + "type": "object", + "required": [ + "id", + "command", + "args", + "tty", + "interactive", + "status", + "createdAtMs" + ], + "properties": { + "args": { + "type": "array", + "items": { + "type": "string" + } + }, + "command": { + "type": "string" + }, + "createdAtMs": { + "type": "integer", + "format": "int64" + }, + "cwd": { + "type": "string", + "nullable": true + }, + "exitCode": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "exitedAtMs": { + "type": "integer", + "format": "int64", + "nullable": true + }, + "id": { + "type": "string" + }, + "interactive": { + "type": "boolean" + }, + "pid": { + "type": "integer", + "format": "int32", + "nullable": true, + "minimum": 0 + }, + "status": { + "$ref": "#/components/schemas/ProcessState" + }, + "tty": { + "type": "boolean" + } + } + }, + "ProcessInputRequest": { + "type": "object", + "required": [ + "data" + ], + "properties": { + "data": { + "type": "string" + }, + "encoding": { + "type": "string", + "nullable": true + } + } + }, + "ProcessInputResponse": { + "type": "object", + "required": [ + "bytesWritten" + ], + "properties": { + "bytesWritten": { + "type": "integer", + "minimum": 0 + } + } + }, + "ProcessListResponse": { + "type": "object", + "required": [ + "processes" + ], + "properties": { + "processes": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProcessInfo" + } + } + } + }, + "ProcessLogEntry": { + "type": "object", + "required": [ + "sequence", + "stream", + "timestampMs", + "data", + "encoding" + ], + "properties": { + "data": { + "type": "string" + }, + "encoding": { + "type": "string" + }, + "sequence": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "stream": { + "$ref": "#/components/schemas/ProcessLogsStream" + }, + "timestampMs": { + "type": "integer", + "format": "int64" + } + } + }, + "ProcessLogsQuery": { + "type": "object", + "properties": { + "follow": { + "type": "boolean", + "nullable": true + }, + "since": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + }, + "stream": { + "allOf": [ + { + "$ref": "#/components/schemas/ProcessLogsStream" + } + ], + "nullable": true + }, + "tail": { + "type": "integer", + "nullable": true, + "minimum": 0 + } + } + }, + "ProcessLogsResponse": { + "type": "object", + "required": [ + "processId", + "stream", + "entries" + ], + "properties": { + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ProcessLogEntry" + } + }, + "processId": { + "type": "string" + }, + "stream": { + "$ref": "#/components/schemas/ProcessLogsStream" + } + } + }, + "ProcessLogsStream": { + "type": "string", + "enum": [ + "stdout", + "stderr", + "combined", + "pty" + ] + }, + "ProcessRunRequest": { + "type": "object", + "required": [ + "command" + ], + "properties": { + "args": { + "type": "array", + "items": { + "type": "string" + } + }, + "command": { + "type": "string" + }, + "cwd": { + "type": "string", + "nullable": true + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "maxOutputBytes": { + "type": "integer", + "nullable": true, + "minimum": 0 + }, + "timeoutMs": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + }, + "ProcessRunResponse": { + "type": "object", + "required": [ + "timedOut", + "stdout", + "stderr", + "stdoutTruncated", + "stderrTruncated", + "durationMs" + ], + "properties": { + "durationMs": { + "type": "integer", + "format": "int64", + "minimum": 0 + }, + "exitCode": { + "type": "integer", + "format": "int32", + "nullable": true + }, + "stderr": { + "type": "string" + }, + "stderrTruncated": { + "type": "boolean" + }, + "stdout": { + "type": "string" + }, + "stdoutTruncated": { + "type": "boolean" + }, + "timedOut": { + "type": "boolean" + } + } + }, + "ProcessSignalQuery": { + "type": "object", + "properties": { + "waitMs": { + "type": "integer", + "format": "int64", + "nullable": true, + "minimum": 0 + } + } + }, + "ProcessState": { + "type": "string", + "enum": [ + "running", + "exited" + ] + }, + "ProcessTerminalResizeRequest": { + "type": "object", + "required": [ + "cols", + "rows" + ], + "properties": { + "cols": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "rows": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, + "ProcessTerminalResizeResponse": { + "type": "object", + "required": [ + "cols", + "rows" + ], + "properties": { + "cols": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "rows": { + "type": "integer", + "format": "int32", + "minimum": 0 + } + } + }, "ServerStatus": { "type": "string", "enum": [ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1463987..126b15a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -17,13 +17,13 @@ importers: version: 2.7.6 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) examples/boxlite: dependencies: '@boxlite-ai/boxlite': specifier: latest - version: 0.2.11 + version: 0.3.0 '@sandbox-agent/example-shared': specifier: workspace:* version: link:../shared @@ -33,7 +33,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -45,7 +45,7 @@ importers: dependencies: '@cloudflare/sandbox': specifier: latest - version: 0.7.5 + version: 0.7.12 hono: specifier: ^4.12.2 version: 4.12.2 @@ -61,10 +61,10 @@ importers: devDependencies: '@cloudflare/workers-types': specifier: latest - version: 4.20260304.0 + version: 4.20260305.1 '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 '@types/react': specifier: ^18.3.3 version: 18.3.27 @@ -73,19 +73,19 @@ importers: version: 18.3.7(@types/react@18.3.27) '@vitejs/plugin-react': specifier: ^4.5.0 - version: 4.7.0(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + version: 4.7.0(vite@6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) typescript: specifier: latest version: 5.9.3 vite: specifier: ^6.2.0 - version: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) wrangler: specifier: latest - version: 4.68.1(@cloudflare/workers-types@4.20260304.0) + version: 4.71.0(@cloudflare/workers-types@4.20260305.1) examples/computesdk: dependencies: @@ -101,7 +101,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -110,13 +110,13 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) examples/daytona: dependencies: '@daytonaio/sdk': specifier: latest - version: 0.145.0(ws@8.19.0) + version: 0.149.0(ws@8.19.0) '@sandbox-agent/example-shared': specifier: workspace:* version: link:../shared @@ -126,7 +126,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -151,7 +151,7 @@ importers: version: 4.0.1 '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -160,7 +160,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) examples/e2b: dependencies: @@ -176,7 +176,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -185,7 +185,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) examples/file-system: dependencies: @@ -201,7 +201,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -220,7 +220,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -245,7 +245,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 esbuild: specifier: latest version: 0.27.3 @@ -260,7 +260,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 typescript: specifier: latest version: 5.9.3 @@ -276,7 +276,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -294,17 +294,17 @@ importers: version: link:../../sdks/persist-postgres pg: specifier: latest - version: 8.18.0 + version: 8.20.0 sandbox-agent: specifier: workspace:* version: link:../../sdks/typescript devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 '@types/pg': specifier: latest - version: 8.16.0 + version: 8.18.0 tsx: specifier: latest version: 4.21.0 @@ -326,7 +326,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -348,7 +348,7 @@ importers: version: 4.0.1 '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 typescript: specifier: latest version: 5.9.3 @@ -364,7 +364,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -383,7 +383,7 @@ importers: devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 esbuild: specifier: latest version: 0.27.3 @@ -401,14 +401,14 @@ importers: version: link:../shared '@vercel/sandbox': specifier: latest - version: 1.7.1 + version: 1.8.0 sandbox-agent: specifier: workspace:* version: link:../../sdks/typescript devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -417,7 +417,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) frontend/packages/inspector: dependencies: @@ -442,7 +442,7 @@ importers: version: 18.3.7(@types/react@18.3.27) '@vitejs/plugin-react': specifier: ^4.3.1 - version: 4.7.0(vite@5.4.21(@types/node@25.3.0)) + version: 4.7.0(vite@5.4.21(@types/node@25.3.5)) fake-indexeddb: specifier: ^6.2.4 version: 6.2.5 @@ -454,25 +454,25 @@ importers: version: 5.9.3 vite: specifier: ^5.4.7 - version: 5.4.21(@types/node@25.3.0) + version: 5.4.21(@types/node@25.3.5) vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) frontend/packages/website: dependencies: '@astrojs/react': specifier: ^4.2.0 - version: 4.4.2(@types/node@25.3.0)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2) + version: 4.4.2(@types/node@25.3.5)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2) '@astrojs/sitemap': specifier: ^3.2.0 version: 3.7.0 '@astrojs/tailwind': specifier: ^6.0.0 - version: 6.0.2(astro@5.16.15(@types/node@25.3.0)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2)) + version: 6.0.2(astro@5.16.15(@types/node@25.3.5)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2)) astro: specifier: ^5.1.0 - version: 5.16.15(@types/node@25.3.0)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) + version: 5.16.15(@types/node@25.3.5)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) framer-motion: specifier: ^12.0.0 version: 12.29.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -556,14 +556,14 @@ importers: dependencies: '@daytonaio/sdk': specifier: latest - version: 0.145.0(ws@8.19.0) + version: 0.149.0(ws@8.19.0) '@e2b/code-interpreter': specifier: latest version: 2.3.3 devDependencies: '@types/node': specifier: latest - version: 25.3.0 + version: 25.3.5 tsx: specifier: latest version: 4.21.0 @@ -614,7 +614,7 @@ importers: devDependencies: vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) sdks/cli-shared: devDependencies: @@ -662,7 +662,7 @@ importers: devDependencies: vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) sdks/gigacode/platforms/darwin-arm64: {} @@ -784,6 +784,9 @@ importers: '@types/node': specifier: ^22.0.0 version: 22.19.7 + '@types/ws': + specifier: ^8.18.1 + version: 8.18.1 openapi-typescript: specifier: ^6.7.0 version: 6.7.6 @@ -796,6 +799,9 @@ importers: vitest: specifier: ^3.0.0 version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + ws: + specifier: ^8.19.0 + version: 8.19.0 packages: @@ -1123,20 +1129,20 @@ packages: '@balena/dockerignore@1.0.2': resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} - '@boxlite-ai/boxlite-darwin-arm64@0.2.11': - resolution: {integrity: sha512-JjNf6S/+XqooWvFX2Zn9XjmeML/e6Errk0jzG77v8YV0k2nNmt8P1nMANb2kMPbsQn93ap9v74VnYesYdKRoNg==} + '@boxlite-ai/boxlite-darwin-arm64@0.3.0': + resolution: {integrity: sha512-V0FeD7VTQ+V4LFAwHzSe2K7hl7IjXKS6u1VrWr/H0zJ8GGZTAi6feI1w+QTmLJMgdlJdIufWsJwY/RsjtwwF/Q==} engines: {node: '>=18.0.0'} cpu: [arm64] os: [darwin] - '@boxlite-ai/boxlite-linux-x64-gnu@0.2.11': - resolution: {integrity: sha512-H3a8FMc6X4KVsmlQKs2xTIlSh4KhiI52MnXV16OwcC6OWQBBadR1N6GCCKojfwpqn6yIsZc2dxoyy25YTYYf9g==} + '@boxlite-ai/boxlite-linux-x64-gnu@0.3.0': + resolution: {integrity: sha512-1VkXxzm+3hmuP6XpbZxPsaf+Tv2gwd7iHAH76f2uWulooxRjATnk+Smhud+FuHvLQIvjr8ERAA26vMbST5OgpQ==} engines: {node: '>=18.0.0'} cpu: [x64] os: [linux] - '@boxlite-ai/boxlite@0.2.11': - resolution: {integrity: sha512-IJ+jyYdsc1hmZknDtqGpRyMAMxoQfF1VFDVuPhiO59fBmoDEI5u69DzoMtyax4gzL3Q46tjYkVBvJhNtSDaxBw==} + '@boxlite-ai/boxlite@0.3.0': + resolution: {integrity: sha512-D9sU7PUzFHlgv6aIGf+h5kp0+C2A05RVX73aaSMK2gWjQgf12lJ/SVg3OiMSmhnV0cZ+Q0hTn+EBnDWpe26cqA==} engines: {node: '>=18.0.0'} peerDependencies: playwright-core: '>=1.58.0' @@ -1181,15 +1187,15 @@ packages: cpu: [x64] os: [win32] - '@cloudflare/containers@0.0.30': - resolution: {integrity: sha512-i148xBgmyn/pje82ZIyuTr/Ae0BT/YWwa1/GTJcw6DxEjUHAzZLaBCiX446U9OeuJ2rBh/L/9FIzxX5iYNt1AQ==} + '@cloudflare/containers@0.1.1': + resolution: {integrity: sha512-YTdobRTnTlUOUPMFemufH367A9Z8pDfZ+UboYMLbGpO0VlvEXZDiioSmXPQMHld2vRtkL31mcRii3bcbQU6fdw==} '@cloudflare/kv-asset-handler@0.4.2': resolution: {integrity: sha512-SIOD2DxrRRwQ+jgzlXCqoEFiKOFqaPjhnNTGKXSRLvp1HiOvapLaFG2kEr9dYQTYe8rKrd9uvDUzmAITeNyaHQ==} engines: {node: '>=18.0.0'} - '@cloudflare/sandbox@0.7.5': - resolution: {integrity: sha512-lOegEUL6eDsHrsxEMxqRcftsp46hn+ilQryCLuSDghHvnCdDAenzyN/E3nVjQdYAZnoh5xsLzis/G295LcZr1w==} + '@cloudflare/sandbox@0.7.12': + resolution: {integrity: sha512-Frk8S/xZ3jDyQIreu66C4fQtfERmG9ZLQT6iJFfJUJN/aMUvHehRyAy34BNfHTXFZc3/YxGcnRBgitsWI9jArg==} peerDependencies: '@openai/agents': ^0.3.3 '@opencode-ai/sdk': ^1.1.40 @@ -1202,47 +1208,47 @@ packages: '@xterm/xterm': optional: true - '@cloudflare/unenv-preset@2.14.0': - resolution: {integrity: sha512-XKAkWhi1nBdNsSEoNG9nkcbyvfUrSjSf+VYVPfOto3gLTZVc3F4g6RASCMh6IixBKCG2yDgZKQIHGKtjcnLnKg==} + '@cloudflare/unenv-preset@2.15.0': + resolution: {integrity: sha512-EGYmJaGZKWl+X8tXxcnx4v2bOZSjQeNI5dWFeXivgX9+YCT69AkzHHwlNbVpqtEUTbew8eQurpyOpeN8fg00nw==} peerDependencies: unenv: 2.0.0-rc.24 - workerd: ^1.20260218.0 + workerd: 1.20260301.1 || ~1.20260302.1 || ~1.20260303.1 || ~1.20260304.1 || >1.20260305.0 <2.0.0-0 peerDependenciesMeta: workerd: optional: true - '@cloudflare/workerd-darwin-64@1.20260302.0': - resolution: {integrity: sha512-cGtxPByeVrgoqxbmd8qs631wuGwf8yTm/FY44dEW4HdoXrb5jhlE4oWYHFafedkQCvGjY1Vbs3puAiKnuMxTXQ==} + '@cloudflare/workerd-darwin-64@1.20260301.1': + resolution: {integrity: sha512-+kJvwociLrvy1JV9BAvoSVsMEIYD982CpFmo/yMEvBwxDIjltYsLTE8DLi0mCkGsQ8Ygidv2fD9wavzXeiY7OQ==} engines: {node: '>=16'} cpu: [x64] os: [darwin] - '@cloudflare/workerd-darwin-arm64@1.20260302.0': - resolution: {integrity: sha512-WRGqV6RNXM3xoQblJJw1EHKwx9exyhB18cdnToSCUFPObFhk3fzMLoQh7S+nUHUpto6aUrXPVj6R/4G3UPjCxw==} + '@cloudflare/workerd-darwin-arm64@1.20260301.1': + resolution: {integrity: sha512-PPIetY3e67YBr9O4UhILK8nbm5TqUDl14qx4rwFNrRSBOvlzuczzbd4BqgpAtbGVFxKp1PWpjAnBvGU/OI/tLQ==} engines: {node: '>=16'} cpu: [arm64] os: [darwin] - '@cloudflare/workerd-linux-64@1.20260302.0': - resolution: {integrity: sha512-gG423mtUjrmlQT+W2+KisLc6qcGcBLR+QcK5x1gje3bu/dF3oNiYuqY7o58A+sQk6IB849UC4UyNclo1RhP2xw==} + '@cloudflare/workerd-linux-64@1.20260301.1': + resolution: {integrity: sha512-Gu5vaVTZuYl3cHa+u5CDzSVDBvSkfNyuAHi6Mdfut7TTUdcb3V5CIcR/mXRSyMXzEy9YxEWIfdKMxOMBjupvYQ==} engines: {node: '>=16'} cpu: [x64] os: [linux] - '@cloudflare/workerd-linux-arm64@1.20260302.0': - resolution: {integrity: sha512-7M25noGI4WlSBOhrIaY8xZrnn87OQKtJg9YWAO2EFqGjF1Su5QXGaLlQVF4fAKbqTywbHnI8BAuIsIlUSNkhCg==} + '@cloudflare/workerd-linux-arm64@1.20260301.1': + resolution: {integrity: sha512-igL1pkyCXW6GiGpjdOAvqMi87UW0LMc/+yIQe/CSzuZJm5GzXoAMrwVTkCFnikk6JVGELrM5x0tGYlxa0sk5Iw==} engines: {node: '>=16'} cpu: [arm64] os: [linux] - '@cloudflare/workerd-windows-64@1.20260302.0': - resolution: {integrity: sha512-jK1L3ADkiWxFzlqZTq2iHW1Bd2Nzu1fmMWCGZw4sMZ2W1B2WCm2wHwO2SX/py4BgylyEN3wuF+5zagbkNKht9A==} + '@cloudflare/workerd-windows-64@1.20260301.1': + resolution: {integrity: sha512-Q0wMJ4kcujXILwQKQFc1jaYamVsNvjuECzvRrTI8OxGFMx2yq9aOsswViE4X1gaS2YQQ5u0JGwuGi5WdT1Lt7A==} engines: {node: '>=16'} cpu: [x64] os: [win32] - '@cloudflare/workers-types@4.20260304.0': - resolution: {integrity: sha512-oQ0QJpWnCWK9tx5q/ZHQeSsf5EcQWa4KqdDMY/R5Ln0ojFzv6UYO0RWsfDPsoXUAwK671VwaXqAW0Mx0uWz7yw==} + '@cloudflare/workers-types@4.20260305.1': + resolution: {integrity: sha512-835BZaIcgjuYIUqgOWJSpwQxFSJ8g/X1OCZFLO7bmirM6TGmVgIGwiGItBgkjUXXCPrYzJEldsJkuFuK7ePuMw==} '@computesdk/cmd@0.4.1': resolution: {integrity: sha512-hhcYrwMnOpRSwWma3gkUeAVsDFG56nURwSaQx8vCepv0IuUv39bK4mMkgszolnUQrVjBDdW7b3lV+l5B2S8fRA==} @@ -1262,14 +1268,14 @@ packages: resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} - '@daytonaio/api-client@0.145.0': - resolution: {integrity: sha512-8xLJ1G7C3QJs2KfONcGd4O4ktHtGM4qxWVAcQERHSE1w4hJVrlaUzutMm2qy+HoXtMU1L5h/eFazoxrJ0xWzPw==} + '@daytonaio/api-client@0.149.0': + resolution: {integrity: sha512-tlqVFnJll4JUAY3Ictwl7kGI3jo6HP+AcHl8FsZg/lSG7t/SdlZVO9iPPt6kjxmY3WN8BYRI1NYtIFFh8SJolw==} - '@daytonaio/sdk@0.145.0': - resolution: {integrity: sha512-RZhe5oz9EdC9PP3g95g+jXFkCiQbPJTfSALe9wi4W5n97hA9O6rM5zYRuwB2PJbHA8YC0m2t5pyHRJA9+88r5A==} + '@daytonaio/sdk@0.149.0': + resolution: {integrity: sha512-yu228ZVj0FFlas/VmoirqZ/QJNKuvSf5AiDVkPUdejEYHyh98s8owSEFKgOwajtBwtrNn+ETSunkWroMRbzvQg==} - '@daytonaio/toolbox-api-client@0.145.0': - resolution: {integrity: sha512-Twh8FIoPAen+pjFeW03Fcom0fYT+k2grw8Q18aHdMAKQtXmMvA3+Ntim5ooE8AsHHLpCL6w+9ycdsovvzZOAEg==} + '@daytonaio/toolbox-api-client@0.149.0': + resolution: {integrity: sha512-6IiZ+RDDQSRjjKMbmmiQj1uG0u8CxCHxX8YwWVq2Oc/6ACPVBLdNlh4p/xBXUGgxbcPo2ewH1F2y1P2FSUU8aA==} '@e2b/code-interpreter@2.3.3': resolution: {integrity: sha512-WOpSwc1WpvxyOijf6WMbR76BUuvd2O9ddXgCHHi65lkuy6YgQGq7oyd8PNsT331O9Tqbccjy6uF4xanSdLX1UA==} @@ -2826,12 +2832,15 @@ packages: '@types/node@24.10.9': resolution: {integrity: sha512-ne4A0IpG3+2ETuREInjPNhUGis1SFjv1d5asp8MzEAGtOZeTeHVDOYqOgqfhvseqg/iXty2hjBf1zAOb7RNiNw==} - '@types/node@25.3.0': - resolution: {integrity: sha512-4K3bqJpXpqfg2XKGK9bpDTc6xO/xoUP/RBWS7AtRMug6zZFaRekiLzjVtAoZMquxoAbzBvy5nxQ7veS5eYzf8A==} + '@types/node@25.3.5': + resolution: {integrity: sha512-oX8xrhvpiyRCQkG1MFchB09f+cXftgIXb3a7UUa4Y3wpmZPw5tyZGTLWhlESOLq1Rq6oDlc8npVU2/9xiCuXMA==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} + '@types/pg@8.18.0': + resolution: {integrity: sha512-gT+oueVQkqnj6ajGJXblFR4iavIXWsGAFCk3dP4Kki5+a9R4NMt0JARdk6s8cUKcfUoqP5dAtDSLU8xYUTFV+Q==} + '@types/prop-types@15.7.15': resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} @@ -2858,6 +2867,9 @@ packages: '@types/unist@3.0.3': resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + '@types/ws@8.18.1': + resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} + '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} @@ -2865,8 +2877,8 @@ packages: resolution: {integrity: sha512-UycprH3T6n3jH0k44NHMa7pnFHGu/N05MjojYr+Mc6I7obkoLIJujSWwin1pCvdy/eOxrI/l3uDLQsmcrOb4ug==} engines: {node: '>= 20'} - '@vercel/sandbox@1.7.1': - resolution: {integrity: sha512-TI9InUQe7sqyO4/TIiGXC/3RHA0hTt5PpFaTWeWunkbKZae26nuPVsd+p10W/WN2THUKE+NPtTJ21dhp1Yw48w==} + '@vercel/sandbox@1.8.0': + resolution: {integrity: sha512-SbXkg8Fmp8i+I9IdyD4PAAVtxM/KS4ULV4eiEfY/9tab1AF1MPvmEA8/ebvCn7QTWQQ7twwtpJNSPlUVmOBp3w==} '@vitejs/plugin-react@4.7.0': resolution: {integrity: sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==} @@ -4190,8 +4202,8 @@ packages: resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} engines: {node: '>=10'} - miniflare@4.20260302.0: - resolution: {integrity: sha512-joGFywlo7HdfHXXGOkc6tDCVkwjEncM0mwEsMOLWcl+vDVJPj9HRV7JtEa0+lCpNOLdYw7mZNHYe12xz9KtJOw==} + miniflare@4.20260301.1: + resolution: {integrity: sha512-fqkHx0QMKswRH9uqQQQOU/RoaS3Wjckxy3CUX3YGJr0ZIMu7ObvI+NovdYi6RIsSPthNtq+3TPmRNxjeRiasog==} engines: {node: '>=18.0.0'} hasBin: true @@ -4423,6 +4435,9 @@ packages: pg-connection-string@2.11.0: resolution: {integrity: sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==} + pg-connection-string@2.12.0: + resolution: {integrity: sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==} + pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -4432,9 +4447,17 @@ packages: peerDependencies: pg: '>=8.0' + pg-pool@3.13.0: + resolution: {integrity: sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==} + peerDependencies: + pg: '>=8.0' + pg-protocol@1.11.0: resolution: {integrity: sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==} + pg-protocol@1.13.0: + resolution: {integrity: sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==} + pg-types@2.2.0: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} engines: {node: '>=4'} @@ -4448,6 +4471,15 @@ packages: pg-native: optional: true + pg@8.20.0: + resolution: {integrity: sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==} + engines: {node: '>= 16.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -5011,7 +5043,7 @@ packages: tar@7.5.6: resolution: {integrity: sha512-xqUeu2JAIJpXyvskvU3uvQW8PAmHrtXp2KDuMJwQqW8Sqq0CaZBAQ+dKS3RBXVhU4wC5NjAdKrmh84241gO9cA==} engines: {node: '>=18'} - deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me + deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exhorbitant rates) by contacting i@izs.me tar@7.5.7: resolution: {integrity: sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==} @@ -5484,17 +5516,17 @@ packages: resolution: {integrity: sha512-c9bZp7b5YtRj2wOe6dlj32MK+Bx/M/d+9VB2SHM1OtsUHR0aV0tdP6DWh/iMt0kWi1t5g1Iudu6hQRNd1A4PVA==} engines: {node: '>=18'} - workerd@1.20260302.0: - resolution: {integrity: sha512-FhNdC8cenMDllI6bTktFgxP5Bn5ZEnGtofgKipY6pW9jtq708D1DeGI6vGad78KQLBGaDwFy1eThjCoLYgFfog==} + workerd@1.20260301.1: + resolution: {integrity: sha512-oterQ1IFd3h7PjCfT4znSFOkJCvNQ6YMOyZ40YsnO3nrSpgB4TbJVYWFOnyJAw71/RQuupfVqZZWKvsy8GO3fw==} engines: {node: '>=16'} hasBin: true - wrangler@4.68.1: - resolution: {integrity: sha512-G+TI3k/olEGBAVkPtUlhAX/DIbL/190fv3aK+r+45/wPclNEymjxCc35T8QGTDhc2fEMXiw51L5bH9aNsBg+yQ==} + wrangler@4.71.0: + resolution: {integrity: sha512-j6pSGAncOLNQDRzqtp0EqzYj52CldDP7uz/C9cxVrIgqa5p+cc0b4pIwnapZZAGv9E1Loa3tmPD0aXonH7KTkw==} engines: {node: '>=20.0.0'} hasBin: true peerDependencies: - '@cloudflare/workers-types': ^4.20260302.0 + '@cloudflare/workers-types': ^4.20260226.1 peerDependenciesMeta: '@cloudflare/workers-types': optional: true @@ -5665,15 +5697,15 @@ snapshots: dependencies: prismjs: 1.30.0 - '@astrojs/react@4.4.2(@types/node@25.3.0)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2)': + '@astrojs/react@4.4.2(@types/node@25.3.5)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2)': dependencies: '@types/react': 18.3.27 '@types/react-dom': 18.3.7(@types/react@18.3.27) - '@vitejs/plugin-react': 4.7.0(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + '@vitejs/plugin-react': 4.7.0(vite@6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) ultrahtml: 1.6.0 - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) transitivePeerDependencies: - '@types/node' - jiti @@ -5694,9 +5726,9 @@ snapshots: stream-replace-string: 2.0.0 zod: 3.25.76 - '@astrojs/tailwind@6.0.2(astro@5.16.15(@types/node@25.3.0)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))': + '@astrojs/tailwind@6.0.2(astro@5.16.15(@types/node@25.3.5)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2))(tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2))': dependencies: - astro: 5.16.15(@types/node@25.3.0)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) + astro: 5.16.15(@types/node@25.3.5)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) autoprefixer: 10.4.23(postcss@8.5.6) postcss: 8.5.6 postcss-load-config: 4.0.2(postcss@8.5.6) @@ -6374,16 +6406,16 @@ snapshots: '@balena/dockerignore@1.0.2': {} - '@boxlite-ai/boxlite-darwin-arm64@0.2.11': + '@boxlite-ai/boxlite-darwin-arm64@0.3.0': optional: true - '@boxlite-ai/boxlite-linux-x64-gnu@0.2.11': + '@boxlite-ai/boxlite-linux-x64-gnu@0.3.0': optional: true - '@boxlite-ai/boxlite@0.2.11': + '@boxlite-ai/boxlite@0.3.0': optionalDependencies: - '@boxlite-ai/boxlite-darwin-arm64': 0.2.11 - '@boxlite-ai/boxlite-linux-x64-gnu': 0.2.11 + '@boxlite-ai/boxlite-darwin-arm64': 0.3.0 + '@boxlite-ai/boxlite-linux-x64-gnu': 0.3.0 '@bufbuild/protobuf@2.11.0': {} @@ -6409,37 +6441,37 @@ snapshots: '@cbor-extract/cbor-extract-win32-x64@2.2.0': optional: true - '@cloudflare/containers@0.0.30': {} + '@cloudflare/containers@0.1.1': {} '@cloudflare/kv-asset-handler@0.4.2': {} - '@cloudflare/sandbox@0.7.5': + '@cloudflare/sandbox@0.7.12': dependencies: - '@cloudflare/containers': 0.0.30 + '@cloudflare/containers': 0.1.1 aws4fetch: 1.0.20 - '@cloudflare/unenv-preset@2.14.0(unenv@2.0.0-rc.24)(workerd@1.20260302.0)': + '@cloudflare/unenv-preset@2.15.0(unenv@2.0.0-rc.24)(workerd@1.20260301.1)': dependencies: unenv: 2.0.0-rc.24 optionalDependencies: - workerd: 1.20260302.0 + workerd: 1.20260301.1 - '@cloudflare/workerd-darwin-64@1.20260302.0': + '@cloudflare/workerd-darwin-64@1.20260301.1': optional: true - '@cloudflare/workerd-darwin-arm64@1.20260302.0': + '@cloudflare/workerd-darwin-arm64@1.20260301.1': optional: true - '@cloudflare/workerd-linux-64@1.20260302.0': + '@cloudflare/workerd-linux-64@1.20260301.1': optional: true - '@cloudflare/workerd-linux-arm64@1.20260302.0': + '@cloudflare/workerd-linux-arm64@1.20260301.1': optional: true - '@cloudflare/workerd-windows-64@1.20260302.0': + '@cloudflare/workerd-windows-64@1.20260301.1': optional: true - '@cloudflare/workers-types@4.20260304.0': {} + '@cloudflare/workers-types@4.20260305.1': {} '@computesdk/cmd@0.4.1': {} @@ -6456,18 +6488,18 @@ snapshots: dependencies: '@jridgewell/trace-mapping': 0.3.9 - '@daytonaio/api-client@0.145.0': + '@daytonaio/api-client@0.149.0': dependencies: axios: 1.13.5 transitivePeerDependencies: - debug - '@daytonaio/sdk@0.145.0(ws@8.19.0)': + '@daytonaio/sdk@0.149.0(ws@8.19.0)': dependencies: '@aws-sdk/client-s3': 3.975.0 '@aws-sdk/lib-storage': 3.975.0(@aws-sdk/client-s3@3.975.0) - '@daytonaio/api-client': 0.145.0 - '@daytonaio/toolbox-api-client': 0.145.0 + '@daytonaio/api-client': 0.149.0 + '@daytonaio/toolbox-api-client': 0.149.0 '@iarna/toml': 2.2.5 '@opentelemetry/api': 1.9.0 '@opentelemetry/exporter-trace-otlp-http': 0.207.0(@opentelemetry/api@1.9.0) @@ -6493,7 +6525,7 @@ snapshots: - supports-color - ws - '@daytonaio/toolbox-api-client@0.145.0': + '@daytonaio/toolbox-api-client@0.149.0': dependencies: axios: 1.13.5 transitivePeerDependencies: @@ -7836,7 +7868,7 @@ snapshots: '@types/better-sqlite3@7.6.13': dependencies: - '@types/node': 25.3.0 + '@types/node': 24.10.9 '@types/chai@5.2.3': dependencies: @@ -7851,13 +7883,13 @@ snapshots: '@types/docker-modem@3.0.6': dependencies: - '@types/node': 25.3.0 + '@types/node': 25.3.5 '@types/ssh2': 1.15.5 '@types/dockerode@4.0.1': dependencies: '@types/docker-modem': 3.0.6 - '@types/node': 25.3.0 + '@types/node': 25.3.5 '@types/ssh2': 1.15.5 '@types/estree@1.0.8': {} @@ -7892,13 +7924,19 @@ snapshots: dependencies: undici-types: 7.16.0 - '@types/node@25.3.0': + '@types/node@25.3.5': dependencies: undici-types: 7.18.2 '@types/pg@8.16.0': dependencies: - '@types/node': 25.3.0 + '@types/node': 24.10.9 + pg-protocol: 1.11.0 + pg-types: 2.2.0 + + '@types/pg@8.18.0': + dependencies: + '@types/node': 25.3.5 pg-protocol: 1.11.0 pg-types: 2.2.0 @@ -7917,7 +7955,7 @@ snapshots: '@types/sax@1.2.7': dependencies: - '@types/node': 25.3.0 + '@types/node': 24.10.9 '@types/semver@7.7.1': {} @@ -7927,11 +7965,15 @@ snapshots: '@types/unist@3.0.3': {} + '@types/ws@8.18.1': + dependencies: + '@types/node': 24.10.9 + '@ungap/structured-clone@1.3.0': {} '@vercel/oidc@3.2.0': {} - '@vercel/sandbox@1.7.1': + '@vercel/sandbox@1.8.0': dependencies: '@vercel/oidc': 3.2.0 async-retry: 1.3.3 @@ -7946,7 +7988,7 @@ snapshots: - bare-abort-controller - react-native-b4a - '@vitejs/plugin-react@4.7.0(vite@5.4.21(@types/node@25.3.0))': + '@vitejs/plugin-react@4.7.0(vite@5.4.21(@types/node@25.3.5))': dependencies: '@babel/core': 7.28.6 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.6) @@ -7954,11 +7996,11 @@ snapshots: '@rolldown/pluginutils': 1.0.0-beta.27 '@types/babel__core': 7.20.5 react-refresh: 0.17.0 - vite: 5.4.21(@types/node@25.3.0) + vite: 5.4.21(@types/node@25.3.5) transitivePeerDependencies: - supports-color - '@vitejs/plugin-react@4.7.0(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': + '@vitejs/plugin-react@4.7.0(vite@6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@babel/core': 7.28.6 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.6) @@ -7966,7 +8008,7 @@ snapshots: '@rolldown/pluginutils': 1.0.0-beta.27 '@types/babel__core': 7.20.5 react-refresh: 0.17.0 - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) transitivePeerDependencies: - supports-color @@ -7978,13 +8020,21 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@25.3.0))': + '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@22.19.7))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 5.4.21(@types/node@25.3.0) + vite: 5.4.21(@types/node@22.19.7) + + '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@25.3.5))': + dependencies: + '@vitest/spy': 3.2.4 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 5.4.21(@types/node@25.3.5) '@vitest/pretty-format@3.2.4': dependencies: @@ -8071,7 +8121,7 @@ snapshots: assertion-error@2.0.1: {} - astro@5.16.15(@types/node@25.3.0)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2): + astro@5.16.15(@types/node@25.3.5)(aws4fetch@1.0.20)(jiti@1.21.7)(rollup@4.56.0)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2): dependencies: '@astrojs/compiler': 2.13.0 '@astrojs/internal-helpers': 0.7.5 @@ -8128,8 +8178,8 @@ snapshots: unist-util-visit: 5.1.0 unstorage: 1.17.4(aws4fetch@1.0.20) vfile: 6.0.3 - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) - vitefu: 1.1.1(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + vite: 6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vitefu: 1.1.1(vite@6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) xxhash-wasm: 1.1.0 yargs-parser: 21.1.1 yocto-spinner: 0.2.3 @@ -9648,12 +9698,12 @@ snapshots: mimic-response@3.1.0: {} - miniflare@4.20260302.0: + miniflare@4.20260301.1: dependencies: '@cspotcode/source-map-support': 0.8.1 sharp: 0.34.5 undici: 7.18.2 - workerd: 1.20260302.0 + workerd: 1.20260301.1 ws: 8.18.0 youch: 4.1.0-beta.10 transitivePeerDependencies: @@ -9869,14 +9919,22 @@ snapshots: pg-connection-string@2.11.0: {} + pg-connection-string@2.12.0: {} + pg-int8@1.0.1: {} pg-pool@3.11.0(pg@8.18.0): dependencies: pg: 8.18.0 + pg-pool@3.13.0(pg@8.20.0): + dependencies: + pg: 8.20.0 + pg-protocol@1.11.0: {} + pg-protocol@1.13.0: {} + pg-types@2.2.0: dependencies: pg-int8: 1.0.1 @@ -9895,6 +9953,16 @@ snapshots: optionalDependencies: pg-cloudflare: 1.3.0 + pg@8.20.0: + dependencies: + pg-connection-string: 2.12.0 + pg-pool: 3.13.0(pg@8.20.0) + pg-protocol: 1.13.0 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.3.0 + pgpass@1.0.5: dependencies: split2: 4.2.0 @@ -10035,7 +10103,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 25.3.0 + '@types/node': 25.3.5 long: 5.3.2 proxy-addr@2.0.7: @@ -10940,13 +11008,13 @@ snapshots: - tsx - yaml - vite-node@3.2.4(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): + vite-node@3.2.4(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): dependencies: cac: 6.7.14 debug: 4.4.3 es-module-lexer: 1.7.0 pathe: 2.0.3 - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) transitivePeerDependencies: - '@types/node' - jiti @@ -10970,13 +11038,13 @@ snapshots: '@types/node': 22.19.7 fsevents: 2.3.3 - vite@5.4.21(@types/node@25.3.0): + vite@5.4.21(@types/node@25.3.5): dependencies: esbuild: 0.21.5 postcss: 8.5.6 rollup: 4.56.0 optionalDependencies: - '@types/node': 25.3.0 + '@types/node': 25.3.5 fsevents: 2.3.3 vite@6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): @@ -10994,7 +11062,7 @@ snapshots: tsx: 4.21.0 yaml: 2.8.2 - vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): + vite@6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): dependencies: esbuild: 0.25.12 fdir: 6.5.0(picomatch@4.0.3) @@ -11003,21 +11071,21 @@ snapshots: rollup: 4.56.0 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 25.3.0 + '@types/node': 25.3.5 fsevents: 2.3.3 jiti: 1.21.7 tsx: 4.21.0 yaml: 2.8.2 - vitefu@1.1.1(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)): + vitefu@1.1.1(vite@6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)): optionalDependencies: - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 6.4.1(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@25.3.0)) + '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@22.19.7)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -11055,11 +11123,11 @@ snapshots: - tsx - yaml - vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@25.3.0)) + '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@25.3.5)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -11077,12 +11145,12 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 5.4.21(@types/node@25.3.0) - vite-node: 3.2.4(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 5.4.21(@types/node@25.3.5) + vite-node: 3.2.4(@types/node@25.3.5)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: '@types/debug': 4.1.12 - '@types/node': 25.3.0 + '@types/node': 25.3.5 transitivePeerDependencies: - jiti - less @@ -11118,26 +11186,26 @@ snapshots: dependencies: string-width: 7.2.0 - workerd@1.20260302.0: + workerd@1.20260301.1: optionalDependencies: - '@cloudflare/workerd-darwin-64': 1.20260302.0 - '@cloudflare/workerd-darwin-arm64': 1.20260302.0 - '@cloudflare/workerd-linux-64': 1.20260302.0 - '@cloudflare/workerd-linux-arm64': 1.20260302.0 - '@cloudflare/workerd-windows-64': 1.20260302.0 + '@cloudflare/workerd-darwin-64': 1.20260301.1 + '@cloudflare/workerd-darwin-arm64': 1.20260301.1 + '@cloudflare/workerd-linux-64': 1.20260301.1 + '@cloudflare/workerd-linux-arm64': 1.20260301.1 + '@cloudflare/workerd-windows-64': 1.20260301.1 - wrangler@4.68.1(@cloudflare/workers-types@4.20260304.0): + wrangler@4.71.0(@cloudflare/workers-types@4.20260305.1): dependencies: '@cloudflare/kv-asset-handler': 0.4.2 - '@cloudflare/unenv-preset': 2.14.0(unenv@2.0.0-rc.24)(workerd@1.20260302.0) + '@cloudflare/unenv-preset': 2.15.0(unenv@2.0.0-rc.24)(workerd@1.20260301.1) blake3-wasm: 2.1.5 esbuild: 0.27.3 - miniflare: 4.20260302.0 + miniflare: 4.20260301.1 path-to-regexp: 6.3.0 unenv: 2.0.0-rc.24 - workerd: 1.20260302.0 + workerd: 1.20260301.1 optionalDependencies: - '@cloudflare/workers-types': 4.20260304.0 + '@cloudflare/workers-types': 4.20260305.1 fsevents: 2.3.3 transitivePeerDependencies: - bufferutil diff --git a/sdks/acp-http-client/tests/smoke.test.ts b/sdks/acp-http-client/tests/smoke.test.ts index 2380010..8b92e6c 100644 --- a/sdks/acp-http-client/tests/smoke.test.ts +++ b/sdks/acp-http-client/tests/smoke.test.ts @@ -74,6 +74,10 @@ describe("AcpHttpClient integration", () => { timeoutMs: 30000, env: { XDG_DATA_HOME: dataHome, + HOME: dataHome, + USERPROFILE: dataHome, + APPDATA: join(dataHome, "AppData", "Roaming"), + LOCALAPPDATA: join(dataHome, "AppData", "Local"), }, }); baseUrl = handle.baseUrl; diff --git a/sdks/persist-indexeddb/tests/integration.test.ts b/sdks/persist-indexeddb/tests/integration.test.ts index a30e70e..064c83d 100644 --- a/sdks/persist-indexeddb/tests/integration.test.ts +++ b/sdks/persist-indexeddb/tests/integration.test.ts @@ -60,6 +60,10 @@ describe("IndexedDB persistence end-to-end", () => { timeoutMs: 30000, env: { XDG_DATA_HOME: dataHome, + HOME: dataHome, + USERPROFILE: dataHome, + APPDATA: join(dataHome, "AppData", "Roaming"), + LOCALAPPDATA: join(dataHome, "AppData", "Local"), }, }); baseUrl = handle.baseUrl; diff --git a/sdks/persist-postgres/tests/integration.test.ts b/sdks/persist-postgres/tests/integration.test.ts index f453021..9017775 100644 --- a/sdks/persist-postgres/tests/integration.test.ts +++ b/sdks/persist-postgres/tests/integration.test.ts @@ -64,6 +64,10 @@ describe("Postgres persistence driver", () => { timeoutMs: 30000, env: { XDG_DATA_HOME: dataHome, + HOME: dataHome, + USERPROFILE: dataHome, + APPDATA: join(dataHome, "AppData", "Roaming"), + LOCALAPPDATA: join(dataHome, "AppData", "Local"), }, }); baseUrl = handle.baseUrl; diff --git a/sdks/persist-sqlite/tests/integration.test.ts b/sdks/persist-sqlite/tests/integration.test.ts index fb4b99c..5c4948a 100644 --- a/sdks/persist-sqlite/tests/integration.test.ts +++ b/sdks/persist-sqlite/tests/integration.test.ts @@ -55,6 +55,10 @@ describe("SQLite persistence driver", () => { timeoutMs: 30000, env: { XDG_DATA_HOME: dataHome, + HOME: dataHome, + USERPROFILE: dataHome, + APPDATA: join(dataHome, "AppData", "Roaming"), + LOCALAPPDATA: join(dataHome, "AppData", "Local"), }, }); baseUrl = handle.baseUrl; diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 990f952..b9f3716 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -17,8 +17,8 @@ } }, "dependencies": { - "acp-http-client": "workspace:*", - "@sandbox-agent/cli-shared": "workspace:*" + "@sandbox-agent/cli-shared": "workspace:*", + "acp-http-client": "workspace:*" }, "files": [ "dist" @@ -34,10 +34,12 @@ }, "devDependencies": { "@types/node": "^22.0.0", + "@types/ws": "^8.18.1", "openapi-typescript": "^6.7.0", "tsup": "^8.0.0", "typescript": "^5.7.0", - "vitest": "^3.0.0" + "vitest": "^3.0.0", + "ws": "^8.19.0" }, "optionalDependencies": { "@sandbox-agent/cli": "workspace:*" diff --git a/sdks/typescript/src/client.ts b/sdks/typescript/src/client.ts index 59d8f14..65b8aa5 100644 --- a/sdks/typescript/src/client.ts +++ b/sdks/typescript/src/client.ts @@ -39,6 +39,20 @@ import { type McpConfigQuery, type McpServerConfig, type ProblemDetails, + type ProcessConfig, + type ProcessCreateRequest, + type ProcessInfo, + type ProcessInputRequest, + type ProcessInputResponse, + type ProcessListResponse, + type ProcessLogEntry, + type ProcessLogsQuery, + type ProcessLogsResponse, + type ProcessRunRequest, + type ProcessRunResponse, + type ProcessSignalQuery, + type ProcessTerminalResizeRequest, + type ProcessTerminalResizeResponse, type SessionEvent, type SessionPersistDriver, type SessionRecord, @@ -108,6 +122,27 @@ export interface SessionSendOptions { } export type SessionEventListener = (event: SessionEvent) => void; +export type ProcessLogListener = (entry: ProcessLogEntry) => void; +export type ProcessLogFollowQuery = Omit; + +export interface AgentQueryOptions { + config?: boolean; + noCache?: boolean; +} + +export interface ProcessLogSubscription { + close(): void; + closed: Promise; +} + +export interface ProcessTerminalWebSocketUrlOptions { + accessToken?: string; +} + +export interface ProcessTerminalConnectOptions extends ProcessTerminalWebSocketUrlOptions { + protocols?: string | string[]; + WebSocket?: typeof WebSocket; +} export class SandboxAgentError extends Error { readonly status: number; @@ -696,15 +731,15 @@ export class SandboxAgent { return this.requestHealth(); } - async listAgents(options?: { config?: boolean }): Promise { + async listAgents(options?: AgentQueryOptions): Promise { return this.requestJson("GET", `${API_PREFIX}/agents`, { - query: options?.config ? { config: "true" } : undefined, + query: toAgentQuery(options), }); } - async getAgent(agent: string, options?: { config?: boolean }): Promise { + async getAgent(agent: string, options?: AgentQueryOptions): Promise { return this.requestJson("GET", `${API_PREFIX}/agents/${encodeURIComponent(agent)}`, { - query: options?.config ? { config: "true" } : undefined, + query: toAgentQuery(options), }); } @@ -793,6 +828,134 @@ export class SandboxAgent { await this.requestRaw("DELETE", `${API_PREFIX}/config/skills`, { query }); } + async getProcessConfig(): Promise { + return this.requestJson("GET", `${API_PREFIX}/processes/config`); + } + + async setProcessConfig(config: ProcessConfig): Promise { + return this.requestJson("POST", `${API_PREFIX}/processes/config`, { + body: config, + }); + } + + async createProcess(request: ProcessCreateRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/processes`, { + body: request, + }); + } + + async runProcess(request: ProcessRunRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/processes/run`, { + body: request, + }); + } + + async listProcesses(): Promise { + return this.requestJson("GET", `${API_PREFIX}/processes`); + } + + async getProcess(id: string): Promise { + return this.requestJson("GET", `${API_PREFIX}/processes/${encodeURIComponent(id)}`); + } + + async stopProcess(id: string, query?: ProcessSignalQuery): Promise { + return this.requestJson("POST", `${API_PREFIX}/processes/${encodeURIComponent(id)}/stop`, { + query, + }); + } + + async killProcess(id: string, query?: ProcessSignalQuery): Promise { + return this.requestJson("POST", `${API_PREFIX}/processes/${encodeURIComponent(id)}/kill`, { + query, + }); + } + + async deleteProcess(id: string): Promise { + await this.requestRaw("DELETE", `${API_PREFIX}/processes/${encodeURIComponent(id)}`); + } + + async getProcessLogs(id: string, query: ProcessLogFollowQuery = {}): Promise { + return this.requestJson("GET", `${API_PREFIX}/processes/${encodeURIComponent(id)}/logs`, { + query, + }); + } + + async followProcessLogs( + id: string, + listener: ProcessLogListener, + query: ProcessLogFollowQuery = {}, + ): Promise { + const abortController = new AbortController(); + const response = await this.requestRaw( + "GET", + `${API_PREFIX}/processes/${encodeURIComponent(id)}/logs`, + { + query: { ...query, follow: true }, + accept: "text/event-stream", + signal: abortController.signal, + }, + ); + + if (!response.body) { + abortController.abort(); + throw new Error("SSE stream is not readable in this environment."); + } + + const closed = consumeProcessLogSse(response.body, listener, abortController.signal); + + return { + close: () => abortController.abort(), + closed, + }; + } + + async sendProcessInput(id: string, request: ProcessInputRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/processes/${encodeURIComponent(id)}/input`, { + body: request, + }); + } + + async resizeProcessTerminal( + id: string, + request: ProcessTerminalResizeRequest, + ): Promise { + return this.requestJson( + "POST", + `${API_PREFIX}/processes/${encodeURIComponent(id)}/terminal/resize`, + { + body: request, + }, + ); + } + + buildProcessTerminalWebSocketUrl( + id: string, + options: ProcessTerminalWebSocketUrlOptions = {}, + ): string { + return toWebSocketUrl( + this.buildUrl(`${API_PREFIX}/processes/${encodeURIComponent(id)}/terminal/ws`, { + access_token: options.accessToken ?? this.token, + }), + ); + } + + connectProcessTerminalWebSocket( + id: string, + options: ProcessTerminalConnectOptions = {}, + ): WebSocket { + const WebSocketCtor = options.WebSocket ?? globalThis.WebSocket; + if (!WebSocketCtor) { + throw new Error("WebSocket API is not available; provide a WebSocket implementation."); + } + + return new WebSocketCtor( + this.buildProcessTerminalWebSocketUrl(id, { + accessToken: options.accessToken, + }), + options.protocols, + ); + } + private async getLiveConnection(agent: string): Promise { await this.awaitHealthy(); @@ -1182,6 +1345,17 @@ async function autoAuthenticate(acp: AcpHttpClient, methods: AuthMethod[]): Prom } } +function toAgentQuery(options: AgentQueryOptions | undefined): Record | undefined { + if (!options) { + return undefined; + } + + return { + config: options.config, + no_cache: options.noCache, + }; +} + function normalizeSessionInit( value: Omit | undefined, ): Omit { @@ -1455,6 +1629,92 @@ async function waitForAbortable(promise: Promise, signal: AbortSignal | un }); } +async function consumeProcessLogSse( + body: ReadableStream, + listener: ProcessLogListener, + signal: AbortSignal, +): Promise { + const reader = body.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + + try { + while (!signal.aborted) { + const { done, value } = await reader.read(); + if (done) { + return; + } + + buffer += decoder.decode(value, { stream: true }).replace(/\r\n/g, "\n"); + + let separatorIndex = buffer.indexOf("\n\n"); + while (separatorIndex !== -1) { + const chunk = buffer.slice(0, separatorIndex); + buffer = buffer.slice(separatorIndex + 2); + + const entry = parseProcessLogSseChunk(chunk); + if (entry) { + listener(entry); + } + + separatorIndex = buffer.indexOf("\n\n"); + } + } + } catch (error) { + if (signal.aborted || isAbortError(error)) { + return; + } + throw error; + } finally { + reader.releaseLock(); + } +} + +function parseProcessLogSseChunk(chunk: string): ProcessLogEntry | null { + if (!chunk.trim()) { + return null; + } + + let eventName = "message"; + const dataLines: string[] = []; + + for (const line of chunk.split("\n")) { + if (!line || line.startsWith(":")) { + continue; + } + + if (line.startsWith("event:")) { + eventName = line.slice(6).trim(); + continue; + } + + if (line.startsWith("data:")) { + dataLines.push(line.slice(5).trimStart()); + } + } + + if (eventName !== "log") { + return null; + } + + const data = dataLines.join("\n"); + if (!data.trim()) { + return null; + } + + return JSON.parse(data) as ProcessLogEntry; +} + +function toWebSocketUrl(url: string): string { + const parsed = new URL(url); + if (parsed.protocol === "http:") { + parsed.protocol = "ws:"; + } else if (parsed.protocol === "https:") { + parsed.protocol = "wss:"; + } + return parsed.toString(); +} + function isAbortError(error: unknown): boolean { return error instanceof Error && error.name === "AbortError"; } diff --git a/sdks/typescript/src/generated/openapi.ts b/sdks/typescript/src/generated/openapi.ts index 91ab56b..a89d796 100644 --- a/sdks/typescript/src/generated/openapi.ts +++ b/sdks/typescript/src/generated/openapi.ts @@ -57,6 +57,39 @@ export interface paths { "/v1/health": { get: operations["get_v1_health"]; }; + "/v1/processes": { + get: operations["get_v1_processes"]; + post: operations["post_v1_processes"]; + }; + "/v1/processes/config": { + get: operations["get_v1_processes_config"]; + post: operations["post_v1_processes_config"]; + }; + "/v1/processes/run": { + post: operations["post_v1_processes_run"]; + }; + "/v1/processes/{id}": { + get: operations["get_v1_process"]; + delete: operations["delete_v1_process"]; + }; + "/v1/processes/{id}/input": { + post: operations["post_v1_process_input"]; + }; + "/v1/processes/{id}/kill": { + post: operations["post_v1_process_kill"]; + }; + "/v1/processes/{id}/logs": { + get: operations["get_v1_process_logs"]; + }; + "/v1/processes/{id}/stop": { + post: operations["post_v1_process_stop"]; + }; + "/v1/processes/{id}/terminal/resize": { + post: operations["post_v1_process_terminal_resize"]; + }; + "/v1/processes/{id}/terminal/ws": { + get: operations["get_v1_process_terminal_ws"]; + }; } export type webhooks = Record; @@ -230,6 +263,116 @@ export interface components { type: string; [key: string]: unknown; }; + ProcessConfig: { + /** Format: int64 */ + defaultRunTimeoutMs: number; + maxConcurrentProcesses: number; + maxInputBytesPerRequest: number; + maxLogBytesPerProcess: number; + maxOutputBytes: number; + /** Format: int64 */ + maxRunTimeoutMs: number; + }; + ProcessCreateRequest: { + args?: string[]; + command: string; + cwd?: string | null; + env?: { + [key: string]: string; + }; + interactive?: boolean; + tty?: boolean; + }; + ProcessInfo: { + args: string[]; + command: string; + /** Format: int64 */ + createdAtMs: number; + cwd?: string | null; + /** Format: int32 */ + exitCode?: number | null; + /** Format: int64 */ + exitedAtMs?: number | null; + id: string; + interactive: boolean; + /** Format: int32 */ + pid?: number | null; + status: components["schemas"]["ProcessState"]; + tty: boolean; + }; + ProcessInputRequest: { + data: string; + encoding?: string | null; + }; + ProcessInputResponse: { + bytesWritten: number; + }; + ProcessListResponse: { + processes: components["schemas"]["ProcessInfo"][]; + }; + ProcessLogEntry: { + data: string; + encoding: string; + /** Format: int64 */ + sequence: number; + stream: components["schemas"]["ProcessLogsStream"]; + /** Format: int64 */ + timestampMs: number; + }; + ProcessLogsQuery: { + follow?: boolean | null; + /** Format: int64 */ + since?: number | null; + stream?: components["schemas"]["ProcessLogsStream"] | null; + tail?: number | null; + }; + ProcessLogsResponse: { + entries: components["schemas"]["ProcessLogEntry"][]; + processId: string; + stream: components["schemas"]["ProcessLogsStream"]; + }; + /** @enum {string} */ + ProcessLogsStream: "stdout" | "stderr" | "combined" | "pty"; + ProcessRunRequest: { + args?: string[]; + command: string; + cwd?: string | null; + env?: { + [key: string]: string; + }; + maxOutputBytes?: number | null; + /** Format: int64 */ + timeoutMs?: number | null; + }; + ProcessRunResponse: { + /** Format: int64 */ + durationMs: number; + /** Format: int32 */ + exitCode?: number | null; + stderr: string; + stderrTruncated: boolean; + stdout: string; + stdoutTruncated: boolean; + timedOut: boolean; + }; + ProcessSignalQuery: { + /** Format: int64 */ + waitMs?: number | null; + }; + /** @enum {string} */ + ProcessState: "running" | "exited"; + ProcessTerminalResizeRequest: { + /** Format: int32 */ + cols: number; + /** Format: int32 */ + rows: number; + }; + ProcessTerminalResizeResponse: { + /** Format: int32 */ + cols: number; + /** Format: int32 */ + rows: number; + }; /** @enum {string} */ ServerStatus: "running" | "stopped"; ServerStatusInfo: { @@ -748,4 +891,417 @@ export interface operations { }; }; }; + get_v1_processes: { + responses: { + /** @description List processes */ + 200: { + content: { + "application/json": components["schemas"]["ProcessListResponse"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_processes: { + requestBody: { + content: { + "application/json": components["schemas"]["ProcessCreateRequest"]; + }; + }; + responses: { + /** @description Started process */ + 200: { + content: { + "application/json": components["schemas"]["ProcessInfo"]; + }; + }; + /** @description Invalid request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process limit or state conflict */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + get_v1_processes_config: { + responses: { + /** @description Current runtime process config */ + 200: { + content: { + "application/json": components["schemas"]["ProcessConfig"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_processes_config: { + requestBody: { + content: { + "application/json": components["schemas"]["ProcessConfig"]; + }; + }; + responses: { + /** @description Updated runtime process config */ + 200: { + content: { + "application/json": components["schemas"]["ProcessConfig"]; + }; + }; + /** @description Invalid config */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_processes_run: { + requestBody: { + content: { + "application/json": components["schemas"]["ProcessRunRequest"]; + }; + }; + responses: { + /** @description One-off command result */ + 200: { + content: { + "application/json": components["schemas"]["ProcessRunResponse"]; + }; + }; + /** @description Invalid request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + get_v1_process: { + parameters: { + path: { + /** @description Process ID */ + id: string; + }; + }; + responses: { + /** @description Process details */ + 200: { + content: { + "application/json": components["schemas"]["ProcessInfo"]; + }; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + delete_v1_process: { + parameters: { + path: { + /** @description Process ID */ + id: string; + }; + }; + responses: { + /** @description Process deleted */ + 204: { + content: never; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process is still running */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_process_input: { + parameters: { + path: { + /** @description Process ID */ + id: string; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["ProcessInputRequest"]; + }; + }; + responses: { + /** @description Input accepted */ + 200: { + content: { + "application/json": components["schemas"]["ProcessInputResponse"]; + }; + }; + /** @description Invalid request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process not writable */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Input exceeds configured limit */ + 413: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_process_kill: { + parameters: { + query?: { + /** @description Wait up to N ms for process to exit */ + waitMs?: number | null; + }; + path: { + /** @description Process ID */ + id: string; + }; + }; + responses: { + /** @description Kill signal sent */ + 200: { + content: { + "application/json": components["schemas"]["ProcessInfo"]; + }; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + get_v1_process_logs: { + parameters: { + query?: { + /** @description stdout|stderr|combined|pty */ + stream?: components["schemas"]["ProcessLogsStream"] | null; + /** @description Tail N entries */ + tail?: number | null; + /** @description Follow via SSE */ + follow?: boolean | null; + /** @description Only entries with sequence greater than this */ + since?: number | null; + }; + path: { + /** @description Process ID */ + id: string; + }; + }; + responses: { + /** @description Process logs */ + 200: { + content: { + "application/json": components["schemas"]["ProcessLogsResponse"]; + }; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_process_stop: { + parameters: { + query?: { + /** @description Wait up to N ms for process to exit */ + waitMs?: number | null; + }; + path: { + /** @description Process ID */ + id: string; + }; + }; + responses: { + /** @description Stop signal sent */ + 200: { + content: { + "application/json": components["schemas"]["ProcessInfo"]; + }; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + post_v1_process_terminal_resize: { + parameters: { + path: { + /** @description Process ID */ + id: string; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["ProcessTerminalResizeRequest"]; + }; + }; + responses: { + /** @description Resize accepted */ + 200: { + content: { + "application/json": components["schemas"]["ProcessTerminalResizeResponse"]; + }; + }; + /** @description Invalid request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Not a terminal process */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + get_v1_process_terminal_ws: { + parameters: { + query?: { + /** @description Bearer token alternative for WS auth */ + access_token?: string | null; + }; + path: { + /** @description Process ID */ + id: string; + }; + }; + responses: { + /** @description WebSocket upgraded */ + 101: { + content: never; + }; + /** @description Invalid websocket frame or upgrade request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Unknown process */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Not a terminal process */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Process API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; } diff --git a/sdks/typescript/src/index.ts b/sdks/typescript/src/index.ts index 4e360ae..8273809 100644 --- a/sdks/typescript/src/index.ts +++ b/sdks/typescript/src/index.ts @@ -11,6 +11,12 @@ export { buildInspectorUrl } from "./inspector.ts"; export type { SandboxAgentHealthWaitOptions, + AgentQueryOptions, + ProcessLogFollowQuery, + ProcessLogListener, + ProcessLogSubscription, + ProcessTerminalConnectOptions, + ProcessTerminalWebSocketUrlOptions, SandboxAgentConnectOptions, SandboxAgentStartOptions, SessionCreateRequest, @@ -30,6 +36,7 @@ export type { AcpServerInfo, AcpServerListResponse, AgentInfo, + AgentQuery, AgentInstallRequest, AgentInstallResponse, AgentListResponse, @@ -52,6 +59,27 @@ export type { McpConfigQuery, McpServerConfig, ProblemDetails, + ProcessConfig, + ProcessCreateRequest, + ProcessInfo, + ProcessInputRequest, + ProcessInputResponse, + ProcessListResponse, + ProcessLogEntry, + ProcessLogsQuery, + ProcessLogsResponse, + ProcessLogsStream, + ProcessRunRequest, + ProcessRunResponse, + ProcessSignalQuery, + ProcessState, + ProcessTerminalClientFrame, + ProcessTerminalErrorFrame, + ProcessTerminalExitFrame, + ProcessTerminalReadyFrame, + ProcessTerminalResizeRequest, + ProcessTerminalResizeResponse, + ProcessTerminalServerFrame, SessionEvent, SessionPersistDriver, SessionRecord, diff --git a/sdks/typescript/src/types.ts b/sdks/typescript/src/types.ts index 17b321e..aa7a73a 100644 --- a/sdks/typescript/src/types.ts +++ b/sdks/typescript/src/types.ts @@ -6,6 +6,7 @@ export type ProblemDetails = components["schemas"]["ProblemDetails"]; export type HealthResponse = JsonResponse; export type AgentListResponse = JsonResponse; export type AgentInfo = components["schemas"]["AgentInfo"]; +export type AgentQuery = QueryParams; export type AgentInstallRequest = JsonRequestBody; export type AgentInstallResponse = JsonResponse; @@ -31,6 +32,58 @@ export type McpServerConfig = components["schemas"]["McpServerConfig"]; export type SkillsConfigQuery = QueryParams; export type SkillsConfig = components["schemas"]["SkillsConfig"]; +export type ProcessConfig = JsonResponse; +export type ProcessCreateRequest = JsonRequestBody; +export type ProcessInfo = components["schemas"]["ProcessInfo"]; +export type ProcessInputRequest = JsonRequestBody; +export type ProcessInputResponse = JsonResponse; +export type ProcessListResponse = JsonResponse; +export type ProcessLogEntry = components["schemas"]["ProcessLogEntry"]; +export type ProcessLogsQuery = QueryParams; +export type ProcessLogsResponse = JsonResponse; +export type ProcessLogsStream = components["schemas"]["ProcessLogsStream"]; +export type ProcessRunRequest = JsonRequestBody; +export type ProcessRunResponse = JsonResponse; +export type ProcessSignalQuery = QueryParams; +export type ProcessState = components["schemas"]["ProcessState"]; +export type ProcessTerminalResizeRequest = JsonRequestBody; +export type ProcessTerminalResizeResponse = JsonResponse; + +export type ProcessTerminalClientFrame = + | { + type: "input"; + data: string; + encoding?: string; + } + | { + type: "resize"; + cols: number; + rows: number; + } + | { + type: "close"; + }; + +export interface ProcessTerminalReadyFrame { + type: "ready"; + processId: string; +} + +export interface ProcessTerminalExitFrame { + type: "exit"; + exitCode?: number | null; +} + +export interface ProcessTerminalErrorFrame { + type: "error"; + message: string; +} + +export type ProcessTerminalServerFrame = + | ProcessTerminalReadyFrame + | ProcessTerminalExitFrame + | ProcessTerminalErrorFrame; + export interface SessionRecord { id: string; agent: string; diff --git a/sdks/typescript/tests/helpers/mock-agent.ts b/sdks/typescript/tests/helpers/mock-agent.ts index 3d5677b..4c6f064 100644 --- a/sdks/typescript/tests/helpers/mock-agent.ts +++ b/sdks/typescript/tests/helpers/mock-agent.ts @@ -1,18 +1,29 @@ import { chmodSync, mkdirSync, writeFileSync } from "node:fs"; import { join } from "node:path"; -export function prepareMockAgentDataHome(dataHome: string): void { - const installDir = join(dataHome, "sandbox-agent", "bin"); - const processDir = join(installDir, "agent_processes"); - mkdirSync(processDir, { recursive: true }); +function candidateInstallDirs(dataHome: string): string[] { + const dirs = [join(dataHome, "sandbox-agent", "bin")]; + if (process.platform === "darwin") { + dirs.push(join(dataHome, "Library", "Application Support", "sandbox-agent", "bin")); + } else if (process.platform === "win32") { + dirs.push(join(dataHome, "AppData", "Roaming", "sandbox-agent", "bin")); + } + return dirs; +} - const runner = process.platform === "win32" - ? join(processDir, "mock-acp.cmd") - : join(processDir, "mock-acp"); - - const scriptFile = process.platform === "win32" - ? join(processDir, "mock-acp.js") - : runner; +export function prepareMockAgentDataHome(dataHome: string): Record { + const runtimeEnv: Record = {}; + if (process.platform === "darwin") { + runtimeEnv.HOME = dataHome; + runtimeEnv.XDG_DATA_HOME = join(dataHome, ".local", "share"); + } else if (process.platform === "win32") { + runtimeEnv.USERPROFILE = dataHome; + runtimeEnv.APPDATA = join(dataHome, "AppData", "Roaming"); + runtimeEnv.LOCALAPPDATA = join(dataHome, "AppData", "Local"); + } else { + runtimeEnv.HOME = dataHome; + runtimeEnv.XDG_DATA_HOME = dataHome; + } const nodeScript = String.raw`#!/usr/bin/env node const { createInterface } = require("node:readline"); @@ -127,14 +138,29 @@ rl.on("line", (line) => { }); `; - writeFileSync(scriptFile, nodeScript); + for (const installDir of candidateInstallDirs(dataHome)) { + const processDir = join(installDir, "agent_processes"); + mkdirSync(processDir, { recursive: true }); - if (process.platform === "win32") { - writeFileSync(runner, `@echo off\r\nnode "${scriptFile}" %*\r\n`); + const runner = process.platform === "win32" + ? join(processDir, "mock-acp.cmd") + : join(processDir, "mock-acp"); + + const scriptFile = process.platform === "win32" + ? join(processDir, "mock-acp.js") + : runner; + + writeFileSync(scriptFile, nodeScript); + + if (process.platform === "win32") { + writeFileSync(runner, `@echo off\r\nnode "${scriptFile}" %*\r\n`); + } + + chmodSync(scriptFile, 0o755); + if (process.platform === "win32") { + chmodSync(runner, 0o755); + } } - chmodSync(scriptFile, 0o755); - if (process.platform === "win32") { - chmodSync(runner, 0o755); - } + return runtimeEnv; } diff --git a/sdks/typescript/tests/integration.test.ts b/sdks/typescript/tests/integration.test.ts index fd20eef..238c6cb 100644 --- a/sdks/typescript/tests/integration.test.ts +++ b/sdks/typescript/tests/integration.test.ts @@ -12,6 +12,7 @@ import { } from "../src/index.ts"; import { spawnSandboxAgent, isNodeRuntime, type SandboxAgentSpawnHandle } from "../src/spawn.ts"; import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts"; +import WebSocket from "ws"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -64,6 +65,107 @@ async function waitFor( throw new Error("timed out waiting for condition"); } +async function waitForAsync( + fn: () => Promise, + timeoutMs = 6000, + stepMs = 30, +): Promise { + const started = Date.now(); + while (Date.now() - started < timeoutMs) { + const value = await fn(); + if (value !== undefined && value !== null) { + return value; + } + await sleep(stepMs); + } + throw new Error("timed out waiting for condition"); +} + +function buildTarArchive(entries: Array<{ name: string; content: string }>): Uint8Array { + const blocks: Buffer[] = []; + + for (const entry of entries) { + const content = Buffer.from(entry.content, "utf8"); + const header = Buffer.alloc(512, 0); + + writeTarString(header, 0, 100, entry.name); + writeTarOctal(header, 100, 8, 0o644); + writeTarOctal(header, 108, 8, 0); + writeTarOctal(header, 116, 8, 0); + writeTarOctal(header, 124, 12, content.length); + writeTarOctal(header, 136, 12, Math.floor(Date.now() / 1000)); + header.fill(0x20, 148, 156); + header[156] = "0".charCodeAt(0); + writeTarString(header, 257, 6, "ustar"); + writeTarString(header, 263, 2, "00"); + + let checksum = 0; + for (const byte of header) { + checksum += byte; + } + writeTarChecksum(header, checksum); + + blocks.push(header); + blocks.push(content); + + const remainder = content.length % 512; + if (remainder !== 0) { + blocks.push(Buffer.alloc(512 - remainder, 0)); + } + } + + blocks.push(Buffer.alloc(1024, 0)); + return Buffer.concat(blocks); +} + +function writeTarString(buffer: Buffer, offset: number, length: number, value: string): void { + const bytes = Buffer.from(value, "utf8"); + bytes.copy(buffer, offset, 0, Math.min(bytes.length, length)); +} + +function writeTarOctal(buffer: Buffer, offset: number, length: number, value: number): void { + const rendered = value.toString(8).padStart(length - 1, "0"); + writeTarString(buffer, offset, length, rendered); + buffer[offset + length - 1] = 0; +} + +function writeTarChecksum(buffer: Buffer, checksum: number): void { + const rendered = checksum.toString(8).padStart(6, "0"); + writeTarString(buffer, 148, 6, rendered); + buffer[154] = 0; + buffer[155] = 0x20; +} + +function decodeSocketPayload(data: unknown): string { + if (typeof data === "string") { + return data; + } + if (data instanceof ArrayBuffer) { + return Buffer.from(data).toString("utf8"); + } + if (ArrayBuffer.isView(data)) { + return Buffer.from(data.buffer, data.byteOffset, data.byteLength).toString("utf8"); + } + if (typeof Blob !== "undefined" && data instanceof Blob) { + throw new Error("Blob socket payloads are not supported in this test"); + } + throw new Error(`Unsupported socket payload type: ${typeof data}`); +} + +function decodeProcessLogData(data: string, encoding: string): string { + if (encoding === "base64") { + return Buffer.from(data, "base64").toString("utf8"); + } + return data; +} + +function nodeCommand(source: string): { command: string; args: string[] } { + return { + command: process.execPath, + args: ["-e", source], + }; +} + describe("Integration: TypeScript SDK flat session API", () => { let handle: SandboxAgentSpawnHandle; let baseUrl: string; @@ -72,15 +174,13 @@ describe("Integration: TypeScript SDK flat session API", () => { beforeAll(async () => { dataHome = mkdtempSync(join(tmpdir(), "sdk-integration-")); - prepareMockAgentDataHome(dataHome); + const agentEnv = prepareMockAgentDataHome(dataHome); handle = await spawnSandboxAgent({ enabled: true, log: "silent", timeoutMs: 30000, - env: { - XDG_DATA_HOME: dataHome, - }, + env: agentEnv, }); baseUrl = handle.baseUrl; token = handle.token; @@ -122,6 +222,9 @@ describe("Integration: TypeScript SDK flat session API", () => { const fetched = await sdk.getSession(session.id); expect(fetched?.agent).toBe("mock"); + const acpServers = await sdk.listAcpServers(); + expect(acpServers.servers.some((server) => server.agent === "mock")).toBe(true); + const events = await sdk.getEvents({ sessionId: session.id, limit: 100 }); expect(events.items.length).toBeGreaterThan(0); expect(events.items.some((event) => event.sender === "client")).toBe(true); @@ -137,6 +240,64 @@ describe("Integration: TypeScript SDK flat session API", () => { await sdk.dispose(); }); + it("covers agent query flags and filesystem HTTP helpers", async () => { + const sdk = await SandboxAgent.connect({ + baseUrl, + token, + }); + + const directory = mkdtempSync(join(tmpdir(), "sdk-fs-")); + const nestedDir = join(directory, "nested"); + const filePath = join(directory, "notes.txt"); + const movedPath = join(directory, "notes-moved.txt"); + const uploadDir = join(directory, "uploaded"); + + try { + const listedAgents = await sdk.listAgents({ config: true, noCache: true }); + expect(listedAgents.agents.some((agent) => agent.id === "mock")).toBe(true); + + const mockAgent = await sdk.getAgent("mock", { config: true, noCache: true }); + expect(mockAgent.id).toBe("mock"); + expect(Array.isArray(mockAgent.configOptions)).toBe(true); + + await sdk.mkdirFs({ path: nestedDir }); + await sdk.writeFsFile({ path: filePath }, "hello from sdk"); + + const bytes = await sdk.readFsFile({ path: filePath }); + expect(new TextDecoder().decode(bytes)).toBe("hello from sdk"); + + const stat = await sdk.statFs({ path: filePath }); + expect(stat.path).toBe(filePath); + expect(stat.size).toBe(bytes.byteLength); + + const entries = await sdk.listFsEntries({ path: directory }); + expect(entries.some((entry) => entry.path === nestedDir)).toBe(true); + expect(entries.some((entry) => entry.path === filePath)).toBe(true); + + const moved = await sdk.moveFs({ + from: filePath, + to: movedPath, + overwrite: true, + }); + expect(moved.to).toBe(movedPath); + + const uploadResult = await sdk.uploadFsBatch( + buildTarArchive([{ name: "batch.txt", content: "batch upload works" }]), + { path: uploadDir }, + ); + expect(uploadResult.paths.some((path) => path.endsWith("batch.txt"))).toBe(true); + + const uploaded = await sdk.readFsFile({ path: join(uploadDir, "batch.txt") }); + expect(new TextDecoder().decode(uploaded)).toBe("batch upload works"); + + const deleted = await sdk.deleteFsEntry({ path: movedPath }); + expect(deleted.path).toBe(movedPath); + } finally { + rmSync(directory, { recursive: true, force: true }); + await sdk.dispose(); + } + }); + it("uses custom fetch for both HTTP helpers and ACP session traffic", async () => { const defaultFetch = globalThis.fetch; if (!defaultFetch) { @@ -168,7 +329,7 @@ describe("Integration: TypeScript SDK flat session API", () => { expect(seenPaths.some((path) => path.startsWith("/v1/acp/"))).toBe(true); await sdk.dispose(); - }); + }, 60_000); it("requires baseUrl when fetch is not provided", async () => { await expect(SandboxAgent.connect({ token } as any)).rejects.toThrow( @@ -425,4 +586,186 @@ describe("Integration: TypeScript SDK flat session API", () => { await sdk.dispose(); rmSync(directory, { recursive: true, force: true }); }); + + it("covers process runtime HTTP helpers, log streaming, and terminal websocket access", async () => { + const sdk = await SandboxAgent.connect({ + baseUrl, + token, + }); + + const originalConfig = await sdk.getProcessConfig(); + const updatedConfig = await sdk.setProcessConfig({ + ...originalConfig, + maxOutputBytes: originalConfig.maxOutputBytes + 1, + }); + expect(updatedConfig.maxOutputBytes).toBe(originalConfig.maxOutputBytes + 1); + + const runResult = await sdk.runProcess({ + ...nodeCommand("process.stdout.write('run-stdout'); process.stderr.write('run-stderr');"), + timeoutMs: 5_000, + }); + expect(runResult.stdout).toContain("run-stdout"); + expect(runResult.stderr).toContain("run-stderr"); + + let interactiveProcessId: string | undefined; + let ttyProcessId: string | undefined; + let killProcessId: string | undefined; + + try { + const interactiveProcess = await sdk.createProcess({ + ...nodeCommand(` + process.stdin.setEncoding("utf8"); + process.stdout.write("ready\\n"); + process.stdin.on("data", (chunk) => { + process.stdout.write("echo:" + chunk); + }); + setInterval(() => {}, 1_000); + `), + interactive: true, + }); + interactiveProcessId = interactiveProcess.id; + + const listed = await sdk.listProcesses(); + expect(listed.processes.some((process) => process.id === interactiveProcess.id)).toBe(true); + + const fetched = await sdk.getProcess(interactiveProcess.id); + expect(fetched.status).toBe("running"); + + const initialLogs = await waitForAsync(async () => { + const logs = await sdk.getProcessLogs(interactiveProcess.id, { tail: 10 }); + return logs.entries.some((entry) => decodeProcessLogData(entry.data, entry.encoding).includes("ready")) + ? logs + : undefined; + }); + expect( + initialLogs.entries.some((entry) => decodeProcessLogData(entry.data, entry.encoding).includes("ready")), + ).toBe(true); + + const followedLogs: string[] = []; + const subscription = await sdk.followProcessLogs( + interactiveProcess.id, + (entry) => { + followedLogs.push(decodeProcessLogData(entry.data, entry.encoding)); + }, + { tail: 1 }, + ); + + try { + const inputResult = await sdk.sendProcessInput(interactiveProcess.id, { + data: Buffer.from("hello over stdin\n", "utf8").toString("base64"), + encoding: "base64", + }); + expect(inputResult.bytesWritten).toBeGreaterThan(0); + + await waitFor(() => { + const joined = followedLogs.join(""); + return joined.includes("echo:hello over stdin") ? joined : undefined; + }); + } finally { + subscription.close(); + await subscription.closed; + } + + const stopped = await sdk.stopProcess(interactiveProcess.id, { waitMs: 5_000 }); + expect(stopped.status).toBe("exited"); + + await sdk.deleteProcess(interactiveProcess.id); + interactiveProcessId = undefined; + + const ttyProcess = await sdk.createProcess({ + ...nodeCommand(` + process.stdin.setEncoding("utf8"); + process.stdin.on("data", (chunk) => { + process.stdout.write(chunk); + }); + setInterval(() => {}, 1_000); + `), + interactive: true, + tty: true, + }); + ttyProcessId = ttyProcess.id; + + const resized = await sdk.resizeProcessTerminal(ttyProcess.id, { + cols: 120, + rows: 40, + }); + expect(resized.cols).toBe(120); + expect(resized.rows).toBe(40); + + const wsUrl = sdk.buildProcessTerminalWebSocketUrl(ttyProcess.id); + expect(wsUrl.startsWith("ws://") || wsUrl.startsWith("wss://")).toBe(true); + + const ws = sdk.connectProcessTerminalWebSocket(ttyProcess.id, { + WebSocket: WebSocket as unknown as typeof globalThis.WebSocket, + }); + ws.binaryType = "arraybuffer"; + + const socketTextFrames: string[] = []; + const socketBinaryFrames: string[] = []; + ws.addEventListener("message", (event) => { + if (typeof event.data === "string") { + socketTextFrames.push(event.data); + return; + } + socketBinaryFrames.push(decodeSocketPayload(event.data)); + }); + + await waitFor(() => { + const ready = socketTextFrames.find((frame) => frame.includes('"type":"ready"')); + return ready; + }); + + ws.send(JSON.stringify({ + type: "input", + data: "hello tty\n", + })); + + await waitFor(() => { + const joined = socketBinaryFrames.join(""); + return joined.includes("hello tty") ? joined : undefined; + }); + + ws.close(); + await waitForAsync(async () => { + const processInfo = await sdk.getProcess(ttyProcess.id); + return processInfo.status === "running" ? processInfo : undefined; + }); + + const killedTty = await sdk.killProcess(ttyProcess.id, { waitMs: 5_000 }); + expect(killedTty.status).toBe("exited"); + + await sdk.deleteProcess(ttyProcess.id); + ttyProcessId = undefined; + + const killProcess = await sdk.createProcess({ + ...nodeCommand("setInterval(() => {}, 1_000);"), + }); + killProcessId = killProcess.id; + + const killed = await sdk.killProcess(killProcess.id, { waitMs: 5_000 }); + expect(killed.status).toBe("exited"); + + await sdk.deleteProcess(killProcess.id); + killProcessId = undefined; + } finally { + await sdk.setProcessConfig(originalConfig); + + if (interactiveProcessId) { + await sdk.killProcess(interactiveProcessId, { waitMs: 5_000 }).catch(() => {}); + await sdk.deleteProcess(interactiveProcessId).catch(() => {}); + } + + if (ttyProcessId) { + await sdk.killProcess(ttyProcessId, { waitMs: 5_000 }).catch(() => {}); + await sdk.deleteProcess(ttyProcessId).catch(() => {}); + } + + if (killProcessId) { + await sdk.killProcess(killProcessId, { waitMs: 5_000 }).catch(() => {}); + await sdk.deleteProcess(killProcessId).catch(() => {}); + } + + await sdk.dispose(); + } + }); }); diff --git a/server/CLAUDE.md b/server/CLAUDE.md index 2c217d7..b56223c 100644 --- a/server/CLAUDE.md +++ b/server/CLAUDE.md @@ -1,17 +1,17 @@ # Server Instructions -## ACP v2 Architecture +## Architecture - Public API routes are defined in `server/packages/sandbox-agent/src/router.rs`. -- ACP runtime/process bridge is in `server/packages/sandbox-agent/src/acp_runtime.rs`. -- `/v2` is the only active API surface for sessions/prompts (`/v2/rpc`). +- ACP proxy runtime is in `server/packages/sandbox-agent/src/acp_proxy_runtime.rs`. +- All API endpoints are under `/v1`. - Keep binary filesystem transfer endpoints as dedicated HTTP APIs: - - `GET /v2/fs/file` - - `PUT /v2/fs/file` - - `POST /v2/fs/upload-batch` + - `GET /v1/fs/file` + - `PUT /v1/fs/file` + - `POST /v1/fs/upload-batch` - Rationale: host-owned cross-agent-consistent behavior and large binary transfer needs that ACP JSON-RPC is not suited to stream efficiently. - Maintain ACP variants in parallel only when they share the same underlying filesystem implementation; SDK defaults should still prefer HTTP for large/binary transfers. -- `/v1/*` must remain hard-removed (`410`) and `/opencode/*` stays disabled (`503`) until Phase 7. +- `/opencode/*` stays disabled (`503`) until Phase 7. - Agent install logic (native + ACP agent process + lazy install) is handled by `server/packages/agent-management/`. ## API Contract Rules @@ -23,14 +23,14 @@ ## Tests -Primary v2 integration coverage: -- `server/packages/sandbox-agent/tests/v2_api.rs` -- `server/packages/sandbox-agent/tests/v2_agent_process_matrix.rs` +Primary v1 integration coverage: +- `server/packages/sandbox-agent/tests/v1_api.rs` +- `server/packages/sandbox-agent/tests/v1_agent_process_matrix.rs` Run: ```bash -cargo test -p sandbox-agent --test v2_api -cargo test -p sandbox-agent --test v2_agent_process_matrix +cargo test -p sandbox-agent --test v1_api +cargo test -p sandbox-agent --test v1_agent_process_matrix ``` ## Migration Docs Sync diff --git a/server/packages/error/src/lib.rs b/server/packages/error/src/lib.rs index 9bc1734..d0e8c0c 100644 --- a/server/packages/error/src/lib.rs +++ b/server/packages/error/src/lib.rs @@ -17,6 +17,7 @@ pub enum ErrorType { PermissionDenied, NotAcceptable, UnsupportedMediaType, + NotFound, SessionNotFound, SessionAlreadyExists, ModeNotSupported, @@ -37,6 +38,7 @@ impl ErrorType { Self::PermissionDenied => "urn:sandbox-agent:error:permission_denied", Self::NotAcceptable => "urn:sandbox-agent:error:not_acceptable", Self::UnsupportedMediaType => "urn:sandbox-agent:error:unsupported_media_type", + Self::NotFound => "urn:sandbox-agent:error:not_found", Self::SessionNotFound => "urn:sandbox-agent:error:session_not_found", Self::SessionAlreadyExists => "urn:sandbox-agent:error:session_already_exists", Self::ModeNotSupported => "urn:sandbox-agent:error:mode_not_supported", @@ -57,6 +59,7 @@ impl ErrorType { Self::PermissionDenied => "Permission Denied", Self::NotAcceptable => "Not Acceptable", Self::UnsupportedMediaType => "Unsupported Media Type", + Self::NotFound => "Not Found", Self::SessionNotFound => "Session Not Found", Self::SessionAlreadyExists => "Session Already Exists", Self::ModeNotSupported => "Mode Not Supported", @@ -77,6 +80,7 @@ impl ErrorType { Self::PermissionDenied => 403, Self::NotAcceptable => 406, Self::UnsupportedMediaType => 415, + Self::NotFound => 404, Self::SessionNotFound => 404, Self::SessionAlreadyExists => 409, Self::ModeNotSupported => 400, @@ -155,6 +159,8 @@ pub enum SandboxError { NotAcceptable { message: String }, #[error("unsupported media type: {message}")] UnsupportedMediaType { message: String }, + #[error("not found: {resource} {id}")] + NotFound { resource: String, id: String }, #[error("session not found: {session_id}")] SessionNotFound { session_id: String }, #[error("session already exists: {session_id}")] @@ -180,6 +186,7 @@ impl SandboxError { Self::PermissionDenied { .. } => ErrorType::PermissionDenied, Self::NotAcceptable { .. } => ErrorType::NotAcceptable, Self::UnsupportedMediaType { .. } => ErrorType::UnsupportedMediaType, + Self::NotFound { .. } => ErrorType::NotFound, Self::SessionNotFound { .. } => ErrorType::SessionNotFound, Self::SessionAlreadyExists { .. } => ErrorType::SessionAlreadyExists, Self::ModeNotSupported { .. } => ErrorType::ModeNotSupported, @@ -264,6 +271,12 @@ impl SandboxError { map.insert("message".to_string(), Value::String(message.clone())); (None, None, Some(Value::Object(map))) } + Self::NotFound { resource, id } => { + let mut map = Map::new(); + map.insert("resource".to_string(), Value::String(resource.clone())); + map.insert("id".to_string(), Value::String(id.clone())); + (None, None, Some(Value::Object(map))) + } Self::SessionNotFound { session_id } => (None, Some(session_id.clone()), None), Self::SessionAlreadyExists { session_id } => (None, Some(session_id.clone()), None), Self::ModeNotSupported { agent, mode } => { diff --git a/server/packages/sandbox-agent/Cargo.toml b/server/packages/sandbox-agent/Cargo.toml index bb8328c..a8ae1db 100644 --- a/server/packages/sandbox-agent/Cargo.toml +++ b/server/packages/sandbox-agent/Cargo.toml @@ -55,6 +55,7 @@ insta.workspace = true tower.workspace = true tempfile.workspace = true serial_test = "3.2" +tokio-tungstenite = "0.24" [features] test-utils = ["tempfile"] diff --git a/server/packages/sandbox-agent/src/lib.rs b/server/packages/sandbox-agent/src/lib.rs index b5031e1..e84b10b 100644 --- a/server/packages/sandbox-agent/src/lib.rs +++ b/server/packages/sandbox-agent/src/lib.rs @@ -3,6 +3,7 @@ mod acp_proxy_runtime; pub mod cli; pub mod daemon; +mod process_runtime; pub mod router; pub mod server_logs; pub mod telemetry; diff --git a/server/packages/sandbox-agent/src/process_runtime.rs b/server/packages/sandbox-agent/src/process_runtime.rs new file mode 100644 index 0000000..4a895ec --- /dev/null +++ b/server/packages/sandbox-agent/src/process_runtime.rs @@ -0,0 +1,1082 @@ +use std::collections::{HashMap, VecDeque}; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::Arc; +use std::time::Instant; + +use base64::engine::general_purpose::STANDARD as BASE64; +use base64::Engine; +use serde::{Deserialize, Serialize}; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt}; +use tokio::process::{Child, ChildStdin, Command}; +use tokio::sync::{broadcast, Mutex, RwLock}; + +use sandbox_agent_error::SandboxError; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ProcessStatus { + Running, + Exited, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ProcessStream { + Stdout, + Stderr, + Pty, +} + +#[derive(Debug, Clone)] +pub struct ProcessStartSpec { + pub command: String, + pub args: Vec, + pub cwd: Option, + pub env: HashMap, + pub tty: bool, + pub interactive: bool, +} + +#[derive(Debug, Clone)] +pub struct RunSpec { + pub command: String, + pub args: Vec, + pub cwd: Option, + pub env: HashMap, + pub timeout_ms: Option, + pub max_output_bytes: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RunOutput { + pub exit_code: Option, + pub timed_out: bool, + pub stdout: String, + pub stderr: String, + pub stdout_truncated: bool, + pub stderr_truncated: bool, + pub duration_ms: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProcessLogLine { + pub sequence: u64, + pub stream: ProcessStream, + pub timestamp_ms: i64, + pub data: String, + pub encoding: &'static str, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProcessSnapshot { + pub id: String, + pub command: String, + pub args: Vec, + pub cwd: Option, + pub tty: bool, + pub interactive: bool, + pub status: ProcessStatus, + pub pid: Option, + pub exit_code: Option, + pub created_at_ms: i64, + pub exited_at_ms: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProcessRuntimeConfig { + pub max_concurrent_processes: usize, + pub default_run_timeout_ms: u64, + pub max_run_timeout_ms: u64, + pub max_output_bytes: usize, + pub max_log_bytes_per_process: usize, + pub max_input_bytes_per_request: usize, +} + +impl Default for ProcessRuntimeConfig { + fn default() -> Self { + Self { + max_concurrent_processes: 64, + default_run_timeout_ms: 30_000, + max_run_timeout_ms: 300_000, + max_output_bytes: 1_048_576, + max_log_bytes_per_process: 10_485_760, + max_input_bytes_per_request: 65_536, + } + } +} + +#[derive(Debug, Clone)] +pub struct ProcessRuntime { + config: Arc>, + inner: Arc, +} + +#[derive(Debug)] +struct ProcessRuntimeInner { + next_id: AtomicU64, + processes: RwLock>>, +} + +#[derive(Debug)] +struct ManagedProcess { + id: String, + command: String, + args: Vec, + cwd: Option, + tty: bool, + interactive: bool, + created_at_ms: i64, + pid: Option, + max_log_bytes: usize, + stdin: Mutex>, + #[cfg(unix)] + pty_resize_fd: Mutex>, + status: RwLock, + sequence: AtomicU64, + logs: Mutex>, + total_log_bytes: Mutex, + log_tx: broadcast::Sender, +} + +#[derive(Debug)] +enum ProcessStdin { + Pipe(ChildStdin), + Pty(tokio::fs::File), +} + +#[derive(Debug, Clone)] +struct StoredLog { + line: ProcessLogLine, + byte_len: usize, +} + +#[derive(Debug, Clone)] +struct ManagedStatus { + status: ProcessStatus, + exit_code: Option, + exited_at_ms: Option, +} + +struct SpawnedPipeProcess { + process: Arc, + child: Child, + stdout: tokio::process::ChildStdout, + stderr: tokio::process::ChildStderr, +} + +#[cfg(unix)] +struct SpawnedTtyProcess { + process: Arc, + child: Child, + reader: tokio::fs::File, +} + +impl ProcessRuntime { + pub fn new() -> Self { + Self { + config: Arc::new(RwLock::new(ProcessRuntimeConfig::default())), + inner: Arc::new(ProcessRuntimeInner { + next_id: AtomicU64::new(1), + processes: RwLock::new(HashMap::new()), + }), + } + } + + pub async fn get_config(&self) -> ProcessRuntimeConfig { + self.config.read().await.clone() + } + + pub async fn set_config( + &self, + mut value: ProcessRuntimeConfig, + ) -> Result { + if value.max_concurrent_processes == 0 { + return Err(SandboxError::InvalidRequest { + message: "maxConcurrentProcesses must be greater than 0".to_string(), + }); + } + if value.default_run_timeout_ms == 0 || value.max_run_timeout_ms == 0 { + return Err(SandboxError::InvalidRequest { + message: "timeouts must be greater than 0".to_string(), + }); + } + if value.default_run_timeout_ms > value.max_run_timeout_ms { + value.default_run_timeout_ms = value.max_run_timeout_ms; + } + if value.max_output_bytes == 0 + || value.max_log_bytes_per_process == 0 + || value.max_input_bytes_per_request == 0 + { + return Err(SandboxError::InvalidRequest { + message: "byte limits must be greater than 0".to_string(), + }); + } + + *self.config.write().await = value.clone(); + Ok(value) + } + + pub async fn start_process( + &self, + spec: ProcessStartSpec, + ) -> Result { + let config = self.get_config().await; + + let process_refs = { + let processes = self.inner.processes.read().await; + processes.values().cloned().collect::>() + }; + + let mut running_count = 0usize; + for process in process_refs { + if process.status.read().await.status == ProcessStatus::Running { + running_count += 1; + } + } + + if running_count >= config.max_concurrent_processes { + return Err(SandboxError::Conflict { + message: format!( + "max concurrent process limit reached ({})", + config.max_concurrent_processes + ), + }); + } + + if spec.command.trim().is_empty() { + return Err(SandboxError::InvalidRequest { + message: "command must not be empty".to_string(), + }); + } + + let id_num = self.inner.next_id.fetch_add(1, Ordering::Relaxed); + let id = format!("proc_{id_num}"); + + if spec.tty { + #[cfg(unix)] + { + let spawned = self + .spawn_tty_process(id.clone(), spec, config.max_log_bytes_per_process) + .await?; + let process = spawned.process.clone(); + self.inner + .processes + .write() + .await + .insert(id, process.clone()); + + let p = process.clone(); + tokio::spawn(async move { + pump_output(p, spawned.reader, ProcessStream::Pty).await; + }); + + let p = process.clone(); + tokio::spawn(async move { + watch_exit(p, spawned.child).await; + }); + + return Ok(process.snapshot().await); + } + #[cfg(not(unix))] + { + return Err(SandboxError::StreamError { + message: "tty process mode is not supported on this platform".to_string(), + }); + } + } + + let spawned = self + .spawn_pipe_process(id.clone(), spec, config.max_log_bytes_per_process) + .await?; + let process = spawned.process.clone(); + self.inner + .processes + .write() + .await + .insert(id, process.clone()); + + let p = process.clone(); + tokio::spawn(async move { + pump_output(p, spawned.stdout, ProcessStream::Stdout).await; + }); + + let p = process.clone(); + tokio::spawn(async move { + pump_output(p, spawned.stderr, ProcessStream::Stderr).await; + }); + + let p = process.clone(); + tokio::spawn(async move { + watch_exit(p, spawned.child).await; + }); + + Ok(process.snapshot().await) + } + + pub async fn run_once(&self, spec: RunSpec) -> Result { + if spec.command.trim().is_empty() { + return Err(SandboxError::InvalidRequest { + message: "command must not be empty".to_string(), + }); + } + + let config = self.get_config().await; + let mut timeout_ms = spec.timeout_ms.unwrap_or(config.default_run_timeout_ms); + if timeout_ms == 0 { + timeout_ms = config.default_run_timeout_ms; + } + timeout_ms = timeout_ms.min(config.max_run_timeout_ms); + + let max_output_bytes = spec.max_output_bytes.unwrap_or(config.max_output_bytes); + + let mut cmd = Command::new(&spec.command); + cmd.args(&spec.args) + .stdin(std::process::Stdio::null()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()); + + if let Some(cwd) = &spec.cwd { + cmd.current_dir(cwd); + } + + for (key, value) in &spec.env { + cmd.env(key, value); + } + + let mut child = cmd.spawn().map_err(|err| SandboxError::StreamError { + message: format!("failed to spawn process: {err}"), + })?; + + let stdout = child + .stdout + .take() + .ok_or_else(|| SandboxError::StreamError { + message: "failed to capture stdout".to_string(), + })?; + let stderr = child + .stderr + .take() + .ok_or_else(|| SandboxError::StreamError { + message: "failed to capture stderr".to_string(), + })?; + + let started = Instant::now(); + let stdout_task = tokio::spawn(capture_output(stdout, max_output_bytes)); + let stderr_task = tokio::spawn(capture_output(stderr, max_output_bytes)); + + let wait_result = + tokio::time::timeout(std::time::Duration::from_millis(timeout_ms), child.wait()).await; + + let (exit_code, timed_out) = match wait_result { + Ok(Ok(status)) => (status.code(), false), + Ok(Err(err)) => { + let _ = child.kill().await; + return Err(SandboxError::StreamError { + message: format!("failed to wait on process: {err}"), + }); + } + Err(_) => { + let _ = child.kill().await; + let _ = child.wait().await; + (None, true) + } + }; + + let (stdout, stdout_truncated) = match stdout_task.await { + Ok(Ok(captured)) => captured, + _ => (Vec::new(), false), + }; + let (stderr, stderr_truncated) = match stderr_task.await { + Ok(Ok(captured)) => captured, + _ => (Vec::new(), false), + }; + + Ok(RunOutput { + exit_code, + timed_out, + stdout: String::from_utf8_lossy(&stdout).to_string(), + stderr: String::from_utf8_lossy(&stderr).to_string(), + stdout_truncated, + stderr_truncated, + duration_ms: started.elapsed().as_millis() as u64, + }) + } + + pub async fn list_processes(&self) -> Vec { + let processes = self.inner.processes.read().await; + let mut items = Vec::with_capacity(processes.len()); + for process in processes.values() { + items.push(process.snapshot().await); + } + items.sort_by(|a, b| a.id.cmp(&b.id)); + items + } + + pub async fn snapshot(&self, id: &str) -> Result { + Ok(self.lookup_process(id).await?.snapshot().await) + } + + pub async fn is_tty(&self, id: &str) -> Result { + Ok(self.lookup_process(id).await?.tty) + } + + pub async fn max_input_bytes(&self) -> usize { + self.get_config().await.max_input_bytes_per_request + } + + pub async fn delete_process(&self, id: &str) -> Result<(), SandboxError> { + let process = self.lookup_process(id).await?; + let status = process.status.read().await.clone(); + if status.status == ProcessStatus::Running { + return Err(SandboxError::Conflict { + message: "process is still running; stop or kill it before delete".to_string(), + }); + } + + self.inner.processes.write().await.remove(id); + Ok(()) + } + + pub async fn stop_process( + &self, + id: &str, + wait_ms: Option, + ) -> Result { + let process = self.lookup_process(id).await?; + process.send_signal(SIGTERM).await?; + maybe_wait_for_exit(process.clone(), wait_ms.unwrap_or(2_000)).await; + Ok(process.snapshot().await) + } + + pub async fn kill_process( + &self, + id: &str, + wait_ms: Option, + ) -> Result { + let process = self.lookup_process(id).await?; + process.send_signal(SIGKILL).await?; + maybe_wait_for_exit(process.clone(), wait_ms.unwrap_or(1_000)).await; + Ok(process.snapshot().await) + } + + pub async fn write_input(&self, id: &str, data: &[u8]) -> Result { + self.lookup_process(id).await?.write_input(data).await + } + + pub async fn resize_terminal( + &self, + id: &str, + cols: u16, + rows: u16, + ) -> Result<(), SandboxError> { + let process = self.lookup_process(id).await?; + if !process.tty { + return Err(SandboxError::Conflict { + message: "process is not running in tty mode".to_string(), + }); + } + + process.resize_pty(cols, rows).await?; + process.send_signal(SIGWINCH).await + } + + pub async fn logs( + &self, + id: &str, + filter: ProcessLogFilter, + ) -> Result, SandboxError> { + self.lookup_process(id).await?.read_logs(filter).await + } + + pub async fn subscribe_logs( + &self, + id: &str, + ) -> Result, SandboxError> { + let process = self.lookup_process(id).await?; + Ok(process.log_tx.subscribe()) + } + + async fn lookup_process(&self, id: &str) -> Result, SandboxError> { + let process = self.inner.processes.read().await.get(id).cloned(); + process.ok_or_else(|| SandboxError::NotFound { + resource: "process".to_string(), + id: id.to_string(), + }) + } + + async fn spawn_pipe_process( + &self, + id: String, + spec: ProcessStartSpec, + max_log_bytes: usize, + ) -> Result { + let mut cmd = Command::new(&spec.command); + cmd.args(&spec.args) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()); + + if let Some(cwd) = &spec.cwd { + cmd.current_dir(cwd); + } + + for (key, value) in &spec.env { + cmd.env(key, value); + } + + let mut child = cmd.spawn().map_err(|err| SandboxError::StreamError { + message: format!("failed to spawn process: {err}"), + })?; + + let stdin = child.stdin.take(); + let stdout = child + .stdout + .take() + .ok_or_else(|| SandboxError::StreamError { + message: "failed to capture stdout".to_string(), + })?; + let stderr = child + .stderr + .take() + .ok_or_else(|| SandboxError::StreamError { + message: "failed to capture stderr".to_string(), + })?; + let pid = child.id(); + + let (tx, _rx) = broadcast::channel(512); + let process = Arc::new(ManagedProcess { + id, + command: spec.command, + args: spec.args, + cwd: spec.cwd, + tty: false, + interactive: spec.interactive, + created_at_ms: now_ms(), + pid, + max_log_bytes, + stdin: Mutex::new(stdin.map(ProcessStdin::Pipe)), + #[cfg(unix)] + pty_resize_fd: Mutex::new(None), + status: RwLock::new(ManagedStatus { + status: ProcessStatus::Running, + exit_code: None, + exited_at_ms: None, + }), + sequence: AtomicU64::new(1), + logs: Mutex::new(VecDeque::new()), + total_log_bytes: Mutex::new(0), + log_tx: tx, + }); + + Ok(SpawnedPipeProcess { + process, + child, + stdout, + stderr, + }) + } + + #[cfg(unix)] + async fn spawn_tty_process( + &self, + id: String, + spec: ProcessStartSpec, + max_log_bytes: usize, + ) -> Result { + use std::os::fd::AsRawFd; + use std::process::Stdio; + + let (master_fd, slave_fd) = open_pty(80, 24)?; + let slave_raw = slave_fd.as_raw_fd(); + + let stdin_fd = dup_fd(slave_raw)?; + let stdout_fd = dup_fd(slave_raw)?; + let stderr_fd = dup_fd(slave_raw)?; + + let mut cmd = Command::new(&spec.command); + cmd.args(&spec.args) + .stdin(Stdio::from(std::fs::File::from(stdin_fd))) + .stdout(Stdio::from(std::fs::File::from(stdout_fd))) + .stderr(Stdio::from(std::fs::File::from(stderr_fd))); + + if let Some(cwd) = &spec.cwd { + cmd.current_dir(cwd); + } + + for (key, value) in &spec.env { + cmd.env(key, value); + } + + unsafe { + cmd.pre_exec(move || { + if libc::setsid() == -1 { + return Err(std::io::Error::last_os_error()); + } + if libc::ioctl(slave_raw, libc::TIOCSCTTY as _, 0) == -1 { + return Err(std::io::Error::last_os_error()); + } + Ok(()) + }); + } + + let child = cmd.spawn().map_err(|err| SandboxError::StreamError { + message: format!("failed to spawn tty process: {err}"), + })?; + + let pid = child.id(); + drop(slave_fd); + + let master_raw = master_fd.as_raw_fd(); + let writer_fd = dup_fd(master_raw)?; + let resize_fd = dup_fd(master_raw)?; + + let reader_file = tokio::fs::File::from_std(std::fs::File::from(master_fd)); + let writer_file = tokio::fs::File::from_std(std::fs::File::from(writer_fd)); + let resize_file = std::fs::File::from(resize_fd); + + let (tx, _rx) = broadcast::channel(512); + let process = Arc::new(ManagedProcess { + id, + command: spec.command, + args: spec.args, + cwd: spec.cwd, + tty: true, + interactive: spec.interactive, + created_at_ms: now_ms(), + pid, + max_log_bytes, + stdin: Mutex::new(Some(ProcessStdin::Pty(writer_file))), + pty_resize_fd: Mutex::new(Some(resize_file)), + status: RwLock::new(ManagedStatus { + status: ProcessStatus::Running, + exit_code: None, + exited_at_ms: None, + }), + sequence: AtomicU64::new(1), + logs: Mutex::new(VecDeque::new()), + total_log_bytes: Mutex::new(0), + log_tx: tx, + }); + + Ok(SpawnedTtyProcess { + process, + child, + reader: reader_file, + }) + } +} + +#[derive(Debug, Clone, Copy)] +pub enum ProcessLogFilterStream { + Stdout, + Stderr, + Combined, + Pty, +} + +#[derive(Debug, Clone, Copy)] +pub struct ProcessLogFilter { + pub stream: ProcessLogFilterStream, + pub tail: Option, + pub since: Option, +} + +impl ManagedProcess { + async fn snapshot(&self) -> ProcessSnapshot { + let status = self.status.read().await.clone(); + ProcessSnapshot { + id: self.id.clone(), + command: self.command.clone(), + args: self.args.clone(), + cwd: self.cwd.clone(), + tty: self.tty, + interactive: self.interactive, + status: status.status, + pid: self.pid, + exit_code: status.exit_code, + created_at_ms: self.created_at_ms, + exited_at_ms: status.exited_at_ms, + } + } + + async fn append_log(&self, stream: ProcessStream, data: &[u8]) { + if data.is_empty() { + return; + } + + let stream = if self.tty { ProcessStream::Pty } else { stream }; + let line = ProcessLogLine { + sequence: self.sequence.fetch_add(1, Ordering::Relaxed), + stream, + timestamp_ms: now_ms(), + data: BASE64.encode(data), + encoding: "base64", + }; + let stored = StoredLog { + line: line.clone(), + byte_len: data.len(), + }; + + { + let mut logs = self.logs.lock().await; + let mut total = self.total_log_bytes.lock().await; + logs.push_back(stored); + *total += data.len(); + + while *total > self.max_log_bytes { + if let Some(front) = logs.pop_front() { + *total = total.saturating_sub(front.byte_len); + } else { + break; + } + } + } + + let _ = self.log_tx.send(line); + } + + async fn write_input(&self, data: &[u8]) -> Result { + if self.status.read().await.status != ProcessStatus::Running { + return Err(SandboxError::Conflict { + message: "process is not running".to_string(), + }); + } + + let mut guard = self.stdin.lock().await; + let stdin = guard.as_mut().ok_or_else(|| SandboxError::Conflict { + message: "process does not accept stdin".to_string(), + })?; + + match stdin { + ProcessStdin::Pipe(pipe) => { + pipe.write_all(data) + .await + .map_err(|err| SandboxError::StreamError { + message: format!("failed to write stdin: {err}"), + })?; + pipe.flush() + .await + .map_err(|err| SandboxError::StreamError { + message: format!("failed to flush stdin: {err}"), + })?; + } + ProcessStdin::Pty(pty_writer) => { + pty_writer + .write_all(data) + .await + .map_err(|err| SandboxError::StreamError { + message: format!("failed to write PTY input: {err}"), + })?; + pty_writer + .flush() + .await + .map_err(|err| SandboxError::StreamError { + message: format!("failed to flush PTY input: {err}"), + })?; + } + } + + Ok(data.len()) + } + + async fn read_logs( + &self, + filter: ProcessLogFilter, + ) -> Result, SandboxError> { + let logs = self.logs.lock().await; + + let mut entries: Vec = logs + .iter() + .filter_map(|entry| { + if let Some(since) = filter.since { + if entry.line.sequence <= since { + return None; + } + } + if stream_matches(entry.line.stream, filter.stream) { + Some(entry.line.clone()) + } else { + None + } + }) + .collect(); + + if let Some(tail) = filter.tail { + if entries.len() > tail { + let start = entries.len() - tail; + entries = entries.split_off(start); + } + } + + Ok(entries) + } + + async fn send_signal(&self, signal: i32) -> Result<(), SandboxError> { + if self.status.read().await.status != ProcessStatus::Running { + return Ok(()); + } + let Some(pid) = self.pid else { + return Ok(()); + }; + + send_signal(pid, signal) + } + + async fn resize_pty(&self, cols: u16, rows: u16) -> Result<(), SandboxError> { + if !self.tty { + return Ok(()); + } + + #[cfg(unix)] + { + use std::os::fd::AsRawFd; + let guard = self.pty_resize_fd.lock().await; + let Some(fd) = guard.as_ref() else { + return Err(SandboxError::Conflict { + message: "PTY resize handle unavailable".to_string(), + }); + }; + resize_pty(fd.as_raw_fd(), cols, rows)?; + } + + #[cfg(not(unix))] + { + let _ = cols; + let _ = rows; + } + + Ok(()) + } +} + +fn stream_matches(stream: ProcessStream, filter: ProcessLogFilterStream) -> bool { + match filter { + ProcessLogFilterStream::Stdout => stream == ProcessStream::Stdout, + ProcessLogFilterStream::Stderr => stream == ProcessStream::Stderr, + ProcessLogFilterStream::Combined => { + stream == ProcessStream::Stdout || stream == ProcessStream::Stderr + } + ProcessLogFilterStream::Pty => stream == ProcessStream::Pty, + } +} + +async fn maybe_wait_for_exit(process: Arc, wait_ms: u64) { + let deadline = tokio::time::Instant::now() + tokio::time::Duration::from_millis(wait_ms); + while tokio::time::Instant::now() < deadline { + if process.status.read().await.status == ProcessStatus::Exited { + break; + } + tokio::time::sleep(tokio::time::Duration::from_millis(25)).await; + } +} + +async fn pump_output(process: Arc, mut reader: R, stream: ProcessStream) +where + R: AsyncRead + Unpin, +{ + let mut buffer = [0_u8; 8192]; + loop { + match reader.read(&mut buffer).await { + Ok(0) => break, + Ok(n) => { + process.append_log(stream, &buffer[..n]).await; + } + Err(err) => { + let msg = format!("\n[process stream error: {err}]\n"); + process + .append_log( + if process.tty { + ProcessStream::Pty + } else { + ProcessStream::Stderr + }, + msg.as_bytes(), + ) + .await; + break; + } + } + } +} + +async fn watch_exit(process: Arc, mut child: Child) { + let wait = child.wait().await; + let (exit_code, exited_at_ms) = match wait { + Ok(status) => (status.code(), Some(now_ms())), + Err(_) => (None, Some(now_ms())), + }; + + { + let mut state = process.status.write().await; + state.status = ProcessStatus::Exited; + state.exit_code = exit_code; + state.exited_at_ms = exited_at_ms; + } + + let _ = process.stdin.lock().await.take(); +} + +async fn capture_output(mut reader: R, max_bytes: usize) -> std::io::Result<(Vec, bool)> +where + R: AsyncRead + Unpin, +{ + let mut output = Vec::new(); + let mut buffer = [0_u8; 8192]; + let mut truncated = false; + + loop { + let n = reader.read(&mut buffer).await?; + if n == 0 { + break; + } + + if output.len() < max_bytes { + let remaining = max_bytes - output.len(); + let to_copy = remaining.min(n); + output.extend_from_slice(&buffer[..to_copy]); + if to_copy < n { + truncated = true; + } + } else { + truncated = true; + } + } + + Ok((output, truncated)) +} + +fn now_ms() -> i64 { + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|duration| duration.as_millis() as i64) + .unwrap_or(0) +} + +#[cfg(unix)] +const SIGTERM: i32 = libc::SIGTERM; +#[cfg(unix)] +const SIGKILL: i32 = libc::SIGKILL; +#[cfg(unix)] +const SIGWINCH: i32 = libc::SIGWINCH; + +#[cfg(unix)] +fn send_signal(pid: u32, signal: i32) -> Result<(), SandboxError> { + let result = unsafe { libc::kill(pid as libc::pid_t, signal) }; + if result == 0 { + return Ok(()); + } + + let err = std::io::Error::last_os_error(); + if err.raw_os_error() == Some(libc::ESRCH) { + return Ok(()); + } + + Err(SandboxError::StreamError { + message: format!("failed to signal process {pid}: {err}"), + }) +} + +#[cfg(not(unix))] +const SIGTERM: i32 = 15; +#[cfg(not(unix))] +const SIGKILL: i32 = 9; +#[cfg(not(unix))] +const SIGWINCH: i32 = 28; + +#[cfg(not(unix))] +fn send_signal(_pid: u32, _signal: i32) -> Result<(), SandboxError> { + Err(SandboxError::StreamError { + message: "process signaling not supported on this platform".to_string(), + }) +} + +#[cfg(unix)] +fn open_pty( + cols: u16, + rows: u16, +) -> Result<(std::os::fd::OwnedFd, std::os::fd::OwnedFd), SandboxError> { + use std::os::fd::FromRawFd; + + let mut master: libc::c_int = -1; + let mut slave: libc::c_int = -1; + let mut winsize = libc::winsize { + ws_row: rows, + ws_col: cols, + ws_xpixel: 0, + ws_ypixel: 0, + }; + + let rc = unsafe { + libc::openpty( + &mut master, + &mut slave, + std::ptr::null_mut(), + std::ptr::null_mut(), + &mut winsize, + ) + }; + + if rc != 0 { + return Err(SandboxError::StreamError { + message: format!( + "failed to allocate PTY: {}", + std::io::Error::last_os_error() + ), + }); + } + + let master_fd = unsafe { std::os::fd::OwnedFd::from_raw_fd(master) }; + let slave_fd = unsafe { std::os::fd::OwnedFd::from_raw_fd(slave) }; + Ok((master_fd, slave_fd)) +} + +#[cfg(unix)] +fn dup_fd(fd: std::os::fd::RawFd) -> Result { + use std::os::fd::FromRawFd; + + let duplicated = unsafe { libc::dup(fd) }; + if duplicated == -1 { + return Err(SandboxError::StreamError { + message: format!("failed to dup fd: {}", std::io::Error::last_os_error()), + }); + } + + Ok(unsafe { std::os::fd::OwnedFd::from_raw_fd(duplicated) }) +} + +#[cfg(unix)] +fn resize_pty(fd: std::os::fd::RawFd, cols: u16, rows: u16) -> Result<(), SandboxError> { + let winsize = libc::winsize { + ws_row: rows, + ws_col: cols, + ws_xpixel: 0, + ws_ypixel: 0, + }; + + let rc = unsafe { libc::ioctl(fd, libc::TIOCSWINSZ as _, &winsize) }; + if rc == -1 { + return Err(SandboxError::StreamError { + message: format!("failed to resize PTY: {}", std::io::Error::last_os_error()), + }); + } + + Ok(()) +} + +pub fn decode_input_bytes(data: &str, encoding: &str) -> Result, SandboxError> { + match encoding { + "base64" => BASE64 + .decode(data) + .map_err(|err| SandboxError::InvalidRequest { + message: format!("invalid base64 input: {err}"), + }), + "utf8" | "text" => Ok(data.as_bytes().to_vec()), + _ => Err(SandboxError::InvalidRequest { + message: "encoding must be one of: base64, utf8, text".to_string(), + }), + } +} diff --git a/server/packages/sandbox-agent/src/router.rs b/server/packages/sandbox-agent/src/router.rs index 99971ff..110c325 100644 --- a/server/packages/sandbox-agent/src/router.rs +++ b/server/packages/sandbox-agent/src/router.rs @@ -1,4 +1,5 @@ use std::collections::{BTreeMap, HashMap}; +use std::convert::Infallible; use std::fs; use std::io::Cursor; use std::path::{Path as StdPath, PathBuf}; @@ -6,6 +7,7 @@ use std::sync::{Arc, Mutex}; use std::time::Duration; use axum::body::Bytes; +use axum::extract::ws::{Message, WebSocket, WebSocketUpgrade}; use axum::extract::{Path, Query, State}; use axum::http::{header, HeaderMap, Request, StatusCode}; use axum::middleware::Next; @@ -13,6 +15,8 @@ use axum::response::sse::KeepAlive; use axum::response::{IntoResponse, Response, Sse}; use axum::routing::{delete, get, post}; use axum::{Json, Router}; +use futures::stream; +use futures::StreamExt; use sandbox_agent_agent_management::agents::{ AgentId, AgentManager, InstallOptions, InstallResult, InstallSource, InstalledArtifactKind, }; @@ -27,11 +31,16 @@ use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use tar::Archive; +use tokio_stream::wrappers::BroadcastStream; use tower_http::trace::TraceLayer; use tracing::Span; use utoipa::{Modify, OpenApi, ToSchema}; use crate::acp_proxy_runtime::{AcpProxyRuntime, ProxyPostOutcome}; +use crate::process_runtime::{ + decode_input_bytes, ProcessLogFilter, ProcessLogFilterStream, ProcessRuntime, + ProcessRuntimeConfig, ProcessSnapshot, ProcessStartSpec, ProcessStatus, ProcessStream, RunSpec, +}; use crate::ui; mod support; @@ -77,6 +86,7 @@ pub struct AppState { agent_manager: Arc, acp_proxy: Arc, opencode_server_manager: Arc, + process_runtime: Arc, pub(crate) branding: BrandingMode, version_cache: Mutex>, } @@ -100,11 +110,13 @@ impl AppState { auto_restart: true, }, )); + let process_runtime = Arc::new(ProcessRuntime::new()); Self { auth, agent_manager, acp_proxy, opencode_server_manager, + process_runtime, branding, version_cache: Mutex::new(HashMap::new()), } @@ -122,6 +134,10 @@ impl AppState { self.opencode_server_manager.clone() } + pub(crate) fn process_runtime(&self) -> Arc { + self.process_runtime.clone() + } + pub(crate) fn purge_version_cache(&self, agent: AgentId) { self.version_cache.lock().unwrap().remove(&agent); } @@ -166,6 +182,28 @@ pub fn build_router_with_state(shared: Arc) -> (Router, Arc) .route("/fs/move", post(post_v1_fs_move)) .route("/fs/stat", get(get_v1_fs_stat)) .route("/fs/upload-batch", post(post_v1_fs_upload_batch)) + .route( + "/processes/config", + get(get_v1_processes_config).post(post_v1_processes_config), + ) + .route("/processes", get(get_v1_processes).post(post_v1_processes)) + .route("/processes/run", post(post_v1_processes_run)) + .route( + "/processes/:id", + get(get_v1_process).delete(delete_v1_process), + ) + .route("/processes/:id/stop", post(post_v1_process_stop)) + .route("/processes/:id/kill", post(post_v1_process_kill)) + .route("/processes/:id/logs", get(get_v1_process_logs)) + .route("/processes/:id/input", post(post_v1_process_input)) + .route( + "/processes/:id/terminal/resize", + post(post_v1_process_terminal_resize), + ) + .route( + "/processes/:id/terminal/ws", + get(get_v1_process_terminal_ws), + ) .route( "/config/mcp", get(get_v1_config_mcp) @@ -295,6 +333,19 @@ pub async fn shutdown_servers(state: &Arc) { post_v1_fs_move, get_v1_fs_stat, post_v1_fs_upload_batch, + get_v1_processes_config, + post_v1_processes_config, + post_v1_processes, + post_v1_processes_run, + get_v1_processes, + get_v1_process, + post_v1_process_stop, + post_v1_process_kill, + delete_v1_process, + get_v1_process_logs, + post_v1_process_input, + post_v1_process_terminal_resize, + get_v1_process_terminal_ws, get_v1_config_mcp, put_v1_config_mcp, delete_v1_config_mcp, @@ -329,6 +380,22 @@ pub async fn shutdown_servers(state: &Arc) { FsMoveResponse, FsActionResponse, FsUploadBatchResponse, + ProcessConfig, + ProcessCreateRequest, + ProcessRunRequest, + ProcessRunResponse, + ProcessState, + ProcessInfo, + ProcessListResponse, + ProcessLogsStream, + ProcessLogsQuery, + ProcessLogEntry, + ProcessLogsResponse, + ProcessInputRequest, + ProcessInputResponse, + ProcessSignalQuery, + ProcessTerminalResizeRequest, + ProcessTerminalResizeResponse, AcpPostQuery, AcpServerInfo, AcpServerListResponse, @@ -361,12 +428,21 @@ impl Modify for ServerAddon { pub enum ApiError { #[error(transparent)] Sandbox(#[from] SandboxError), + #[error("problem: {0:?}")] + Problem(ProblemDetails), +} + +impl From for ApiError { + fn from(value: ProblemDetails) -> Self { + Self::Problem(value) + } } impl IntoResponse for ApiError { fn into_response(self) -> Response { let problem = match &self { ApiError::Sandbox(error) => problem_from_sandbox_error(error), + ApiError::Problem(problem) => problem.clone(), }; let status = StatusCode::from_u16(problem.status).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); @@ -1075,6 +1151,678 @@ async fn post_v1_fs_upload_batch( })) } +/// Get process runtime configuration. +/// +/// Returns the current runtime configuration for the process management API, +/// including limits for concurrency, timeouts, and buffer sizes. +#[utoipa::path( + get, + path = "/v1/processes/config", + tag = "v1", + responses( + (status = 200, description = "Current runtime process config", body = ProcessConfig), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn get_v1_processes_config( + State(state): State>, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let config = state.process_runtime().get_config().await; + Ok(Json(map_process_config(config))) +} + +/// Update process runtime configuration. +/// +/// Replaces the runtime configuration for the process management API. +/// Validates that all values are non-zero and clamps default timeout to max. +#[utoipa::path( + post, + path = "/v1/processes/config", + tag = "v1", + request_body = ProcessConfig, + responses( + (status = 200, description = "Updated runtime process config", body = ProcessConfig), + (status = 400, description = "Invalid config", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_processes_config( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let runtime = state.process_runtime(); + let updated = runtime + .set_config(into_runtime_process_config(body)) + .await?; + Ok(Json(map_process_config(updated))) +} + +/// Create a long-lived managed process. +/// +/// Spawns a new process with the given command and arguments. Supports both +/// pipe-based and PTY (tty) modes. Returns the process descriptor on success. +#[utoipa::path( + post, + path = "/v1/processes", + tag = "v1", + request_body = ProcessCreateRequest, + responses( + (status = 200, description = "Started process", body = ProcessInfo), + (status = 400, description = "Invalid request", body = ProblemDetails), + (status = 409, description = "Process limit or state conflict", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_processes( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let runtime = state.process_runtime(); + let snapshot = runtime + .start_process(ProcessStartSpec { + command: body.command, + args: body.args, + cwd: body.cwd, + env: body.env.into_iter().collect(), + tty: body.tty, + interactive: body.interactive, + }) + .await?; + + Ok(Json(map_process_snapshot(snapshot))) +} + +/// Run a one-shot command. +/// +/// Executes a command to completion and returns its stdout, stderr, exit code, +/// and duration. Supports configurable timeout and output size limits. +#[utoipa::path( + post, + path = "/v1/processes/run", + tag = "v1", + request_body = ProcessRunRequest, + responses( + (status = 200, description = "One-off command result", body = ProcessRunResponse), + (status = 400, description = "Invalid request", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_processes_run( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let runtime = state.process_runtime(); + let output = runtime + .run_once(RunSpec { + command: body.command, + args: body.args, + cwd: body.cwd, + env: body.env.into_iter().collect(), + timeout_ms: body.timeout_ms, + max_output_bytes: body.max_output_bytes, + }) + .await?; + + Ok(Json(ProcessRunResponse { + exit_code: output.exit_code, + timed_out: output.timed_out, + stdout: output.stdout, + stderr: output.stderr, + stdout_truncated: output.stdout_truncated, + stderr_truncated: output.stderr_truncated, + duration_ms: output.duration_ms, + })) +} + +/// List all managed processes. +/// +/// Returns a list of all processes (running and exited) currently tracked +/// by the runtime, sorted by process ID. +#[utoipa::path( + get, + path = "/v1/processes", + tag = "v1", + responses( + (status = 200, description = "List processes", body = ProcessListResponse), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn get_v1_processes( + State(state): State>, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let snapshots = state.process_runtime().list_processes().await; + Ok(Json(ProcessListResponse { + processes: snapshots.into_iter().map(map_process_snapshot).collect(), + })) +} + +/// Get a single process by ID. +/// +/// Returns the current state of a managed process including its status, +/// PID, exit code, and creation/exit timestamps. +#[utoipa::path( + get, + path = "/v1/processes/{id}", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID") + ), + responses( + (status = 200, description = "Process details", body = ProcessInfo), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn get_v1_process( + State(state): State>, + Path(id): Path, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let snapshot = state.process_runtime().snapshot(&id).await?; + Ok(Json(map_process_snapshot(snapshot))) +} + +/// Send SIGTERM to a process. +/// +/// Sends SIGTERM to the process and optionally waits up to `waitMs` +/// milliseconds for the process to exit before returning. +#[utoipa::path( + post, + path = "/v1/processes/{id}/stop", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID"), + ("waitMs" = Option, Query, description = "Wait up to N ms for process to exit") + ), + responses( + (status = 200, description = "Stop signal sent", body = ProcessInfo), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_process_stop( + State(state): State>, + Path(id): Path, + Query(query): Query, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let snapshot = state + .process_runtime() + .stop_process(&id, query.wait_ms) + .await?; + Ok(Json(map_process_snapshot(snapshot))) +} + +/// Send SIGKILL to a process. +/// +/// Sends SIGKILL to the process and optionally waits up to `waitMs` +/// milliseconds for the process to exit before returning. +#[utoipa::path( + post, + path = "/v1/processes/{id}/kill", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID"), + ("waitMs" = Option, Query, description = "Wait up to N ms for process to exit") + ), + responses( + (status = 200, description = "Kill signal sent", body = ProcessInfo), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_process_kill( + State(state): State>, + Path(id): Path, + Query(query): Query, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let snapshot = state + .process_runtime() + .kill_process(&id, query.wait_ms) + .await?; + Ok(Json(map_process_snapshot(snapshot))) +} + +/// Delete a process record. +/// +/// Removes a stopped process from the runtime. Returns 409 if the process +/// is still running; stop or kill it first. +#[utoipa::path( + delete, + path = "/v1/processes/{id}", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID") + ), + responses( + (status = 204, description = "Process deleted"), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 409, description = "Process is still running", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn delete_v1_process( + State(state): State>, + Path(id): Path, +) -> Result { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + state.process_runtime().delete_process(&id).await?; + Ok(StatusCode::NO_CONTENT) +} + +/// Fetch process logs. +/// +/// Returns buffered log entries for a process. Supports filtering by stream +/// type, tail count, and sequence-based resumption. When `follow=true`, +/// returns an SSE stream that replays buffered entries then streams live output. +#[utoipa::path( + get, + path = "/v1/processes/{id}/logs", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID"), + ("stream" = Option, Query, description = "stdout|stderr|combined|pty"), + ("tail" = Option, Query, description = "Tail N entries"), + ("follow" = Option, Query, description = "Follow via SSE"), + ("since" = Option, Query, description = "Only entries with sequence greater than this") + ), + responses( + (status = 200, description = "Process logs", body = ProcessLogsResponse), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn get_v1_process_logs( + State(state): State>, + Path(id): Path, + headers: HeaderMap, + Query(query): Query, +) -> Result { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let runtime = state.process_runtime(); + let default_stream = if runtime.is_tty(&id).await? { + ProcessLogsStream::Pty + } else { + ProcessLogsStream::Combined + }; + let requested_stream = query.stream.unwrap_or(default_stream); + let since = match (query.since, parse_last_event_id(&headers)?) { + (Some(query_since), Some(last_event_id)) => Some(query_since.max(last_event_id)), + (Some(query_since), None) => Some(query_since), + (None, Some(last_event_id)) => Some(last_event_id), + (None, None) => None, + }; + let filter = ProcessLogFilter { + stream: into_runtime_log_stream(requested_stream), + tail: query.tail, + since, + }; + + let entries = runtime.logs(&id, filter).await?; + let response_entries: Vec = + entries.iter().cloned().map(map_process_log_line).collect(); + + if query.follow.unwrap_or(false) { + let rx = runtime.subscribe_logs(&id).await?; + let replay_stream = stream::iter(response_entries.into_iter().map(|entry| { + Ok::( + axum::response::sse::Event::default() + .event("log") + .id(entry.sequence.to_string()) + .data(serde_json::to_string(&entry).unwrap_or_else(|_| "{}".to_string())), + ) + })); + + let requested_stream_copy = requested_stream; + let follow_stream = BroadcastStream::new(rx).filter_map(move |item| { + let requested_stream_copy = requested_stream_copy; + async move { + match item { + Ok(line) => { + let entry = map_process_log_line(line); + if process_log_matches(&entry, requested_stream_copy) { + Some(Ok(axum::response::sse::Event::default() + .event("log") + .id(entry.sequence.to_string()) + .data( + serde_json::to_string(&entry) + .unwrap_or_else(|_| "{}".to_string()), + ))) + } else { + None + } + } + Err(_) => None, + } + } + }); + + let stream = replay_stream.chain(follow_stream); + let response = + Sse::new(stream).keep_alive(KeepAlive::new().interval(Duration::from_secs(15))); + return Ok(response.into_response()); + } + + Ok(Json(ProcessLogsResponse { + process_id: id, + stream: requested_stream, + entries: response_entries, + }) + .into_response()) +} + +/// Write input to a process. +/// +/// Sends data to a process's stdin (pipe mode) or PTY writer (tty mode). +/// Data can be encoded as base64, utf8, or text. Returns 413 if the decoded +/// payload exceeds the configured `maxInputBytesPerRequest` limit. +#[utoipa::path( + post, + path = "/v1/processes/{id}/input", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID") + ), + request_body = ProcessInputRequest, + responses( + (status = 200, description = "Input accepted", body = ProcessInputResponse), + (status = 400, description = "Invalid request", body = ProblemDetails), + (status = 413, description = "Input exceeds configured limit", body = ProblemDetails), + (status = 409, description = "Process not writable", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_process_input( + State(state): State>, + Path(id): Path, + Json(body): Json, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let encoding = body.encoding.unwrap_or_else(|| "base64".to_string()); + let input = decode_input_bytes(&body.data, &encoding)?; + let runtime = state.process_runtime(); + let max_input = runtime.max_input_bytes().await; + if input.len() > max_input { + return Err(SandboxError::InvalidRequest { + message: format!("input payload exceeds maxInputBytesPerRequest ({max_input})"), + } + .into()); + } + + let bytes_written = runtime.write_input(&id, &input).await?; + Ok(Json(ProcessInputResponse { bytes_written })) +} + +/// Resize a process terminal. +/// +/// Sets the PTY window size (columns and rows) for a tty-mode process and +/// sends SIGWINCH so the child process can adapt. +#[utoipa::path( + post, + path = "/v1/processes/{id}/terminal/resize", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID") + ), + request_body = ProcessTerminalResizeRequest, + responses( + (status = 200, description = "Resize accepted", body = ProcessTerminalResizeResponse), + (status = 400, description = "Invalid request", body = ProblemDetails), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 409, description = "Not a terminal process", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn post_v1_process_terminal_resize( + State(state): State>, + Path(id): Path, + Json(body): Json, +) -> Result, ApiError> { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + state + .process_runtime() + .resize_terminal(&id, body.cols, body.rows) + .await?; + Ok(Json(ProcessTerminalResizeResponse { + cols: body.cols, + rows: body.rows, + })) +} + +/// Open an interactive WebSocket terminal session. +/// +/// Upgrades the connection to a WebSocket for bidirectional PTY I/O. Accepts +/// `access_token` query param for browser-based auth (WebSocket API cannot +/// send custom headers). Streams raw PTY output as binary frames and accepts +/// JSON control frames for input, resize, and close. +#[utoipa::path( + get, + path = "/v1/processes/{id}/terminal/ws", + tag = "v1", + params( + ("id" = String, Path, description = "Process ID"), + ("access_token" = Option, Query, description = "Bearer token alternative for WS auth") + ), + responses( + (status = 101, description = "WebSocket upgraded"), + (status = 400, description = "Invalid websocket frame or upgrade request", body = ProblemDetails), + (status = 404, description = "Unknown process", body = ProblemDetails), + (status = 409, description = "Not a terminal process", body = ProblemDetails), + (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) + ) +)] +async fn get_v1_process_terminal_ws( + State(state): State>, + Path(id): Path, + Query(_query): Query, + ws: WebSocketUpgrade, +) -> Result { + if !process_api_supported() { + return Err(process_api_not_supported().into()); + } + + let runtime = state.process_runtime(); + if !runtime.is_tty(&id).await? { + return Err(SandboxError::Conflict { + message: "process is not running in tty mode".to_string(), + } + .into()); + } + + Ok(ws + .on_upgrade(move |socket| process_terminal_ws_session(socket, runtime, id)) + .into_response()) +} + +#[derive(Debug, Deserialize)] +#[serde(tag = "type", rename_all = "camelCase")] +enum TerminalClientFrame { + Input { + data: String, + #[serde(default)] + encoding: Option, + }, + Resize { + cols: u16, + rows: u16, + }, + Close, +} + +async fn process_terminal_ws_session( + mut socket: WebSocket, + runtime: Arc, + id: String, +) { + let _ = send_ws_json( + &mut socket, + json!({ + "type": "ready", + "processId": &id, + }), + ) + .await; + + let mut log_rx = match runtime.subscribe_logs(&id).await { + Ok(rx) => rx, + Err(err) => { + let _ = send_ws_error(&mut socket, &err.to_string()).await; + let _ = socket.close().await; + return; + } + }; + let mut exit_poll = tokio::time::interval(Duration::from_millis(150)); + + loop { + tokio::select! { + ws_in = socket.recv() => { + match ws_in { + Some(Ok(Message::Binary(_))) => { + let _ = send_ws_error(&mut socket, "binary input is not supported; use text JSON frames").await; + } + Some(Ok(Message::Text(text))) => { + let parsed = serde_json::from_str::(&text); + match parsed { + Ok(TerminalClientFrame::Input { data, encoding }) => { + let input = match decode_input_bytes(&data, encoding.as_deref().unwrap_or("utf8")) { + Ok(input) => input, + Err(err) => { + let _ = send_ws_error(&mut socket, &err.to_string()).await; + continue; + } + }; + let max_input = runtime.max_input_bytes().await; + if input.len() > max_input { + let _ = send_ws_error(&mut socket, &format!("input payload exceeds maxInputBytesPerRequest ({max_input})")).await; + continue; + } + if let Err(err) = runtime.write_input(&id, &input).await { + let _ = send_ws_error(&mut socket, &err.to_string()).await; + } + } + Ok(TerminalClientFrame::Resize { cols, rows }) => { + if let Err(err) = runtime.resize_terminal(&id, cols, rows).await { + let _ = send_ws_error(&mut socket, &err.to_string()).await; + } + } + Ok(TerminalClientFrame::Close) => { + let _ = socket.close().await; + break; + } + Err(err) => { + let _ = send_ws_error(&mut socket, &format!("invalid terminal frame: {err}")).await; + } + } + } + Some(Ok(Message::Ping(payload))) => { + let _ = socket.send(Message::Pong(payload)).await; + } + Some(Ok(Message::Close(_))) | None => break, + Some(Ok(Message::Pong(_))) => {} + Some(Err(_)) => break, + } + } + log_in = log_rx.recv() => { + match log_in { + Ok(line) => { + if line.stream != ProcessStream::Pty { + continue; + } + let bytes = { + use base64::engine::general_purpose::STANDARD as BASE64_ENGINE; + use base64::Engine; + BASE64_ENGINE.decode(&line.data).unwrap_or_default() + }; + if socket.send(Message::Binary(bytes)).await.is_err() { + break; + } + } + Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {} + Err(tokio::sync::broadcast::error::RecvError::Closed) => break, + } + } + _ = exit_poll.tick() => { + if let Ok(snapshot) = runtime.snapshot(&id).await { + if snapshot.status == ProcessStatus::Exited { + let _ = send_ws_json( + &mut socket, + json!({ + "type": "exit", + "exitCode": snapshot.exit_code, + }), + ) + .await; + let _ = socket.close().await; + break; + } + } + } + } + } +} + +async fn send_ws_json(socket: &mut WebSocket, payload: Value) -> Result<(), ()> { + socket + .send(Message::Text( + serde_json::to_string(&payload).map_err(|_| ())?, + )) + .await + .map_err(|_| ()) +} + +async fn send_ws_error(socket: &mut WebSocket, message: &str) -> Result<(), ()> { + send_ws_json( + socket, + json!({ + "type": "error", + "message": message, + }), + ) + .await +} + #[utoipa::path( get, path = "/v1/config/mcp", @@ -1386,6 +2134,96 @@ async fn delete_v1_acp( Ok(StatusCode::NO_CONTENT) } +fn process_api_supported() -> bool { + !cfg!(windows) +} + +fn process_api_not_supported() -> ProblemDetails { + ProblemDetails { + type_: ErrorType::InvalidRequest.as_urn().to_string(), + title: "Not Implemented".to_string(), + status: 501, + detail: Some("process API is not implemented on Windows".to_string()), + instance: None, + extensions: serde_json::Map::new(), + } +} + +fn map_process_config(config: ProcessRuntimeConfig) -> ProcessConfig { + ProcessConfig { + max_concurrent_processes: config.max_concurrent_processes, + default_run_timeout_ms: config.default_run_timeout_ms, + max_run_timeout_ms: config.max_run_timeout_ms, + max_output_bytes: config.max_output_bytes, + max_log_bytes_per_process: config.max_log_bytes_per_process, + max_input_bytes_per_request: config.max_input_bytes_per_request, + } +} + +fn into_runtime_process_config(config: ProcessConfig) -> ProcessRuntimeConfig { + ProcessRuntimeConfig { + max_concurrent_processes: config.max_concurrent_processes, + default_run_timeout_ms: config.default_run_timeout_ms, + max_run_timeout_ms: config.max_run_timeout_ms, + max_output_bytes: config.max_output_bytes, + max_log_bytes_per_process: config.max_log_bytes_per_process, + max_input_bytes_per_request: config.max_input_bytes_per_request, + } +} + +fn map_process_snapshot(snapshot: ProcessSnapshot) -> ProcessInfo { + ProcessInfo { + id: snapshot.id, + command: snapshot.command, + args: snapshot.args, + cwd: snapshot.cwd, + tty: snapshot.tty, + interactive: snapshot.interactive, + status: match snapshot.status { + ProcessStatus::Running => ProcessState::Running, + ProcessStatus::Exited => ProcessState::Exited, + }, + pid: snapshot.pid, + exit_code: snapshot.exit_code, + created_at_ms: snapshot.created_at_ms, + exited_at_ms: snapshot.exited_at_ms, + } +} + +fn into_runtime_log_stream(stream: ProcessLogsStream) -> ProcessLogFilterStream { + match stream { + ProcessLogsStream::Stdout => ProcessLogFilterStream::Stdout, + ProcessLogsStream::Stderr => ProcessLogFilterStream::Stderr, + ProcessLogsStream::Combined => ProcessLogFilterStream::Combined, + ProcessLogsStream::Pty => ProcessLogFilterStream::Pty, + } +} + +fn map_process_log_line(line: crate::process_runtime::ProcessLogLine) -> ProcessLogEntry { + ProcessLogEntry { + sequence: line.sequence, + stream: match line.stream { + ProcessStream::Stdout => ProcessLogsStream::Stdout, + ProcessStream::Stderr => ProcessLogsStream::Stderr, + ProcessStream::Pty => ProcessLogsStream::Pty, + }, + timestamp_ms: line.timestamp_ms, + data: line.data, + encoding: line.encoding.to_string(), + } +} + +fn process_log_matches(entry: &ProcessLogEntry, stream: ProcessLogsStream) -> bool { + match stream { + ProcessLogsStream::Stdout => entry.stream == ProcessLogsStream::Stdout, + ProcessLogsStream::Stderr => entry.stream == ProcessLogsStream::Stderr, + ProcessLogsStream::Combined => { + entry.stream == ProcessLogsStream::Stdout || entry.stream == ProcessLogsStream::Stderr + } + ProcessLogsStream::Pty => entry.stream == ProcessLogsStream::Pty, + } +} + fn validate_named_query(value: &str, field_name: &str) -> Result<(), SandboxError> { if value.trim().is_empty() { return Err(SandboxError::InvalidRequest { diff --git a/server/packages/sandbox-agent/src/router/support.rs b/server/packages/sandbox-agent/src/router/support.rs index 173017d..21dded4 100644 --- a/server/packages/sandbox-agent/src/router/support.rs +++ b/server/packages/sandbox-agent/src/router/support.rs @@ -33,7 +33,17 @@ pub(super) async fn require_token( .and_then(|value| value.to_str().ok()) .and_then(|value| value.strip_prefix("Bearer ")); - if bearer == Some(expected.as_str()) { + let allow_query_token = request.uri().path().ends_with("/terminal/ws"); + let query_token = if allow_query_token { + request + .uri() + .query() + .and_then(|query| query_param(query, "access_token")) + } else { + None + }; + + if bearer == Some(expected.as_str()) || query_token.as_deref() == Some(expected.as_str()) { return Ok(next.run(request).await); } @@ -42,6 +52,53 @@ pub(super) async fn require_token( })) } +fn query_param(query: &str, key: &str) -> Option { + query + .split('&') + .filter_map(|part| part.split_once('=')) + .find_map(|(k, v)| { + if k == key { + Some(percent_decode(v)) + } else { + None + } + }) +} + +fn percent_decode(input: &str) -> String { + let mut output = Vec::with_capacity(input.len()); + let bytes = input.as_bytes(); + let mut i = 0; + while i < bytes.len() { + if bytes[i] == b'%' && i + 2 < bytes.len() { + if let (Some(hi), Some(lo)) = ( + hex_nibble(bytes[i + 1]), + hex_nibble(bytes[i + 2]), + ) { + output.push((hi << 4) | lo); + i += 3; + continue; + } + } + if bytes[i] == b'+' { + output.push(b' '); + } else { + output.push(bytes[i]); + } + i += 1; + } + String::from_utf8(output).unwrap_or_else(|_| input.to_string()) +} + +fn hex_nibble(b: u8) -> Option { + match b { + b'0'..=b'9' => Some(b - b'0'), + b'a'..=b'f' => Some(b - b'a' + 10), + b'A'..=b'F' => Some(b - b'A' + 10), + _ => None, + } +} + pub(super) type PinBoxSseStream = crate::acp_proxy_runtime::PinBoxSseStream; pub(super) fn credentials_available_for( @@ -497,8 +554,17 @@ pub(super) fn problem_from_sandbox_error(error: &SandboxError) -> ProblemDetails let mut problem = error.to_problem_details(); match error { - SandboxError::InvalidRequest { .. } => { - problem.status = 400; + SandboxError::InvalidRequest { message } => { + if message.starts_with("input payload exceeds maxInputBytesPerRequest") { + problem.status = 413; + problem.title = "Payload Too Large".to_string(); + } else { + problem.status = 400; + } + } + SandboxError::NotFound { .. } => { + problem.status = 404; + problem.title = "Not Found".to_string(); } SandboxError::Timeout { .. } => { problem.status = 504; diff --git a/server/packages/sandbox-agent/src/router/types.rs b/server/packages/sandbox-agent/src/router/types.rs index 481850b..6d40e2a 100644 --- a/server/packages/sandbox-agent/src/router/types.rs +++ b/server/packages/sandbox-agent/src/router/types.rs @@ -362,3 +362,173 @@ pub struct AcpEnvelope { #[serde(default)] pub error: Option, } + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessConfig { + pub max_concurrent_processes: usize, + pub default_run_timeout_ms: u64, + pub max_run_timeout_ms: u64, + pub max_output_bytes: usize, + pub max_log_bytes_per_process: usize, + pub max_input_bytes_per_request: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessCreateRequest { + pub command: String, + #[serde(default)] + pub args: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cwd: Option, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub env: BTreeMap, + #[serde(default)] + pub tty: bool, + #[serde(default)] + pub interactive: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessRunRequest { + pub command: String, + #[serde(default)] + pub args: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cwd: Option, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub env: BTreeMap, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub timeout_ms: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub max_output_bytes: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessRunResponse { + pub exit_code: Option, + pub timed_out: bool, + pub stdout: String, + pub stderr: String, + pub stdout_truncated: bool, + pub stderr_truncated: bool, + pub duration_ms: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ProcessState { + Running, + Exited, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessInfo { + pub id: String, + pub command: String, + pub args: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cwd: Option, + pub tty: bool, + pub interactive: bool, + pub status: ProcessState, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pid: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub exit_code: Option, + pub created_at_ms: i64, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub exited_at_ms: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessListResponse { + pub processes: Vec, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ProcessLogsStream { + Stdout, + Stderr, + Combined, + Pty, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessLogsQuery { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub stream: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub tail: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub follow: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub since: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessLogEntry { + pub sequence: u64, + pub stream: ProcessLogsStream, + pub timestamp_ms: i64, + pub data: String, + pub encoding: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessLogsResponse { + pub process_id: String, + pub stream: ProcessLogsStream, + pub entries: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessInputRequest { + pub data: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub encoding: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessInputResponse { + pub bytes_written: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessSignalQuery { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub wait_ms: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessTerminalResizeRequest { + pub cols: u16, + pub rows: u16, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessTerminalResizeResponse { + pub cols: u16, + pub rows: u16, +} + +#[derive(Debug, Clone, Deserialize, JsonSchema)] +#[serde(rename_all = "camelCase")] +pub struct ProcessWsQuery { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub access_token: Option, +} diff --git a/server/packages/sandbox-agent/tests/v1_api.rs b/server/packages/sandbox-agent/tests/v1_api.rs index 89efde0..3dbd5e7 100644 --- a/server/packages/sandbox-agent/tests/v1_api.rs +++ b/server/packages/sandbox-agent/tests/v1_api.rs @@ -1,6 +1,6 @@ use std::fs; use std::io::{Read, Write}; -use std::net::{TcpListener, TcpStream}; +use std::net::{SocketAddr, TcpListener, TcpStream}; use std::path::Path; use std::time::Duration; @@ -14,6 +14,8 @@ use sandbox_agent_agent_management::agents::AgentManager; use serde_json::{json, Value}; use serial_test::serial; use tempfile::TempDir; +use tokio::sync::oneshot; +use tokio::task::JoinHandle; use tower::util::ServiceExt; struct TestApp { @@ -48,6 +50,56 @@ struct EnvVarGuard { previous: Option, } +struct LiveServer { + address: SocketAddr, + shutdown_tx: Option>, + task: JoinHandle<()>, +} + +impl LiveServer { + async fn spawn(app: Router) -> Self { + let listener = tokio::net::TcpListener::bind("127.0.0.1:0") + .await + .expect("bind live server"); + let address = listener.local_addr().expect("live server address"); + let (shutdown_tx, shutdown_rx) = oneshot::channel::<()>(); + + let task = tokio::spawn(async move { + let server = axum::serve(listener, app.into_make_service()) + .with_graceful_shutdown(async { + let _ = shutdown_rx.await; + }); + + let _ = server.await; + }); + + Self { + address, + shutdown_tx: Some(shutdown_tx), + task, + } + } + + fn http_url(&self, path: &str) -> String { + format!("http://{}{}", self.address, path) + } + + fn ws_url(&self, path: &str) -> String { + format!("ws://{}{}", self.address, path) + } + + async fn shutdown(mut self) { + if let Some(shutdown_tx) = self.shutdown_tx.take() { + let _ = shutdown_tx.send(()); + } + + let _ = tokio::time::timeout(Duration::from_secs(3), async { + let _ = self.task.await; + }) + .await; + } +} + impl EnvVarGuard { fn set(key: &'static str, value: &str) -> Self { let previous = std::env::var_os(key); @@ -291,3 +343,5 @@ mod acp_transport; mod config_endpoints; #[path = "v1_api/control_plane.rs"] mod control_plane; +#[path = "v1_api/processes.rs"] +mod processes; diff --git a/server/packages/sandbox-agent/tests/v1_api/processes.rs b/server/packages/sandbox-agent/tests/v1_api/processes.rs new file mode 100644 index 0000000..aaf072d --- /dev/null +++ b/server/packages/sandbox-agent/tests/v1_api/processes.rs @@ -0,0 +1,661 @@ +use super::*; +use base64::engine::general_purpose::STANDARD as BASE64; +use base64::Engine; +use futures::{SinkExt, StreamExt}; +use tokio_tungstenite::connect_async; +use tokio_tungstenite::tungstenite::Message; + +async fn wait_for_exited(test_app: &TestApp, process_id: &str) { + for _ in 0..30 { + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + &format!("/v1/processes/{process_id}"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let parsed = parse_json(&body); + if parsed["status"] == "exited" { + return; + } + tokio::time::sleep(Duration::from_millis(100)).await; + } + + panic!("process did not exit in time"); +} + +fn decode_log_entries(entries: &[Value]) -> String { + entries + .iter() + .filter_map(|entry| entry.get("data").and_then(Value::as_str)) + .filter_map(|encoded| BASE64.decode(encoded).ok()) + .map(|bytes| String::from_utf8_lossy(&bytes).to_string()) + .collect::>() + .join("") +} + +async fn recv_ws_message( + ws: &mut tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, +) -> Message { + tokio::time::timeout(Duration::from_secs(3), ws.next()) + .await + .expect("timed out waiting for websocket frame") + .expect("websocket stream ended") + .expect("websocket frame") +} + +#[tokio::test] +async fn v1_processes_config_round_trip() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/processes/config", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["maxConcurrentProcesses"], 64); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes/config", + Some(json!({ + "maxConcurrentProcesses": 8, + "defaultRunTimeoutMs": 1000, + "maxRunTimeoutMs": 5000, + "maxOutputBytes": 4096, + "maxLogBytesPerProcess": 32768, + "maxInputBytesPerRequest": 1024 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let parsed = parse_json(&body); + assert_eq!(parsed["maxConcurrentProcesses"], 8); + assert_eq!(parsed["defaultRunTimeoutMs"], 1000); +} + +#[tokio::test] +async fn v1_process_lifecycle_requires_stop_before_delete() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "sh", + "args": ["-lc", "sleep 30"], + "tty": false, + "interactive": false + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + let (status, _, body) = send_request( + &test_app.app, + Method::DELETE, + &format!("/v1/processes/{process_id}"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::CONFLICT); + assert_eq!(parse_json(&body)["status"], 409); + + let (status, _, _body) = send_request( + &test_app.app, + Method::POST, + &format!("/v1/processes/{process_id}/stop"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + + wait_for_exited(&test_app, &process_id).await; + + let (status, _, _) = send_request( + &test_app.app, + Method::DELETE, + &format!("/v1/processes/{process_id}"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::NO_CONTENT); +} + +#[tokio::test] +async fn v1_process_run_returns_output_and_timeout() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes/run", + Some(json!({ + "command": "sh", + "args": ["-lc", "echo hi"], + "timeoutMs": 1000 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let parsed = parse_json(&body); + assert_eq!(parsed["timedOut"], false); + assert_eq!(parsed["exitCode"], 0); + assert!(parsed["stdout"].as_str().unwrap_or_default().contains("hi")); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes/run", + Some(json!({ + "command": "sh", + "args": ["-lc", "sleep 2"], + "timeoutMs": 50 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["timedOut"], true); +} + +#[tokio::test] +async fn v1_process_run_reports_truncation() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes/run", + Some(json!({ + "command": "sh", + "args": ["-lc", "printf 'abcdefghijklmnopqrstuvwxyz'"], + "maxOutputBytes": 5 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let parsed = parse_json(&body); + assert_eq!(parsed["stdoutTruncated"], true); + assert_eq!(parsed["stderrTruncated"], false); + assert_eq!(parsed["stdout"].as_str().unwrap_or_default().len(), 5); +} + +#[tokio::test] +async fn v1_process_tty_input_and_logs() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "cat", + "tty": true, + "interactive": true + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + let (status, _, _body) = send_request( + &test_app.app, + Method::POST, + &format!("/v1/processes/{process_id}/input"), + Some(json!({ + "data": "aGVsbG8K", + "encoding": "base64" + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + + tokio::time::sleep(Duration::from_millis(150)).await; + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + &format!("/v1/processes/{process_id}/logs?stream=pty&tail=20"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let entries = parse_json(&body)["entries"] + .as_array() + .cloned() + .unwrap_or_default(); + assert!(!entries.is_empty()); + + let (status, _, _body) = send_request( + &test_app.app, + Method::POST, + &format!("/v1/processes/{process_id}/kill"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + + wait_for_exited(&test_app, &process_id).await; + + let (status, _, _) = send_request( + &test_app.app, + Method::DELETE, + &format!("/v1/processes/{process_id}"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::NO_CONTENT); +} + +#[tokio::test] +async fn v1_process_not_found_returns_404() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/processes/does-not-exist", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::NOT_FOUND); + assert_eq!(parse_json(&body)["status"], 404); +} + +#[tokio::test] +async fn v1_process_input_limit_returns_413() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, _) = send_request( + &test_app.app, + Method::POST, + "/v1/processes/config", + Some(json!({ + "maxConcurrentProcesses": 8, + "defaultRunTimeoutMs": 1000, + "maxRunTimeoutMs": 5000, + "maxOutputBytes": 4096, + "maxLogBytesPerProcess": 32768, + "maxInputBytesPerRequest": 4 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "cat", + "tty": true, + "interactive": true + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + &format!("/v1/processes/{process_id}/input"), + Some(json!({ + "data": "aGVsbG8=", + "encoding": "base64" + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::PAYLOAD_TOO_LARGE); + assert_eq!(parse_json(&body)["status"], 413); +} + +#[tokio::test] +async fn v1_tty_process_is_real_terminal() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "sh", + "args": ["-lc", "tty"], + "tty": true, + "interactive": false + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + wait_for_exited(&test_app, &process_id).await; + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + &format!("/v1/processes/{process_id}/logs?stream=pty"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let entries = parse_json(&body)["entries"] + .as_array() + .cloned() + .unwrap_or_default(); + let joined = decode_log_entries(&entries); + assert!(!joined.to_lowercase().contains("not a tty")); + assert!(joined.contains("/dev/")); +} + +#[tokio::test] +async fn v1_process_logs_follow_sse_streams_entries() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "sh", + "args": ["-lc", "echo first; sleep 0.3; echo second"], + "tty": false, + "interactive": false + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + let request = Request::builder() + .method(Method::GET) + .uri(format!( + "/v1/processes/{process_id}/logs?stream=stdout&follow=true" + )) + .body(Body::empty()) + .expect("build request"); + let response = test_app + .app + .clone() + .oneshot(request) + .await + .expect("sse response"); + assert_eq!(response.status(), StatusCode::OK); + + let mut stream = response.into_body().into_data_stream(); + let chunk = tokio::time::timeout(Duration::from_secs(5), async move { + while let Some(chunk) = stream.next().await { + let bytes = chunk.expect("stream chunk"); + let text = String::from_utf8_lossy(&bytes).to_string(); + if text.contains("data:") { + return text; + } + } + panic!("SSE stream ended before log chunk"); + }) + .await + .expect("timed out reading process log sse"); + + let payload = parse_sse_data(&chunk); + assert!(payload["sequence"].as_u64().is_some()); + assert_eq!(payload["stream"], "stdout"); +} + +#[tokio::test] +async fn v1_access_token_query_only_allows_terminal_ws() { + let test_app = TestApp::new(AuthConfig::with_token("secret-token".to_string())); + + let (status, _, _) = send_request( + &test_app.app, + Method::GET, + "/v1/health?access_token=secret-token", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::UNAUTHORIZED); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "cat", + "tty": true, + "interactive": true + })), + &[("authorization", "Bearer secret-token")], + ) + .await; + assert_eq!(status, StatusCode::OK); + let process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + let (status, _, _) = send_request( + &test_app.app, + Method::GET, + &format!("/v1/processes/{process_id}/terminal/ws"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::UNAUTHORIZED); + + let (status, _, _) = send_request( + &test_app.app, + Method::GET, + &format!("/v1/processes/{process_id}/terminal/ws?access_token=secret-token"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn v1_process_terminal_ws_e2e_is_deterministic() { + let test_app = TestApp::new(AuthConfig::disabled()); + let live_server = LiveServer::spawn(test_app.app.clone()).await; + let http = reqwest::Client::new(); + + let create_response = http + .post(live_server.http_url("/v1/processes")) + .json(&json!({ + "command": "sh", + "args": ["-lc", "stty -echo; IFS= read -r line; printf 'got:%s\\n' \"$line\""], + "tty": true, + "interactive": true + })) + .send() + .await + .expect("create process response"); + assert_eq!(create_response.status(), reqwest::StatusCode::OK); + let create_body: Value = create_response.json().await.expect("create process json"); + let process_id = create_body["id"] + .as_str() + .expect("process id") + .to_string(); + + let ws_url = live_server.ws_url(&format!("/v1/processes/{process_id}/terminal/ws")); + let (mut ws, _) = connect_async(&ws_url) + .await + .expect("connect websocket"); + + let ready = recv_ws_message(&mut ws).await; + let ready_payload: Value = serde_json::from_str(ready.to_text().expect("ready text frame")) + .expect("ready json"); + assert_eq!(ready_payload["type"], "ready"); + assert_eq!(ready_payload["processId"], process_id); + + ws.send(Message::Text( + json!({ + "type": "input", + "data": "hello from ws\n" + }) + .to_string(), + )) + .await + .expect("send input frame"); + + let mut saw_binary_output = false; + let mut saw_exit = false; + for _ in 0..10 { + let frame = recv_ws_message(&mut ws).await; + match frame { + Message::Binary(bytes) => { + let text = String::from_utf8_lossy(&bytes); + if text.contains("got:hello from ws") { + saw_binary_output = true; + } + } + Message::Text(text) => { + let payload: Value = serde_json::from_str(&text).expect("ws json"); + if payload["type"] == "exit" { + saw_exit = true; + break; + } + assert_ne!(payload["type"], "error"); + } + Message::Close(_) => break, + Message::Ping(_) | Message::Pong(_) => {} + _ => {} + } + } + + assert!(saw_binary_output, "expected pty binary output over websocket"); + assert!(saw_exit, "expected exit control frame over websocket"); + + let _ = ws.close(None).await; + + let delete_response = http + .delete(live_server.http_url(&format!("/v1/processes/{process_id}"))) + .send() + .await + .expect("delete process response"); + assert_eq!(delete_response.status(), reqwest::StatusCode::NO_CONTENT); + + live_server.shutdown().await; +} + +#[tokio::test] +async fn v1_process_terminal_ws_auth_e2e() { + let token = "secret-token"; + let test_app = TestApp::new(AuthConfig::with_token(token.to_string())); + let live_server = LiveServer::spawn(test_app.app.clone()).await; + let http = reqwest::Client::new(); + + let create_response = http + .post(live_server.http_url("/v1/processes")) + .bearer_auth(token) + .json(&json!({ + "command": "cat", + "tty": true, + "interactive": true + })) + .send() + .await + .expect("create process response"); + assert_eq!(create_response.status(), reqwest::StatusCode::OK); + let create_body: Value = create_response.json().await.expect("create process json"); + let process_id = create_body["id"] + .as_str() + .expect("process id") + .to_string(); + + let unauth_ws_url = live_server.ws_url(&format!("/v1/processes/{process_id}/terminal/ws")); + let unauth_err = connect_async(&unauth_ws_url) + .await + .expect_err("unauthenticated websocket handshake should fail"); + match unauth_err { + tokio_tungstenite::tungstenite::Error::Http(response) => { + assert_eq!(response.status().as_u16(), 401); + } + other => panic!("unexpected websocket auth error: {other:?}"), + } + + let auth_ws_url = live_server.ws_url(&format!( + "/v1/processes/{process_id}/terminal/ws?access_token={token}" + )); + let (mut ws, _) = connect_async(&auth_ws_url) + .await + .expect("authenticated websocket handshake"); + + let ready = recv_ws_message(&mut ws).await; + let ready_payload: Value = serde_json::from_str(ready.to_text().expect("ready text frame")) + .expect("ready json"); + assert_eq!(ready_payload["type"], "ready"); + assert_eq!(ready_payload["processId"], process_id); + + let _ = ws + .send(Message::Text(json!({ "type": "close" }).to_string())) + .await; + let _ = ws.close(None).await; + + let kill_response = http + .post(live_server.http_url(&format!( + "/v1/processes/{process_id}/kill?waitMs=1000" + ))) + .bearer_auth(token) + .send() + .await + .expect("kill process response"); + assert_eq!(kill_response.status(), reqwest::StatusCode::OK); + + let delete_response = http + .delete(live_server.http_url(&format!("/v1/processes/{process_id}"))) + .bearer_auth(token) + .send() + .await + .expect("delete process response"); + assert_eq!(delete_response.status(), reqwest::StatusCode::NO_CONTENT); + + live_server.shutdown().await; +}