feat: add raw session args/opts

This commit is contained in:
Nathan Flurry 2026-02-04 23:41:11 -08:00
parent 7378abee46
commit d51203c628
30 changed files with 4968 additions and 329 deletions

View file

@ -74,7 +74,7 @@ Policy:
Message normalization notes
- user vs assistant: normalized via role in the universal item; provider role fields or item types determine role.
- file artifacts: always represented as content parts (type=file_ref) inside message/tool_result items, not a separate item kind.
- file artifacts: always represented as content parts (type=file_ref) inside message/tool_result items, not a separate item kind. Rename actions use `target_path`.
- reasoning: represented as content parts (type=reasoning) inside message items, with visibility when available.
- subagents: OpenCode subtask parts and Claude Task tool usage are currently normalized into standard message/tool flow (no dedicated subagent fields).
- OpenCode unrolling: message.updated creates/updates the parent message item; tool-related parts emit separate tool item events (item.started/ item.completed) with parent_id pointing to the message item.

View file

@ -17,8 +17,8 @@ Authentication matches `/v1`: if a token is configured, requests must include `A
| GET | /config/providers | Stubbed | Returns/echoes config payloads. | E2E: openapi-coverage |
| GET | /event | SSE stub | Emits compat events for session/message/pty updates only. | E2E: openapi-coverage, events |
| GET | /experimental/resource | Stubbed | Experimental endpoints return empty stubs. | E2E: openapi-coverage |
| GET | /experimental/tool | Stubbed | Experimental endpoints return empty stubs. | E2E: openapi-coverage |
| GET | /experimental/tool/ids | Stubbed | Experimental endpoints return empty stubs. | E2E: openapi-coverage |
| GET | /experimental/tool | Derived | Returns MCP tool metadata for connected servers. | E2E: openapi-coverage, opencode-mcp |
| GET | /experimental/tool/ids | Derived | Returns MCP tool IDs for connected servers. | E2E: openapi-coverage, opencode-mcp |
| DELETE | /experimental/worktree | Stubbed | Experimental endpoints return empty stubs. | E2E: openapi-coverage |
| GET | /experimental/worktree | Stubbed | Experimental endpoints return empty stubs. | E2E: openapi-coverage |
| POST | /experimental/worktree | Stubbed | Experimental endpoints return empty stubs. | E2E: openapi-coverage |
@ -29,7 +29,7 @@ Authentication matches `/v1`: if a token is configured, requests must include `A
| GET | /find | Stubbed | Returns empty results. | E2E: openapi-coverage |
| GET | /find/file | Stubbed | Returns empty results. | E2E: openapi-coverage |
| GET | /find/symbol | Stubbed | Returns empty results. | E2E: openapi-coverage |
| GET | /formatter | Stubbed | | E2E: openapi-coverage |
| GET | /formatter | Derived | Scans workspace files to report formatter availability. | E2E: openapi-coverage, formatter-lsp |
| GET | /global/config | Stubbed | | E2E: openapi-coverage |
| PATCH | /global/config | Stubbed | | E2E: openapi-coverage |
| POST | /global/dispose | Stubbed | | E2E: openapi-coverage |
@ -37,15 +37,15 @@ Authentication matches `/v1`: if a token is configured, requests must include `A
| GET | /global/health | Stubbed | | E2E: openapi-coverage |
| POST | /instance/dispose | Stubbed | | E2E: openapi-coverage |
| POST | /log | Stubbed | | E2E: openapi-coverage |
| GET | /lsp | Stubbed | | E2E: openapi-coverage |
| GET | /mcp | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| POST | /mcp | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| DELETE | /mcp/{name}/auth | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| POST | /mcp/{name}/auth | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| POST | /mcp/{name}/auth/authenticate | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| POST | /mcp/{name}/auth/callback | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| POST | /mcp/{name}/connect | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| POST | /mcp/{name}/disconnect | Stubbed | Returns disabled/needs_auth stubs. | E2E: openapi-coverage |
| GET | /lsp | Derived | Reports LSP status per language based on workspace scan and PATH. | E2E: openapi-coverage, formatter-lsp |
| GET | /mcp | Stateful | Lists MCP registry status. | E2E: openapi-coverage, opencode-mcp |
| POST | /mcp | Stateful | Registers MCP servers and stores config. | E2E: openapi-coverage, opencode-mcp |
| DELETE | /mcp/{name}/auth | Stateful | Clears MCP auth credentials. | E2E: openapi-coverage, opencode-mcp |
| POST | /mcp/{name}/auth | Stateful | Starts MCP auth flow. | E2E: openapi-coverage, opencode-mcp |
| POST | /mcp/{name}/auth/authenticate | Stateful | Returns MCP auth status. | E2E: openapi-coverage, opencode-mcp |
| POST | /mcp/{name}/auth/callback | Stateful | Completes MCP auth and connects. | E2E: openapi-coverage, opencode-mcp |
| POST | /mcp/{name}/connect | Stateful | Connects MCP server and loads tools. | E2E: openapi-coverage, opencode-mcp |
| POST | /mcp/{name}/disconnect | Stateful | Disconnects MCP server and clears tools. | E2E: openapi-coverage, opencode-mcp |
| GET | /path | Derived stub | | E2E: openapi-coverage |
| GET | /permission | Stubbed | | E2E: openapi-coverage, permissions |
| POST | /permission/{requestID}/reply | Stubbed | | E2E: openapi-coverage, permissions |
@ -105,4 +105,4 @@ Authentication matches `/v1`: if a token is configured, requests must include `A
| POST | /tui/select-session | Stubbed | Returns true/empty control payloads. | E2E: openapi-coverage |
| POST | /tui/show-toast | Stubbed | Returns true/empty control payloads. | E2E: openapi-coverage |
| POST | /tui/submit-prompt | Stubbed | Returns true/empty control payloads. | E2E: openapi-coverage |
| GET | /vcs | Derived stub | | E2E: openapi-coverage |
| GET | /vcs | Derived stub | | E2E: openapi-coverage |

View file

@ -291,8 +291,9 @@ File reference with optional diff.
| Field | Type | Description |
|-------|------|-------------|
| `path` | string | File path |
| `action` | string | `read`, `write`, `patch` |
| `action` | string | `read`, `write`, `patch`, `rename`, `delete` |
| `diff` | string? | Unified diff (for patches) |
| `target_path` | string? | Destination path for renames |
```json
{

View file

@ -44,11 +44,17 @@ const renderContentPart = (part: ContentPart, index: number) => {
);
}
case "file_ref": {
const { path, action, diff } = part as { path: string; action: string; diff?: string | null };
const { path, action, diff, target_path } = part as {
path: string;
action: string;
diff?: string | null;
target_path?: string | null;
};
const displayPath = target_path && action === "rename" ? `${path} -> ${target_path}` : path;
return (
<div key={key} className="part">
<div className="part-title">file - {action}</div>
<div className="part-body mono">{path}</div>
<div className="part-body mono">{displayPath}</div>
{diff && <pre className="code-block">{diff}</pre>}
</div>
);

View file

@ -131,6 +131,7 @@ export interface components {
action: components["schemas"]["FileAction"];
diff?: string | null;
path: string;
target_path?: string | null;
/** @enum {string} */
type: "file_ref";
}) | {
@ -183,7 +184,7 @@ export interface components {
hasMore: boolean;
};
/** @enum {string} */
FileAction: "read" | "write" | "patch";
FileAction: "read" | "write" | "patch" | "rename" | "delete";
HealthResponse: {
status: string;
};

View file

@ -23,6 +23,7 @@ axum.workspace = true
clap.workspace = true
futures.workspace = true
reqwest.workspace = true
regress.workspace = true
dirs.workspace = true
time.workspace = true
chrono.workspace = true
@ -36,6 +37,7 @@ tracing-logfmt.workspace = true
tracing-subscriber.workspace = true
include_dir.workspace = true
base64.workspace = true
portable-pty = "0.8"
tempfile = { workspace = true, optional = true }
[target.'cfg(unix)'.dependencies]

View file

@ -0,0 +1,337 @@
use std::collections::HashSet;
use std::env;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use serde::Serialize;
const MAX_SCAN_FILES: usize = 10_000;
const MAX_SCAN_DEPTH: usize = 6;
const IGNORE_DIRS: &[&str] = &[
".git",
".idea",
".sandbox-agent",
".venv",
".vscode",
"build",
"dist",
"node_modules",
"target",
"venv",
];
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct FormatterStatus {
pub name: String,
pub extensions: Vec<String>,
pub enabled: bool,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct LspStatus {
pub id: String,
pub name: String,
pub root: String,
pub status: String,
}
#[derive(Debug, Clone)]
pub struct FormatterService {
formatters: Vec<FormatterDefinition>,
}
#[derive(Debug, Clone)]
pub struct LspRegistry {
servers: Vec<LspDefinition>,
}
#[derive(Debug, Clone)]
struct FormatterDefinition {
name: &'static str,
extensions: &'static [&'static str],
config_files: &'static [&'static str],
binaries: &'static [&'static str],
}
#[derive(Debug, Clone)]
struct LspDefinition {
id: &'static str,
name: &'static str,
extensions: &'static [&'static str],
binaries: &'static [&'static str],
#[allow(dead_code)]
capabilities: &'static [&'static str],
}
impl FormatterService {
pub fn new() -> Self {
Self {
formatters: vec![
FormatterDefinition {
name: "prettier",
extensions: &[
".js", ".jsx", ".ts", ".tsx", ".json", ".css", ".scss", ".md", ".mdx",
".yaml", ".yml", ".html",
],
config_files: &[
".prettierrc",
".prettierrc.json",
".prettierrc.yaml",
".prettierrc.yml",
".prettierrc.js",
".prettierrc.cjs",
".prettierrc.mjs",
"prettier.config.js",
"prettier.config.cjs",
"prettier.config.mjs",
],
binaries: &["prettier"],
},
FormatterDefinition {
name: "rustfmt",
extensions: &[".rs"],
config_files: &["rustfmt.toml"],
binaries: &["rustfmt"],
},
FormatterDefinition {
name: "gofmt",
extensions: &[".go"],
config_files: &[],
binaries: &["gofmt"],
},
FormatterDefinition {
name: "black",
extensions: &[".py"],
config_files: &["pyproject.toml", "black.toml", "setup.cfg"],
binaries: &["black"],
},
FormatterDefinition {
name: "shfmt",
extensions: &[".sh", ".bash"],
config_files: &[],
binaries: &["shfmt"],
},
FormatterDefinition {
name: "stylua",
extensions: &[".lua"],
config_files: &["stylua.toml"],
binaries: &["stylua"],
},
],
}
}
pub fn status_for_directory(&self, directory: &str) -> Vec<FormatterStatus> {
let root = resolve_root(directory);
let scan = scan_workspace(root);
let mut entries = Vec::new();
for formatter in &self.formatters {
let has_extension = formatter
.extensions
.iter()
.any(|ext| scan.extensions.contains(&ext.to_ascii_lowercase()));
let has_config = formatter
.config_files
.iter()
.any(|name| scan.file_names.contains(&name.to_ascii_lowercase()));
if !has_extension && !has_config {
continue;
}
let enabled = has_binary_in_workspace(root, formatter.binaries);
entries.push(FormatterStatus {
name: formatter.name.to_string(),
extensions: formatter
.extensions
.iter()
.map(|ext| ext.to_string())
.collect(),
enabled,
});
}
entries
}
}
impl LspRegistry {
pub fn new() -> Self {
Self {
servers: vec![
LspDefinition {
id: "rust-analyzer",
name: "Rust Analyzer",
extensions: &[".rs"],
binaries: &["rust-analyzer"],
capabilities: &["completion", "diagnostics", "formatting"],
},
LspDefinition {
id: "typescript-language-server",
name: "TypeScript Language Server",
extensions: &[".ts", ".tsx", ".js", ".jsx"],
binaries: &["typescript-language-server", "tsserver"],
capabilities: &["completion", "diagnostics", "formatting"],
},
LspDefinition {
id: "pyright",
name: "Pyright",
extensions: &[".py"],
binaries: &["pyright-langserver", "pyright"],
capabilities: &["completion", "diagnostics"],
},
LspDefinition {
id: "gopls",
name: "gopls",
extensions: &[".go"],
binaries: &["gopls"],
capabilities: &["completion", "diagnostics", "formatting"],
},
],
}
}
pub fn status_for_directory(&self, directory: &str) -> Vec<LspStatus> {
let root = resolve_root(directory);
let scan = scan_workspace(root);
let mut entries = Vec::new();
for server in &self.servers {
let has_extension = server
.extensions
.iter()
.any(|ext| scan.extensions.contains(&ext.to_ascii_lowercase()));
if !has_extension {
continue;
}
let status = if has_binary_in_workspace(root, server.binaries) {
"connected"
} else {
"error"
};
entries.push(LspStatus {
id: server.id.to_string(),
name: server.name.to_string(),
root: root.to_string_lossy().to_string(),
status: status.to_string(),
});
}
entries
}
}
#[derive(Default)]
struct WorkspaceScan {
extensions: HashSet<String>,
file_names: HashSet<String>,
}
fn resolve_root(directory: &str) -> PathBuf {
let root = PathBuf::from(directory);
if root.is_dir() {
return root;
}
env::current_dir().unwrap_or_else(|_| PathBuf::from("."))
}
fn scan_workspace(root: &Path) -> WorkspaceScan {
let mut scan = WorkspaceScan::default();
let mut stack = Vec::new();
let mut files_seen = 0usize;
stack.push((root.to_path_buf(), 0usize));
while let Some((dir, depth)) = stack.pop() {
if depth > MAX_SCAN_DEPTH {
continue;
}
let entries = match std::fs::read_dir(&dir) {
Ok(entries) => entries,
Err(_) => continue,
};
for entry in entries.flatten() {
let path = entry.path();
let file_type = match entry.file_type() {
Ok(file_type) => file_type,
Err(_) => continue,
};
let name = entry.file_name();
if file_type.is_dir() {
if should_skip_dir(&name) {
continue;
}
stack.push((path, depth + 1));
} else if file_type.is_file() {
files_seen += 1;
if files_seen > MAX_SCAN_FILES {
return scan;
}
if let Some(extension) = path.extension().and_then(|ext| ext.to_str()) {
scan.extensions
.insert(format!(".{}", extension.to_ascii_lowercase()));
}
if let Some(name) = name.to_str() {
scan.file_names.insert(name.to_ascii_lowercase());
}
}
}
}
scan
}
fn should_skip_dir(name: &OsStr) -> bool {
let Some(name) = name.to_str() else {
return false;
};
let name = name.to_ascii_lowercase();
IGNORE_DIRS.iter().any(|dir| dir == &name)
}
fn has_binary_in_workspace(root: &Path, binaries: &[&str]) -> bool {
binaries
.iter()
.any(|binary| binary_exists_in_workspace(root, binary) || binary_exists_in_path(binary))
}
fn binary_exists_in_workspace(root: &Path, binary: &str) -> bool {
let bin_dir = root.join("node_modules").join(".bin");
if !bin_dir.is_dir() {
return false;
}
path_has_binary(&bin_dir, binary)
}
fn binary_exists_in_path(binary: &str) -> bool {
let Some(paths) = env::var_os("PATH") else {
return false;
};
for path in env::split_paths(&paths) {
if path_has_binary(&path, binary) {
return true;
}
}
false
}
fn path_has_binary(path: &Path, binary: &str) -> bool {
let candidate = path.join(binary);
if candidate.is_file() {
return true;
}
if cfg!(windows) {
if let Some(exts) = std::env::var_os("PATHEXT") {
for ext in std::env::split_paths(&exts) {
if let Some(ext_str) = ext.to_str() {
let candidate = path.join(format!("{}{}", binary, ext_str));
if candidate.is_file() {
return true;
}
}
}
}
}
false
}

View file

@ -1,9 +1,12 @@
//! Sandbox agent core utilities.
mod agent_server_logs;
pub mod formatter_lsp;
pub mod credentials;
pub mod opencode_compat;
pub mod pty;
pub mod router;
pub mod search;
pub mod server_logs;
pub mod telemetry;
pub mod ui;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,354 @@
use std::collections::HashMap;
use std::io::{Read, Write};
use std::sync::{Arc, Mutex};
use std::time::Duration;
use portable_pty::{native_pty_system, CommandBuilder, MasterPty, PtySize};
use tokio::sync::{broadcast, mpsc, Mutex as AsyncMutex};
use sandbox_agent_error::SandboxError;
const DEFAULT_ROWS: u16 = 24;
const DEFAULT_COLS: u16 = 80;
const OUTPUT_BUFFER_SIZE: usize = 8192;
const OUTPUT_CHANNEL_CAPACITY: usize = 256;
const INPUT_CHANNEL_CAPACITY: usize = 256;
const EXIT_POLL_INTERVAL_MS: u64 = 200;
#[derive(Debug, Clone)]
pub struct PtyRecord {
pub id: String,
pub title: String,
pub command: String,
pub args: Vec<String>,
pub cwd: String,
pub status: PtyStatus,
pub pid: i64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PtyStatus {
Running,
Exited,
}
impl PtyStatus {
pub fn as_str(&self) -> &'static str {
match self {
PtyStatus::Running => "running",
PtyStatus::Exited => "exited",
}
}
}
#[derive(Debug, Clone)]
pub struct PtySizeSpec {
pub rows: u16,
pub cols: u16,
}
#[derive(Debug, Clone)]
pub struct PtyCreateOptions {
pub id: String,
pub title: String,
pub command: String,
pub args: Vec<String>,
pub cwd: String,
pub env: HashMap<String, String>,
pub owner_session_id: Option<String>,
}
#[derive(Debug, Clone)]
pub struct PtyUpdateOptions {
pub title: Option<String>,
pub size: Option<PtySizeSpec>,
}
#[derive(Debug)]
pub struct PtyIo {
pub output: mpsc::Receiver<Arc<[u8]>>,
pub input: mpsc::Sender<Vec<u8>>,
}
#[derive(Debug, Clone)]
pub enum PtyEvent {
Exited { id: String, exit_code: i32 },
}
#[derive(Debug)]
pub struct PtyManager {
ptys: AsyncMutex<HashMap<String, Arc<PtyHandle>>>,
event_tx: broadcast::Sender<PtyEvent>,
}
#[derive(Debug)]
struct PtyHandle {
record: Mutex<PtyRecordState>,
master: Mutex<Box<dyn MasterPty + Send>>,
input_tx: mpsc::Sender<Vec<u8>>,
output_listeners: Mutex<Vec<mpsc::Sender<Arc<[u8]>>>>,
owner_session_id: Option<String>,
child: Mutex<Box<dyn portable_pty::Child + Send>>,
}
#[derive(Debug, Clone)]
struct PtyRecordState {
record: PtyRecord,
exit_code: Option<i32>,
}
impl PtyRecordState {
fn snapshot(&self) -> PtyRecord {
self.record.clone()
}
}
impl PtyManager {
pub fn new() -> Self {
let (event_tx, _) = broadcast::channel(128);
Self {
ptys: AsyncMutex::new(HashMap::new()),
event_tx,
}
}
pub fn subscribe(&self) -> broadcast::Receiver<PtyEvent> {
self.event_tx.subscribe()
}
pub async fn list(&self) -> Vec<PtyRecord> {
let ptys = self.ptys.lock().await;
ptys.values()
.map(|handle| handle.record.lock().unwrap().snapshot())
.collect()
}
pub async fn get(&self, id: &str) -> Option<PtyRecord> {
let ptys = self.ptys.lock().await;
ptys.get(id)
.map(|handle| handle.record.lock().unwrap().snapshot())
}
pub async fn connect(&self, id: &str) -> Option<PtyIo> {
let ptys = self.ptys.lock().await;
let handle = ptys.get(id)?.clone();
drop(ptys);
let (output_tx, output_rx) = mpsc::channel(OUTPUT_CHANNEL_CAPACITY);
handle
.output_listeners
.lock()
.unwrap()
.push(output_tx);
Some(PtyIo {
output: output_rx,
input: handle.input_tx.clone(),
})
}
pub async fn create(&self, options: PtyCreateOptions) -> Result<PtyRecord, SandboxError> {
let pty_system = native_pty_system();
let pty_size = PtySize {
rows: DEFAULT_ROWS,
cols: DEFAULT_COLS,
pixel_width: 0,
pixel_height: 0,
};
let pair = pty_system.openpty(pty_size).map_err(|err| SandboxError::StreamError {
message: format!("failed to open pty: {err}"),
})?;
let mut cmd = CommandBuilder::new(&options.command);
cmd.args(&options.args);
cmd.cwd(&options.cwd);
for (key, value) in &options.env {
cmd.env(key, value);
}
let child = pair
.slave
.spawn_command(cmd)
.map_err(|err| SandboxError::StreamError {
message: format!("failed to spawn pty command: {err}"),
})?;
let pid = child
.process_id()
.map(|value| value as i64)
.unwrap_or(0);
let record = PtyRecord {
id: options.id.clone(),
title: options.title.clone(),
command: options.command.clone(),
args: options.args.clone(),
cwd: options.cwd.clone(),
status: PtyStatus::Running,
pid,
};
let record_state = PtyRecordState {
record: record.clone(),
exit_code: None,
};
let mut master = pair.master;
let reader = master
.try_clone_reader()
.map_err(|err| SandboxError::StreamError {
message: format!("failed to clone pty reader: {err}"),
})?;
let writer = master
.take_writer()
.map_err(|err| SandboxError::StreamError {
message: format!("failed to take pty writer: {err}"),
})?;
let (input_tx, input_rx) = mpsc::channel::<Vec<u8>>(INPUT_CHANNEL_CAPACITY);
let output_listeners = Mutex::new(Vec::new());
let handle = Arc::new(PtyHandle {
record: Mutex::new(record_state),
master: Mutex::new(master),
input_tx,
output_listeners,
owner_session_id: options.owner_session_id.clone(),
child: Mutex::new(child),
});
self.spawn_output_reader(handle.clone(), reader);
self.spawn_input_writer(writer, input_rx);
self.spawn_exit_watcher(handle.clone());
let mut ptys = self.ptys.lock().await;
ptys.insert(options.id.clone(), handle);
drop(ptys);
Ok(record)
}
pub async fn update(&self, id: &str, options: PtyUpdateOptions) -> Result<Option<PtyRecord>, SandboxError> {
let ptys = self.ptys.lock().await;
let handle = match ptys.get(id) {
Some(handle) => handle.clone(),
None => return Ok(None),
};
drop(ptys);
if let Some(title) = options.title {
let mut record = handle.record.lock().unwrap();
record.record.title = title;
}
if let Some(size) = options.size {
let pty_size = PtySize {
rows: size.rows,
cols: size.cols,
pixel_width: 0,
pixel_height: 0,
};
handle
.master
.lock()
.unwrap()
.resize(pty_size)
.map_err(|err| SandboxError::StreamError {
message: format!("failed to resize pty: {err}"),
})?;
}
let record = handle.record.lock().unwrap().snapshot();
Ok(Some(record))
}
pub async fn remove(&self, id: &str) -> Option<PtyRecord> {
let mut ptys = self.ptys.lock().await;
let handle = ptys.remove(id)?;
drop(ptys);
let _ = handle.child.lock().unwrap().kill();
Some(handle.record.lock().unwrap().snapshot())
}
pub async fn cleanup_session(&self, session_id: &str) {
let mut ptys = self.ptys.lock().await;
let ids: Vec<String> = ptys
.iter()
.filter_map(|(id, handle)| {
if handle.owner_session_id.as_deref() == Some(session_id) {
Some(id.clone())
} else {
None
}
})
.collect();
for id in ids {
if let Some(handle) = ptys.remove(&id) {
let _ = handle.child.lock().unwrap().kill();
}
}
}
fn spawn_output_reader(&self, handle: Arc<PtyHandle>, mut reader: Box<dyn Read + Send>) {
std::thread::spawn(move || {
let mut buffer = vec![0u8; OUTPUT_BUFFER_SIZE];
loop {
let size = match reader.read(&mut buffer) {
Ok(0) => break,
Ok(size) => size,
Err(_) => break,
};
let payload: Arc<[u8]> = Arc::from(buffer[..size].to_vec());
let mut listeners = handle.output_listeners.lock().unwrap();
listeners.retain(|listener| listener.blocking_send(payload.clone()).is_ok());
}
});
}
fn spawn_input_writer(
&self,
writer: Box<dyn Write + Send>,
mut input_rx: mpsc::Receiver<Vec<u8>>,
) {
let writer = Arc::new(Mutex::new(writer));
tokio::spawn(async move {
while let Some(chunk) = input_rx.recv().await {
let writer = writer.clone();
let result = tokio::task::spawn_blocking(move || {
let mut writer = writer.lock().unwrap();
writer.write_all(&chunk)?;
writer.flush()
})
.await;
if result.is_err() {
break;
}
}
});
}
fn spawn_exit_watcher(&self, handle: Arc<PtyHandle>) {
let event_tx = self.event_tx.clone();
std::thread::spawn(move || loop {
let exit_code = {
let mut child = handle.child.lock().unwrap();
match child.try_wait() {
Ok(Some(status)) => Some(status.code().unwrap_or(0)),
Ok(None) => None,
Err(_) => Some(1),
}
};
if let Some(exit_code) = exit_code {
{
let mut record = handle.record.lock().unwrap();
record.record.status = PtyStatus::Exited;
record.exit_code = Some(exit_code);
}
let _ = event_tx.send(PtyEvent::Exited {
id: handle.record.lock().unwrap().record.id.clone(),
exit_code,
});
break;
}
std::thread::sleep(Duration::from_millis(EXIT_POLL_INTERVAL_MS));
});
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,714 @@
use std::collections::HashMap;
use std::fs;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::time::SystemTime;
use regress::Regex;
use sandbox_agent_error::SandboxError;
use serde::{Deserialize, Serialize};
use tokio::process::Command;
const DEFAULT_LIMIT: usize = 200;
const MAX_LIMIT: usize = 1_000;
const MAX_FILES: usize = 50_000;
const MAX_FILE_BYTES: u64 = 1_000_000;
const IGNORED_DIRS: [&str; 10] = [
".git",
"node_modules",
"target",
"dist",
"build",
".next",
".cache",
".turbo",
".sandbox-agent",
"coverage",
];
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum FindFileKind {
File,
Directory,
Any,
}
impl Default for FindFileKind {
fn default() -> Self {
Self::File
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FindFileOptions {
#[serde(default)]
pub kind: FindFileKind,
#[serde(default)]
pub case_sensitive: bool,
#[serde(default)]
pub limit: Option<usize>,
}
impl Default for FindFileOptions {
fn default() -> Self {
Self {
kind: FindFileKind::default(),
case_sensitive: false,
limit: None,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TextField {
pub text: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FindTextMatch {
pub path: TextField,
pub lines: TextField,
pub line_number: u64,
pub absolute_offset: u64,
pub submatches: Vec<FindTextSubmatch>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FindTextSubmatch {
#[serde(rename = "match")]
pub match_field: TextField,
pub start: u64,
pub end: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SymbolResult {
pub name: String,
pub kind: String,
pub path: String,
pub line: usize,
pub column: usize,
}
#[derive(Debug, Clone, Copy, Default)]
pub struct FindTextOptions {
pub case_sensitive: Option<bool>,
pub limit: Option<usize>,
}
#[derive(Debug, Clone, Copy, Default)]
pub struct FindSymbolOptions {
pub limit: Option<usize>,
}
#[derive(Debug, Clone)]
struct FileSymbols {
modified: SystemTime,
symbols: Vec<SymbolResult>,
}
#[derive(Debug, Default, Clone)]
struct SymbolIndex {
files: HashMap<PathBuf, FileSymbols>,
}
#[derive(Debug, Clone)]
struct SymbolPattern {
regex: Regex,
kind: &'static str,
}
#[derive(Debug, Clone)]
pub struct SearchService {
symbol_cache: Arc<Mutex<HashMap<PathBuf, SymbolIndex>>>,
symbol_patterns: Arc<Vec<SymbolPattern>>,
}
impl SearchService {
pub fn new() -> Self {
let patterns = vec![
SymbolPattern {
regex: Regex::new(r"^\s*(?:pub\s+)?(?:async\s+)?fn\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid fn regex"),
kind: "function",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:def)\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid def regex"),
kind: "function",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:export\s+)?function\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid function regex"),
kind: "function",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:pub\s+)?struct\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid struct regex"),
kind: "struct",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:pub\s+)?enum\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid enum regex"),
kind: "enum",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:class)\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid class regex"),
kind: "class",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:interface)\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid interface regex"),
kind: "interface",
},
SymbolPattern {
regex: Regex::new(r"^\s*(?:trait)\s+([A-Za-z_][A-Za-z0-9_]*)")
.expect("valid trait regex"),
kind: "trait",
},
];
Self {
symbol_cache: Arc::new(Mutex::new(HashMap::new())),
symbol_patterns: Arc::new(patterns),
}
}
pub fn resolve_directory(directory: &str) -> Result<PathBuf, SandboxError> {
let trimmed = directory.trim();
if trimmed.is_empty() {
return Err(SandboxError::InvalidRequest {
message: "directory is required".to_string(),
});
}
let root = PathBuf::from(trimmed);
if !root.exists() {
return Err(SandboxError::InvalidRequest {
message: "directory does not exist".to_string(),
});
}
let canonical = root.canonicalize().map_err(|_| SandboxError::InvalidRequest {
message: "directory could not be resolved".to_string(),
})?;
if !canonical.is_dir() {
return Err(SandboxError::InvalidRequest {
message: "directory is not a folder".to_string(),
});
}
Ok(canonical)
}
pub async fn find_text(
&self,
root: PathBuf,
pattern: String,
options: Option<FindTextOptions>,
) -> Result<Vec<FindTextMatch>, SandboxError> {
let options = options.unwrap_or_default();
let limit = resolve_limit(options.limit);
if limit == 0 {
return Ok(Vec::new());
}
match rg_text_matches(&root, &pattern, limit, options.case_sensitive).await {
Ok(matches) => Ok(matches),
Err(RgError::NotAvailable) => {
let service = self.clone();
let pattern = pattern.clone();
tokio::task::spawn_blocking(move || {
service.find_text_fallback(&root, &pattern, limit, options.case_sensitive)
})
.await
.unwrap_or_else(|_| {
Err(SandboxError::StreamError {
message: "search failed".to_string(),
})
})
}
Err(RgError::InvalidPattern(message)) => Err(SandboxError::InvalidRequest { message }),
Err(RgError::Failed(message)) => Err(SandboxError::StreamError { message }),
}
}
pub async fn find_files(
&self,
root: PathBuf,
query: String,
options: FindFileOptions,
) -> Result<Vec<String>, SandboxError> {
let limit = resolve_limit(options.limit);
if limit == 0 {
return Ok(Vec::new());
}
let service = self.clone();
tokio::task::spawn_blocking(move || service.find_files_blocking(&root, &query, options, limit))
.await
.unwrap_or_else(|_| {
Err(SandboxError::StreamError {
message: "search failed".to_string(),
})
})
}
pub async fn find_symbols(
&self,
root: PathBuf,
query: String,
options: Option<FindSymbolOptions>,
) -> Result<Vec<SymbolResult>, SandboxError> {
let options = options.unwrap_or_default();
let limit = resolve_limit(options.limit);
if limit == 0 {
return Ok(Vec::new());
}
let service = self.clone();
tokio::task::spawn_blocking(move || service.find_symbols_blocking(&root, &query, limit))
.await
.unwrap_or_else(|_| {
Err(SandboxError::StreamError {
message: "search failed".to_string(),
})
})
}
fn find_text_fallback(
&self,
root: &Path,
pattern: &str,
limit: usize,
case_sensitive: Option<bool>,
) -> Result<Vec<FindTextMatch>, SandboxError> {
let regex = if case_sensitive == Some(false) {
Regex::with_flags(pattern, "i")
} else {
Regex::new(pattern)
}
.map_err(|err| SandboxError::InvalidRequest {
message: format!("invalid pattern: {err}"),
})?;
let files = collect_files(root, MAX_FILES, FindFileKind::File);
let mut results = Vec::new();
for file in files {
if results.len() >= limit {
break;
}
let metadata = match fs::metadata(&file) {
Ok(metadata) => metadata,
Err(_) => continue,
};
if metadata.len() > MAX_FILE_BYTES {
continue;
}
let Ok(handle) = fs::File::open(&file) else {
continue;
};
let reader = BufReader::new(handle);
let mut absolute_offset = 0u64;
for (index, line) in reader.lines().enumerate() {
if results.len() >= limit {
break;
}
let Ok(line) = line else {
continue;
};
let line_start = absolute_offset;
absolute_offset = absolute_offset.saturating_add(line.as_bytes().len() as u64 + 1);
for matched in regex.find_iter(&line) {
let relative = match file.strip_prefix(root) {
Ok(path) => path,
Err(_) => continue,
};
let start = matched.range().start as u64;
let end = matched.range().end as u64;
results.push(FindTextMatch {
path: TextField {
text: normalize_path(relative),
},
lines: TextField {
text: line.clone(),
},
line_number: (index + 1) as u64,
absolute_offset: line_start.saturating_add(start),
submatches: vec![FindTextSubmatch {
match_field: TextField {
text: matched.as_str().to_string(),
},
start,
end,
}],
});
if results.len() >= limit {
break;
}
}
}
}
Ok(results)
}
fn find_files_blocking(
&self,
root: &Path,
query: &str,
options: FindFileOptions,
limit: usize,
) -> Result<Vec<String>, SandboxError> {
let files = collect_files(root, MAX_FILES, options.kind);
let mut results = Vec::new();
for path in files {
if results.len() >= limit {
break;
}
let relative = match path.strip_prefix(root) {
Ok(path) => path,
Err(_) => continue,
};
let path_text = normalize_path(relative);
if path_text.is_empty() {
continue;
}
let file_name = path
.file_name()
.and_then(|name| name.to_str())
.unwrap_or("");
let file_name = normalize_query(file_name, options.case_sensitive);
let candidate = normalize_query(&path_text, options.case_sensitive);
if matches_query(&candidate, query, options.case_sensitive)
|| matches_query(&file_name, query, options.case_sensitive)
{
results.push(path_text);
}
}
Ok(results)
}
fn find_symbols_blocking(
&self,
root: &Path,
query: &str,
limit: usize,
) -> Result<Vec<SymbolResult>, SandboxError> {
let files = collect_files(root, MAX_FILES, FindFileKind::File);
let mut cache = self
.symbol_cache
.lock()
.expect("symbol cache lock");
let index = cache.entry(root.to_path_buf()).or_default();
for file in &files {
let metadata = match fs::metadata(file) {
Ok(metadata) => metadata,
Err(_) => continue,
};
if metadata.len() > MAX_FILE_BYTES {
continue;
}
let modified = match metadata.modified() {
Ok(time) => time,
Err(_) => continue,
};
let needs_refresh = index
.files
.get(file)
.map(|entry| entry.modified != modified)
.unwrap_or(true);
if !needs_refresh {
continue;
}
let symbols = extract_symbols(file, root, &self.symbol_patterns);
index.files.insert(
file.clone(),
FileSymbols {
modified,
symbols,
},
);
}
let mut results = Vec::new();
let query = normalize_query(query, false);
for entry in index.files.values() {
for symbol in &entry.symbols {
if results.len() >= limit {
break;
}
if normalize_query(&symbol.name, false).contains(&query) {
results.push(symbol.clone());
}
}
if results.len() >= limit {
break;
}
}
Ok(results)
}
}
#[derive(Debug)]
enum RgError {
NotAvailable,
InvalidPattern(String),
Failed(String),
}
async fn rg_text_matches(
root: &Path,
pattern: &str,
limit: usize,
case_sensitive: Option<bool>,
) -> Result<Vec<FindTextMatch>, RgError> {
let mut command = Command::new("rg");
command
.arg("--json")
.arg("--line-number")
.arg("--byte-offset")
.arg("--with-filename")
.arg("--max-count")
.arg(limit.to_string());
if case_sensitive == Some(false) {
command.arg("--ignore-case");
}
command.arg(pattern);
command.current_dir(root);
let output = match command.output().await {
Ok(output) => output,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Err(RgError::NotAvailable),
Err(err) => return Err(RgError::Failed(err.to_string())),
};
if !output.status.success() {
if output.status.code() == Some(1) {
return Ok(Vec::new());
}
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
if stderr.contains("regex parse error") || stderr.contains("error parsing") {
return Err(RgError::InvalidPattern(stderr.trim().to_string()));
}
return Err(RgError::Failed("ripgrep failed".to_string()));
}
let mut results = Vec::new();
for line in output.stdout.split(|b| *b == b'\n') {
if line.is_empty() {
continue;
}
let Ok(value) = serde_json::from_slice::<serde_json::Value>(line) else {
continue;
};
if value.get("type").and_then(|v| v.as_str()) != Some("match") {
continue;
}
let Some(data) = value.get("data") else {
continue;
};
if let Some(entry) = match_from_rg_data(root, data) {
results.push(entry);
if results.len() >= limit {
break;
}
}
}
Ok(results)
}
fn match_from_rg_data(root: &Path, data: &serde_json::Value) -> Option<FindTextMatch> {
let path_text = data
.get("path")
.and_then(|v| v.get("text"))
.and_then(|v| v.as_str())?;
let line_number = data.get("line_number").and_then(|v| v.as_u64())?;
let absolute_offset = data.get("absolute_offset").and_then(|v| v.as_u64())?;
let line_text = data
.get("lines")
.and_then(|v| v.get("text"))
.and_then(|v| v.as_str())?
.trim_end_matches('\n')
.to_string();
let submatches = data
.get("submatches")
.and_then(|v| v.as_array())
.map(|matches| {
matches
.iter()
.filter_map(|submatch| {
let match_text = submatch
.get("match")
.and_then(|v| v.get("text"))
.and_then(|v| v.as_str())?;
let start = submatch.get("start").and_then(|v| v.as_u64())?;
let end = submatch.get("end").and_then(|v| v.as_u64())?;
Some(FindTextSubmatch {
match_field: TextField {
text: match_text.to_string(),
},
start,
end,
})
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
let path = Path::new(path_text);
let absolute = if path.is_absolute() {
path.to_path_buf()
} else {
root.join(path)
};
let relative = absolute.strip_prefix(root).unwrap_or(&absolute);
Some(FindTextMatch {
path: TextField {
text: normalize_path(relative),
},
lines: TextField { text: line_text },
line_number,
absolute_offset,
submatches,
})
}
fn resolve_limit(limit: Option<usize>) -> usize {
limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT)
}
fn normalize_query(value: &str, case_sensitive: bool) -> String {
if case_sensitive {
value.to_string()
} else {
value.to_lowercase()
}
}
fn normalize_path(path: &Path) -> String {
path.to_string_lossy().replace('\\', "/")
}
fn is_ignored_dir(path: &Path) -> bool {
path.file_name()
.and_then(|name| name.to_str())
.map(|name| IGNORED_DIRS.contains(&name))
.unwrap_or(false)
}
fn collect_files(root: &Path, max_files: usize, kind: FindFileKind) -> Vec<PathBuf> {
let mut entries = Vec::new();
let mut stack = vec![root.to_path_buf()];
while let Some(dir) = stack.pop() {
let entries_iter = match fs::read_dir(&dir) {
Ok(entries_iter) => entries_iter,
Err(_) => continue,
};
for entry in entries_iter.flatten() {
if entries.len() >= max_files {
return entries;
}
let Ok(file_type) = entry.file_type() else {
continue;
};
if file_type.is_symlink() {
continue;
}
let path = entry.path();
if file_type.is_dir() {
if is_ignored_dir(&path) {
continue;
}
if matches!(kind, FindFileKind::Directory | FindFileKind::Any) {
entries.push(path.clone());
}
stack.push(path);
} else if file_type.is_file() {
if matches!(kind, FindFileKind::File | FindFileKind::Any) {
entries.push(path);
}
}
}
}
entries
}
fn matches_query(candidate: &str, query: &str, case_sensitive: bool) -> bool {
let candidate = normalize_query(candidate, case_sensitive);
let query = normalize_query(query, case_sensitive);
if query.contains('*') || query.contains('?') {
return glob_match(&query, &candidate);
}
candidate.contains(&query)
}
fn glob_match(pattern: &str, text: &str) -> bool {
glob_match_inner(pattern.as_bytes(), text.as_bytes())
}
fn glob_match_inner(pattern: &[u8], text: &[u8]) -> bool {
if pattern.is_empty() {
return text.is_empty();
}
match pattern[0] {
b'*' => {
if glob_match_inner(&pattern[1..], text) {
return true;
}
if !text.is_empty() {
return glob_match_inner(pattern, &text[1..]);
}
false
}
b'?' => {
if text.is_empty() {
false
} else {
glob_match_inner(&pattern[1..], &text[1..])
}
}
ch => {
if text.first().copied() == Some(ch) {
glob_match_inner(&pattern[1..], &text[1..])
} else {
false
}
}
}
}
fn extract_symbols(path: &Path, root: &Path, patterns: &[SymbolPattern]) -> Vec<SymbolResult> {
let Ok(file) = fs::File::open(path) else {
return Vec::new();
};
let reader = BufReader::new(file);
let mut symbols = Vec::new();
for (index, line) in reader.lines().enumerate() {
let Ok(line) = line else {
continue;
};
for pattern in patterns {
for matched in pattern.regex.find_iter(&line) {
let Some(group) = matched.group(1) else {
continue;
};
let name = line.get(group.clone()).unwrap_or("").to_string();
if name.is_empty() {
continue;
}
let relative = match path.strip_prefix(root) {
Ok(path) => path,
Err(_) => continue,
};
symbols.push(SymbolResult {
name,
kind: pattern.kind.to_string(),
path: normalize_path(relative),
line: index + 1,
column: group.start + 1,
});
}
}
}
symbols
}

View file

@ -0,0 +1,80 @@
include!("../common/http.rs");
fn expect_formatter(payload: &serde_json::Value, name: &str, ext: &str) {
let entries = payload
.as_array()
.unwrap_or_else(|| panic!("formatter payload should be array: {payload}"));
let entry = entries
.iter()
.find(|value| value.get("name").and_then(|v| v.as_str()) == Some(name))
.unwrap_or_else(|| panic!("formatter {name} not found in {entries:?}"));
let enabled = entry.get("enabled").and_then(|value| value.as_bool());
assert_eq!(enabled, Some(true), "formatter {name} should be enabled");
let extensions = entry
.get("extensions")
.and_then(|value| value.as_array())
.unwrap_or_else(|| panic!("formatter {name} extensions missing: {entry}"));
let has_ext = extensions
.iter()
.any(|value| value.as_str() == Some(ext));
assert!(has_ext, "formatter {name} missing extension {ext}");
}
fn expect_lsp(payload: &serde_json::Value, id: &str, root: &str) {
let entries = payload
.as_array()
.unwrap_or_else(|| panic!("lsp payload should be array: {payload}"));
let entry = entries
.iter()
.find(|value| value.get("id").and_then(|v| v.as_str()) == Some(id))
.unwrap_or_else(|| panic!("lsp {id} not found in {entries:?}"));
let entry_root = entry
.get("root")
.and_then(|value| value.as_str())
.unwrap_or("");
assert_eq!(entry_root, root, "lsp {id} root mismatch");
let status = entry
.get("status")
.and_then(|value| value.as_str())
.unwrap_or("");
assert!(
matches!(status, "connected" | "error"),
"lsp {id} status unexpected: {status}"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn opencode_formatter_and_lsp_status() {
let app = TestApp::new();
let tempdir = tempfile::tempdir().expect("tempdir");
let root = tempdir.path();
std::fs::create_dir_all(root.join("src")).expect("create src");
std::fs::write(root.join("src/main.rs"), "fn main() {}\n").expect("write rs");
std::fs::write(root.join("src/app.ts"), "export const x = 1;\n").expect("write ts");
let root_str = root
.to_str()
.unwrap_or_else(|| panic!("tempdir path not utf8: {root:?}"));
let formatter_request = Request::builder()
.method(Method::GET)
.uri("/opencode/formatter")
.header("x-opencode-directory", root_str)
.body(Body::empty())
.expect("formatter request");
let (status, _headers, payload) = send_json_request(&app.app, formatter_request).await;
assert_eq!(status, StatusCode::OK, "formatter status");
expect_formatter(&payload, "rustfmt", ".rs");
expect_formatter(&payload, "prettier", ".ts");
let lsp_request = Request::builder()
.method(Method::GET)
.uri("/opencode/lsp")
.header("x-opencode-directory", root_str)
.body(Body::empty())
.expect("lsp request");
let (status, _headers, payload) = send_json_request(&app.app, lsp_request).await;
assert_eq!(status, StatusCode::OK, "lsp status");
expect_lsp(&payload, "rust-analyzer", root_str);
expect_lsp(&payload, "typescript-language-server", root_str);
}

View file

@ -1,2 +1,3 @@
#[path = "http/agent_endpoints.rs"]
mod agent_endpoints;
mod opencode_formatter_lsp;

View file

@ -13,6 +13,88 @@ import { describe, it, expect, beforeAll, beforeEach, afterEach } from "vitest";
import { createOpencodeClient, type OpencodeClient } from "@opencode-ai/sdk";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
type SseEvent = {
id?: string;
data: any;
};
function parseSseMessage(raw: string): SseEvent | null {
const lines = raw.replace(/\r\n/g, "\n").split("\n");
const dataLines: string[] = [];
let id: string | undefined;
for (const line of lines) {
if (!line || line.startsWith(":")) {
continue;
}
if (line.startsWith("id:")) {
id = line.slice(3).trim();
continue;
}
if (line.startsWith("data:")) {
dataLines.push(line.slice(5).trimStart());
}
}
if (dataLines.length === 0) {
return null;
}
const dataText = dataLines.join("\n");
try {
return { id, data: JSON.parse(dataText) };
} catch {
return null;
}
}
async function collectSseEvents(
url: string,
options: { headers: Record<string, string>; limit: number; timeoutMs: number }
): Promise<SseEvent[]> {
const { headers, limit, timeoutMs } = options;
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
const response = await fetch(url, { headers, signal: controller.signal });
expect(response.ok).toBe(true);
if (!response.body) {
clearTimeout(timeout);
throw new Error("SSE response missing body");
}
const decoder = new TextDecoder();
let buffer = "";
const events: SseEvent[] = [];
try {
for await (const chunk of response.body as any) {
buffer += decoder.decode(chunk, { stream: true });
let boundary = buffer.indexOf("\n\n");
while (boundary >= 0) {
const raw = buffer.slice(0, boundary);
buffer = buffer.slice(boundary + 2);
const parsed = parseSseMessage(raw);
if (parsed) {
events.push(parsed);
if (events.length >= limit) {
controller.abort();
clearTimeout(timeout);
return events;
}
}
boundary = buffer.indexOf("\n\n");
}
}
} catch (error) {
if (!controller.signal.aborted) {
throw error;
}
}
clearTimeout(timeout);
return events;
}
describe("OpenCode-compatible Event Streaming", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
@ -125,6 +207,81 @@ describe("OpenCode-compatible Event Streaming", () => {
// Should have received some events
expect(events.length).toBeGreaterThan(0);
});
it("should replay ordered events by offset", async () => {
const headers = { Authorization: `Bearer ${handle.token}` };
const eventUrl = `${handle.baseUrl}/opencode/event?offset=0`;
const initialEventsPromise = collectSseEvents(eventUrl, {
headers,
limit: 10,
timeoutMs: 10000,
});
const session = await client.session.create();
const sessionId = session.data?.id!;
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
parts: [{ type: "text", text: "Say hello" }],
},
});
const initialEvents = await initialEventsPromise;
const filteredInitial = initialEvents.filter(
(event) => event.data?.type && event.data.type !== "server.heartbeat"
);
expect(filteredInitial.length).toBeGreaterThan(0);
const ids = filteredInitial
.map((event) => Number(event.id))
.filter((value) => Number.isFinite(value));
expect(ids.length).toBeGreaterThan(0);
for (let i = 1; i < ids.length; i += 1) {
expect(ids[i]).toBeGreaterThan(ids[i - 1]);
}
const types = new Set(filteredInitial.map((event) => event.data.type));
expect(types.has("session.status")).toBe(true);
expect(types.has("message.updated")).toBe(true);
expect(types.has("message.part.updated")).toBe(true);
const partEvent = filteredInitial.find(
(event) => event.data.type === "message.part.updated"
);
expect(partEvent?.data?.properties?.part).toBeDefined();
const lastId = Math.max(...ids);
const followupSession = await client.session.create();
const followupId = followupSession.data?.id!;
await client.session.prompt({
path: { id: followupId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
parts: [{ type: "text", text: "Say hi again" }],
},
});
const replayEvents = await collectSseEvents(
`${handle.baseUrl}/opencode/event?offset=${lastId}`,
{
headers,
limit: 8,
timeoutMs: 10000,
}
);
const filteredReplay = replayEvents.filter(
(event) => event.data?.type && event.data.type !== "server.heartbeat"
);
expect(filteredReplay.length).toBeGreaterThan(0);
const replayIds = filteredReplay
.map((event) => Number(event.id))
.filter((value) => Number.isFinite(value));
expect(replayIds.length).toBeGreaterThan(0);
expect(Math.min(...replayIds)).toBeGreaterThan(lastId);
});
});
describe("global.event", () => {

View file

@ -0,0 +1,60 @@
import { describe, it, expect, beforeAll, beforeEach, afterEach } from "vitest";
import { createOpencodeClient, type OpencodeClient } from "@opencode-ai/sdk";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
import { resolve, dirname } from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = dirname(fileURLToPath(import.meta.url));
const fixtureRoot = resolve(__dirname, "fixtures/search-repo");
describe("OpenCode-compatible Find API", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
beforeAll(async () => {
await buildSandboxAgent();
});
beforeEach(async () => {
handle = await spawnSandboxAgent({
opencodeCompat: true,
env: {
OPENCODE_COMPAT_DIRECTORY: fixtureRoot,
OPENCODE_COMPAT_WORKTREE: fixtureRoot,
},
});
client = createOpencodeClient({
baseUrl: `${handle.baseUrl}/opencode`,
headers: { Authorization: `Bearer ${handle.token}` },
});
});
afterEach(async () => {
await handle?.dispose();
});
it("should find matching text", async () => {
const response = await client.find.text({
query: { directory: fixtureRoot, pattern: "Needle" },
});
const results = (response as any).data ?? [];
expect(results.length).toBeGreaterThan(0);
expect(results.some((match: any) => match.path?.text?.includes("README.md"))).toBe(true);
});
it("should find matching files", async () => {
const response = await client.find.files({
query: { directory: fixtureRoot, query: "example.ts" },
});
const results = (response as any).data ?? [];
expect(results).toContain("src/example.ts");
});
it("should find matching symbols", async () => {
const response = await client.find.symbols({
query: { directory: fixtureRoot, query: "greet" },
});
const results = (response as any).data ?? [];
expect(results.some((symbol: any) => symbol.name === "greet")).toBe(true);
});
});

View file

@ -0,0 +1,7 @@
class Finder:
def search(self, value):
return value
def find_symbol(value):
return value

View file

@ -0,0 +1,11 @@
export class SearchWidget {
render() {
return "SearchWidget";
}
}
export function findMatches(input: string) {
return input.includes("match");
}
export const SEARCH_TOKEN = "SearchWidget";

View file

@ -0,0 +1,3 @@
Search fixture repo
Needle line for text search.

View file

@ -0,0 +1,13 @@
export function greet(name: string): string {
return `Hello, ${name}`;
}
export class Greeter {
constructor(private readonly name: string) {}
sayHello(): string {
return greet(this.name);
}
}
export const DEFAULT_MESSAGE = "Needle says hello";

View file

@ -0,0 +1,80 @@
/**
* Tests for OpenCode-compatible formatter + LSP endpoints.
*/
import { describe, it, expect, beforeAll, beforeEach, afterEach } from "vitest";
import { createOpencodeClient, type OpencodeClient } from "@opencode-ai/sdk";
import { mkdtemp, writeFile, rm } from "node:fs/promises";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
describe("OpenCode-compatible Formatter + LSP status", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
let workspaceDir: string;
beforeAll(async () => {
await buildSandboxAgent();
});
beforeEach(async () => {
workspaceDir = await mkdtemp(join(tmpdir(), "opencode-compat-"));
await writeFile(join(workspaceDir, "main.rs"), "fn main() {}\n");
await writeFile(join(workspaceDir, "app.ts"), "const value = 1;\n");
handle = await spawnSandboxAgent({
opencodeCompat: true,
env: {
OPENCODE_COMPAT_DIRECTORY: workspaceDir,
OPENCODE_COMPAT_WORKTREE: workspaceDir,
},
});
client = createOpencodeClient({
baseUrl: `${handle.baseUrl}/opencode`,
headers: { Authorization: `Bearer ${handle.token}` },
});
});
afterEach(async () => {
await handle?.dispose();
if (workspaceDir) {
await rm(workspaceDir, { recursive: true, force: true });
}
});
it("should report formatter status for workspace languages", async () => {
const response = await client.formatter.status({ query: { directory: workspaceDir } });
const entries = response.data ?? [];
expect(Array.isArray(entries)).toBe(true);
expect(entries.length).toBeGreaterThan(0);
const hasRust = entries.some((entry: any) => entry.extensions?.includes(".rs"));
const hasTs = entries.some((entry: any) => entry.extensions?.includes(".ts"));
expect(hasRust).toBe(true);
expect(hasTs).toBe(true);
for (const entry of entries) {
expect(typeof entry.enabled).toBe("boolean");
}
});
it("should report lsp status for workspace languages", async () => {
const response = await client.lsp.status({ query: { directory: workspaceDir } });
const entries = response.data ?? [];
expect(Array.isArray(entries)).toBe(true);
expect(entries.length).toBeGreaterThan(0);
const hasRust = entries.some((entry: any) => entry.id === "rust-analyzer");
const hasTs = entries.some((entry: any) => entry.id === "typescript-language-server");
expect(hasRust).toBe(true);
expect(hasTs).toBe(true);
for (const entry of entries) {
expect(entry.root).toBe(workspaceDir);
expect(["connected", "error"]).toContain(entry.status);
}
});
});

View file

@ -9,10 +9,12 @@
},
"devDependencies": {
"@types/node": "^22.0.0",
"@types/ws": "^8.5.10",
"typescript": "^5.7.0",
"vitest": "^3.0.0"
},
"dependencies": {
"@opencode-ai/sdk": "^1.1.21"
"@opencode-ai/sdk": "^1.1.21",
"ws": "^8.18.0"
}
}

View file

@ -0,0 +1,112 @@
/**
* Tests for OpenCode-compatible PTY endpoints.
*/
import { describe, it, expect, beforeAll, beforeEach, afterEach } from "vitest";
import { WebSocket } from "ws";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
describe("OpenCode-compatible PTY API", () => {
let handle: SandboxAgentHandle;
beforeAll(async () => {
await buildSandboxAgent();
});
beforeEach(async () => {
handle = await spawnSandboxAgent({ opencodeCompat: true });
});
afterEach(async () => {
await handle?.dispose();
});
async function createPty(body: Record<string, unknown>) {
const response = await fetch(`${handle.baseUrl}/opencode/pty`, {
method: "POST",
headers: {
Authorization: `Bearer ${handle.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
});
const data = await response.json();
return { response, data };
}
async function deletePty(id: string) {
await fetch(`${handle.baseUrl}/opencode/pty/${id}`, {
method: "DELETE",
headers: { Authorization: `Bearer ${handle.token}` },
});
}
async function connectPty(id: string): Promise<WebSocket> {
const wsUrl = `${handle.baseUrl.replace("http", "ws")}/opencode/pty/${id}/connect`;
return new Promise((resolve, reject) => {
const ws = new WebSocket(wsUrl, {
headers: { Authorization: `Bearer ${handle.token}` },
});
ws.once("open", () => resolve(ws));
ws.once("error", (err) => reject(err));
});
}
function waitForOutput(
ws: WebSocket,
matcher: (text: string) => boolean,
timeoutMs = 5000
): Promise<string> {
return new Promise((resolve, reject) => {
const timer = setTimeout(() => reject(new Error("timed out waiting for output")), timeoutMs);
const onMessage = (data: WebSocket.RawData) => {
const text = data.toString();
if (matcher(text)) {
clearTimeout(timer);
ws.off("message", onMessage);
resolve(text);
}
};
ws.on("message", onMessage);
});
}
it("should spawn a pty session", async () => {
const { response, data } = await createPty({ command: "sh" });
expect(response.ok).toBe(true);
expect(data.id).toMatch(/^pty_/);
expect(data.status).toBe("running");
expect(typeof data.pid).toBe("number");
await deletePty(data.id);
});
it("should capture output from pty", async () => {
const { data } = await createPty({ command: "sh" });
const ws = await connectPty(data.id);
const outputPromise = waitForOutput(ws, (text) => text.includes("hello-pty"));
ws.send("echo hello-pty\n");
const output = await outputPromise;
expect(output).toContain("hello-pty");
ws.close();
await deletePty(data.id);
});
it("should echo input back through pty", async () => {
const { data } = await createPty({ command: "cat" });
const ws = await connectPty(data.id);
const outputPromise = waitForOutput(ws, (text) => text.includes("ping-pty"));
ws.send("ping-pty\n");
const output = await outputPromise;
expect(output).toContain("ping-pty");
ws.close();
await deletePty(data.id);
});
});

View file

@ -0,0 +1,97 @@
/**
* Tests for OpenCode-compatible search endpoints.
*/
import { describe, it, expect, beforeAll, beforeEach, afterEach } from "vitest";
import { resolve, dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
const __dirname = dirname(fileURLToPath(import.meta.url));
const fixtureRoot = resolve(__dirname, "fixtures/search-fixture");
describe("OpenCode-compatible Search API", () => {
let handle: SandboxAgentHandle;
beforeAll(async () => {
await buildSandboxAgent();
});
beforeEach(async () => {
handle = await spawnSandboxAgent({
opencodeCompat: true,
env: {
OPENCODE_COMPAT_DIRECTORY: fixtureRoot,
OPENCODE_COMPAT_WORKTREE: fixtureRoot,
},
});
});
afterEach(async () => {
await handle?.dispose();
});
it("should return text matches", async () => {
const url = new URL(`${handle.baseUrl}/opencode/find`);
url.searchParams.set("pattern", "SearchWidget");
url.searchParams.set("limit", "10");
const response = await fetch(url, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(Array.isArray(data)).toBe(true);
const hit = data.find((entry: any) => entry?.path?.text?.endsWith("src/app.ts"));
expect(hit).toBeDefined();
expect(hit?.lines?.text).toContain("SearchWidget");
});
it("should respect case-insensitive search", async () => {
const url = new URL(`${handle.baseUrl}/opencode/find`);
url.searchParams.set("pattern", "searchwidget");
url.searchParams.set("caseSensitive", "false");
url.searchParams.set("limit", "10");
const response = await fetch(url, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(response.ok).toBe(true);
const data = await response.json();
const hit = data.find((entry: any) => entry?.path?.text?.endsWith("src/app.ts"));
expect(hit).toBeDefined();
});
it("should return file and symbol hits", async () => {
const filesUrl = new URL(`${handle.baseUrl}/opencode/find/file`);
filesUrl.searchParams.set("query", "src/*.ts");
filesUrl.searchParams.set("limit", "10");
const filesResponse = await fetch(filesUrl, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(filesResponse.ok).toBe(true);
const files = await filesResponse.json();
expect(Array.isArray(files)).toBe(true);
expect(files).toContain("src/app.ts");
const symbolsUrl = new URL(`${handle.baseUrl}/opencode/find/symbol`);
symbolsUrl.searchParams.set("query", "findMatches");
symbolsUrl.searchParams.set("limit", "10");
const symbolsResponse = await fetch(symbolsUrl, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(symbolsResponse.ok).toBe(true);
const symbols = await symbolsResponse.json();
expect(Array.isArray(symbols)).toBe(true);
const match = symbols.find((entry: any) => entry?.name === "findMatches");
expect(match).toBeDefined();
expect(match?.location?.uri).toContain("src/app.ts");
});
});

View file

@ -13,12 +13,16 @@
*/
import { describe, it, expect, beforeAll, afterAll, beforeEach, afterEach } from "vitest";
import { mkdtemp } from "node:fs/promises";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { createOpencodeClient, type OpencodeClient } from "@opencode-ai/sdk";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
describe("OpenCode-compatible Session API", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
let stateDir: string;
beforeAll(async () => {
// Build the binary if needed
@ -27,7 +31,11 @@ describe("OpenCode-compatible Session API", () => {
beforeEach(async () => {
// Spawn a fresh sandbox-agent instance for each test
handle = await spawnSandboxAgent({ opencodeCompat: true });
stateDir = await mkdtemp(join(tmpdir(), "opencode-state-"));
handle = await spawnSandboxAgent({
opencodeCompat: true,
env: { OPENCODE_COMPAT_STATE: stateDir },
});
client = createOpencodeClient({
baseUrl: `${handle.baseUrl}/opencode`,
headers: { Authorization: `Bearer ${handle.token}` },
@ -145,4 +153,41 @@ describe("OpenCode-compatible Session API", () => {
expect(response.data?.title).toBe("Keep");
});
});
describe("session.persistence", () => {
it("should persist sessions across restarts", async () => {
const created = await client.session.create({ body: { title: "Persistent" } });
const sessionId = created.data?.id!;
await client.session.update({
path: { id: sessionId },
body: { title: "Updated" },
});
await fetch(`${handle.baseUrl}/opencode/session/${sessionId}/share`, {
method: "POST",
headers: { Authorization: `Bearer ${handle.token}` },
});
await handle.dispose();
handle = await spawnSandboxAgent({
opencodeCompat: true,
env: { OPENCODE_COMPAT_STATE: stateDir },
});
client = createOpencodeClient({
baseUrl: `${handle.baseUrl}/opencode`,
headers: { Authorization: `Bearer ${handle.token}` },
});
const list = await client.session.list();
const persisted = list.data?.find((session) => session.id === sessionId);
expect(persisted).toBeDefined();
expect(persisted?.title).toBe("Updated");
expect(persisted?.share?.url).toContain(sessionId);
const next = await client.session.create();
expect(next.data?.id).not.toBe(sessionId);
});
});
});

View file

@ -37,6 +37,8 @@ describe("OpenCode-compatible Tool + File Actions", () => {
tool: false,
file: false,
edited: false,
pending: false,
completed: false,
};
const waiter = new Promise<void>((resolve, reject) => {
@ -48,6 +50,12 @@ describe("OpenCode-compatible Tool + File Actions", () => {
const part = event.properties?.part;
if (part?.type === "tool") {
tracker.tool = true;
if (part?.state?.status === "pending") {
tracker.pending = true;
}
if (part?.state?.status === "completed") {
tracker.completed = true;
}
}
if (part?.type === "file") {
tracker.file = true;
@ -56,7 +64,13 @@ describe("OpenCode-compatible Tool + File Actions", () => {
if (event.type === "file.edited") {
tracker.edited = true;
}
if (tracker.tool && tracker.file && tracker.edited) {
if (
tracker.tool &&
tracker.file &&
tracker.edited &&
tracker.pending &&
tracker.completed
) {
clearTimeout(timeout);
resolve();
break;
@ -82,4 +96,49 @@ describe("OpenCode-compatible Tool + File Actions", () => {
expect(tracker.file).toBe(true);
expect(tracker.edited).toBe(true);
});
it("should emit tool lifecycle states", async () => {
const eventStream = await client.event.subscribe();
const statuses = new Set<string>();
const waiter = new Promise<void>((resolve, reject) => {
const timeout = setTimeout(() => reject(new Error("Timed out waiting for tool lifecycle")), 15_000);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event.type === "message.part.updated") {
const part = event.properties?.part;
if (part?.type === "tool") {
const status = part?.state?.status;
if (status) {
statuses.add(status);
}
}
}
if (statuses.has("pending") && statuses.has("running") && (statuses.has("completed") || statuses.has("error"))) {
clearTimeout(timeout);
resolve();
break;
}
}
} catch (err) {
clearTimeout(timeout);
reject(err);
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
parts: [{ type: "text", text: "tool" }],
},
});
await waiter;
expect(statuses.has("pending")).toBe(true);
expect(statuses.has("running")).toBe(true);
expect(statuses.has("completed") || statuses.has("error")).toBe(true);
});
});

View file

@ -0,0 +1,186 @@
use std::sync::Arc;
use axum::extract::State;
use axum::http::{HeaderMap, StatusCode};
use axum::response::IntoResponse;
use axum::routing::post;
use axum::{Json, Router};
use serde_json::{json, Value};
use tokio::net::TcpListener;
include!("common/http.rs");
#[derive(Clone)]
struct McpTestState {
token: String,
}
async fn mcp_handler(
State(state): State<Arc<McpTestState>>,
headers: HeaderMap,
Json(body): Json<Value>,
) -> impl IntoResponse {
let expected = format!("Bearer {}", state.token);
let auth = headers
.get(axum::http::header::AUTHORIZATION)
.and_then(|value| value.to_str().ok());
if auth != Some(expected.as_str()) {
return (
StatusCode::UNAUTHORIZED,
Json(json!({"error": "unauthorized"})),
);
}
let method = body.get("method").and_then(|value| value.as_str()).unwrap_or("");
let id = body.get("id").cloned().unwrap_or_else(|| json!(null));
match method {
"initialize" => (
StatusCode::OK,
Json(json!({
"jsonrpc": "2.0",
"id": id,
"result": {"serverInfo": {"name": "mcp-test", "version": "0.1.0"}}
})),
),
"tools/list" => (
StatusCode::OK,
Json(json!({
"jsonrpc": "2.0",
"id": id,
"result": {
"tools": [
{
"name": "weather",
"description": "Get weather",
"inputSchema": {
"type": "object",
"properties": {
"city": {"type": "string"}
},
"required": ["city"]
}
}
]
}
})),
),
_ => (
StatusCode::BAD_REQUEST,
Json(json!({
"jsonrpc": "2.0",
"id": id,
"error": {"code": -32601, "message": "method not found"}
})),
),
}
}
async fn spawn_mcp_server(token: &str) -> (String, tokio::task::JoinHandle<()>) {
let state = Arc::new(McpTestState {
token: token.to_string(),
});
let app = Router::new().route("/mcp", post(mcp_handler)).with_state(state);
let listener = TcpListener::bind("127.0.0.1:0")
.await
.expect("bind mcp listener");
let addr = listener.local_addr().expect("local addr");
let handle = tokio::spawn(async move {
axum::serve(listener, app).await.expect("mcp server");
});
(format!("http://{}/mcp", addr), handle)
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn opencode_mcp_auth_and_tools() {
let token = "mcp-test-token";
let (mcp_url, handle) = spawn_mcp_server(token).await;
let app = TestApp::new();
let (status, payload) = send_json(
&app.app,
Method::POST,
"/opencode/mcp",
Some(json!({
"name": "test",
"config": {
"type": "remote",
"url": mcp_url,
"oauth": {},
"headers": {}
}
})),
)
.await;
assert_eq!(status, StatusCode::OK, "register mcp server");
assert_eq!(
payload
.get("test")
.and_then(|value| value.get("status"))
.and_then(|value| value.as_str()),
Some("needs_auth")
);
let (status, payload) = send_json(&app.app, Method::POST, "/opencode/mcp/test/auth", None).await;
assert_eq!(status, StatusCode::OK, "start mcp auth");
assert!(
payload
.get("authorizationUrl")
.and_then(|value| value.as_str())
.is_some(),
"authorizationUrl missing"
);
let (status, payload) = send_json(
&app.app,
Method::POST,
"/opencode/mcp/test/auth/callback",
Some(json!({"code": token})),
)
.await;
assert_eq!(status, StatusCode::OK, "complete mcp auth");
assert_eq!(
payload
.get("status")
.and_then(|value| value.as_str()),
Some("connected")
);
let (status, payload) =
send_json(&app.app, Method::POST, "/opencode/mcp/test/connect", None).await;
assert_eq!(status, StatusCode::OK, "connect mcp server");
assert_eq!(payload, json!(true));
let (status, payload) = send_json(
&app.app,
Method::GET,
"/opencode/experimental/tool/ids?provider=sandbox-agent&model=mock",
None,
)
.await;
assert_eq!(status, StatusCode::OK, "tool ids");
let ids = payload.as_array().expect("tool ids array");
assert!(
ids.contains(&Value::String("mcp:test:weather".to_string())),
"missing tool id"
);
let (status, payload) = send_json(
&app.app,
Method::GET,
"/opencode/experimental/tool?provider=sandbox-agent&model=mock",
None,
)
.await;
assert_eq!(status, StatusCode::OK, "tool list");
let tools = payload.as_array().expect("tools array");
let tool = tools
.iter()
.find(|tool| tool.get("id").and_then(|value| value.as_str()) == Some("mcp:test:weather"))
.expect("mcp tool entry");
assert_eq!(
tool.get("description").and_then(|value| value.as_str()),
Some("Get weather")
);
handle.abort();
}

View file

@ -6,6 +6,7 @@ use crate::{
ItemStatus, ReasoningVisibility, SessionEndReason, SessionEndedData, SessionStartedData,
TerminatedBy, UniversalEventData, UniversalEventType, UniversalItem,
};
use serde_json::Value;
/// Convert a Codex ServerNotification to universal events.
pub fn notification_to_universal(
@ -257,6 +258,26 @@ fn thread_item_to_item(item: &schema::ThreadItem, status: ItemStatus) -> Univers
status: exec_status,
..
} => {
if status == ItemStatus::InProgress {
let arguments = serde_json::json!({
"command": command,
"cwd": cwd,
})
.to_string();
return UniversalItem {
item_id: String::new(),
native_item_id: Some(id.clone()),
parent_id: None,
kind: ItemKind::ToolCall,
role: Some(ItemRole::Assistant),
content: vec![ContentPart::ToolCall {
name: "command_execution".to_string(),
arguments,
call_id: id.clone(),
}],
status,
};
}
let mut parts = Vec::new();
if let Some(output) = aggregated_output {
parts.push(ContentPart::ToolResult {
@ -285,20 +306,56 @@ fn thread_item_to_item(item: &schema::ThreadItem, status: ItemStatus) -> Univers
changes,
id,
status: file_status,
} => UniversalItem {
item_id: String::new(),
native_item_id: Some(id.clone()),
parent_id: None,
kind: ItemKind::ToolResult,
role: Some(ItemRole::Tool),
content: vec![ContentPart::Json {
json: serde_json::json!({
} => {
if status == ItemStatus::InProgress {
let arguments = serde_json::json!({
"changes": changes,
"status": format!("{:?}", file_status)
}),
}],
status,
},
})
.to_string();
return UniversalItem {
item_id: String::new(),
native_item_id: Some(id.clone()),
parent_id: None,
kind: ItemKind::ToolCall,
role: Some(ItemRole::Assistant),
content: vec![ContentPart::ToolCall {
name: "file_change".to_string(),
arguments,
call_id: id.clone(),
}],
status,
};
}
let mut parts = Vec::new();
let output = serde_json::json!({
"changes": changes,
"status": format!("{:?}", file_status)
})
.to_string();
parts.push(ContentPart::ToolResult {
call_id: id.clone(),
output,
});
for change in changes {
let (action, target_path) = file_action_from_change_kind(&change.kind);
parts.push(ContentPart::FileRef {
path: change.path.clone(),
action,
diff: Some(change.diff.clone()),
target_path,
});
}
UniversalItem {
item_id: String::new(),
native_item_id: Some(id.clone()),
parent_id: None,
kind: ItemKind::ToolResult,
role: Some(ItemRole::Tool),
content: parts,
status,
}
}
schema::ThreadItem::McpToolCall {
arguments,
error,
@ -433,6 +490,34 @@ fn thread_item_to_item(item: &schema::ThreadItem, status: ItemStatus) -> Univers
}
}
fn file_action_from_change_kind(
kind: &schema::PatchChangeKind,
) -> (crate::FileAction, Option<String>) {
let value = serde_json::to_value(kind).ok();
let kind_type = value
.as_ref()
.and_then(|v| v.get("type"))
.and_then(Value::as_str)
.unwrap_or("update");
let move_path = value
.as_ref()
.and_then(|v| v.get("move_path"))
.and_then(Value::as_str)
.map(|v| v.to_string());
match kind_type {
"add" => (crate::FileAction::Write, None),
"delete" => (crate::FileAction::Delete, None),
"update" => {
if let Some(target) = move_path {
(crate::FileAction::Rename, Some(target))
} else {
(crate::FileAction::Patch, None)
}
}
_ => (crate::FileAction::Patch, None),
}
}
fn status_item(label: &str, detail: Option<String>) -> UniversalItem {
UniversalItem {
item_id: String::new(),

View file

@ -477,6 +477,7 @@ fn file_part_to_content(file_part: &schema::FilePart) -> ContentPart {
path,
action,
diff: None,
target_path: None,
}
}

View file

@ -243,6 +243,8 @@ pub enum ContentPart {
path: String,
action: FileAction,
diff: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
target_path: Option<String>,
},
Reasoning {
text: String,
@ -258,12 +260,14 @@ pub enum ContentPart {
},
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema, ToSchema)]
#[serde(rename_all = "snake_case")]
pub enum FileAction {
Read,
Write,
Patch,
Rename,
Delete,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]