wip: pi working

This commit is contained in:
Franklin 2026-02-06 16:54:43 -05:00
commit a6064e7027
120 changed files with 15728 additions and 2301 deletions

View file

@ -42,7 +42,7 @@ tempfile = { workspace = true, optional = true }
libc = "0.2"
[target.'cfg(windows)'.dependencies]
windows = { version = "0.52", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console"] }
windows = { version = "0.52", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_Threading"] }
[dev-dependencies]
http-body-util.workspace = true

View file

@ -19,9 +19,22 @@ fn main() {
println!("cargo:rerun-if-env-changed=SANDBOX_AGENT_VERSION");
println!("cargo:rerun-if-changed={}", dist_dir.display());
// Rebuild when the git HEAD changes so BUILD_ID stays current.
let git_head = manifest_dir.join(".git/HEAD");
if git_head.exists() {
println!("cargo:rerun-if-changed={}", git_head.display());
} else {
// In a workspace the .git dir lives at the repo root.
let root_git_head = root_dir.join(".git/HEAD");
if root_git_head.exists() {
println!("cargo:rerun-if-changed={}", root_git_head.display());
}
}
// Generate version constant from environment variable or fallback to Cargo.toml version
let out_dir = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR"));
generate_version(&out_dir);
generate_build_id(&out_dir);
let skip = env::var("SANDBOX_AGENT_SKIP_INSPECTOR").is_ok();
let out_file = out_dir.join("inspector_assets.rs");
@ -81,3 +94,33 @@ fn generate_version(out_dir: &Path) {
fs::write(&out_file, contents).expect("write version.rs");
}
fn generate_build_id(out_dir: &Path) {
use std::process::Command;
let build_id = Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.ok()
.filter(|o| o.status.success())
.and_then(|o| String::from_utf8(o.stdout).ok())
.map(|s| s.trim().to_string())
.unwrap_or_else(|| {
// Fallback: use the package version + compile-time timestamp
let version = env::var("CARGO_PKG_VERSION").unwrap_or_default();
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs().to_string())
.unwrap_or_default();
format!("{version}-{timestamp}")
});
let out_file = out_dir.join("build_id.rs");
let contents = format!(
"/// Unique identifier for this build (git short hash or version-timestamp fallback).\n\
pub const BUILD_ID: &str = \"{}\";\n",
build_id
);
fs::write(&out_file, contents).expect("write build_id.rs");
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,476 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{Child, Command as ProcessCommand, Stdio};
use std::time::{Duration, Instant};
use reqwest::blocking::Client as HttpClient;
use crate::cli::{CliConfig, CliError};
mod build_id {
include!(concat!(env!("OUT_DIR"), "/build_id.rs"));
}
pub use build_id::BUILD_ID;
const DAEMON_HEALTH_TIMEOUT: Duration = Duration::from_secs(30);
// ---------------------------------------------------------------------------
// Paths
// ---------------------------------------------------------------------------
pub fn daemon_state_dir() -> PathBuf {
dirs::data_dir()
.map(|dir| dir.join("sandbox-agent").join("daemon"))
.unwrap_or_else(|| PathBuf::from(".").join(".sandbox-agent").join("daemon"))
}
pub fn sanitize_host(host: &str) -> String {
host.chars()
.map(|ch| if ch.is_ascii_alphanumeric() { ch } else { '-' })
.collect()
}
pub fn daemon_pid_path(host: &str, port: u16) -> PathBuf {
let name = format!("daemon-{}-{}.pid", sanitize_host(host), port);
daemon_state_dir().join(name)
}
pub fn daemon_log_path(host: &str, port: u16) -> PathBuf {
let name = format!("daemon-{}-{}.log", sanitize_host(host), port);
daemon_state_dir().join(name)
}
pub fn daemon_version_path(host: &str, port: u16) -> PathBuf {
let name = format!("daemon-{}-{}.version", sanitize_host(host), port);
daemon_state_dir().join(name)
}
// ---------------------------------------------------------------------------
// PID helpers
// ---------------------------------------------------------------------------
pub fn read_pid(path: &Path) -> Option<u32> {
let text = fs::read_to_string(path).ok()?;
text.trim().parse::<u32>().ok()
}
pub fn write_pid(path: &Path, pid: u32) -> Result<(), CliError> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(path, pid.to_string())?;
Ok(())
}
pub fn remove_pid(path: &Path) -> Result<(), CliError> {
if path.exists() {
fs::remove_file(path)?;
}
Ok(())
}
// ---------------------------------------------------------------------------
// Version helpers
// ---------------------------------------------------------------------------
pub fn read_daemon_version(host: &str, port: u16) -> Option<String> {
let path = daemon_version_path(host, port);
let text = fs::read_to_string(path).ok()?;
Some(text.trim().to_string())
}
pub fn write_daemon_version(host: &str, port: u16) -> Result<(), CliError> {
let path = daemon_version_path(host, port);
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&path, BUILD_ID)?;
Ok(())
}
pub fn remove_version_file(host: &str, port: u16) -> Result<(), CliError> {
let path = daemon_version_path(host, port);
if path.exists() {
fs::remove_file(path)?;
}
Ok(())
}
pub fn is_version_current(host: &str, port: u16) -> bool {
match read_daemon_version(host, port) {
Some(v) => v == BUILD_ID,
None => false,
}
}
// ---------------------------------------------------------------------------
// Process helpers
// ---------------------------------------------------------------------------
#[cfg(unix)]
pub fn is_process_running(pid: u32) -> bool {
let result = unsafe { libc::kill(pid as i32, 0) };
if result == 0 {
return true;
}
match std::io::Error::last_os_error().raw_os_error() {
Some(code) if code == libc::EPERM => true,
_ => false,
}
}
#[cfg(windows)]
pub fn is_process_running(pid: u32) -> bool {
use windows::Win32::Foundation::CloseHandle;
use windows::Win32::System::Threading::{
GetExitCodeProcess, OpenProcess, PROCESS_QUERY_LIMITED_INFORMATION,
};
unsafe {
let handle = match OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, false, pid) {
Ok(h) => h,
Err(_) => return false,
};
let mut exit_code = 0u32;
let ok = GetExitCodeProcess(handle, &mut exit_code).is_ok();
let _ = CloseHandle(handle);
ok && exit_code == 259
}
}
// ---------------------------------------------------------------------------
// Health checks
// ---------------------------------------------------------------------------
pub fn check_health(base_url: &str, token: Option<&str>) -> Result<bool, CliError> {
let client = HttpClient::builder().build()?;
let url = format!("{base_url}/v1/health");
let mut request = client.get(url);
if let Some(token) = token {
request = request.bearer_auth(token);
}
match request.send() {
Ok(response) if response.status().is_success() => Ok(true),
Ok(_) => Ok(false),
Err(_) => Ok(false),
}
}
pub fn wait_for_health(
mut server_child: Option<&mut Child>,
base_url: &str,
token: Option<&str>,
timeout: Duration,
) -> Result<(), CliError> {
let client = HttpClient::builder().build()?;
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
if let Some(child) = server_child.as_mut() {
if let Some(status) = child.try_wait()? {
return Err(CliError::Server(format!(
"sandbox-agent exited before becoming healthy ({status})"
)));
}
}
let url = format!("{base_url}/v1/health");
let mut request = client.get(&url);
if let Some(token) = token {
request = request.bearer_auth(token);
}
match request.send() {
Ok(response) if response.status().is_success() => return Ok(()),
_ => {
std::thread::sleep(Duration::from_millis(200));
}
}
}
Err(CliError::Server(
"timed out waiting for sandbox-agent health".to_string(),
))
}
// ---------------------------------------------------------------------------
// Spawn
// ---------------------------------------------------------------------------
pub fn spawn_sandbox_agent_daemon(
cli: &CliConfig,
host: &str,
port: u16,
token: Option<&str>,
log_path: &Path,
) -> Result<Child, CliError> {
if let Some(parent) = log_path.parent() {
fs::create_dir_all(parent)?;
}
let log_file = fs::File::create(log_path)?;
let log_file_err = log_file.try_clone()?;
let exe = std::env::current_exe()?;
let mut cmd = ProcessCommand::new(exe);
cmd.arg("server")
.arg("--host")
.arg(host)
.arg("--port")
.arg(port.to_string())
.env("SANDBOX_AGENT_LOG_STDOUT", "1")
.stdin(Stdio::null())
.stdout(Stdio::from(log_file))
.stderr(Stdio::from(log_file_err));
if let Some(token) = token {
cmd.arg("--token").arg(token);
}
cmd.spawn().map_err(CliError::from)
}
// ---------------------------------------------------------------------------
// DaemonStatus
// ---------------------------------------------------------------------------
#[derive(Debug)]
pub enum DaemonStatus {
Running {
pid: u32,
version: Option<String>,
version_current: bool,
log_path: PathBuf,
},
NotRunning,
}
impl std::fmt::Display for DaemonStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DaemonStatus::Running {
pid,
version,
version_current,
log_path,
} => {
let version_str = version.as_deref().unwrap_or("unknown");
let outdated = if *version_current {
""
} else {
" [outdated, restart recommended]"
};
write!(
f,
"Daemon running (PID {pid}, build {version_str}, logs: {}){}",
log_path.display(),
outdated
)
}
DaemonStatus::NotRunning => write!(f, "Daemon not running"),
}
}
}
// ---------------------------------------------------------------------------
// High-level commands
// ---------------------------------------------------------------------------
pub fn status(host: &str, port: u16, token: Option<&str>) -> Result<DaemonStatus, CliError> {
let pid_path = daemon_pid_path(host, port);
let log_path = daemon_log_path(host, port);
if let Some(pid) = read_pid(&pid_path) {
if is_process_running(pid) {
let version = read_daemon_version(host, port);
let version_current = is_version_current(host, port);
return Ok(DaemonStatus::Running {
pid,
version,
version_current,
log_path,
});
}
// Stale PID file
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
}
// Also try a health check in case the daemon is running but we lost the PID file
let base_url = format!("http://{host}:{port}");
if check_health(&base_url, token)? {
return Ok(DaemonStatus::Running {
pid: 0,
version: read_daemon_version(host, port),
version_current: is_version_current(host, port),
log_path,
});
}
Ok(DaemonStatus::NotRunning)
}
pub fn start(cli: &CliConfig, host: &str, port: u16, token: Option<&str>) -> Result<(), CliError> {
let base_url = format!("http://{host}:{port}");
let pid_path = daemon_pid_path(host, port);
let log_path = daemon_log_path(host, port);
// Already healthy?
if check_health(&base_url, token)? {
eprintln!("daemon already running at {base_url}");
return Ok(());
}
// Stale PID?
if let Some(pid) = read_pid(&pid_path) {
if is_process_running(pid) {
eprintln!("daemon process {pid} exists; waiting for health");
return wait_for_health(None, &base_url, token, DAEMON_HEALTH_TIMEOUT);
}
let _ = remove_pid(&pid_path);
}
eprintln!(
"starting daemon at {base_url} (logs: {})",
log_path.display()
);
let mut child = spawn_sandbox_agent_daemon(cli, host, port, token, &log_path)?;
let pid = child.id();
write_pid(&pid_path, pid)?;
write_daemon_version(host, port)?;
let result = wait_for_health(Some(&mut child), &base_url, token, DAEMON_HEALTH_TIMEOUT);
if result.is_err() {
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
return result;
}
eprintln!("daemon started (PID {pid}, logs: {})", log_path.display());
Ok(())
}
#[cfg(unix)]
pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
let pid_path = daemon_pid_path(host, port);
let pid = match read_pid(&pid_path) {
Some(pid) => pid,
None => {
eprintln!("daemon is not running (no PID file)");
return Ok(());
}
};
if !is_process_running(pid) {
eprintln!("daemon is not running (stale PID file)");
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
return Ok(());
}
eprintln!("stopping daemon (PID {pid})...");
// SIGTERM
unsafe {
libc::kill(pid as i32, libc::SIGTERM);
}
// Wait up to 5 seconds for graceful exit
for _ in 0..50 {
std::thread::sleep(Duration::from_millis(100));
if !is_process_running(pid) {
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
eprintln!("daemon stopped");
return Ok(());
}
}
// SIGKILL
eprintln!("daemon did not stop gracefully, sending SIGKILL...");
unsafe {
libc::kill(pid as i32, libc::SIGKILL);
}
std::thread::sleep(Duration::from_millis(100));
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
eprintln!("daemon killed");
Ok(())
}
#[cfg(windows)]
pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
let pid_path = daemon_pid_path(host, port);
let pid = match read_pid(&pid_path) {
Some(pid) => pid,
None => {
eprintln!("daemon is not running (no PID file)");
return Ok(());
}
};
if !is_process_running(pid) {
eprintln!("daemon is not running (stale PID file)");
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
return Ok(());
}
eprintln!("stopping daemon (PID {pid})...");
// Use taskkill on Windows
let _ = ProcessCommand::new("taskkill")
.args(["/PID", &pid.to_string(), "/F"])
.status();
std::thread::sleep(Duration::from_millis(500));
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
eprintln!("daemon stopped");
Ok(())
}
pub fn ensure_running(
cli: &CliConfig,
host: &str,
port: u16,
token: Option<&str>,
) -> Result<(), CliError> {
let base_url = format!("http://{host}:{port}");
let pid_path = daemon_pid_path(host, port);
// Check if daemon is already healthy
if check_health(&base_url, token)? {
// Check build version
if !is_version_current(host, port) {
let old = read_daemon_version(host, port).unwrap_or_else(|| "unknown".to_string());
eprintln!("daemon outdated (build {old} -> {BUILD_ID}), restarting...");
stop(host, port)?;
return start(cli, host, port, token);
}
let log_path = daemon_log_path(host, port);
if let Some(pid) = read_pid(&pid_path) {
eprintln!(
"daemon already running at {base_url} (PID {pid}, logs: {})",
log_path.display()
);
} else {
eprintln!("daemon already running at {base_url}");
}
return Ok(());
}
// Not healthy — check for stale PID
if let Some(pid) = read_pid(&pid_path) {
if is_process_running(pid) {
eprintln!("daemon process {pid} running; waiting for health");
return wait_for_health(None, &base_url, token, DAEMON_HEALTH_TIMEOUT);
}
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
}
start(cli, host, port, token)
}

View file

@ -1,7 +1,9 @@
//! Sandbox agent core utilities.
mod agent_server_logs;
pub mod cli;
pub mod credentials;
pub mod daemon;
pub mod http_client;
pub mod opencode_compat;
pub mod router;

File diff suppressed because it is too large Load diff

View file

@ -4,11 +4,12 @@
//! stubbed responses with deterministic helpers for snapshot testing. A minimal
//! in-memory state tracks sessions/messages/ptys to keep behavior coherent.
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::convert::Infallible;
use std::str::FromStr;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant};
use axum::extract::{Path, Query, State};
use axum::http::{HeaderMap, StatusCode};
@ -23,7 +24,7 @@ use tokio::sync::{broadcast, Mutex};
use tokio::time::interval;
use utoipa::{IntoParams, OpenApi, ToSchema};
use crate::router::{AppState, CreateSessionRequest, PermissionReply};
use crate::router::{AgentModelInfo, AppState, CreateSessionRequest, PermissionReply};
use sandbox_agent_agent_management::agents::AgentId;
use sandbox_agent_error::SandboxError;
use sandbox_agent_universal_agent_schema::{
@ -37,10 +38,10 @@ static MESSAGE_COUNTER: AtomicU64 = AtomicU64::new(1);
static PART_COUNTER: AtomicU64 = AtomicU64::new(1);
static PTY_COUNTER: AtomicU64 = AtomicU64::new(1);
static PROJECT_COUNTER: AtomicU64 = AtomicU64::new(1);
const OPENCODE_PROVIDER_ID: &str = "sandbox-agent";
const OPENCODE_PROVIDER_NAME: &str = "Sandbox Agent";
const OPENCODE_DEFAULT_MODEL_ID: &str = "mock";
const OPENCODE_DEFAULT_PROVIDER_ID: &str = "mock";
const OPENCODE_DEFAULT_AGENT_MODE: &str = "build";
const OPENCODE_MODEL_CACHE_TTL: Duration = Duration::from_secs(30);
#[derive(Clone, Debug)]
struct OpenCodeCompatConfig {
@ -212,6 +213,30 @@ struct OpenCodeSessionRuntime {
part_id_by_message: HashMap<String, String>,
tool_part_by_call: HashMap<String, String>,
tool_message_by_call: HashMap<String, String>,
/// Tool name by call_id, persisted from ToolCall for use in ToolResult events
tool_name_by_call: HashMap<String, String>,
/// Tool arguments by call_id, persisted from ToolCall for use in ToolResult events
tool_args_by_call: HashMap<String, String>,
}
#[derive(Clone, Debug)]
struct OpenCodeModelEntry {
model: AgentModelInfo,
group_id: String,
group_name: String,
}
#[derive(Clone, Debug)]
struct OpenCodeModelCache {
entries: Vec<OpenCodeModelEntry>,
model_lookup: HashMap<String, AgentId>,
group_defaults: HashMap<String, String>,
group_agents: HashMap<String, AgentId>,
group_names: HashMap<String, String>,
default_group: String,
default_model: String,
cached_at: Instant,
had_discovery_errors: bool,
}
pub struct OpenCodeState {
@ -225,6 +250,7 @@ pub struct OpenCodeState {
session_runtime: Mutex<HashMap<String, OpenCodeSessionRuntime>>,
session_streams: Mutex<HashMap<String, bool>>,
event_broadcaster: broadcast::Sender<Value>,
model_cache: Mutex<Option<OpenCodeModelCache>>,
}
impl OpenCodeState {
@ -242,6 +268,7 @@ impl OpenCodeState {
session_runtime: Mutex::new(HashMap::new()),
session_streams: Mutex::new(HashMap::new()),
event_broadcaster,
model_cache: Mutex::new(None),
}
}
@ -371,13 +398,17 @@ async fn ensure_backing_session(
state: &Arc<OpenCodeAppState>,
session_id: &str,
agent: &str,
model: Option<String>,
variant: Option<String>,
) -> Result<(), SandboxError> {
let model = model.filter(|value| !value.trim().is_empty());
let variant = variant.filter(|value| !value.trim().is_empty());
let request = CreateSessionRequest {
agent: agent.to_string(),
agent_mode: None,
permission_mode: None,
model: None,
variant: None,
model: model.clone(),
variant: variant.clone(),
agent_version: None,
};
match state
@ -387,7 +418,15 @@ async fn ensure_backing_session(
.await
{
Ok(_) => Ok(()),
Err(SandboxError::SessionAlreadyExists { .. }) => Ok(()),
Err(SandboxError::SessionAlreadyExists { .. }) => state
.inner
.session_manager()
.set_session_overrides(session_id, model, variant)
.await
.or_else(|err| match err {
SandboxError::SessionNotFound { .. } => Ok(()),
other => Err(other),
}),
Err(err) => Err(err),
}
}
@ -587,12 +626,208 @@ fn default_agent_mode() -> &'static str {
OPENCODE_DEFAULT_AGENT_MODE
}
fn resolve_agent_from_model(provider_id: &str, model_id: &str) -> Option<AgentId> {
if provider_id == OPENCODE_PROVIDER_ID {
AgentId::parse(model_id)
} else {
None
async fn opencode_model_cache(state: &OpenCodeAppState) -> OpenCodeModelCache {
let previous_cache = {
let cache = state.opencode.model_cache.lock().await;
if let Some(cache) = cache.as_ref() {
if cache.cached_at.elapsed() < OPENCODE_MODEL_CACHE_TTL {
return cache.clone();
}
Some(cache.clone())
} else {
None
}
};
let mut cache = build_opencode_model_cache(state).await;
if let Some(previous_cache) = previous_cache {
if cache.had_discovery_errors
&& cache.entries.is_empty()
&& !previous_cache.entries.is_empty()
{
cache = previous_cache;
cache.cached_at = Instant::now();
}
}
let mut slot = state.opencode.model_cache.lock().await;
*slot = Some(cache.clone());
cache
}
async fn build_opencode_model_cache(state: &OpenCodeAppState) -> OpenCodeModelCache {
let mut entries = Vec::new();
let mut model_lookup = HashMap::new();
let mut ambiguous_models = HashSet::new();
let mut group_defaults: HashMap<String, String> = HashMap::new();
let mut group_agents: HashMap<String, AgentId> = HashMap::new();
let mut group_names: HashMap<String, String> = HashMap::new();
let mut default_model: Option<String> = None;
let mut had_discovery_errors = false;
for agent in available_agent_ids() {
let response = match state.inner.session_manager().agent_models(agent).await {
Ok(response) => response,
Err(err) => {
had_discovery_errors = true;
let (group_id, group_name) = fallback_group_for_agent(agent);
group_agents.entry(group_id.clone()).or_insert(agent);
group_names.entry(group_id).or_insert(group_name);
tracing::warn!(
target = "sandbox_agent::opencode",
?agent,
?err,
"failed to discover models for OpenCode provider"
);
continue;
}
};
if response.models.is_empty() {
let (group_id, group_name) = fallback_group_for_agent(agent);
group_agents.entry(group_id.clone()).or_insert(agent);
group_names.entry(group_id).or_insert(group_name);
}
let first_model_id = response.models.first().map(|model| model.id.clone());
for model in response.models {
let model_id = model.id.clone();
let (group_id, group_name) = group_for_agent_model(agent, &model_id);
if response.default_model.as_deref() == Some(model_id.as_str()) {
group_defaults.insert(group_id.clone(), model_id.clone());
}
group_agents.entry(group_id.clone()).or_insert(agent);
group_names
.entry(group_id.clone())
.or_insert_with(|| group_name.clone());
if !ambiguous_models.contains(&model_id) {
match model_lookup.get(&model_id) {
None => {
model_lookup.insert(model_id.clone(), agent);
}
Some(existing) if *existing != agent => {
model_lookup.remove(&model_id);
ambiguous_models.insert(model_id.clone());
}
_ => {}
}
}
entries.push(OpenCodeModelEntry {
model,
group_id,
group_name,
});
}
if default_model.is_none() {
default_model = response.default_model.clone().or(first_model_id);
}
}
let mut groups: BTreeMap<String, Vec<&OpenCodeModelEntry>> = BTreeMap::new();
for entry in &entries {
groups
.entry(entry.group_id.clone())
.or_default()
.push(entry);
}
for entries in groups.values_mut() {
entries.sort_by(|a, b| a.model.id.cmp(&b.model.id));
}
if entries
.iter()
.any(|entry| entry.model.id == OPENCODE_DEFAULT_MODEL_ID)
{
default_model = Some(OPENCODE_DEFAULT_MODEL_ID.to_string());
}
let default_model = default_model.unwrap_or_else(|| {
entries
.first()
.map(|entry| entry.model.id.clone())
.unwrap_or_else(|| OPENCODE_DEFAULT_MODEL_ID.to_string())
});
let mut default_group = entries
.iter()
.find(|entry| entry.model.id == default_model)
.map(|entry| entry.group_id.clone())
.unwrap_or_else(|| OPENCODE_DEFAULT_PROVIDER_ID.to_string());
if !groups.contains_key(&default_group) {
if let Some((group_id, _)) = groups.iter().next() {
default_group = group_id.clone();
}
}
for (group_id, entries) in &groups {
if !group_defaults.contains_key(group_id) {
if let Some(entry) = entries.first() {
group_defaults.insert(group_id.clone(), entry.model.id.clone());
}
}
}
OpenCodeModelCache {
entries,
model_lookup,
group_defaults,
group_agents,
group_names,
default_group,
default_model,
cached_at: Instant::now(),
had_discovery_errors,
}
}
fn fallback_group_for_agent(agent: AgentId) -> (String, String) {
if agent == AgentId::Opencode {
return (
"opencode".to_string(),
agent_display_name(agent).to_string(),
);
}
(
agent.as_str().to_string(),
agent_display_name(agent).to_string(),
)
}
fn resolve_agent_from_model(
cache: &OpenCodeModelCache,
provider_id: &str,
model_id: &str,
) -> Option<AgentId> {
if let Some(agent) = cache.group_agents.get(provider_id) {
return Some(*agent);
}
if let Some(agent) = cache.model_lookup.get(model_id) {
return Some(*agent);
}
if let Some(agent) = AgentId::parse(model_id) {
return Some(agent);
}
if opencode_group_provider(provider_id).is_some() {
return Some(AgentId::Opencode);
}
if model_id.contains('/') {
return Some(AgentId::Opencode);
}
if model_id.starts_with("claude-") {
return Some(AgentId::Claude);
}
if ["smart", "rush", "deep", "free"].contains(&model_id) {
return Some(AgentId::Amp);
}
if model_id.starts_with("gpt-") || model_id.starts_with('o') {
return Some(AgentId::Codex);
}
None
}
fn normalize_agent_mode(agent: Option<String>) -> String {
@ -607,19 +842,38 @@ async fn resolve_session_agent(
requested_provider: Option<&str>,
requested_model: Option<&str>,
) -> (String, String, String) {
let cache = opencode_model_cache(state).await;
let default_model_id = cache.default_model.clone();
let mut provider_id = requested_provider
.filter(|value| !value.is_empty())
.unwrap_or(OPENCODE_PROVIDER_ID)
.to_string();
let mut model_id = requested_model
.filter(|value| *value != "sandbox-agent")
.map(|value| value.to_string());
let model_id = requested_model
.filter(|value| !value.is_empty())
.unwrap_or(OPENCODE_DEFAULT_MODEL_ID)
.to_string();
let mut resolved_agent = resolve_agent_from_model(&provider_id, &model_id);
.map(|value| value.to_string());
if provider_id.is_none() {
if let Some(model_value) = model_id.as_deref() {
if let Some(entry) = cache
.entries
.iter()
.find(|entry| entry.model.id == model_value)
{
provider_id = Some(entry.group_id.clone());
} else if let Some(agent) = AgentId::parse(model_value) {
provider_id = Some(agent.as_str().to_string());
}
}
}
let mut provider_id = provider_id.unwrap_or_else(|| cache.default_group.clone());
let mut model_id = model_id
.or_else(|| cache.group_defaults.get(&provider_id).cloned())
.unwrap_or_else(|| default_model_id.clone());
let mut resolved_agent = resolve_agent_from_model(&cache, &provider_id, &model_id);
if resolved_agent.is_none() {
provider_id = OPENCODE_PROVIDER_ID.to_string();
model_id = OPENCODE_DEFAULT_MODEL_ID.to_string();
resolved_agent = Some(default_agent_id());
provider_id = cache.default_group.clone();
model_id = default_model_id.clone();
resolved_agent = resolve_agent_from_model(&cache, &provider_id, &model_id)
.or_else(|| Some(default_agent_id()));
}
let mut resolved_agent_id: Option<String> = None;
@ -650,7 +904,7 @@ async fn resolve_session_agent(
fn agent_display_name(agent: AgentId) -> &'static str {
match agent {
AgentId::Claude => "Claude",
AgentId::Claude => "Claude Code",
AgentId::Codex => "Codex",
AgentId::Opencode => "OpenCode",
AgentId::Amp => "Amp",
@ -659,17 +913,63 @@ fn agent_display_name(agent: AgentId) -> &'static str {
}
}
fn model_config_entry(agent: AgentId) -> Value {
fn opencode_model_provider(model_id: &str) -> Option<&str> {
model_id.split_once('/').map(|(provider, _)| provider)
}
fn opencode_group_provider(group_id: &str) -> Option<&str> {
group_id.strip_prefix("opencode:")
}
fn group_for_agent_model(agent: AgentId, model_id: &str) -> (String, String) {
if agent == AgentId::Opencode {
let provider = opencode_model_provider(model_id).unwrap_or("unknown");
return (
format!("opencode:{provider}"),
format!("OpenCode ({provider})"),
);
}
let group_id = agent.as_str().to_string();
let group_name = agent_display_name(agent).to_string();
(group_id, group_name)
}
fn backing_model_for_agent(agent: AgentId, provider_id: &str, model_id: &str) -> Option<String> {
if model_id.trim().is_empty() {
return None;
}
if AgentId::parse(model_id).is_some() {
return None;
}
if agent != AgentId::Opencode {
return Some(model_id.to_string());
}
if model_id.contains('/') {
return Some(model_id.to_string());
}
if let Some(provider) = opencode_group_provider(provider_id) {
return Some(format!("{provider}/{model_id}"));
}
Some(model_id.to_string())
}
fn model_config_entry(entry: &OpenCodeModelEntry) -> Value {
let model_name = entry
.model
.name
.clone()
.unwrap_or_else(|| entry.model.id.clone());
let variants = model_variants_object(&entry.model);
json!({
"id": agent.as_str(),
"providerID": OPENCODE_PROVIDER_ID,
"id": entry.model.id,
"providerID": entry.group_id,
"api": {
"id": "sandbox-agent",
"url": "http://localhost",
"npm": "@sandbox-agent/sdk"
},
"name": agent_display_name(agent),
"family": "sandbox-agent",
"name": model_name,
"family": entry.group_name,
"capabilities": {
"temperature": true,
"reasoning": true,
@ -704,14 +1004,21 @@ fn model_config_entry(agent: AgentId) -> Value {
"options": {},
"headers": {},
"release_date": "2024-01-01",
"variants": {}
"variants": variants
})
}
fn model_summary_entry(agent: AgentId) -> Value {
fn model_summary_entry(entry: &OpenCodeModelEntry) -> Value {
let model_name = entry
.model
.name
.clone()
.unwrap_or_else(|| entry.model.id.clone());
let variants = model_variants_object(&entry.model);
json!({
"id": agent.as_str(),
"name": agent_display_name(agent),
"id": entry.model.id,
"name": model_name,
"family": entry.group_name,
"release_date": "2024-01-01",
"attachment": false,
"reasoning": true,
@ -721,10 +1028,22 @@ fn model_summary_entry(agent: AgentId) -> Value {
"limit": {
"context": 128000,
"output": 4096
}
},
"variants": variants
})
}
fn model_variants_object(model: &AgentModelInfo) -> Value {
let Some(variants) = model.variants.as_ref() else {
return json!({});
};
let mut map = serde_json::Map::new();
for variant in variants {
map.insert(variant.clone(), json!({}));
}
Value::Object(map)
}
fn bad_request(message: &str) -> (StatusCode, Json<Value>) {
(
StatusCode::BAD_REQUEST,
@ -1296,6 +1615,25 @@ async fn apply_universal_event(state: Arc<OpenCodeAppState>, event: UniversalEve
match event.event_type {
UniversalEventType::ItemStarted | UniversalEventType::ItemCompleted => {
if let UniversalEventData::Item(ItemEventData { item }) = &event.data {
// turn.completed or session.idle status → emit session.idle
if event.event_type == UniversalEventType::ItemCompleted
&& item.kind == ItemKind::Status
{
if let Some(ContentPart::Status { label, .. }) = item.content.first() {
if label == "turn.completed" || label == "session.idle" {
let session_id = event.session_id.clone();
state.opencode.emit_event(json!({
"type": "session.status",
"properties": {"sessionID": session_id, "status": {"type": "idle"}}
}));
state.opencode.emit_event(json!({
"type": "session.idle",
"properties": {"sessionID": session_id}
}));
return;
}
}
}
apply_item_event(state, event.clone(), item.clone()).await;
}
}
@ -1540,7 +1878,7 @@ async fn apply_item_event(
let provider_id = runtime
.last_model_provider
.clone()
.unwrap_or_else(|| OPENCODE_PROVIDER_ID.to_string());
.unwrap_or_else(|| OPENCODE_DEFAULT_PROVIDER_ID.to_string());
let model_id = runtime
.last_model_id
.clone()
@ -1587,25 +1925,48 @@ async fn apply_item_event(
.entry(message_id.clone())
.or_insert_with(|| format!("{}_text", message_id))
.clone();
runtime
.text_by_message
.insert(message_id.clone(), text.clone());
let part = build_text_part_with_id(&session_id, &message_id, &part_id, &text);
upsert_message_part(&state.opencode, &session_id, &message_id, part.clone()).await;
state
.opencode
.emit_event(part_event("message.part.updated", &part));
let _ = state
.opencode
.update_runtime(&session_id, |runtime| {
runtime
.text_by_message
.insert(message_id.clone(), text.clone());
runtime
.part_id_by_message
.insert(message_id.clone(), part_id.clone());
})
.await;
if event.event_type == UniversalEventType::ItemStarted {
// For ItemStarted, only store the text in runtime as the initial value
// without emitting a part event. Deltas will handle streaming, and
// ItemCompleted will emit the final text part.
let _ = state
.opencode
.update_runtime(&session_id, |runtime| {
runtime
.text_by_message
.insert(message_id.clone(), String::new());
runtime
.part_id_by_message
.insert(message_id.clone(), part_id.clone());
})
.await;
} else {
// For ItemCompleted, emit the final text part with the complete text.
// Use the accumulated text from deltas if available, otherwise use
// the text from the completed event.
let final_text = runtime
.text_by_message
.get(&message_id)
.filter(|t| !t.is_empty())
.cloned()
.unwrap_or_else(|| text.clone());
let part = build_text_part_with_id(&session_id, &message_id, &part_id, &final_text);
upsert_message_part(&state.opencode, &session_id, &message_id, part.clone()).await;
state
.opencode
.emit_event(part_event("message.part.updated", &part));
let _ = state
.opencode
.update_runtime(&session_id, |runtime| {
runtime
.text_by_message
.insert(message_id.clone(), final_text.clone());
runtime
.part_id_by_message
.insert(message_id.clone(), part_id.clone());
})
.await;
}
}
for part in item.content.iter() {
@ -1635,9 +1996,10 @@ async fn apply_item_event(
.entry(call_id.clone())
.or_insert_with(|| next_id("part_", &PART_COUNTER))
.clone();
let input_value = tool_input_from_arguments(Some(arguments.as_str()));
let state_value = json!({
"status": "pending",
"input": {"arguments": arguments},
"input": input_value,
"raw": arguments,
});
let tool_part = build_tool_part(
@ -1662,6 +2024,12 @@ async fn apply_item_event(
runtime
.tool_message_by_call
.insert(call_id.clone(), message_id.clone());
runtime
.tool_name_by_call
.insert(call_id.clone(), name.clone());
runtime
.tool_args_by_call
.insert(call_id.clone(), arguments.clone());
})
.await;
}
@ -1671,9 +2039,26 @@ async fn apply_item_event(
.entry(call_id.clone())
.or_insert_with(|| next_id("part_", &PART_COUNTER))
.clone();
// Resolve tool name from stored ToolCall data
let tool_name = runtime
.tool_name_by_call
.get(call_id)
.cloned()
.unwrap_or_else(|| "tool".to_string());
// Resolve input from stored ToolCall arguments
let input_value = runtime
.tool_args_by_call
.get(call_id)
.and_then(|args| {
tool_input_from_arguments(Some(args.as_str()))
.as_object()
.cloned()
})
.map(Value::Object)
.unwrap_or_else(|| json!({}));
let state_value = json!({
"status": "completed",
"input": {},
"input": input_value,
"output": output,
"title": "Tool result",
"metadata": {},
@ -1685,7 +2070,7 @@ async fn apply_item_event(
&message_id,
&part_id,
call_id,
"tool",
&tool_name,
state_value,
);
upsert_message_part(&state.opencode, &session_id, &message_id, tool_part.clone())
@ -1736,20 +2121,6 @@ async fn apply_item_event(
_ => {}
}
}
if event.event_type == UniversalEventType::ItemCompleted {
state.opencode.emit_event(json!({
"type": "session.status",
"properties": {
"sessionID": session_id,
"status": {"type": "idle"}
}
}));
state.opencode.emit_event(json!({
"type": "session.idle",
"properties": { "sessionID": session_id }
}));
}
}
async fn apply_tool_item_event(
@ -1821,7 +2192,7 @@ async fn apply_tool_item_event(
let provider_id = runtime
.last_model_provider
.clone()
.unwrap_or_else(|| OPENCODE_PROVIDER_ID.to_string());
.unwrap_or_else(|| OPENCODE_DEFAULT_PROVIDER_ID.to_string());
let model_id = runtime
.last_model_id
.clone()
@ -1878,12 +2249,19 @@ async fn apply_tool_item_event(
.get(&call_id)
.cloned()
.unwrap_or_else(|| next_id("part_", &PART_COUNTER));
// Resolve tool name: prefer current event's data, fall back to stored value from ToolCall
let tool_name = tool_info
.tool_name
.clone()
.or_else(|| runtime.tool_name_by_call.get(&call_id).cloned())
.unwrap_or_else(|| "tool".to_string());
let input_value = tool_input_from_arguments(tool_info.arguments.as_deref());
let raw_args = tool_info.arguments.clone().unwrap_or_default();
// Resolve arguments: prefer current event's data, fall back to stored value from ToolCall
let effective_arguments = tool_info
.arguments
.clone()
.or_else(|| runtime.tool_args_by_call.get(&call_id).cloned());
let input_value = tool_input_from_arguments(effective_arguments.as_deref());
let raw_args = effective_arguments.clone().unwrap_or_default();
let output_value = tool_info
.output
.clone()
@ -1911,7 +2289,7 @@ async fn apply_tool_item_event(
json!({
"status": "error",
"input": input_value,
"error": output_value.unwrap_or_else(|| "Tool failed".to_string()),
"output": output_value.unwrap_or_else(|| "Tool failed".to_string()),
"metadata": {},
"time": {"start": now, "end": now},
})
@ -1963,6 +2341,17 @@ async fn apply_tool_item_event(
runtime
.tool_message_by_call
.insert(call_id.clone(), message_id.clone());
// Persist tool name and arguments from ToolCall for later ToolResult events
if let Some(name) = tool_info.tool_name.as_ref() {
runtime
.tool_name_by_call
.insert(call_id.clone(), name.clone());
}
if let Some(args) = tool_info.arguments.as_ref() {
runtime
.tool_args_by_call
.insert(call_id.clone(), args.clone());
}
})
.await;
}
@ -2037,7 +2426,7 @@ async fn apply_item_delta(
let provider_id = runtime
.last_model_provider
.clone()
.unwrap_or_else(|| OPENCODE_PROVIDER_ID.to_string());
.unwrap_or_else(|| OPENCODE_DEFAULT_PROVIDER_ID.to_string());
let model_id = runtime
.last_model_id
.clone()
@ -2070,9 +2459,11 @@ async fn apply_item_delta(
.unwrap_or_else(|| format!("{}_text", message_id));
let part = build_text_part_with_id(&session_id, &message_id, &part_id, &text);
upsert_message_part(&state.opencode, &session_id, &message_id, part.clone()).await;
state
.opencode
.emit_event(part_event("message.part.updated", &part));
state.opencode.emit_event(part_event_with_delta(
"message.part.updated",
&part,
Some(&delta),
));
let _ = state
.opencode
.update_runtime(&session_id, |runtime| {
@ -2238,9 +2629,10 @@ pub fn build_opencode_router(state: Arc<OpenCodeAppState>) -> Router {
tag = "opencode"
)]
async fn oc_agent_list(State(state): State<Arc<OpenCodeAppState>>) -> impl IntoResponse {
let name = state.inner.branding.product_name();
let agent = json!({
"name": OPENCODE_PROVIDER_NAME,
"description": "Sandbox Agent compatibility layer",
"name": name,
"description": format!("{name} compatibility layer"),
"mode": "all",
"native": false,
"hidden": false,
@ -2287,26 +2679,46 @@ async fn oc_config_patch(Json(body): Json<Value>) -> impl IntoResponse {
responses((status = 200)),
tag = "opencode"
)]
async fn oc_config_providers() -> impl IntoResponse {
let mut models = serde_json::Map::new();
for agent in available_agent_ids() {
models.insert(agent.as_str().to_string(), model_config_entry(agent));
async fn oc_config_providers(State(state): State<Arc<OpenCodeAppState>>) -> impl IntoResponse {
let cache = opencode_model_cache(&state).await;
let mut grouped: BTreeMap<String, Vec<&OpenCodeModelEntry>> = BTreeMap::new();
for entry in &cache.entries {
grouped
.entry(entry.group_id.clone())
.or_default()
.push(entry);
}
for group_id in cache.group_names.keys() {
grouped.entry(group_id.clone()).or_default();
}
let mut providers = Vec::new();
let mut defaults = serde_json::Map::new();
for (group_id, entries) in grouped {
let mut models = serde_json::Map::new();
for entry in entries {
models.insert(entry.model.id.clone(), model_config_entry(entry));
}
let name = cache
.group_names
.get(&group_id)
.cloned()
.unwrap_or_else(|| group_id.clone());
providers.push(json!({
"id": group_id,
"name": name,
"source": "custom",
"env": [],
"key": "",
"options": {},
"models": Value::Object(models),
}));
if let Some(default_model) = cache.group_defaults.get(&group_id) {
defaults.insert(group_id, Value::String(default_model.clone()));
}
}
let providers = json!({
"providers": [
{
"id": OPENCODE_PROVIDER_ID,
"name": OPENCODE_PROVIDER_NAME,
"source": "custom",
"env": [],
"key": "",
"options": {},
"models": Value::Object(models),
}
],
"default": {
OPENCODE_PROVIDER_ID: OPENCODE_DEFAULT_MODEL_ID
}
"providers": providers,
"default": Value::Object(defaults),
});
(StatusCode::OK, Json(providers))
}
@ -2957,6 +3369,9 @@ async fn oc_session_message_create(
.and_then(|v| v.as_str());
let (session_agent, provider_id, model_id) =
resolve_session_agent(&state, &session_id, requested_provider, requested_model).await;
let session_agent_id = AgentId::parse(&session_agent).unwrap_or_else(default_agent_id);
let backing_model = backing_model_for_agent(session_agent_id, &provider_id, &model_id);
let backing_variant = body.variant.clone();
let parts_input = body.parts.unwrap_or_default();
if parts_input.is_empty() {
@ -3020,7 +3435,15 @@ async fn oc_session_message_create(
})
.await;
if let Err(err) = ensure_backing_session(&state, &session_id, &session_agent).await {
if let Err(err) = ensure_backing_session(
&state,
&session_id,
&session_agent,
backing_model,
backing_variant,
)
.await
{
tracing::warn!(
target = "sandbox_agent::opencode",
?err,
@ -3226,7 +3649,7 @@ async fn oc_session_command(
&directory,
&worktree,
&agent,
OPENCODE_PROVIDER_ID,
OPENCODE_DEFAULT_PROVIDER_ID,
OPENCODE_DEFAULT_MODEL_ID,
);
@ -3276,7 +3699,7 @@ async fn oc_session_shell(
.as_ref()
.and_then(|v| v.get("providerID"))
.and_then(|v| v.as_str())
.unwrap_or(OPENCODE_PROVIDER_ID),
.unwrap_or(OPENCODE_DEFAULT_PROVIDER_ID),
body.model
.as_ref()
.and_then(|v| v.get("modelID"))
@ -3584,24 +4007,46 @@ async fn oc_question_reject(
responses((status = 200)),
tag = "opencode"
)]
async fn oc_provider_list() -> impl IntoResponse {
let mut models = serde_json::Map::new();
for agent in available_agent_ids() {
models.insert(agent.as_str().to_string(), model_summary_entry(agent));
async fn oc_provider_list(State(state): State<Arc<OpenCodeAppState>>) -> impl IntoResponse {
let cache = opencode_model_cache(&state).await;
let mut grouped: BTreeMap<String, Vec<&OpenCodeModelEntry>> = BTreeMap::new();
for entry in &cache.entries {
grouped
.entry(entry.group_id.clone())
.or_default()
.push(entry);
}
for group_id in cache.group_names.keys() {
grouped.entry(group_id.clone()).or_default();
}
let mut providers = Vec::new();
let mut defaults = serde_json::Map::new();
let mut connected = Vec::new();
for (group_id, entries) in grouped {
let mut models = serde_json::Map::new();
for entry in entries {
models.insert(entry.model.id.clone(), model_summary_entry(entry));
}
let name = cache
.group_names
.get(&group_id)
.cloned()
.unwrap_or_else(|| group_id.clone());
providers.push(json!({
"id": group_id,
"name": name,
"env": [],
"models": Value::Object(models),
}));
if let Some(default_model) = cache.group_defaults.get(&group_id) {
defaults.insert(group_id.clone(), Value::String(default_model.clone()));
}
connected.push(group_id);
}
let providers = json!({
"all": [
{
"id": OPENCODE_PROVIDER_ID,
"name": OPENCODE_PROVIDER_NAME,
"env": [],
"models": Value::Object(models),
}
],
"default": {
OPENCODE_PROVIDER_ID: OPENCODE_DEFAULT_MODEL_ID
},
"connected": [OPENCODE_PROVIDER_ID]
"all": providers,
"default": Value::Object(defaults),
"connected": connected
});
(StatusCode::OK, Json(providers))
}
@ -3612,11 +4057,13 @@ async fn oc_provider_list() -> impl IntoResponse {
responses((status = 200)),
tag = "opencode"
)]
async fn oc_provider_auth() -> impl IntoResponse {
let auth = json!({
OPENCODE_PROVIDER_ID: []
});
(StatusCode::OK, Json(auth))
async fn oc_provider_auth(State(state): State<Arc<OpenCodeAppState>>) -> impl IntoResponse {
let cache = opencode_model_cache(&state).await;
let mut auth_map = serde_json::Map::new();
for group_id in cache.group_names.keys() {
auth_map.insert(group_id.clone(), json!([]));
}
(StatusCode::OK, Json(Value::Object(auth_map)))
}
#[utoipa::path(

File diff suppressed because it is too large Load diff

View file

@ -3,6 +3,7 @@ use std::env;
use std::fs;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use reqwest::Client;
@ -11,6 +12,7 @@ use time::OffsetDateTime;
use tokio::time::Instant;
use crate::http_client;
static TELEMETRY_ENABLED: AtomicBool = AtomicBool::new(false);
const TELEMETRY_URL: &str = "https://tc.rivet.dev";
const TELEMETRY_ENV_DEBUG: &str = "SANDBOX_AGENT_TELEMETRY_DEBUG";
@ -21,7 +23,7 @@ const TELEMETRY_INTERVAL_SECS: u64 = 300;
const TELEMETRY_MIN_GAP_SECS: i64 = 300;
#[derive(Debug, Serialize)]
struct TelemetryEvent {
struct TelemetryEvent<D: Serialize> {
// p = project identifier
p: String,
// dt = unix timestamp (seconds)
@ -33,13 +35,13 @@ struct TelemetryEvent {
// ev = event name
ev: String,
// d = data payload
d: TelemetryData,
d: D,
// v = schema version
v: u8,
}
#[derive(Debug, Serialize)]
struct TelemetryData {
struct BeaconData {
version: String,
os: OsInfo,
provider: ProviderInfo,
@ -62,15 +64,17 @@ struct ProviderInfo {
}
pub fn telemetry_enabled(no_telemetry: bool) -> bool {
if no_telemetry {
return false;
}
if cfg!(debug_assertions) {
return env::var(TELEMETRY_ENV_DEBUG)
let enabled = if no_telemetry {
false
} else if cfg!(debug_assertions) {
env::var(TELEMETRY_ENV_DEBUG)
.map(|value| matches!(value.as_str(), "1" | "true" | "TRUE"))
.unwrap_or(false);
}
true
.unwrap_or(false)
} else {
true
};
TELEMETRY_ENABLED.store(enabled, Ordering::Relaxed);
enabled
}
pub fn log_enabled_message() {
@ -107,7 +111,7 @@ async fn attempt_send(client: &Client) {
return;
}
let event = build_event(dt);
let event = build_beacon_event(dt);
if let Err(err) = client.post(TELEMETRY_URL).json(&event).send().await {
tracing::debug!(error = %err, "telemetry request failed");
return;
@ -115,15 +119,12 @@ async fn attempt_send(client: &Client) {
write_last_sent(dt);
}
fn build_event(dt: i64) -> TelemetryEvent {
let eid = load_or_create_id();
TelemetryEvent {
p: "sandbox-agent".to_string(),
fn build_beacon_event(dt: i64) -> TelemetryEvent<BeaconData> {
new_event(
dt,
et: "sandbox".to_string(),
eid,
ev: "entity_beacon".to_string(),
d: TelemetryData {
"sandbox",
"entity_beacon",
BeaconData {
version: env!("CARGO_PKG_VERSION").to_string(),
os: OsInfo {
name: std::env::consts::OS.to_string(),
@ -132,6 +133,23 @@ fn build_event(dt: i64) -> TelemetryEvent {
},
provider: detect_provider(),
},
)
}
fn new_event<D: Serialize>(
dt: i64,
entity_type: &str,
event_name: &str,
data: D,
) -> TelemetryEvent<D> {
let eid = load_or_create_id();
TelemetryEvent {
p: "sandbox-agent".to_string(),
dt,
et: entity_type.to_string(),
eid,
ev: event_name.to_string(),
d: data,
v: 1,
}
}
@ -433,3 +451,66 @@ fn metadata_or_none(
Some(map)
}
}
#[derive(Debug, Serialize)]
struct SessionCreatedData {
version: String,
agent: String,
#[serde(skip_serializing_if = "Option::is_none")]
agent_mode: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
permission_mode: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
model: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
variant: Option<String>,
}
pub struct SessionConfig {
pub agent: String,
pub agent_mode: Option<String>,
pub permission_mode: Option<String>,
pub model: Option<String>,
pub variant: Option<String>,
}
pub fn log_session_created(config: SessionConfig) {
if !TELEMETRY_ENABLED.load(Ordering::Relaxed) {
return;
}
let event = new_event(
OffsetDateTime::now_utc().unix_timestamp(),
"session",
"session_created",
SessionCreatedData {
version: env!("CARGO_PKG_VERSION").to_string(),
agent: config.agent,
agent_mode: config.agent_mode,
permission_mode: config.permission_mode,
model: config.model,
variant: config.variant,
},
);
spawn_send(event);
}
fn spawn_send<D: Serialize + Send + 'static>(event: TelemetryEvent<D>) {
tokio::spawn(async move {
let client = match Client::builder()
.timeout(Duration::from_millis(TELEMETRY_TIMEOUT_MS))
.build()
{
Ok(client) => client,
Err(err) => {
tracing::debug!(error = %err, "failed to build telemetry client");
return;
}
};
if let Err(err) = client.post(TELEMETRY_URL).json(&event).send().await {
tracing::debug!(error = %err, "telemetry send failed");
}
});
}

View file

@ -1,26 +1,22 @@
// Pi RPC integration tests (gated via SANDBOX_TEST_PI + PATH).
include!("../common/http.rs");
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn pi_rpc_session_and_stream() {
fn pi_test_config() -> Option<TestAgentConfig> {
let configs = match test_agents_from_env() {
Ok(configs) => configs,
Err(err) => {
eprintln!("Skipping Pi RPC integration test: {err}");
return;
return None;
}
};
let Some(config) = configs.iter().find(|config| config.agent == AgentId::Pi) else {
return;
};
configs
.into_iter()
.find(|config| config.agent == AgentId::Pi)
}
let app = TestApp::new();
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let session_id = "pi-rpc-session".to_string();
async fn create_pi_session_with_native(app: &Router, session_id: &str) -> String {
let (status, payload) = send_json(
&app.app,
app,
Method::POST,
&format!("/v1/sessions/{session_id}"),
Some(json!({
@ -33,19 +29,16 @@ async fn pi_rpc_session_and_stream() {
let native_session_id = payload
.get("native_session_id")
.and_then(Value::as_str)
.unwrap_or("");
.unwrap_or("")
.to_string();
assert!(
!native_session_id.is_empty(),
"expected native_session_id for pi session"
);
native_session_id
}
let events = read_turn_stream_events(&app.app, &session_id, Duration::from_secs(120)).await;
assert!(!events.is_empty(), "no events from pi stream");
assert!(
!events.iter().any(is_unparsed_event),
"agent.unparsed event encountered"
);
fn assert_strictly_increasing_sequences(events: &[Value], label: &str) {
let mut last_sequence = 0u64;
for event in events {
let sequence = event
@ -54,8 +47,241 @@ async fn pi_rpc_session_and_stream() {
.expect("missing sequence");
assert!(
sequence > last_sequence,
"sequence did not increase (prev {last_sequence}, next {sequence})"
"{label}: sequence did not increase (prev {last_sequence}, next {sequence})"
);
last_sequence = sequence;
}
}
fn assert_all_events_for_session(events: &[Value], session_id: &str) {
for event in events {
let event_session_id = event
.get("session_id")
.and_then(Value::as_str)
.unwrap_or_default();
assert_eq!(
event_session_id, session_id,
"cross-session event detected in {session_id}: {event}"
);
}
}
fn assert_item_started_ids_unique(events: &[Value], label: &str) {
let mut ids = std::collections::HashSet::new();
for event in events {
let event_type = event
.get("type")
.and_then(Value::as_str)
.unwrap_or_default();
if event_type != "item.started" {
continue;
}
let Some(item_id) = event
.get("data")
.and_then(|data| data.get("item"))
.and_then(|item| item.get("item_id"))
.and_then(Value::as_str)
else {
continue;
};
assert!(
ids.insert(item_id.to_string()),
"{label}: duplicate item.started id {item_id}"
);
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn pi_rpc_session_and_stream() {
let Some(config) = pi_test_config() else {
return;
};
let app = TestApp::new();
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let session_id = "pi-rpc-session";
let _native_session_id = create_pi_session_with_native(&app.app, session_id).await;
let events = read_turn_stream_events(&app.app, session_id, Duration::from_secs(120)).await;
assert!(!events.is_empty(), "no events from pi stream");
assert!(
!events.iter().any(is_unparsed_event),
"agent.unparsed event encountered"
);
assert!(
should_stop(&events),
"turn stream did not reach a terminal event"
);
assert_strictly_increasing_sequences(&events, "pi_rpc_session_and_stream");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn pi_parallel_sessions_turns() {
let Some(config) = pi_test_config() else {
return;
};
let app = TestApp::new();
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let session_a = "pi-parallel-a";
let session_b = "pi-parallel-b";
create_pi_session_with_native(&app.app, session_a).await;
create_pi_session_with_native(&app.app, session_b).await;
let app_a = app.app.clone();
let app_b = app.app.clone();
let send_a = send_message(&app_a, session_a);
let send_b = send_message(&app_b, session_b);
tokio::join!(send_a, send_b);
let app_a = app.app.clone();
let app_b = app.app.clone();
let poll_a = poll_events_until(&app_a, session_a, Duration::from_secs(120));
let poll_b = poll_events_until(&app_b, session_b, Duration::from_secs(120));
let (events_a, events_b) = tokio::join!(poll_a, poll_b);
assert!(!events_a.is_empty(), "no events for session A");
assert!(!events_b.is_empty(), "no events for session B");
assert!(
should_stop(&events_a),
"session A did not reach a terminal event"
);
assert!(
should_stop(&events_b),
"session B did not reach a terminal event"
);
assert!(
!events_a.iter().any(is_unparsed_event),
"session A encountered agent.unparsed"
);
assert!(
!events_b.iter().any(is_unparsed_event),
"session B encountered agent.unparsed"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn pi_event_isolation() {
let Some(config) = pi_test_config() else {
return;
};
let app = TestApp::new();
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let session_a = "pi-isolation-a";
let session_b = "pi-isolation-b";
create_pi_session_with_native(&app.app, session_a).await;
create_pi_session_with_native(&app.app, session_b).await;
let app_a = app.app.clone();
let app_b = app.app.clone();
let send_a = send_message(&app_a, session_a);
let send_b = send_message(&app_b, session_b);
tokio::join!(send_a, send_b);
let app_a = app.app.clone();
let app_b = app.app.clone();
let poll_a = poll_events_until(&app_a, session_a, Duration::from_secs(120));
let poll_b = poll_events_until(&app_b, session_b, Duration::from_secs(120));
let (events_a, events_b) = tokio::join!(poll_a, poll_b);
assert!(should_stop(&events_a), "session A did not complete");
assert!(should_stop(&events_b), "session B did not complete");
assert_all_events_for_session(&events_a, session_a);
assert_all_events_for_session(&events_b, session_b);
assert_strictly_increasing_sequences(&events_a, "session A");
assert_strictly_increasing_sequences(&events_b, "session B");
assert_item_started_ids_unique(&events_a, "session A");
assert_item_started_ids_unique(&events_b, "session B");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn pi_terminate_one_session_does_not_affect_other() {
let Some(config) = pi_test_config() else {
return;
};
let app = TestApp::new();
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let session_a = "pi-terminate-a";
let session_b = "pi-terminate-b";
create_pi_session_with_native(&app.app, session_a).await;
create_pi_session_with_native(&app.app, session_b).await;
let terminate_status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_a}/terminate"),
None,
)
.await;
assert_eq!(
terminate_status,
StatusCode::NO_CONTENT,
"terminate session A"
);
send_message(&app.app, session_b).await;
let events_b = poll_events_until(&app.app, session_b, Duration::from_secs(120)).await;
assert!(!events_b.is_empty(), "no events for session B");
assert!(
should_stop(&events_b),
"session B did not complete after A terminated"
);
let events_a = poll_events_until(&app.app, session_a, Duration::from_secs(10)).await;
assert!(
events_a.iter().any(|event| {
event
.get("type")
.and_then(Value::as_str)
.is_some_and(|ty| ty == "session.ended")
}),
"session A missing session.ended after terminate"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn pi_runtime_restart_scope() {
let Some(config) = pi_test_config() else {
return;
};
let app = TestApp::new();
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let session_a = "pi-restart-scope-a";
let session_b = "pi-restart-scope-b";
create_pi_session_with_native(&app.app, session_a).await;
create_pi_session_with_native(&app.app, session_b).await;
let terminate_status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_a}/terminate"),
None,
)
.await;
assert_eq!(
terminate_status,
StatusCode::NO_CONTENT,
"terminate session A to stop only its runtime"
);
send_message(&app.app, session_b).await;
let events_b = poll_events_until(&app.app, session_b, Duration::from_secs(120)).await;
assert!(
should_stop(&events_b),
"session B did not continue after A stopped"
);
assert_all_events_for_session(&events_b, session_b);
}

View file

@ -736,6 +736,81 @@ fn normalize_agent_modes(value: &Value) -> Value {
json!({ "modes": normalized })
}
fn normalize_agent_models(value: &Value, agent: AgentId) -> Value {
let models = value
.get("models")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let default_model = value.get("defaultModel").and_then(Value::as_str);
let mut map = Map::new();
let model_count = models.len();
map.insert("nonEmpty".to_string(), Value::Bool(model_count > 0));
map.insert("hasDefault".to_string(), Value::Bool(default_model.is_some()));
let default_in_list = default_model.map_or(false, |default_id| {
models
.iter()
.any(|model| model.get("id").and_then(Value::as_str) == Some(default_id))
});
map.insert(
"defaultInList".to_string(),
Value::Bool(default_in_list),
);
let has_variants = models.iter().any(|model| {
model
.get("variants")
.and_then(Value::as_array)
.is_some_and(|variants| !variants.is_empty())
});
match agent {
AgentId::Claude | AgentId::Opencode => {
map.insert(
"hasVariants".to_string(),
Value::String("<redacted>".to_string()),
);
}
_ => {
map.insert("hasVariants".to_string(), Value::Bool(has_variants));
}
}
if matches!(agent, AgentId::Amp | AgentId::Mock) {
map.insert(
"modelCount".to_string(),
Value::Number(model_count.into()),
);
let mut ids: Vec<String> = models
.iter()
.filter_map(|model| model.get("id").and_then(Value::as_str).map(|id| id.to_string()))
.collect();
ids.sort();
map.insert("ids".to_string(), json!(ids));
if let Some(default_model) = default_model {
map.insert(
"defaultModel".to_string(),
Value::String(default_model.to_string()),
);
}
if agent == AgentId::Amp {
if let Some(variants) = models
.first()
.and_then(|model| model.get("variants"))
.and_then(Value::as_array)
{
let mut variant_ids: Vec<String> = variants
.iter()
.filter_map(|variant| variant.as_str().map(|id| id.to_string()))
.collect();
variant_ids.sort();
map.insert("variants".to_string(), json!(variant_ids));
}
}
}
Value::Object(map)
}
fn normalize_sessions(value: &Value) -> Value {
let sessions = value
.get("sessions")

View file

@ -162,4 +162,27 @@ async fn agent_endpoints_snapshots() {
insta::assert_yaml_snapshot!(normalize_agent_modes(&modes));
});
}
for config in &configs {
let _guard = apply_credentials(&config.credentials);
let (status, models) = send_json(
&app.app,
Method::GET,
&format!("/v1/agents/{}/models", config.agent.as_str()),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "agent models");
let model_count = models
.get("models")
.and_then(|value| value.as_array())
.map(|models| models.len())
.unwrap_or_default();
assert!(model_count > 0, "agent models should not be empty");
insta::with_settings!({
snapshot_suffix => snapshot_name("agent_models", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalize_agent_models(&models, config.agent));
});
}
}

View file

@ -0,0 +1,19 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
expression: normalize_agent_models(&models, config.agent)
---
nonEmpty: true
hasDefault: true
defaultInList: true
hasVariants: true
modelCount: 4
ids:
- deep
- free
- rush
- smart
defaultModel: smart
variants:
- high
- medium
- xhigh

View file

@ -0,0 +1,8 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
expression: normalize_agent_models(&models, config.agent)
---
nonEmpty: true
hasDefault: true
defaultInList: true
hasVariants: "<redacted>"

View file

@ -0,0 +1,8 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
expression: normalize_agent_models(&models, config.agent)
---
nonEmpty: true
hasDefault: true
defaultInList: true
hasVariants: true

View file

@ -0,0 +1,12 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
expression: normalize_agent_models(&models, config.agent)
---
nonEmpty: true
hasDefault: true
defaultInList: true
hasVariants: false
modelCount: 1
ids:
- mock
defaultModel: mock

View file

@ -0,0 +1,8 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
expression: normalize_agent_models(&models, config.agent)
---
nonEmpty: true
hasDefault: true
defaultInList: true
hasVariants: "<redacted>"

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 145
expression: snapshot_status(status)
---
status: 204

View file

@ -0,0 +1,12 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 129
expression: normalize_agent_list(&agents)
---
agents:
- id: amp
- id: claude
- id: codex
- id: mock
- id: opencode
- id: pi

View file

@ -0,0 +1,14 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 59
expression: "json!({\n \"status\": status.as_u16(), \"payload\": normalize_agent_list(&payload),\n})"
---
payload:
agents:
- id: amp
- id: claude
- id: codex
- id: mock
- id: opencode
- id: pi
status: 200

View file

@ -115,7 +115,7 @@ describe("OpenCode-compatible Event Streaming", () => {
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Say hello" }],
},
});
@ -145,4 +145,98 @@ describe("OpenCode-compatible Event Streaming", () => {
expect(response.data).toBeDefined();
});
});
describe("session.idle count", () => {
it("should emit exactly one session.idle for echo flow", async () => {
const session = await client.session.create();
const sessionId = session.data?.id!;
const eventStream = await client.event.subscribe();
const idleEvents: any[] = [];
// Wait for first idle, then linger 1s for duplicates
const collectIdle = new Promise<void>((resolve, reject) => {
let lingerTimer: ReturnType<typeof setTimeout> | null = null;
const timeout = setTimeout(() => reject(new Error("Timed out waiting for session.idle")), 15_000);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event.type === "session.idle") {
idleEvents.push(event);
if (!lingerTimer) {
lingerTimer = setTimeout(() => {
clearTimeout(timeout);
resolve();
}, 1000);
}
}
}
} catch {
// Stream ended
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "echo hello" }],
},
});
await collectIdle;
expect(idleEvents.length).toBe(1);
});
it("should emit exactly one session.idle for tool flow", async () => {
const session = await client.session.create();
const sessionId = session.data?.id!;
const eventStream = await client.event.subscribe();
const allEvents: any[] = [];
const idleEvents: any[] = [];
const collectIdle = new Promise<void>((resolve, reject) => {
let lingerTimer: ReturnType<typeof setTimeout> | null = null;
const timeout = setTimeout(() => reject(new Error("Timed out waiting for session.idle")), 15_000);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
allEvents.push(event);
if (event.type === "session.idle") {
idleEvents.push(event);
if (!lingerTimer) {
lingerTimer = setTimeout(() => {
clearTimeout(timeout);
resolve();
}, 1000);
}
}
}
} catch {
// Stream ended
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "tool" }],
},
});
await collectIdle;
expect(idleEvents.length).toBe(1);
// All tool parts should have been emitted before idle
const toolParts = allEvents.filter(
(e) => e.type === "message.part.updated" && e.properties?.part?.type === "tool"
);
expect(toolParts.length).toBeGreaterThan(0);
});
});
});

View file

@ -45,7 +45,7 @@ describe("OpenCode-compatible Messaging API", () => {
const response = await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Hello, world!" }],
},
});
@ -58,7 +58,7 @@ describe("OpenCode-compatible Messaging API", () => {
const response = await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Say hello" }],
},
});
@ -72,7 +72,7 @@ describe("OpenCode-compatible Messaging API", () => {
const response = await client.session.promptAsync({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Process this asynchronously" }],
},
});
@ -96,7 +96,7 @@ describe("OpenCode-compatible Messaging API", () => {
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Test message" }],
},
});
@ -116,7 +116,7 @@ describe("OpenCode-compatible Messaging API", () => {
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Test" }],
},
});
@ -144,7 +144,7 @@ describe("OpenCode-compatible Messaging API", () => {
await client.session.promptAsync({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Long running task" }],
},
});

View file

@ -0,0 +1,64 @@
/**
* Tests for OpenCode-compatible provider/model listing.
*/
import { describe, it, expect, beforeAll, afterEach, beforeEach } from "vitest";
import { createOpencodeClient, type OpencodeClient } from "@opencode-ai/sdk";
import { spawnSandboxAgent, buildSandboxAgent, type SandboxAgentHandle } from "./helpers/spawn";
describe("OpenCode-compatible Model API", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
beforeAll(async () => {
await buildSandboxAgent();
});
beforeEach(async () => {
handle = await spawnSandboxAgent({ opencodeCompat: true });
client = createOpencodeClient({
baseUrl: `${handle.baseUrl}/opencode`,
headers: { Authorization: `Bearer ${handle.token}` },
});
});
afterEach(async () => {
await handle?.dispose();
});
it("should list models grouped by agent with real model IDs", async () => {
const response = await client.provider.list();
const providers = response.data?.all ?? [];
const mockProvider = providers.find((entry) => entry.id === "mock");
const ampProvider = providers.find((entry) => entry.id === "amp");
const sandboxProvider = providers.find((entry) => entry.id === "sandbox-agent");
expect(sandboxProvider).toBeUndefined();
expect(mockProvider).toBeDefined();
expect(ampProvider).toBeDefined();
const mockModels = mockProvider?.models ?? {};
expect(mockModels["mock"]).toBeDefined();
expect(mockModels["mock"].id).toBe("mock");
expect(mockModels["mock"].family).toBe("Mock");
const ampModels = ampProvider?.models ?? {};
expect(ampModels["smart"]).toBeDefined();
expect(ampModels["smart"].id).toBe("smart");
expect(ampModels["smart"].family).toBe("Amp");
expect(response.data?.default?.["mock"]).toBe("mock");
expect(response.data?.default?.["amp"]).toBe("smart");
});
it("should keep provider backends visible when discovery is degraded", async () => {
const response = await client.provider.list();
const providers = response.data?.all ?? [];
const providerIds = new Set(providers.map((provider) => provider.id));
expect(providerIds.has("claude")).toBe(true);
expect(providerIds.has("codex")).toBe(true);
expect(
providerIds.has("opencode") || Array.from(providerIds).some((id) => id.startsWith("opencode:"))
).toBe(true);
});
});

View file

@ -57,7 +57,7 @@ describe("OpenCode-compatible Permission API", () => {
it("should receive permission.asked and reply via global endpoint", async () => {
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: permissionPrompt }],
});
@ -77,7 +77,7 @@ describe("OpenCode-compatible Permission API", () => {
it("should accept permission response for a session", async () => {
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: permissionPrompt }],
});

View file

@ -49,7 +49,7 @@ describe("OpenCode-compatible Question API", () => {
it("should ask a question and accept a reply", async () => {
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: questionPrompt }],
});
@ -67,7 +67,7 @@ describe("OpenCode-compatible Question API", () => {
it("should allow rejecting a question", async () => {
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: questionPrompt }],
});

View file

@ -72,7 +72,7 @@ describe("OpenCode-compatible Tool + File Actions", () => {
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "sandbox-agent", modelID: "mock" },
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "tool" }],
},
});

View file

@ -0,0 +1,131 @@
---
source: server/packages/sandbox-agent/tests/sessions/multi_turn.rs
assertion_line: 15
expression: value
---
first:
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
second:
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 1
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 2
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 3
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 4
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 5
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta

View file

@ -0,0 +1,237 @@
---
source: server/packages/sandbox-agent/tests/sessions/permissions.rs
assertion_line: 12
expression: value
---
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 13
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 14
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 15
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 16
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 17
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 18
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 19
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 20
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 21
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 22
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 23
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 24
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 25
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 26
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 27
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 28
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 29
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 30
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 31
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 32
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 33
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 34
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 35
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 36
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 37
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 38
type: item.delta

View file

@ -0,0 +1,105 @@
---
source: server/packages/sandbox-agent/tests/sessions/questions.rs
assertion_line: 12
expression: value
---
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 13
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 14
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 15
type: item.delta
- question:
id: "<redacted>"
options: 4
status: requested
seq: 16
type: question.requested

View file

@ -0,0 +1,105 @@
---
source: server/packages/sandbox-agent/tests/sessions/session_lifecycle.rs
assertion_line: 12
expression: value
---
session_a:
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
session_b:
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta

View file

@ -0,0 +1,7 @@
---
source: server/packages/sandbox-agent/tests/sessions/session_lifecycle.rs
assertion_line: 12
expression: value
---
healthy: true
nativeSessionId: "<redacted>"

View file

@ -0,0 +1,46 @@
---
source: server/packages/sandbox-agent/tests/sessions/../common/http.rs
assertion_line: 1001
expression: normalized
---
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- item:
content_types: []
kind: message
role: assistant
status: completed
seq: 6
type: item.completed

View file

@ -4,9 +4,9 @@ use serde_json::Value;
use crate::amp as schema;
use crate::{
ContentPart, ErrorData, EventConversion, ItemDeltaData, ItemEventData, ItemKind, ItemRole,
ItemStatus, SessionEndReason, SessionEndedData, TerminatedBy, UniversalEventData,
UniversalEventType, UniversalItem,
turn_completed_event, ContentPart, ErrorData, EventConversion, ItemDeltaData, ItemEventData,
ItemKind, ItemRole, ItemStatus, SessionEndReason, SessionEndedData, TerminatedBy,
UniversalEventData, UniversalEventType, UniversalItem,
};
static TEMP_ID: AtomicU64 = AtomicU64::new(1);
@ -99,6 +99,7 @@ pub fn event_to_universal(
));
}
schema::StreamJsonMessageType::Done => {
events.push(turn_completed_event());
events.push(
EventConversion::new(
UniversalEventType::SessionEnded,

View file

@ -3,9 +3,9 @@ use std::sync::atomic::{AtomicU64, Ordering};
use serde_json::Value;
use crate::{
ContentPart, EventConversion, ItemDeltaData, ItemEventData, ItemKind, ItemRole, ItemStatus,
PermissionEventData, PermissionStatus, QuestionEventData, QuestionStatus, SessionStartedData,
UniversalEventData, UniversalEventType, UniversalItem,
turn_completed_event, ContentPart, EventConversion, ItemDeltaData, ItemEventData, ItemKind,
ItemRole, ItemStatus, PermissionEventData, PermissionStatus, QuestionEventData, QuestionStatus,
SessionStartedData, UniversalEventData, UniversalEventType, UniversalItem,
};
static TEMP_ID: AtomicU64 = AtomicU64::new(1);
@ -420,10 +420,13 @@ fn result_event_to_universal(event: &Value, session_id: &str) -> Vec<EventConver
status: ItemStatus::Completed,
};
vec![EventConversion::new(
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData { item: message_item }),
)]
vec![
EventConversion::new(
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData { item: message_item }),
),
turn_completed_event(),
]
}
fn claude_message_id(event: &Value, session_id: &str) -> String {

View file

@ -8,8 +8,8 @@ pub use sandbox_agent_extracted_agent_schemas::{amp, claude, codex, opencode, pi
pub mod agents;
pub use agents::{
amp as convert_amp, claude as convert_claude, codex as convert_codex, opencode as convert_opencode,
pi as convert_pi,
amp as convert_amp, claude as convert_claude, codex as convert_codex,
opencode as convert_opencode, pi as convert_pi,
};
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
@ -317,6 +317,27 @@ impl EventConversion {
}
}
pub fn turn_completed_event() -> EventConversion {
EventConversion::new(
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData {
item: UniversalItem {
item_id: String::new(),
native_item_id: None,
parent_id: None,
kind: ItemKind::Status,
role: Some(ItemRole::System),
content: vec![ContentPart::Status {
label: "turn.completed".to_string(),
detail: None,
}],
status: ItemStatus::Completed,
},
}),
)
.synthetic()
}
pub fn item_from_text(role: ItemRole, text: String) -> UniversalItem {
UniversalItem {
item_id: String::new(),