Merge branch 'main' into feat/support-pi

This commit is contained in:
Nathan Flurry 2026-02-10 22:27:03 -08:00
commit 4c6c5983c0
156 changed files with 16196 additions and 2338 deletions

View file

@ -63,7 +63,9 @@ pub fn extract_claude_credentials(
];
for path in config_paths {
let data = read_json_file(&path)?;
let Some(data) = read_json_file(&path) else {
continue;
};
for key_path in &key_paths {
if let Some(key) = read_string_field(&data, key_path) {
if key.starts_with("sk-ant-") {

View file

@ -21,6 +21,7 @@ pub enum AgentId {
Opencode,
Amp,
Pi,
Cursor,
Mock,
}
@ -32,54 +33,20 @@ impl AgentId {
AgentId::Opencode => "opencode",
AgentId::Amp => "amp",
AgentId::Pi => "pi",
AgentId::Cursor => "cursor",
AgentId::Mock => "mock",
}
}
pub fn binary_name(self) -> &'static str {
match self {
AgentId::Claude => {
if cfg!(windows) {
"claude.exe"
} else {
"claude"
}
}
AgentId::Codex => {
if cfg!(windows) {
"codex.exe"
} else {
"codex"
}
}
AgentId::Opencode => {
if cfg!(windows) {
"opencode.exe"
} else {
"opencode"
}
}
AgentId::Amp => {
if cfg!(windows) {
"amp.exe"
} else {
"amp"
}
}
AgentId::Pi => {
if cfg!(windows) {
"pi.exe"
} else {
"pi"
}
}
AgentId::Mock => {
if cfg!(windows) {
"mock.exe"
} else {
"mock"
}
}
AgentId::Claude => "claude",
AgentId::Codex => "codex",
AgentId::Opencode => "opencode",
AgentId::Amp => "amp",
AgentId::Pi => "pi",
AgentId::Cursor => "cursor-agent",
AgentId::Mock => "mock",
}
}
@ -90,6 +57,7 @@ impl AgentId {
"opencode" => Some(AgentId::Opencode),
"amp" => Some(AgentId::Amp),
"pi" => Some(AgentId::Pi),
"cursor" => Some(AgentId::Cursor),
"mock" => Some(AgentId::Mock),
_ => None,
}
@ -191,6 +159,7 @@ impl AgentManager {
}
AgentId::Amp => install_amp(&install_path, self.platform, options.version.as_deref())?,
AgentId::Pi => install_pi(&install_path, self.platform, options.version.as_deref())?,
AgentId::Cursor => install_cursor(&install_path, self.platform, options.version.as_deref())?,
AgentId::Mock => {
if !install_path.exists() {
fs::write(&install_path, b"mock")?;
@ -208,9 +177,7 @@ impl AgentManager {
if agent == AgentId::Mock {
return true;
}
self.binary_path(agent).exists()
|| find_in_path(agent.binary_name()).is_some()
|| default_install_dir().join(agent.binary_name()).exists()
self.binary_path(agent).exists() || find_in_path(agent.binary_name()).is_some()
}
pub fn binary_path(&self, agent: AgentId) -> PathBuf {
@ -305,6 +272,21 @@ impl AgentManager {
if let Some(variant) = options.variant.as_deref() {
command.arg("--variant").arg(variant);
}
if options.permission_mode.as_deref() == Some("bypass") {
command.arg("--dangerously-skip-permissions");
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("-s").arg(session_id);
}
command.arg(&options.prompt);
}
AgentId::Cursor => {
// cursor-agent typically runs as HTTP server on localhost:32123
// For CLI usage similar to opencode
command.arg("run").arg("--format").arg("json");
if let Some(model) = options.model.as_deref() {
command.arg("-m").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("-s").arg(session_id);
}
@ -386,6 +368,12 @@ impl AgentManager {
options.streaming_input = true;
}
let mut command = self.build_command(agent, &options)?;
// Pass environment variables to the agent process (e.g., ANTHROPIC_API_KEY)
for (key, value) in &options.env {
command.env(key, value);
}
if matches!(agent, AgentId::Codex | AgentId::Claude) {
command.stdin(Stdio::piped());
}
@ -687,6 +675,9 @@ impl AgentManager {
if let Some(variant) = options.variant.as_deref() {
command.arg("--variant").arg(variant);
}
if options.permission_mode.as_deref() == Some("bypass") {
command.arg("--dangerously-skip-permissions");
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("-s").arg(session_id);
}
@ -695,6 +686,16 @@ impl AgentManager {
AgentId::Amp => {
return Ok(build_amp_command(&path, &working_dir, options));
}
AgentId::Cursor => {
command.arg("run").arg("--format").arg("json");
if let Some(model) = options.model.as_deref() {
command.arg("-m").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("-s").arg(session_id);
}
command.arg(&options.prompt);
}
AgentId::Pi => {
unreachable!("Pi is handled by router RPC runtime");
}
@ -718,10 +719,6 @@ impl AgentManager {
if let Some(path) = find_in_path(agent.binary_name()) {
return Ok(path);
}
let fallback = default_install_dir().join(agent.binary_name());
if fallback.exists() {
return Ok(fallback);
}
Err(AgentError::BinaryNotFound { agent })
}
}
@ -832,7 +829,13 @@ fn parse_version_output(output: &std::process::Output) -> Option<String> {
.lines()
.map(str::trim)
.find(|line| !line.is_empty())
.map(|line| line.to_string())
.map(|line| {
// Strip trailing metadata like " (released ...)" from version strings
match line.find(" (") {
Some(pos) => line[..pos].to_string(),
None => line.to_string(),
}
})
}
fn parse_jsonl(text: &str) -> Vec<Value> {
@ -1042,6 +1045,14 @@ fn extract_session_id(agent: AgentId, events: &[Value]) -> Option<String> {
return Some(id);
}
}
AgentId::Cursor => {
if let Some(id) = event.get("session_id").and_then(Value::as_str) {
return Some(id.to_string());
}
if let Some(id) = event.get("sessionId").and_then(Value::as_str) {
return Some(id.to_string());
}
}
AgentId::Mock => {}
}
}
@ -1125,6 +1136,7 @@ fn extract_result_text(agent: AgentId, events: &[Value]) -> Option<String> {
}
}
AgentId::Pi => extract_pi_result_text(events),
AgentId::Cursor => None,
AgentId::Mock => None,
}
}
@ -1251,26 +1263,21 @@ fn spawn_amp(
let mut args: Vec<&str> = Vec::new();
if flags.execute {
args.push("--execute");
} else if flags.print {
args.push("--print");
args.push(&options.prompt);
}
if flags.output_format {
args.push("--output-format");
args.push("stream-json");
args.push("--stream-json");
}
if flags.dangerously_skip_permissions && options.permission_mode.as_deref() == Some("bypass") {
args.push("--dangerously-skip-permissions");
args.push("--dangerously-allow-all");
}
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
command.args(&args).arg(&options.prompt);
command.args(&args);
for (key, value) in &options.env {
command.env(key, value);
}
@ -1294,24 +1301,19 @@ fn build_amp_command(path: &Path, working_dir: &Path, options: &SpawnOptions) ->
let flags = detect_amp_flags(path, working_dir).unwrap_or_default();
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
if flags.execute {
command.arg("--execute");
} else if flags.print {
command.arg("--print");
command.arg(&options.prompt);
}
if flags.output_format {
command.arg("--output-format").arg("stream-json");
command.arg("--stream-json");
}
if flags.dangerously_skip_permissions && options.permission_mode.as_deref() == Some("bypass") {
command.arg("--dangerously-skip-permissions");
command.arg("--dangerously-allow-all");
}
command.arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
@ -1321,7 +1323,6 @@ fn build_amp_command(path: &Path, working_dir: &Path, options: &SpawnOptions) ->
#[derive(Debug, Default, Clone, Copy)]
struct AmpFlags {
execute: bool,
print: bool,
output_format: bool,
dangerously_skip_permissions: bool,
}
@ -1339,9 +1340,8 @@ fn detect_amp_flags(path: &Path, working_dir: &Path) -> Option<AmpFlags> {
);
Some(AmpFlags {
execute: text.contains("--execute"),
print: text.contains("--print"),
output_format: text.contains("--output-format"),
dangerously_skip_permissions: text.contains("--dangerously-skip-permissions"),
output_format: text.contains("--stream-json"),
dangerously_skip_permissions: text.contains("--dangerously-allow-all"),
})
}
@ -1350,23 +1350,19 @@ fn spawn_amp_fallback(
working_dir: &Path,
options: &SpawnOptions,
) -> Result<std::process::Output, AgentError> {
let mut attempts = vec![
let mut attempts: Vec<Vec<&str>> = vec![
vec!["--execute"],
vec!["--print", "--output-format", "stream-json"],
vec!["--output-format", "stream-json"],
vec!["--dangerously-skip-permissions"],
vec!["stream-json"],
vec!["--dangerously-allow-all"],
vec![],
];
if options.permission_mode.as_deref() != Some("bypass") {
attempts.retain(|args| !args.contains(&"--dangerously-skip-permissions"));
attempts.retain(|args| !args.contains(&"--dangerously-allow-all"));
}
for args in attempts {
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
@ -1385,9 +1381,6 @@ fn spawn_amp_fallback(
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
@ -1409,10 +1402,28 @@ fn find_in_path(binary_name: &str) -> Option<PathBuf> {
None
}
fn default_install_dir() -> PathBuf {
dirs::data_dir()
.map(|dir| dir.join("sandbox-agent").join("bin"))
.unwrap_or_else(|| PathBuf::from(".").join(".sandbox-agent").join("bin"))
fn install_cursor(path: &Path, platform: Platform, _version: Option<&str>) -> Result<(), AgentError> {
// Note: cursor-agent binary URL needs to be verified
// Cursor Pro includes cursor-agent, typically installed via: curl -fsS https://cursor.com/install | bash
// For sandbox-agent, we need standalone cursor-agent binary
// TODO: Determine correct download URL for cursor-agent releases
let platform_segment = match platform {
Platform::LinuxX64 | Platform::LinuxX64Musl => "linux-x64",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 => "darwin-arm64",
Platform::MacosX64 => "darwin-x64",
};
// Placeholder URL - needs to be updated with actual cursor-agent release URL
let url = Url::parse(&format!(
"https://cursor.com/api/v1/releases/latest/download/cursor-agent-{platform_segment}",
platform_segment = platform_segment
))?;
let bytes = download_bytes(&url)?;
write_executable(path, &bytes)?;
Ok(())
}
fn download_bytes(url: &Url) -> Result<Vec<u8>, AgentError> {

View file

@ -159,6 +159,7 @@ pub fn test_agents_from_env() -> Result<Vec<TestAgentConfig>, TestAgentConfigErr
}
credentials_with(anthropic_cred.clone(), openai_cred.clone())
}
AgentId::Cursor => credentials_with(None, None),
AgentId::Mock => credentials_with(None, None),
};
configs.push(TestAgentConfig { agent, credentials });

View file

@ -73,3 +73,32 @@ fn test_amp_message() {
assert!(json.contains("user"));
assert!(json.contains("Hello"));
}
#[test]
fn test_amp_stream_json_message_types() {
// Test that all new message types can be parsed
let system_msg = r#"{"type":"system","subtype":"init","cwd":"/tmp","session_id":"sess-1","tools":["Bash"],"mcp_servers":[]}"#;
let parsed: amp::StreamJsonMessage = serde_json::from_str(system_msg).unwrap();
assert!(matches!(parsed.type_, amp::StreamJsonMessageType::System));
let user_msg = r#"{"type":"user","message":{"role":"user","content":"Hello"},"session_id":"sess-1"}"#;
let parsed: amp::StreamJsonMessage = serde_json::from_str(user_msg).unwrap();
assert!(matches!(parsed.type_, amp::StreamJsonMessageType::User));
let assistant_msg = r#"{"type":"assistant","message":{"role":"assistant","content":"Hi there"},"session_id":"sess-1"}"#;
let parsed: amp::StreamJsonMessage = serde_json::from_str(assistant_msg).unwrap();
assert!(matches!(parsed.type_, amp::StreamJsonMessageType::Assistant));
let result_msg = r#"{"type":"result","subtype":"success","duration_ms":1000,"is_error":false,"num_turns":1,"result":"Done","session_id":"sess-1"}"#;
let parsed: amp::StreamJsonMessage = serde_json::from_str(result_msg).unwrap();
assert!(matches!(parsed.type_, amp::StreamJsonMessageType::Result));
// Test legacy types still work
let message_msg = r#"{"type":"message","id":"msg-1","content":"Hello"}"#;
let parsed: amp::StreamJsonMessage = serde_json::from_str(message_msg).unwrap();
assert!(matches!(parsed.type_, amp::StreamJsonMessageType::Message));
let done_msg = r#"{"type":"done"}"#;
let parsed: amp::StreamJsonMessage = serde_json::from_str(done_msg).unwrap();
assert!(matches!(parsed.type_, amp::StreamJsonMessageType::Done));
}

View file

@ -36,6 +36,9 @@ tracing-logfmt.workspace = true
tracing-subscriber.workspace = true
include_dir.workspace = true
base64.workspace = true
toml_edit.workspace = true
tar.workspace = true
zip.workspace = true
tempfile = { workspace = true, optional = true }
[target.'cfg(unix)'.dependencies]

View file

@ -17,7 +17,15 @@ fn main() {
println!("cargo:rerun-if-env-changed=SANDBOX_AGENT_SKIP_INSPECTOR");
println!("cargo:rerun-if-env-changed=SANDBOX_AGENT_VERSION");
println!("cargo:rerun-if-changed={}", dist_dir.display());
let dist_exists = dist_dir.exists();
if dist_exists {
println!("cargo:rerun-if-changed={}", dist_dir.display());
} else {
println!(
"cargo:warning=Inspector frontend missing at {}. Embedding disabled; set SANDBOX_AGENT_SKIP_INSPECTOR=1 to silence or build the inspector to embed it.",
dist_dir.display()
);
}
// Rebuild when the git HEAD changes so BUILD_ID stays current.
let git_head = manifest_dir.join(".git/HEAD");
@ -36,7 +44,7 @@ fn main() {
generate_version(&out_dir);
generate_build_id(&out_dir);
let skip = env::var("SANDBOX_AGENT_SKIP_INSPECTOR").is_ok();
let skip = env::var("SANDBOX_AGENT_SKIP_INSPECTOR").is_ok() || !dist_exists;
let out_file = out_dir.join("inspector_assets.rs");
if skip {
@ -44,13 +52,6 @@ fn main() {
return;
}
if !dist_dir.exists() {
panic!(
"Inspector frontend missing at {}. Run `pnpm --filter @sandbox-agent/inspector build` (or `pnpm -C frontend/packages/inspector build`) or set SANDBOX_AGENT_SKIP_INSPECTOR=1 to skip embedding.",
dist_dir.display()
);
}
let dist_literal = quote_path(&dist_dir);
let contents = format!(
"pub const INSPECTOR_ENABLED: bool = true;\n\
@ -98,26 +99,23 @@ fn generate_version(out_dir: &Path) {
fn generate_build_id(out_dir: &Path) {
use std::process::Command;
let build_id = Command::new("git")
let source_id = Command::new("git")
.args(["rev-parse", "--short", "HEAD"])
.output()
.ok()
.filter(|o| o.status.success())
.and_then(|o| String::from_utf8(o.stdout).ok())
.map(|s| s.trim().to_string())
.unwrap_or_else(|| {
// Fallback: use the package version + compile-time timestamp
let version = env::var("CARGO_PKG_VERSION").unwrap_or_default();
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs().to_string())
.unwrap_or_default();
format!("{version}-{timestamp}")
});
.unwrap_or_else(|| env::var("CARGO_PKG_VERSION").unwrap_or_default());
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_nanos().to_string())
.unwrap_or_else(|_| "0".to_string());
let build_id = format!("{source_id}-{timestamp}");
let out_file = out_dir.join("build_id.rs");
let contents = format!(
"/// Unique identifier for this build (git short hash or version-timestamp fallback).\n\
"/// Unique identifier for this build (source id + build timestamp).\n\
pub const BUILD_ID: &str = \"{}\";\n",
build_id
);

View file

@ -1,4 +1,5 @@
use std::collections::HashMap;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use std::process::{Command as ProcessCommand, Stdio};
@ -13,12 +14,14 @@ mod build_version {
}
use crate::router::{build_router_with_state, shutdown_servers};
use crate::router::{
AgentInstallRequest, AppState, AuthConfig, BrandingMode, CreateSessionRequest, MessageRequest,
PermissionReply, PermissionReplyRequest, QuestionReplyRequest,
AgentInstallRequest, AppState, AuthConfig, BrandingMode, CreateSessionRequest, McpServerConfig,
MessageRequest, PermissionReply, PermissionReplyRequest, QuestionReplyRequest, SkillSource,
SkillsConfig,
};
use crate::router::{
AgentListResponse, AgentModelsResponse, AgentModesResponse, CreateSessionResponse,
EventsResponse, SessionListResponse,
EventsResponse, FsActionResponse, FsEntry, FsMoveRequest, FsMoveResponse, FsStat,
FsUploadBatchResponse, FsWriteResponse, SessionListResponse,
};
use crate::server_logs::ServerLogs;
use crate::telemetry;
@ -68,6 +71,10 @@ pub struct GigacodeCli {
#[arg(long, short = 'n', global = true)]
pub no_token: bool,
/// Bypass all permission checks (auto-approve tool calls).
#[arg(long, global = true)]
pub yolo: bool,
}
#[derive(Subcommand, Debug)]
@ -127,8 +134,9 @@ pub struct OpencodeArgs {
#[arg(long)]
session_title: Option<String>,
/// Bypass all permission checks (auto-approve tool calls).
#[arg(long)]
opencode_bin: Option<PathBuf>,
pub yolo: bool,
}
impl Default for OpencodeArgs {
@ -137,7 +145,7 @@ impl Default for OpencodeArgs {
host: DEFAULT_HOST.to_string(),
port: DEFAULT_PORT,
session_title: None,
opencode_bin: None,
yolo: false,
}
}
}
@ -171,6 +179,10 @@ pub struct DaemonStartArgs {
#[arg(long, short = 'p', default_value_t = DEFAULT_PORT)]
port: u16,
/// If the daemon is already running but outdated, stop and restart it.
#[arg(long, default_value_t = false)]
upgrade: bool,
}
#[derive(Args, Debug)]
@ -197,6 +209,8 @@ pub enum ApiCommand {
Agents(AgentsArgs),
/// Create sessions and interact with session events.
Sessions(SessionsArgs),
/// Manage filesystem entries.
Fs(FsArgs),
}
#[derive(Subcommand, Debug)]
@ -220,6 +234,12 @@ pub struct SessionsArgs {
command: SessionsCommand,
}
#[derive(Args, Debug)]
pub struct FsArgs {
#[command(subcommand)]
command: FsCommand,
}
#[derive(Subcommand, Debug)]
pub enum AgentsCommand {
/// List all agents and install status.
@ -267,6 +287,27 @@ pub enum SessionsCommand {
ReplyPermission(PermissionReplyArgs),
}
#[derive(Subcommand, Debug)]
pub enum FsCommand {
/// List directory entries.
Entries(FsEntriesArgs),
/// Read a file.
Read(FsReadArgs),
/// Write a file.
Write(FsWriteArgs),
/// Delete a file or directory.
Delete(FsDeleteArgs),
/// Create a directory.
Mkdir(FsMkdirArgs),
/// Move a file or directory.
Move(FsMoveArgs),
/// Stat a file or directory.
Stat(FsStatArgs),
/// Upload a tar archive and extract it.
#[command(name = "upload-batch")]
UploadBatch(FsUploadBatchArgs),
}
#[derive(Args, Debug, Clone)]
pub struct ClientArgs {
#[arg(long, short = 'e')]
@ -318,6 +359,10 @@ pub struct CreateSessionArgs {
variant: Option<String>,
#[arg(long, short = 'A')]
agent_version: Option<String>,
#[arg(long)]
mcp_config: Option<PathBuf>,
#[arg(long)]
skill: Vec<PathBuf>,
#[command(flatten)]
client: ClientArgs,
}
@ -401,6 +446,91 @@ pub struct PermissionReplyArgs {
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsEntriesArgs {
#[arg(long)]
path: Option<String>,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsReadArgs {
path: String,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsWriteArgs {
path: String,
#[arg(long)]
content: Option<String>,
#[arg(long = "from-file")]
from_file: Option<PathBuf>,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsDeleteArgs {
path: String,
#[arg(long)]
recursive: bool,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsMkdirArgs {
path: String,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsMoveArgs {
from: String,
to: String,
#[arg(long)]
overwrite: bool,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsStatArgs {
path: String,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct FsUploadBatchArgs {
#[arg(long = "tar")]
tar_path: PathBuf,
#[arg(long)]
path: Option<String>,
#[arg(long)]
session_id: Option<String>,
#[command(flatten)]
client: ClientArgs,
}
#[derive(Args, Debug)]
pub struct CredentialsExtractArgs {
#[arg(long, short = 'a', value_enum)]
@ -428,6 +558,8 @@ pub struct CredentialsExtractEnvArgs {
#[derive(Debug, Error)]
pub enum CliError {
#[error("missing --token or --no-token for server mode")]
MissingToken,
#[error("invalid cors origin: {0}")]
InvalidCorsOrigin(String),
#[error("invalid cors method: {0}")]
@ -585,6 +717,7 @@ fn run_api(command: &ApiCommand, cli: &CliConfig) -> Result<(), CliError> {
match command {
ApiCommand::Agents(subcommand) => run_agents(&subcommand.command, cli),
ApiCommand::Sessions(subcommand) => run_sessions(&subcommand.command, cli),
ApiCommand::Fs(subcommand) => run_fs(&subcommand.command, cli),
}
}
@ -596,26 +729,57 @@ fn run_opencode(cli: &CliConfig, args: &OpencodeArgs) -> Result<(), CliError> {
};
write_stderr_line(&format!("\nEXPERIMENTAL: Please report bugs to:\n- GitHub: https://github.com/rivet-dev/sandbox-agent/issues\n- Discord: https://rivet.dev/discord\n\n{name} is powered by:\n- OpenCode (TUI): https://opencode.ai/\n- Sandbox Agent SDK (multi-agent compatibility): https://sandboxagent.dev/\n\n"))?;
let yolo = args.yolo;
let token = cli.token.clone();
let base_url = format!("http://{}:{}", args.host, args.port);
let has_proxy_env = std::env::var_os("HTTP_PROXY").is_some()
|| std::env::var_os("http_proxy").is_some()
|| std::env::var_os("HTTPS_PROXY").is_some()
|| std::env::var_os("https_proxy").is_some();
let has_no_proxy_env =
std::env::var_os("NO_PROXY").is_some() || std::env::var_os("no_proxy").is_some();
write_stderr_line(&format!(
"gigacode startup: ensuring daemon at {base_url} (token: {}, proxy env: {}, no_proxy env: {})",
if token.is_some() { "set" } else { "unset" },
if has_proxy_env { "set" } else { "unset" },
if has_no_proxy_env { "set" } else { "unset" }
))?;
crate::daemon::ensure_running(cli, &args.host, args.port, token.as_deref())?;
write_stderr_line("gigacode startup: daemon is healthy")?;
let session_id =
create_opencode_session(&base_url, token.as_deref(), args.session_title.as_deref())?;
write_stdout_line(&format!("OpenCode session: {session_id}"))?;
let attach_session_id = if args.session_title.is_some() || yolo {
write_stderr_line("gigacode startup: creating OpenCode session via /opencode/session")?;
let session_id = create_opencode_session(
&base_url,
token.as_deref(),
args.session_title.as_deref(),
yolo,
)?;
write_stdout_line(&format!("OpenCode session: {session_id}"))?;
Some(session_id)
} else {
write_stderr_line("gigacode startup: attaching OpenCode without precreating a session")?;
None
};
let attach_url = format!("{base_url}/opencode");
let opencode_bin = resolve_opencode_bin(args.opencode_bin.as_ref())?;
write_stderr_line("gigacode startup: resolving OpenCode binary (installing if needed)")?;
let opencode_bin = resolve_opencode_bin()?;
write_stderr_line(&format!(
"gigacode startup: launching OpenCode attach using {}",
opencode_bin.display()
))?;
let mut opencode_cmd = ProcessCommand::new(opencode_bin);
opencode_cmd
.arg("attach")
.arg(&attach_url)
.arg("--session")
.arg(&session_id)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
if let Some(session_id) = attach_session_id.as_deref() {
opencode_cmd.arg("--session").arg(session_id);
}
if let Some(token) = token.as_deref() {
opencode_cmd.arg("--password").arg(token);
}
@ -636,6 +800,9 @@ fn run_opencode(cli: &CliConfig, args: &OpencodeArgs) -> Result<(), CliError> {
fn run_daemon(command: &DaemonCommand, cli: &CliConfig) -> Result<(), CliError> {
let token = cli.token.as_deref();
match command {
DaemonCommand::Start(args) if args.upgrade => {
crate::daemon::ensure_running(cli, &args.host, args.port, token)
}
DaemonCommand::Start(args) => crate::daemon::start(cli, &args.host, args.port, token),
DaemonCommand::Stop(args) => crate::daemon::stop(&args.host, args.port),
DaemonCommand::Status(args) => {
@ -686,6 +853,32 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
}
SessionsCommand::Create(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let mcp = if let Some(path) = &args.mcp_config {
let text = std::fs::read_to_string(path)?;
let parsed = serde_json::from_str::<
std::collections::BTreeMap<String, McpServerConfig>,
>(&text)?;
Some(parsed)
} else {
None
};
let skills = if args.skill.is_empty() {
None
} else {
Some(SkillsConfig {
sources: args
.skill
.iter()
.map(|path| SkillSource {
source_type: "local".to_string(),
source: path.to_string_lossy().to_string(),
skills: None,
git_ref: None,
subpath: None,
})
.collect(),
})
};
let body = CreateSessionRequest {
agent: args.agent.clone(),
agent_mode: args.agent_mode.clone(),
@ -693,6 +886,10 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
model: args.model.clone(),
variant: args.variant.clone(),
agent_version: args.agent_version.clone(),
directory: None,
title: None,
mcp,
skills,
};
let path = format!("{API_PREFIX}/sessions/{}", args.session_id);
let response = ctx.post(&path, &body)?;
@ -702,6 +899,7 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
let ctx = ClientContext::new(cli, &args.client)?;
let body = MessageRequest {
message: args.message.clone(),
attachments: Vec::new(),
};
let path = format!("{API_PREFIX}/sessions/{}/messages", args.session_id);
let response = ctx.post(&path, &body)?;
@ -711,6 +909,7 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
let ctx = ClientContext::new(cli, &args.client)?;
let body = MessageRequest {
message: args.message.clone(),
attachments: Vec::new(),
};
let path = format!("{API_PREFIX}/sessions/{}/messages/stream", args.session_id);
let response = ctx.post_with_query(
@ -807,18 +1006,145 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
}
}
fn run_fs(command: &FsCommand, cli: &CliConfig) -> Result<(), CliError> {
match command {
FsCommand::Entries(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let response = ctx.get_with_query(
&format!("{API_PREFIX}/fs/entries"),
&[
("path", args.path.clone()),
("session_id", args.session_id.clone()),
],
)?;
print_json_response::<Vec<FsEntry>>(response)
}
FsCommand::Read(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let response = ctx.get_with_query(
&format!("{API_PREFIX}/fs/file"),
&[
("path", Some(args.path.clone())),
("session_id", args.session_id.clone()),
],
)?;
print_binary_response(response)
}
FsCommand::Write(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let body = match (&args.content, &args.from_file) {
(Some(_), Some(_)) => {
return Err(CliError::Server(
"use --content or --from-file, not both".to_string(),
))
}
(None, None) => {
return Err(CliError::Server(
"write requires --content or --from-file".to_string(),
))
}
(Some(content), None) => content.clone().into_bytes(),
(None, Some(path)) => std::fs::read(path)?,
};
let response = ctx.put_raw_with_query(
&format!("{API_PREFIX}/fs/file"),
body,
"application/octet-stream",
&[
("path", Some(args.path.clone())),
("session_id", args.session_id.clone()),
],
)?;
print_json_response::<FsWriteResponse>(response)
}
FsCommand::Delete(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let response = ctx.delete_with_query(
&format!("{API_PREFIX}/fs/entry"),
&[
("path", Some(args.path.clone())),
("session_id", args.session_id.clone()),
(
"recursive",
if args.recursive {
Some("true".to_string())
} else {
None
},
),
],
)?;
print_json_response::<FsActionResponse>(response)
}
FsCommand::Mkdir(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let response = ctx.post_empty_with_query(
&format!("{API_PREFIX}/fs/mkdir"),
&[
("path", Some(args.path.clone())),
("session_id", args.session_id.clone()),
],
)?;
print_json_response::<FsActionResponse>(response)
}
FsCommand::Move(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let body = FsMoveRequest {
from: args.from.clone(),
to: args.to.clone(),
overwrite: if args.overwrite { Some(true) } else { None },
};
let response = ctx.post_with_query(
&format!("{API_PREFIX}/fs/move"),
&body,
&[("session_id", args.session_id.clone())],
)?;
print_json_response::<FsMoveResponse>(response)
}
FsCommand::Stat(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let response = ctx.get_with_query(
&format!("{API_PREFIX}/fs/stat"),
&[
("path", Some(args.path.clone())),
("session_id", args.session_id.clone()),
],
)?;
print_json_response::<FsStat>(response)
}
FsCommand::UploadBatch(args) => {
let ctx = ClientContext::new(cli, &args.client)?;
let file = File::open(&args.tar_path)?;
let response = ctx.post_raw_with_query(
&format!("{API_PREFIX}/fs/upload-batch"),
file,
"application/x-tar",
&[
("path", args.path.clone()),
("session_id", args.session_id.clone()),
],
)?;
print_json_response::<FsUploadBatchResponse>(response)
}
}
}
fn create_opencode_session(
base_url: &str,
token: Option<&str>,
title: Option<&str>,
yolo: bool,
) -> Result<String, CliError> {
let client = HttpClient::builder().build()?;
let url = format!("{base_url}/opencode/session");
let body = if let Some(title) = title {
let mut body = if let Some(title) = title {
json!({ "title": title })
} else {
json!({})
};
if yolo {
body["permissionMode"] = json!("bypass");
}
let mut request = client.post(&url).json(&body);
if let Ok(directory) = std::env::current_dir() {
request = request.header(
@ -844,52 +1170,21 @@ fn create_opencode_session(
Ok(session_id.to_string())
}
fn resolve_opencode_bin(explicit: Option<&PathBuf>) -> Result<PathBuf, CliError> {
if let Some(path) = explicit {
return Ok(path.clone());
}
if let Ok(path) = std::env::var("OPENCODE_BIN") {
return Ok(PathBuf::from(path));
}
if let Some(path) = find_in_path("opencode") {
write_stderr_line(&format!(
"using opencode binary from PATH: {}",
path.display()
))?;
return Ok(path);
}
fn resolve_opencode_bin() -> Result<PathBuf, CliError> {
let manager = AgentManager::new(default_install_dir())
.map_err(|err| CliError::Server(err.to_string()))?;
match manager.resolve_binary(AgentId::Opencode) {
Ok(path) => Ok(path),
Err(_) => {
write_stderr_line("opencode not found; installing...")?;
let result = manager
.install(
AgentId::Opencode,
InstallOptions {
reinstall: false,
version: None,
},
)
.map_err(|err| CliError::Server(err.to_string()))?;
Ok(result.path)
}
match manager.install(
AgentId::Opencode,
InstallOptions {
reinstall: false,
version: None,
},
) {
Ok(result) => Ok(result.path),
Err(err) => Err(CliError::Server(err.to_string())),
}
}
fn find_in_path(binary_name: &str) -> Option<PathBuf> {
let path_var = std::env::var_os("PATH")?;
for path in std::env::split_paths(&path_var) {
let candidate = path.join(binary_name);
if candidate.exists() {
return Some(candidate);
}
}
None
}
fn run_credentials(command: &CredentialsCommand) -> Result<(), CliError> {
match command {
CredentialsCommand::Extract(args) => {
@ -1290,9 +1585,75 @@ impl ClientContext {
Ok(request.send()?)
}
fn put_raw_with_query<B: Into<reqwest::blocking::Body>>(
&self,
path: &str,
body: B,
content_type: &str,
query: &[(&str, Option<String>)],
) -> Result<reqwest::blocking::Response, CliError> {
let mut request = self
.request(Method::PUT, path)
.header(reqwest::header::CONTENT_TYPE, content_type)
.header(reqwest::header::ACCEPT, "application/json");
for (key, value) in query {
if let Some(value) = value {
request = request.query(&[(key, value)]);
}
}
Ok(request.body(body).send()?)
}
fn post_empty(&self, path: &str) -> Result<reqwest::blocking::Response, CliError> {
Ok(self.request(Method::POST, path).send()?)
}
fn post_empty_with_query(
&self,
path: &str,
query: &[(&str, Option<String>)],
) -> Result<reqwest::blocking::Response, CliError> {
let mut request = self.request(Method::POST, path);
for (key, value) in query {
if let Some(value) = value {
request = request.query(&[(key, value)]);
}
}
Ok(request.send()?)
}
fn delete_with_query(
&self,
path: &str,
query: &[(&str, Option<String>)],
) -> Result<reqwest::blocking::Response, CliError> {
let mut request = self.request(Method::DELETE, path);
for (key, value) in query {
if let Some(value) = value {
request = request.query(&[(key, value)]);
}
}
Ok(request.send()?)
}
fn post_raw_with_query<B: Into<reqwest::blocking::Body>>(
&self,
path: &str,
body: B,
content_type: &str,
query: &[(&str, Option<String>)],
) -> Result<reqwest::blocking::Response, CliError> {
let mut request = self
.request(Method::POST, path)
.header(reqwest::header::CONTENT_TYPE, content_type)
.header(reqwest::header::ACCEPT, "application/json");
for (key, value) in query {
if let Some(value) = value {
request = request.query(&[(key, value)]);
}
}
Ok(request.body(body).send()?)
}
}
fn print_json_response<T: serde::de::DeserializeOwned + Serialize>(
@ -1325,6 +1686,25 @@ fn print_text_response(response: reqwest::blocking::Response) -> Result<(), CliE
Ok(())
}
fn print_binary_response(response: reqwest::blocking::Response) -> Result<(), CliError> {
let status = response.status();
let bytes = response.bytes()?;
if !status.is_success() {
if let Ok(text) = std::str::from_utf8(&bytes) {
print_error_body(text)?;
} else {
write_stderr_line("Request failed with non-text response body")?;
}
return Err(CliError::HttpStatus(status));
}
let mut out = std::io::stdout();
out.write_all(&bytes)?;
out.flush()?;
Ok(())
}
fn print_empty_response(response: reqwest::blocking::Response) -> Result<(), CliError> {
let status = response.status();
if status.is_success() {

View file

@ -10,10 +10,11 @@ use crate::cli::{CliConfig, CliError};
mod build_id {
include!(concat!(env!("OUT_DIR"), "/build_id.rs"));
}
pub use build_id::BUILD_ID;
const DAEMON_HEALTH_TIMEOUT: Duration = Duration::from_secs(30);
const HEALTH_CHECK_CONNECT_TIMEOUT: Duration = Duration::from_secs(2);
const HEALTH_CHECK_REQUEST_TIMEOUT: Duration = Duration::from_secs(5);
// ---------------------------------------------------------------------------
// Paths
@ -144,16 +145,40 @@ pub fn is_process_running(pid: u32) -> bool {
// ---------------------------------------------------------------------------
pub fn check_health(base_url: &str, token: Option<&str>) -> Result<bool, CliError> {
let client = HttpClient::builder().build()?;
let url = format!("{base_url}/v1/health");
let started_at = Instant::now();
let client = HttpClient::builder()
.connect_timeout(HEALTH_CHECK_CONNECT_TIMEOUT)
.timeout(HEALTH_CHECK_REQUEST_TIMEOUT)
.build()?;
let mut request = client.get(url);
if let Some(token) = token {
request = request.bearer_auth(token);
}
match request.send() {
Ok(response) if response.status().is_success() => Ok(true),
Ok(_) => Ok(false),
Err(_) => Ok(false),
Ok(response) if response.status().is_success() => {
tracing::info!(
elapsed_ms = started_at.elapsed().as_millis(),
"daemon health check succeeded"
);
Ok(true)
}
Ok(response) => {
tracing::warn!(
status = %response.status(),
elapsed_ms = started_at.elapsed().as_millis(),
"daemon health check returned non-success status"
);
Ok(false)
}
Err(err) => {
tracing::warn!(
error = %err,
elapsed_ms = started_at.elapsed().as_millis(),
"daemon health check request failed"
);
Ok(false)
}
}
}
@ -163,10 +188,15 @@ pub fn wait_for_health(
token: Option<&str>,
timeout: Duration,
) -> Result<(), CliError> {
let client = HttpClient::builder().build()?;
let client = HttpClient::builder()
.connect_timeout(HEALTH_CHECK_CONNECT_TIMEOUT)
.timeout(HEALTH_CHECK_REQUEST_TIMEOUT)
.build()?;
let deadline = Instant::now() + timeout;
let mut attempts: u32 = 0;
while Instant::now() < deadline {
attempts += 1;
if let Some(child) = server_child.as_mut() {
if let Some(status) = child.try_wait()? {
return Err(CliError::Server(format!(
@ -181,13 +211,43 @@ pub fn wait_for_health(
request = request.bearer_auth(token);
}
match request.send() {
Ok(response) if response.status().is_success() => return Ok(()),
_ => {
Ok(response) if response.status().is_success() => {
tracing::info!(
attempts,
elapsed_ms =
(timeout - deadline.saturating_duration_since(Instant::now())).as_millis(),
"daemon became healthy while waiting"
);
return Ok(());
}
Ok(response) => {
if attempts % 10 == 0 {
tracing::info!(
attempts,
status = %response.status(),
"daemon still not healthy; waiting"
);
}
std::thread::sleep(Duration::from_millis(200));
}
Err(err) => {
if attempts % 10 == 0 {
tracing::warn!(
attempts,
error = %err,
"daemon health poll request failed; still waiting"
);
}
std::thread::sleep(Duration::from_millis(200));
}
}
}
tracing::error!(
attempts,
timeout_ms = timeout.as_millis(),
"timed out waiting for daemon health"
);
Err(CliError::Server(
"timed out waiting for sandbox-agent health".to_string(),
))
@ -198,7 +258,7 @@ pub fn wait_for_health(
// ---------------------------------------------------------------------------
pub fn spawn_sandbox_agent_daemon(
cli: &CliConfig,
_cli: &CliConfig,
host: &str,
port: u16,
token: Option<&str>,
@ -350,25 +410,26 @@ pub fn start(cli: &CliConfig, host: &str, port: u16, token: Option<&str>) -> Res
Ok(())
}
/// Find the PID of a process listening on the given port using lsof.
#[cfg(unix)]
pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
let pid_path = daemon_pid_path(host, port);
fn find_process_on_port(port: u16) -> Option<u32> {
let output = std::process::Command::new("lsof")
.args(["-i", &format!(":{port}"), "-t", "-sTCP:LISTEN"])
.output()
.ok()?;
let pid = match read_pid(&pid_path) {
Some(pid) => pid,
None => {
eprintln!("daemon is not running (no PID file)");
return Ok(());
}
};
if !is_process_running(pid) {
eprintln!("daemon is not running (stale PID file)");
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
return Ok(());
if !output.status.success() {
return None;
}
let stdout = String::from_utf8_lossy(&output.stdout);
// lsof -t returns just the PID(s), one per line
stdout.lines().next()?.trim().parse::<u32>().ok()
}
/// Stop a process by PID with SIGTERM then SIGKILL if needed.
#[cfg(unix)]
fn stop_process(pid: u32, host: &str, port: u16, pid_path: &Path) -> Result<(), CliError> {
eprintln!("stopping daemon (PID {pid})...");
// SIGTERM
@ -380,7 +441,7 @@ pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
for _ in 0..50 {
std::thread::sleep(Duration::from_millis(100));
if !is_process_running(pid) {
let _ = remove_pid(&pid_path);
let _ = remove_pid(pid_path);
let _ = remove_version_file(host, port);
eprintln!("daemon stopped");
return Ok(());
@ -393,12 +454,50 @@ pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
libc::kill(pid as i32, libc::SIGKILL);
}
std::thread::sleep(Duration::from_millis(100));
let _ = remove_pid(&pid_path);
let _ = remove_pid(pid_path);
let _ = remove_version_file(host, port);
eprintln!("daemon killed");
Ok(())
}
#[cfg(unix)]
pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
let base_url = format!("http://{host}:{port}");
let pid_path = daemon_pid_path(host, port);
let pid = match read_pid(&pid_path) {
Some(pid) => pid,
None => {
// No PID file - but check if daemon is actually running via health check
// This can happen if PID file was deleted but daemon is still running
if check_health(&base_url, None)? {
eprintln!(
"daemon is running but PID file missing; finding process on port {port}..."
);
if let Some(pid) = find_process_on_port(port) {
eprintln!("found daemon process {pid}");
return stop_process(pid, host, port, &pid_path);
} else {
return Err(CliError::Server(format!(
"daemon is running on port {port} but cannot find PID"
)));
}
}
eprintln!("daemon is not running (no PID file)");
return Ok(());
}
};
if !is_process_running(pid) {
eprintln!("daemon is not running (stale PID file)");
let _ = remove_pid(&pid_path);
let _ = remove_version_file(host, port);
return Ok(());
}
stop_process(pid, host, port, &pid_path)
}
#[cfg(windows)]
pub fn stop(host: &str, port: u16) -> Result<(), CliError> {
let pid_path = daemon_pid_path(host, port);
@ -440,13 +539,20 @@ pub fn ensure_running(
) -> Result<(), CliError> {
let base_url = format!("http://{host}:{port}");
let pid_path = daemon_pid_path(host, port);
eprintln!(
"checking daemon health at {base_url} (token: {})...",
if token.is_some() { "set" } else { "unset" }
);
// Check if daemon is already healthy
if check_health(&base_url, token)? {
// Check build version
if !is_version_current(host, port) {
let old = read_daemon_version(host, port).unwrap_or_else(|| "unknown".to_string());
eprintln!("daemon outdated (build {old} -> {BUILD_ID}), restarting...");
eprintln!(
"daemon outdated (build {old} -> {}), restarting...",
BUILD_ID
);
stop(host, port)?;
return start(cli, host, port, token);
}

View file

@ -1,5 +1,7 @@
use sandbox_agent::cli::run_sandbox_agent;
fn main() {
if let Err(err) = sandbox_agent::cli::run_sandbox_agent() {
if let Err(err) = run_sandbox_agent() {
tracing::error!(error = %err, "sandbox-agent failed");
std::process::exit(1);
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -15,7 +15,10 @@ pub fn is_enabled() -> bool {
pub fn router() -> Router {
if !INSPECTOR_ENABLED {
return Router::new();
return Router::new()
.route("/ui", get(handle_not_built))
.route("/ui/", get(handle_not_built))
.route("/ui/*path", get(handle_not_built));
}
Router::new()
.route("/ui", get(handle_index))
@ -23,6 +26,18 @@ pub fn router() -> Router {
.route("/ui/*path", get(handle_path))
}
async fn handle_not_built() -> Response {
let body = "Inspector UI was not included in this build.\n\n\
To enable it, build the frontend first:\n\n\
cd frontend/packages/inspector && pnpm install && pnpm build\n\n\
Then rebuild sandbox-agent without SANDBOX_AGENT_SKIP_INSPECTOR.\n";
Response::builder()
.status(StatusCode::NOT_FOUND)
.header(header::CONTENT_TYPE, "text/plain; charset=utf-8")
.body(Body::from(body))
.unwrap()
}
async fn handle_index() -> Response {
serve_path("")
}

View file

@ -1048,6 +1048,13 @@ async fn run_turn_stream_check(app: &Router, config: &TestAgentConfig) {
create_session(app, config.agent, &session_id, test_permission_mode(config.agent)).await;
let events = read_turn_stream_events(app, &session_id, Duration::from_secs(120)).await;
assert!(
events
.iter()
.any(|event| event.get("type").and_then(Value::as_str) == Some("turn.ended")),
"turn stream did not include turn.ended for {}",
config.agent
);
let events = truncate_after_first_stop(&events);
assert!(
!events.is_empty(),

View file

@ -262,3 +262,150 @@ async fn pi_capabilities_and_models_expose_variants() {
}
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn create_session_with_skill_sources() {
let app = TestApp::new();
// Create a temp skill directory with SKILL.md
let skill_dir = tempfile::tempdir().expect("create skill dir");
let skill_path = skill_dir.path().join("my-test-skill");
std::fs::create_dir_all(&skill_path).expect("create skill subdir");
std::fs::write(skill_path.join("SKILL.md"), "# Test Skill\nA test skill.")
.expect("write SKILL.md");
// Create session with local skill source
let (status, payload) = send_json(
&app.app,
Method::POST,
"/v1/sessions/skill-test-session",
Some(json!({
"agent": "mock",
"skills": {
"sources": [
{
"type": "local",
"source": skill_dir.path().to_string_lossy()
}
]
}
})),
)
.await;
assert_eq!(
status,
StatusCode::OK,
"create session with skills: {payload}"
);
assert!(
payload
.get("healthy")
.and_then(Value::as_bool)
.unwrap_or(false),
"session should be healthy"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn create_session_with_skill_sources_filter() {
let app = TestApp::new();
// Create a temp directory with two skills
let skill_dir = tempfile::tempdir().expect("create skill dir");
let wanted = skill_dir.path().join("wanted-skill");
let unwanted = skill_dir.path().join("unwanted-skill");
std::fs::create_dir_all(&wanted).expect("create wanted dir");
std::fs::create_dir_all(&unwanted).expect("create unwanted dir");
std::fs::write(wanted.join("SKILL.md"), "# Wanted").expect("write wanted SKILL.md");
std::fs::write(unwanted.join("SKILL.md"), "# Unwanted").expect("write unwanted SKILL.md");
// Create session with filter
let (status, payload) = send_json(
&app.app,
Method::POST,
"/v1/sessions/skill-filter-session",
Some(json!({
"agent": "mock",
"skills": {
"sources": [
{
"type": "local",
"source": skill_dir.path().to_string_lossy(),
"skills": ["wanted-skill"]
}
]
}
})),
)
.await;
assert_eq!(
status,
StatusCode::OK,
"create session with skill filter: {payload}"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn create_session_with_invalid_skill_source() {
let app = TestApp::new();
// Use a non-existent path
let (status, _payload) = send_json(
&app.app,
Method::POST,
"/v1/sessions/skill-invalid-session",
Some(json!({
"agent": "mock",
"skills": {
"sources": [
{
"type": "local",
"source": "/nonexistent/path/to/skills"
}
]
}
})),
)
.await;
// Should fail with a 4xx or 5xx error
assert_ne!(
status,
StatusCode::OK,
"session with invalid skill source should fail"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn create_session_with_skill_filter_no_match() {
let app = TestApp::new();
let skill_dir = tempfile::tempdir().expect("create skill dir");
let skill_path = skill_dir.path().join("alpha");
std::fs::create_dir_all(&skill_path).expect("create alpha dir");
std::fs::write(skill_path.join("SKILL.md"), "# Alpha").expect("write SKILL.md");
// Filter for a skill that doesn't exist
let (status, _payload) = send_json(
&app.app,
Method::POST,
"/v1/sessions/skill-nomatch-session",
Some(json!({
"agent": "mock",
"skills": {
"sources": [
{
"type": "local",
"source": skill_dir.path().to_string_lossy(),
"skills": ["nonexistent"]
}
]
}
})),
)
.await;
assert_ne!(
status,
StatusCode::OK,
"session with no matching skills should fail"
);
}

View file

@ -0,0 +1,270 @@
// Filesystem HTTP endpoints.
include!("../common/http.rs");
use std::fs as stdfs;
use tar::{Builder, Header};
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn fs_read_write_move_delete() {
let app = TestApp::new();
let cwd = std::env::current_dir().expect("cwd");
let temp = tempfile::tempdir_in(&cwd).expect("tempdir");
let dir_path = temp.path();
let file_path = dir_path.join("hello.txt");
let file_path_str = file_path.to_string_lossy().to_string();
let request = Request::builder()
.method(Method::PUT)
.uri(format!("/v1/fs/file?path={file_path_str}"))
.header(header::CONTENT_TYPE, "application/octet-stream")
.body(Body::from("hello"))
.expect("write request");
let (status, _headers, _payload) = send_json_request(&app.app, request).await;
assert_eq!(status, StatusCode::OK, "write file");
let request = Request::builder()
.method(Method::GET)
.uri(format!("/v1/fs/file?path={file_path_str}"))
.body(Body::empty())
.expect("read request");
let (status, headers, bytes) = send_request(&app.app, request).await;
assert_eq!(status, StatusCode::OK, "read file");
assert_eq!(
headers
.get(header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok()),
Some("application/octet-stream")
);
assert_eq!(bytes.as_ref(), b"hello");
let entries_path = dir_path.to_string_lossy().to_string();
let (status, entries) = send_json(
&app.app,
Method::GET,
&format!("/v1/fs/entries?path={entries_path}"),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "list entries");
let entry_list = entries.as_array().cloned().unwrap_or_default();
let entry_names: Vec<String> = entry_list
.iter()
.filter_map(|entry| entry.get("name").and_then(|value| value.as_str()))
.map(|value| value.to_string())
.collect();
assert!(entry_names.contains(&"hello.txt".to_string()));
let new_path = dir_path.join("moved.txt");
let new_path_str = new_path.to_string_lossy().to_string();
let (status, _payload) = send_json(
&app.app,
Method::POST,
"/v1/fs/move",
Some(json!({
"from": file_path_str,
"to": new_path_str,
"overwrite": true
})),
)
.await;
assert_eq!(status, StatusCode::OK, "move file");
assert!(new_path.exists(), "moved file exists");
let (status, _payload) = send_json(
&app.app,
Method::DELETE,
&format!("/v1/fs/entry?path={}", new_path.to_string_lossy()),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "delete file");
assert!(!new_path.exists(), "file deleted");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn fs_upload_batch_tar() {
let app = TestApp::new();
let cwd = std::env::current_dir().expect("cwd");
let dest_dir = tempfile::tempdir_in(&cwd).expect("tempdir");
let mut builder = Builder::new(Vec::new());
let mut tar_header = Header::new_gnu();
let contents = b"hello";
tar_header.set_size(contents.len() as u64);
tar_header.set_cksum();
builder
.append_data(&mut tar_header, "a.txt", &contents[..])
.expect("append tar entry");
let mut tar_header = Header::new_gnu();
let contents = b"world";
tar_header.set_size(contents.len() as u64);
tar_header.set_cksum();
builder
.append_data(&mut tar_header, "nested/b.txt", &contents[..])
.expect("append tar entry");
let tar_bytes = builder.into_inner().expect("tar bytes");
let request = Request::builder()
.method(Method::POST)
.uri(format!(
"/v1/fs/upload-batch?path={}",
dest_dir.path().to_string_lossy()
))
.header(header::CONTENT_TYPE, "application/x-tar")
.body(Body::from(tar_bytes))
.expect("tar request");
let (status, _headers, payload) = send_json_request(&app.app, request).await;
assert_eq!(status, StatusCode::OK, "upload batch");
assert!(payload
.get("paths")
.and_then(|value| value.as_array())
.map(|value| !value.is_empty())
.unwrap_or(false));
assert!(payload.get("truncated").and_then(|value| value.as_bool()) == Some(false));
let a_path = dest_dir.path().join("a.txt");
let b_path = dest_dir.path().join("nested").join("b.txt");
assert!(a_path.exists(), "a.txt extracted");
assert!(b_path.exists(), "b.txt extracted");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn fs_relative_paths_use_session_dir() {
let app = TestApp::new();
let session_id = "fs-session";
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}"),
Some(json!({ "agent": "mock" })),
)
.await;
assert_eq!(status, StatusCode::OK, "create session");
let cwd = std::env::current_dir().expect("cwd");
let temp = tempfile::tempdir_in(&cwd).expect("tempdir");
let relative_dir = temp
.path()
.strip_prefix(&cwd)
.expect("strip prefix")
.to_path_buf();
let relative_path = relative_dir.join("session.txt");
let request = Request::builder()
.method(Method::PUT)
.uri(format!(
"/v1/fs/file?session_id={session_id}&path={}",
relative_path.to_string_lossy()
))
.header(header::CONTENT_TYPE, "application/octet-stream")
.body(Body::from("session"))
.expect("write request");
let (status, _headers, _payload) = send_json_request(&app.app, request).await;
assert_eq!(status, StatusCode::OK, "write relative file");
let absolute_path = cwd.join(relative_path);
let content = stdfs::read_to_string(&absolute_path).expect("read file");
assert_eq!(content, "session");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn fs_upload_batch_truncates_paths() {
let app = TestApp::new();
let cwd = std::env::current_dir().expect("cwd");
let dest_dir = tempfile::tempdir_in(&cwd).expect("tempdir");
let mut builder = Builder::new(Vec::new());
for index in 0..1030 {
let mut tar_header = Header::new_gnu();
tar_header.set_size(0);
tar_header.set_cksum();
let name = format!("file_{index}.txt");
builder
.append_data(&mut tar_header, name, &[][..])
.expect("append tar entry");
}
let tar_bytes = builder.into_inner().expect("tar bytes");
let request = Request::builder()
.method(Method::POST)
.uri(format!(
"/v1/fs/upload-batch?path={}",
dest_dir.path().to_string_lossy()
))
.header(header::CONTENT_TYPE, "application/x-tar")
.body(Body::from(tar_bytes))
.expect("tar request");
let (status, _headers, payload) = send_json_request(&app.app, request).await;
assert_eq!(status, StatusCode::OK, "upload batch");
let paths = payload
.get("paths")
.and_then(|value| value.as_array())
.cloned()
.unwrap_or_default();
assert_eq!(paths.len(), 1024);
assert_eq!(
payload.get("truncated").and_then(|value| value.as_bool()),
Some(true)
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn fs_mkdir_stat_and_delete_directory() {
let app = TestApp::new();
let cwd = std::env::current_dir().expect("cwd");
let temp = tempfile::tempdir_in(&cwd).expect("tempdir");
let dir_path = temp.path().join("nested");
let dir_path_str = dir_path.to_string_lossy().to_string();
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/fs/mkdir?path={dir_path_str}"),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "mkdir");
assert!(dir_path.exists(), "directory created");
let (status, stat) = send_json(
&app.app,
Method::GET,
&format!("/v1/fs/stat?path={dir_path_str}"),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "stat directory");
assert_eq!(stat["entryType"], "directory");
let file_path = dir_path.join("note.txt");
stdfs::write(&file_path, "content").expect("write file");
let file_path_str = file_path.to_string_lossy().to_string();
let (status, stat) = send_json(
&app.app,
Method::GET,
&format!("/v1/fs/stat?path={file_path_str}"),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "stat file");
assert_eq!(stat["entryType"], "file");
let status = send_status(
&app.app,
Method::DELETE,
&format!("/v1/fs/entry?path={dir_path_str}&recursive=true"),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "delete directory");
assert!(!dir_path.exists(), "directory deleted");
}

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 145
expression: snapshot_status(status)
---
status: 204

View file

@ -0,0 +1,5 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
expression: snapshot_status(status)
---
status: 204

View file

@ -0,0 +1,13 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 185
expression: "normalize_agent_models(&models, config.agent)"
---
defaultInList: true
defaultModel: amp-default
hasDefault: true
hasVariants: false
ids:
- amp-default
modelCount: 1
nonEmpty: true

View file

@ -0,0 +1,9 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 185
expression: "normalize_agent_models(&models, config.agent)"
---
defaultInList: true
hasDefault: true
hasVariants: "<redacted>"
nonEmpty: true

View file

@ -0,0 +1,9 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 185
expression: "normalize_agent_models(&models, config.agent)"
---
defaultInList: true
hasDefault: true
hasVariants: false
nonEmpty: true

View file

@ -0,0 +1,8 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
expression: "normalize_agent_models(&models, config.agent)"
---
defaultInList: true
hasDefault: true
hasVariants: "<redacted>"
nonEmpty: true

View file

@ -0,0 +1,9 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 162
expression: normalize_agent_modes(&modes)
---
modes:
- description: true
id: build
name: Build

View file

@ -0,0 +1,12 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
assertion_line: 162
expression: normalize_agent_modes(&modes)
---
modes:
- description: true
id: build
name: Build
- description: true
id: plan
name: Plan

View file

@ -0,0 +1,14 @@
---
source: server/packages/sandbox-agent/tests/http/agent_endpoints.rs
expression: normalize_agent_modes(&modes)
---
modes:
- description: true
id: build
name: Build
- description: true
id: custom
name: Custom
- description: true
id: plan
name: Plan

View file

@ -1,2 +1,4 @@
#[path = "http/agent_endpoints.rs"]
mod agent_endpoints;
#[path = "http/fs_endpoints.rs"]
mod fs_endpoints;

View file

@ -17,6 +17,25 @@ describe("OpenCode-compatible Event Streaming", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
function uniqueSessionId(prefix: string): string {
return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
}
async function initSessionViaHttp(
sessionId: string,
body: Record<string, unknown>
): Promise<void> {
const response = await fetch(`${handle.baseUrl}/opencode/session/${sessionId}/init`, {
method: "POST",
headers: {
Authorization: `Bearer ${handle.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
});
expect(response.ok).toBe(true);
}
beforeAll(async () => {
await buildSandboxAgent();
});
@ -144,6 +163,182 @@ describe("OpenCode-compatible Event Streaming", () => {
expect(response.data).toBeDefined();
});
it("should be idle before first prompt and return to idle after prompt completion", async () => {
const sessionId = uniqueSessionId("status-idle");
await initSessionViaHttp(sessionId, { providerID: "mock", modelID: "mock" });
const initial = await client.session.status();
expect(initial.data?.[sessionId]?.type).toBe("idle");
const eventStream = await client.event.subscribe();
const statuses: string[] = [];
const collectIdle = new Promise<void>((resolve, reject) => {
const timeout = setTimeout(
() => reject(new Error("Timed out waiting for session.idle")),
15_000
);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event?.properties?.sessionID !== sessionId) continue;
if (event.type === "session.status") {
const statusType = event?.properties?.status?.type;
if (typeof statusType === "string") statuses.push(statusType);
}
if (event.type === "session.idle") {
clearTimeout(timeout);
resolve();
break;
}
}
} catch {
// Stream ended
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "Say hello" }],
},
});
await collectIdle;
expect(statuses).toContain("busy");
expect(statuses.filter((status) => status === "busy")).toHaveLength(1);
const finalStatus = await client.session.status();
expect(finalStatus.data?.[sessionId]?.type).toBe("idle");
});
it("should report busy via /session/status while turn is in flight", async () => {
const sessionId = uniqueSessionId("status-busy-inflight");
await initSessionViaHttp(sessionId, { providerID: "mock", modelID: "mock" });
const eventStream = await client.event.subscribe();
let busySnapshot: string | undefined;
const waitForIdle = new Promise<void>((resolve, reject) => {
const timeout = setTimeout(
() => reject(new Error("Timed out waiting for busy status snapshot + session.idle")),
15_000
);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event?.properties?.sessionID !== sessionId) continue;
if (event.type === "session.status" && event?.properties?.status?.type === "busy" && !busySnapshot) {
for (let attempt = 0; attempt < 5; attempt += 1) {
const status = await client.session.status();
busySnapshot = status.data?.[sessionId]?.type;
if (busySnapshot === "busy") {
break;
}
await new Promise((resolveAttempt) => setTimeout(resolveAttempt, 20));
}
}
if (event.type === "session.idle") {
clearTimeout(timeout);
resolve();
break;
}
}
} catch {
// Stream ended
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "tool" }],
},
});
await waitForIdle;
expect(busySnapshot).toBe("busy");
});
it("should emit session.error and return idle for failed turns", async () => {
const sessionId = uniqueSessionId("status-error");
await initSessionViaHttp(sessionId, { providerID: "mock", modelID: "mock" });
const eventStream = await client.event.subscribe();
const errors: any[] = [];
const idles: any[] = [];
const collectErrorAndIdle = new Promise<void>((resolve, reject) => {
const timeout = setTimeout(
() => reject(new Error("Timed out waiting for session.error + session.idle")),
15_000
);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event?.properties?.sessionID !== sessionId) continue;
if (event.type === "session.error") {
errors.push(event);
}
if (event.type === "session.idle") {
idles.push(event);
}
if (errors.length > 0 && idles.length > 0) {
clearTimeout(timeout);
resolve();
break;
}
}
} catch {
// Stream ended
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "error" }],
},
});
await collectErrorAndIdle;
expect(errors.length).toBeGreaterThan(0);
const finalStatus = await client.session.status();
expect(finalStatus.data?.[sessionId]?.type).toBe("idle");
});
it("should report idle for newly initialized sessions across connected providers", async () => {
const providersResponse = await fetch(`${handle.baseUrl}/opencode/provider`, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(providersResponse.ok).toBe(true);
const providersData = await providersResponse.json();
const connected: string[] = providersData.connected ?? [];
const defaults: Record<string, string> = providersData.default ?? {};
for (const providerID of connected) {
const modelID = defaults[providerID];
if (!modelID) continue;
const sessionId = uniqueSessionId(`status-${providerID.replace(/[^a-zA-Z0-9_-]/g, "_")}`);
await initSessionViaHttp(sessionId, { providerID, modelID });
const status = await client.session.status();
expect(status.data?.[sessionId]?.type).toBe("idle");
}
});
});
describe("session.idle count", () => {
@ -238,5 +433,85 @@ describe("OpenCode-compatible Event Streaming", () => {
);
expect(toolParts.length).toBeGreaterThan(0);
});
it("should preserve part order based on first stream appearance", async () => {
const session = await client.session.create();
const sessionId = session.data?.id!;
const eventStream = await client.event.subscribe();
const seenPartIds: string[] = [];
let targetMessageId: string | null = null;
const collectIdle = new Promise<void>((resolve, reject) => {
let lingerTimer: ReturnType<typeof setTimeout> | null = null;
const timeout = setTimeout(() => reject(new Error("Timed out waiting for session.idle")), 15_000);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event?.properties?.sessionID !== sessionId) {
continue;
}
if (event.type === "message.part.updated") {
const messageId = event.properties?.messageID;
const partId = event.properties?.part?.id;
const partType = event.properties?.part?.type;
if (!targetMessageId && partType === "tool" && typeof messageId === "string") {
targetMessageId = messageId;
}
if (
targetMessageId &&
messageId === targetMessageId &&
typeof partId === "string" &&
!seenPartIds.includes(partId)
) {
seenPartIds.push(partId);
}
}
if (event.type === "session.idle") {
if (!lingerTimer) {
lingerTimer = setTimeout(() => {
clearTimeout(timeout);
resolve();
}, 500);
}
}
}
} catch {
// Stream ended
}
})();
});
await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "tool" }],
},
});
await collectIdle;
expect(targetMessageId).toBeTruthy();
expect(seenPartIds.length).toBeGreaterThan(0);
const response = await fetch(
`${handle.baseUrl}/opencode/session/${sessionId}/message/${targetMessageId}`,
{
headers: { Authorization: `Bearer ${handle.token}` },
}
);
expect(response.ok).toBe(true);
const message = (await response.json()) as any;
const returnedPartIds = (message?.parts ?? [])
.map((part: any) => part?.id)
.filter((id: any) => typeof id === "string");
const expectedSet = new Set(seenPartIds);
const returnedFiltered = returnedPartIds.filter((id: string) => expectedSet.has(id));
expect(returnedFiltered).toEqual(seenPartIds);
});
});
});

View file

@ -53,6 +53,37 @@ describe("OpenCode-compatible Permission API", () => {
throw new Error("Timed out waiting for permission request");
}
async function waitForCondition(
check: () => boolean | Promise<boolean>,
timeoutMs = 10_000,
intervalMs = 100,
) {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
if (await check()) {
return;
}
await new Promise((r) => setTimeout(r, intervalMs));
}
throw new Error("Timed out waiting for condition");
}
async function waitForValue<T>(
getValue: () => T | undefined | Promise<T | undefined>,
timeoutMs = 10_000,
intervalMs = 100,
): Promise<T> {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
const value = await getValue();
if (value !== undefined) {
return value;
}
await new Promise((r) => setTimeout(r, intervalMs));
}
throw new Error("Timed out waiting for value");
}
describe("permission.reply (global)", () => {
it("should receive permission.asked and reply via global endpoint", async () => {
await client.session.prompt({
@ -71,6 +102,108 @@ describe("OpenCode-compatible Permission API", () => {
});
expect(response.error).toBeUndefined();
});
it("should emit permission.replied with always when reply is always", async () => {
const eventStream = await client.event.subscribe();
const repliedEventPromise = new Promise<any>((resolve, reject) => {
const timeout = setTimeout(() => reject(new Error("Timed out waiting for permission.replied")), 15_000);
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event.type === "permission.replied") {
clearTimeout(timeout);
resolve(event);
break;
}
}
} catch (err) {
clearTimeout(timeout);
reject(err);
}
})();
});
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: permissionPrompt }],
});
const asked = await waitForPermissionRequest();
const requestId = asked?.id;
expect(requestId).toBeDefined();
const response = await client.permission.reply({
requestID: requestId,
reply: "always",
});
expect(response.error).toBeUndefined();
const replied = await repliedEventPromise;
expect(replied?.properties?.requestID).toBe(requestId);
expect(replied?.properties?.reply).toBe("always");
});
it("should auto-reply subsequent matching permissions after always", async () => {
const eventStream = await client.event.subscribe();
const repliedEvents: any[] = [];
(async () => {
try {
for await (const event of (eventStream as any).stream) {
if (event.type === "permission.replied") {
repliedEvents.push(event);
}
}
} catch {
// Stream can end during test teardown.
}
})();
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: permissionPrompt }],
});
const firstAsked = await waitForPermissionRequest();
const firstRequestId = firstAsked?.id;
expect(firstRequestId).toBeDefined();
const firstReply = await client.permission.reply({
requestID: firstRequestId,
reply: "always",
});
expect(firstReply.error).toBeUndefined();
await waitForCondition(() =>
repliedEvents.some(
(event) =>
event?.properties?.requestID === firstRequestId &&
event?.properties?.reply === "always",
),
);
await client.session.prompt({
sessionID: sessionId,
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: permissionPrompt }],
});
const autoReplyEvent = await waitForValue(() =>
repliedEvents.find(
(event) =>
event?.properties?.requestID !== firstRequestId &&
event?.properties?.reply === "always",
),
);
const autoRequestId = autoReplyEvent?.properties?.requestID;
expect(autoRequestId).toBeDefined();
await waitForCondition(async () => {
const list = await client.permission.list();
return !(list.data ?? []).some((item) => item?.id === autoRequestId);
});
});
});
describe("postSessionIdPermissionsPermissionId (session)", () => {

View file

@ -20,6 +20,90 @@ describe("OpenCode-compatible Session API", () => {
let handle: SandboxAgentHandle;
let client: OpencodeClient;
async function createSessionViaHttp(body: Record<string, unknown>) {
const response = await fetch(`${handle.baseUrl}/opencode/session`, {
method: "POST",
headers: {
Authorization: `Bearer ${handle.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
});
expect(response.ok).toBe(true);
return response.json();
}
async function getBackingSessionPermissionMode(sessionId: string) {
const response = await fetch(`${handle.baseUrl}/v1/sessions`, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(response.ok).toBe(true);
const data = await response.json();
const session = (data.sessions ?? []).find((item: any) => item.sessionId === sessionId);
return session?.permissionMode;
}
async function getBackingSession(sessionId: string) {
const response = await fetch(`${handle.baseUrl}/v1/sessions`, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(response.ok).toBe(true);
const data = await response.json();
return (data.sessions ?? []).find((item: any) => item.sessionId === sessionId);
}
async function initSessionViaHttp(
sessionId: string,
body: Record<string, unknown> = {}
): Promise<{ response: Response; data: any }> {
const response = await fetch(`${handle.baseUrl}/opencode/session/${sessionId}/init`, {
method: "POST",
headers: {
Authorization: `Bearer ${handle.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
});
const data = await response.json();
return { response, data };
}
async function listMessagesViaHttp(sessionId: string): Promise<any[]> {
const response = await fetch(`${handle.baseUrl}/opencode/session/${sessionId}/message`, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(response.ok).toBe(true);
return response.json();
}
async function getProvidersViaHttp(): Promise<{
connected: string[];
default: Record<string, string>;
}> {
const response = await fetch(`${handle.baseUrl}/opencode/provider`, {
headers: { Authorization: `Bearer ${handle.token}` },
});
expect(response.ok).toBe(true);
const data = await response.json();
return {
connected: data.connected ?? [],
default: data.default ?? {},
};
}
async function waitForAssistantMessage(sessionId: string, timeoutMs = 10_000): Promise<any> {
const deadline = Date.now() + timeoutMs;
while (Date.now() < deadline) {
const messages = await listMessagesViaHttp(sessionId);
const assistant = messages.find((message) => message?.info?.role === "assistant");
if (assistant) {
return assistant;
}
await new Promise((resolve) => setTimeout(resolve, 100));
}
throw new Error("Timed out waiting for assistant message");
}
beforeAll(async () => {
// Build the binary if needed
await buildSandboxAgent();
@ -63,6 +147,42 @@ describe("OpenCode-compatible Session API", () => {
expect(session1.data?.id).not.toBe(session2.data?.id);
});
it("should pass permissionMode bypass to backing session", async () => {
const session = await createSessionViaHttp({ permissionMode: "bypass" });
const sessionId = session.id as string;
expect(sessionId).toBeDefined();
const prompt = await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "hello" }],
},
});
expect(prompt.error).toBeUndefined();
const permissionMode = await getBackingSessionPermissionMode(sessionId);
expect(permissionMode).toBe("bypass");
});
it("should accept permission_mode alias and pass bypass to backing session", async () => {
const session = await createSessionViaHttp({ permission_mode: "bypass" });
const sessionId = session.id as string;
expect(sessionId).toBeDefined();
const prompt = await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "mock", modelID: "mock" },
parts: [{ type: "text", text: "hello" }],
},
});
expect(prompt.error).toBeUndefined();
const permissionMode = await getBackingSessionPermissionMode(sessionId);
expect(permissionMode).toBe("bypass");
});
});
describe("session.list", () => {
@ -86,6 +206,78 @@ describe("OpenCode-compatible Session API", () => {
});
});
describe("session.init", () => {
it("should accept empty init body and keep message flow working", async () => {
const session = await client.session.create();
const sessionId = session.data?.id!;
expect(sessionId).toBeDefined();
const initialized = await initSessionViaHttp(sessionId, {});
expect(initialized.response.ok).toBe(true);
expect(initialized.data).toBe(true);
const prompt = await client.session.prompt({
path: { id: sessionId },
body: {
parts: [{ type: "text", text: "hello after init" }],
} as any,
});
expect(prompt.error).toBeUndefined();
const assistant = await waitForAssistantMessage(sessionId);
expect(assistant?.info?.role).toBe("assistant");
});
it("should apply explicit init model selection to the backing session", async () => {
const session = await client.session.create();
const sessionId = session.data?.id!;
expect(sessionId).toBeDefined();
const initialized = await initSessionViaHttp(sessionId, {
providerID: "codex",
modelID: "gpt-5",
messageID: "msg_init",
});
expect(initialized.response.ok).toBe(true);
expect(initialized.data).toBe(true);
const backingSession = await getBackingSession(sessionId);
expect(backingSession?.agent).toBe("codex");
expect(backingSession?.model).toBe("gpt-5");
});
it("should accept first prompt after codex init without session-not-found", async () => {
const providers = await getProvidersViaHttp();
if (!providers.connected.includes("codex")) {
return;
}
const codexDefaultModel = providers.default?.codex;
if (!codexDefaultModel) {
return;
}
const session = await client.session.create();
const sessionId = session.data?.id!;
expect(sessionId).toBeDefined();
const initialized = await initSessionViaHttp(sessionId, {
providerID: "codex",
modelID: codexDefaultModel,
});
expect(initialized.response.ok).toBe(true);
expect(initialized.data).toBe(true);
const prompt = await client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: "codex", modelID: codexDefaultModel },
parts: [{ type: "text", text: "hello after codex init" }],
},
});
expect(prompt.error).toBeUndefined();
});
});
describe("session.get", () => {
it("should retrieve session by ID", async () => {
const created = await client.session.create({ body: { title: "Test" } });
@ -99,6 +291,41 @@ describe("OpenCode-compatible Session API", () => {
expect(response.data?.title).toBe("Test");
});
it("should keep session.get available during first prompt after /new-style creation", async () => {
const providers = await getProvidersViaHttp();
const providerId = providers.connected.find(
(provider) => provider !== "mock" && typeof providers.default?.[provider] === "string"
);
if (!providerId) {
return;
}
const modelId = providers.default?.[providerId];
if (!modelId) {
return;
}
const created = await client.session.create({ body: { title: "Race Repro" } });
const sessionId = created.data?.id!;
expect(sessionId).toBeDefined();
const promptPromise = client.session.prompt({
path: { id: sessionId },
body: {
model: { providerID: providerId, modelID: modelId },
parts: [{ type: "text", text: "hello after /new" }],
},
});
await new Promise((resolve) => setTimeout(resolve, 25));
const getDuringPrompt = await client.session.get({ path: { id: sessionId } });
expect(getDuringPrompt.error).toBeUndefined();
expect(getDuringPrompt.data?.id).toBe(sessionId);
// Best-effort settle; this assertion focuses on availability during the in-flight turn.
await promptPromise;
});
it("should return error for non-existent session", async () => {
const response = await client.session.get({
path: { id: "non-existent-session-id" },
@ -121,6 +348,34 @@ describe("OpenCode-compatible Session API", () => {
const response = await client.session.get({ path: { id: sessionId } });
expect(response.data?.title).toBe("Updated");
});
it("should reject model changes after session creation", async () => {
const created = await client.session.create({ body: { title: "Original" } });
const sessionId = created.data?.id!;
const payloads = [
{ providerID: "codex", modelID: "gpt-5" },
{ provider_id: "codex", model_id: "gpt-5" },
{ providerId: "codex", modelId: "gpt-5" },
];
for (const payload of payloads) {
const response = await fetch(`${handle.baseUrl}/opencode/session/${sessionId}`, {
method: "PATCH",
headers: {
Authorization: `Bearer ${handle.token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(payload),
});
const data = await response.json();
expect(response.status).toBe(400);
expect(data?.errors?.[0]?.message).toBe(
"OpenCode compatibility currently does not support changing the model after creating a session. Export with /export and load in to a new session."
);
}
});
});
describe("session.delete", () => {

View file

@ -80,3 +80,193 @@ async fn permission_flow_snapshots() {
}
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn permission_reply_always_sets_accept_for_session_status() {
let app = TestApp::new();
install_agent(&app.app, AgentId::Mock).await;
let session_id = "perm-always-mock";
create_session(&app.app, AgentId::Mock, session_id, "plan").await;
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}/messages"),
Some(json!({ "message": PERMISSION_PROMPT })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "send permission prompt");
let events = poll_events_until_match(&app.app, session_id, Duration::from_secs(30), |events| {
find_permission_id(events).is_some() || should_stop(events)
})
.await;
let permission_id = find_permission_id(&events).expect("permission.requested missing");
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}/permissions/{permission_id}/reply"),
Some(json!({ "reply": "always" })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "reply permission always");
let resolved_events =
poll_events_until_match(&app.app, session_id, Duration::from_secs(30), |events| {
events.iter().any(|event| {
event.get("type").and_then(Value::as_str) == Some("permission.resolved")
&& event
.get("data")
.and_then(|data| data.get("permission_id"))
.and_then(Value::as_str)
== Some(permission_id.as_str())
})
})
.await;
let resolved = resolved_events
.iter()
.rev()
.find(|event| {
event.get("type").and_then(Value::as_str) == Some("permission.resolved")
&& event
.get("data")
.and_then(|data| data.get("permission_id"))
.and_then(Value::as_str)
== Some(permission_id.as_str())
})
.expect("permission.resolved missing");
let status = resolved
.get("data")
.and_then(|data| data.get("status"))
.and_then(Value::as_str);
assert_eq!(status, Some("accept_for_session"));
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn permission_reply_always_auto_approves_subsequent_permissions() {
let app = TestApp::new();
install_agent(&app.app, AgentId::Mock).await;
let session_id = "perm-always-auto-mock";
create_session(&app.app, AgentId::Mock, session_id, "plan").await;
let first_status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}/messages"),
Some(json!({ "message": PERMISSION_PROMPT })),
)
.await;
assert_eq!(
first_status,
StatusCode::NO_CONTENT,
"send first permission prompt"
);
let first_events =
poll_events_until_match(&app.app, session_id, Duration::from_secs(30), |events| {
find_permission_id(events).is_some() || should_stop(events)
})
.await;
let first_permission_id =
find_permission_id(&first_events).expect("first permission.requested missing");
let reply_status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}/permissions/{first_permission_id}/reply"),
Some(json!({ "reply": "always" })),
)
.await;
assert_eq!(
reply_status,
StatusCode::NO_CONTENT,
"reply first permission always"
);
let second_status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}/messages"),
Some(json!({ "message": PERMISSION_PROMPT })),
)
.await;
assert_eq!(
second_status,
StatusCode::NO_CONTENT,
"send second permission prompt"
);
let events = poll_events_until_match(&app.app, session_id, Duration::from_secs(30), |events| {
let requested_ids = events
.iter()
.filter_map(|event| {
if event.get("type").and_then(Value::as_str) != Some("permission.requested") {
return None;
}
event
.get("data")
.and_then(|data| data.get("permission_id"))
.and_then(Value::as_str)
.map(|value| value.to_string())
})
.collect::<Vec<_>>();
if requested_ids.len() < 2 {
return false;
}
let second_permission_id = &requested_ids[1];
events.iter().any(|event| {
event.get("type").and_then(Value::as_str) == Some("permission.resolved")
&& event
.get("data")
.and_then(|data| data.get("permission_id"))
.and_then(Value::as_str)
== Some(second_permission_id.as_str())
&& event
.get("data")
.and_then(|data| data.get("status"))
.and_then(Value::as_str)
== Some("accept_for_session")
})
})
.await;
let requested_ids = events
.iter()
.filter_map(|event| {
if event.get("type").and_then(Value::as_str) != Some("permission.requested") {
return None;
}
event
.get("data")
.and_then(|data| data.get("permission_id"))
.and_then(Value::as_str)
.map(|value| value.to_string())
})
.collect::<Vec<_>>();
assert!(
requested_ids.len() >= 2,
"expected at least two permission.requested events"
);
let second_permission_id = &requested_ids[1];
let second_resolved = events.iter().any(|event| {
event.get("type").and_then(Value::as_str) == Some("permission.resolved")
&& event
.get("data")
.and_then(|data| data.get("permission_id"))
.and_then(Value::as_str)
== Some(second_permission_id.as_str())
&& event
.get("data")
.and_then(|data| data.get("status"))
.and_then(Value::as_str)
== Some("accept_for_session")
});
assert!(
second_resolved,
"second permission should auto-resolve as accept_for_session"
);
}

View file

@ -82,6 +82,46 @@ async fn http_events_snapshots() {
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn accept_edits_noop_for_non_claude() {
let app = TestApp::new();
let session_id = "accept-edits-noop";
let (status, _) = send_json(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}"),
Some(json!({
"agent": AgentId::Mock.as_str(),
"permissionMode": "acceptEdits"
})),
)
.await;
assert_eq!(status, StatusCode::OK, "create session with acceptEdits");
let (status, sessions) = send_json(&app.app, Method::GET, "/v1/sessions", None).await;
assert_eq!(status, StatusCode::OK, "list sessions");
let sessions = sessions
.get("sessions")
.and_then(Value::as_array)
.expect("sessions list");
let session = sessions
.iter()
.find(|entry| {
entry
.get("sessionId")
.and_then(Value::as_str)
.is_some_and(|id| id == session_id)
})
.expect("created session");
let permission_mode = session
.get("permissionMode")
.and_then(Value::as_str)
.expect("permissionMode");
assert_eq!(permission_mode, "default");
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn sse_events_snapshots() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
@ -125,6 +165,11 @@ async fn turn_stream_route() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
for config in &configs {
// OpenCode's embedded bun can hang while installing plugins, which blocks turn streaming.
// OpenCode turn behavior is covered by the dedicated opencode-compat suite.
if config.agent == AgentId::Opencode {
continue;
}
let app = TestApp::new();
let capabilities = fetch_capabilities(&app.app).await;
let caps = capabilities
@ -137,6 +182,34 @@ async fn turn_stream_route() {
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn turn_stream_emits_turn_lifecycle_for_mock() {
let app = TestApp::new();
install_agent(&app.app, AgentId::Mock).await;
let session_id = "turn-lifecycle-mock";
create_session(
&app.app,
AgentId::Mock,
session_id,
test_permission_mode(AgentId::Mock),
)
.await;
let events = read_turn_stream_events(&app.app, session_id, Duration::from_secs(30)).await;
let started_count = events
.iter()
.filter(|event| event.get("type").and_then(Value::as_str) == Some("turn.started"))
.count();
let ended_count = events
.iter()
.filter(|event| event.get("type").and_then(Value::as_str) == Some("turn.ended"))
.count();
assert_eq!(started_count, 1, "expected exactly one turn.started event");
assert_eq!(ended_count, 1, "expected exactly one turn.ended event");
}
async fn run_concurrency_snapshot(app: &Router, config: &TestAgentConfig) {
let _guard = apply_credentials(&config.credentials);
install_agent(app, config.agent).await;

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/multi_turn.rs
assertion_line: 15
expression: value
---
first:
@ -15,19 +16,13 @@ first:
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- item:
content_types:
@ -35,13 +30,13 @@ first:
kind: message
role: assistant
status: in_progress
seq: 5
seq: 4
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
seq: 5
type: item.delta
- item:
content_types:
@ -49,7 +44,7 @@ first:
kind: message
role: assistant
status: completed
seq: 7
seq: 6
type: item.completed
second:
- item:
@ -60,19 +55,13 @@ second:
status: in_progress
seq: 1
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 2
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 3
seq: 2
type: item.completed
- item:
content_types:
@ -80,13 +69,13 @@ second:
kind: message
role: assistant
status: in_progress
seq: 4
seq: 3
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 5
seq: 4
type: item.delta
- item:
content_types:
@ -94,5 +83,5 @@ second:
kind: message
role: assistant
status: completed
seq: 6
seq: 5
type: item.completed

View file

@ -1,108 +0,0 @@
---
source: server/packages/sandbox-agent/tests/sessions/multi_turn.rs
assertion_line: 15
expression: value
---
first:
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
second:
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 1
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 2
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 3
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 4
type: item.started
- item:
content_types: []
kind: message
role: assistant
status: completed
seq: 5
type: item.completed

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/permissions.rs
assertion_line: 12
expression: value
---
- metadata: true
@ -14,23 +15,17 @@ expression: value
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- permission:
action: command_execution
id: "<redacted>"
status: requested
seq: 5
seq: 4
type: permission.requested

View file

@ -1,69 +0,0 @@
---
source: server/packages/sandbox-agent/tests/sessions/permissions.rs
assertion_line: 12
expression: value
---
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/questions.rs
assertion_line: 12
expression: value
---
- metadata: true
@ -14,23 +15,17 @@ expression: value
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- question:
id: "<redacted>"
options: 2
status: requested
seq: 5
seq: 4
type: question.requested

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/questions.rs
assertion_line: 12
expression: value
---
- metadata: true
@ -14,23 +15,17 @@ expression: value
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- question:
id: "<redacted>"
options: 2
status: requested
seq: 5
seq: 4
type: question.requested

View file

@ -1,137 +0,0 @@
---
source: server/packages/sandbox-agent/tests/sessions/questions.rs
assertion_line: 12
expression: value
---
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 13
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 14
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 15
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 16
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 17
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 18
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 19
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 20
type: item.delta
- item:
content_types:
- text
kind: message
role: assistant
status: completed
seq: 21
type: item.completed

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/session_lifecycle.rs
assertion_line: 12
expression: value
---
session_a:
@ -15,19 +16,13 @@ session_a:
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- item:
content_types:
@ -35,13 +30,13 @@ session_a:
kind: message
role: assistant
status: in_progress
seq: 5
seq: 4
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
seq: 5
type: item.delta
- item:
content_types:
@ -49,7 +44,7 @@ session_a:
kind: message
role: assistant
status: completed
seq: 7
seq: 6
type: item.completed
session_b:
- metadata: true
@ -64,19 +59,13 @@ session_b:
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- item:
content_types:
@ -84,13 +73,13 @@ session_b:
kind: message
role: assistant
status: in_progress
seq: 5
seq: 4
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
seq: 5
type: item.delta
- item:
content_types:
@ -98,5 +87,5 @@ session_b:
kind: message
role: assistant
status: completed
seq: 7
seq: 6
type: item.completed

View file

@ -1,117 +0,0 @@
---
source: server/packages/sandbox-agent/tests/sessions/session_lifecycle.rs
assertion_line: 12
expression: value
---
session_a:
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
session_b:
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
type: item.delta

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/session_lifecycle.rs
expression: value
---
hasExpectedFields: true
sessionCount: 1

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/../common/http.rs
assertion_line: 1001
expression: normalized
---
- metadata: true
@ -14,19 +15,13 @@ expression: normalized
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- item:
content_types:
@ -34,13 +29,13 @@ expression: normalized
kind: message
role: assistant
status: in_progress
seq: 5
seq: 4
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
seq: 5
type: item.delta
- item:
content_types:
@ -48,5 +43,5 @@ expression: normalized
kind: message
role: assistant
status: completed
seq: 7
seq: 6
type: item.completed

View file

@ -1,69 +0,0 @@
---
source: server/packages/sandbox-agent/tests/sessions/../common/http.rs
assertion_line: 1001
expression: normalized
---
- metadata: true
seq: 1
session: started
type: session.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
type: item.completed
- item:
content_types:
- text
kind: message
role: assistant
status: in_progress
seq: 5
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta

View file

@ -1,5 +1,6 @@
---
source: server/packages/sandbox-agent/tests/sessions/../common/http.rs
assertion_line: 1039
expression: normalized
---
- metadata: true
@ -14,19 +15,13 @@ expression: normalized
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
- item:
content_types:
- text
kind: message
role: user
status: completed
seq: 4
seq: 3
type: item.completed
- item:
content_types:
@ -34,13 +29,13 @@ expression: normalized
kind: message
role: assistant
status: in_progress
seq: 5
seq: 4
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 6
seq: 5
type: item.delta
- item:
content_types:
@ -48,5 +43,5 @@ expression: normalized
kind: message
role: assistant
status: completed
seq: 7
seq: 6
type: item.completed

View file

@ -7,20 +7,16 @@ expression: normalized
seq: 1
session: started
type: session.started
- seq: 2
type: turn.started
- item:
content_types:
- text
kind: message
role: user
status: in_progress
seq: 2
type: item.started
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 3
type: item.delta
type: item.started
- item:
content_types:
- text
@ -43,33 +39,3 @@ expression: normalized
native_item_id: "<redacted>"
seq: 6
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 7
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
type: item.delta

View file

@ -4,7 +4,7 @@ use serde_json::Value;
use crate::amp as schema;
use crate::{
turn_completed_event, ContentPart, ErrorData, EventConversion, ItemDeltaData, ItemEventData,
turn_ended_event, ContentPart, ErrorData, EventConversion, ItemDeltaData, ItemEventData,
ItemKind, ItemRole, ItemStatus, SessionEndReason, SessionEndedData, TerminatedBy,
UniversalEventData, UniversalEventType, UniversalItem,
};
@ -21,6 +21,72 @@ pub fn event_to_universal(
) -> Result<Vec<EventConversion>, String> {
let mut events = Vec::new();
match event.type_ {
// System init message - contains metadata like cwd, tools, session_id
// We skip this as it's not a user-facing event
schema::StreamJsonMessageType::System => {}
// User message - extract content from the nested message field
schema::StreamJsonMessageType::User => {
if !event.message.is_empty() {
let text = event
.message
.get("content")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let item = UniversalItem {
item_id: next_temp_id("tmp_amp_user"),
native_item_id: event.session_id.clone(),
parent_id: None,
kind: ItemKind::Message,
role: Some(ItemRole::User),
content: vec![ContentPart::Text { text: text.clone() }],
status: ItemStatus::Completed,
};
events.extend(message_events(item, text));
}
}
// Assistant message - extract content from the nested message field
schema::StreamJsonMessageType::Assistant => {
if !event.message.is_empty() {
let text = event
.message
.get("content")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let item = UniversalItem {
item_id: next_temp_id("tmp_amp_assistant"),
native_item_id: event.session_id.clone(),
parent_id: None,
kind: ItemKind::Message,
role: Some(ItemRole::Assistant),
content: vec![ContentPart::Text { text: text.clone() }],
status: ItemStatus::Completed,
};
events.extend(message_events(item, text));
}
}
// Result message - signals completion
schema::StreamJsonMessageType::Result => {
events.push(turn_ended_event(None, None).synthetic());
events.push(
EventConversion::new(
UniversalEventType::SessionEnded,
UniversalEventData::SessionEnded(SessionEndedData {
reason: if event.is_error.unwrap_or(false) {
SessionEndReason::Error
} else {
SessionEndReason::Completed
},
terminated_by: TerminatedBy::Agent,
message: event.result.clone(),
exit_code: None,
stderr: None,
}),
)
.with_raw(serde_json::to_value(event).ok()),
);
}
schema::StreamJsonMessageType::Message => {
let text = event.content.clone().unwrap_or_default();
let item = UniversalItem {
@ -99,7 +165,7 @@ pub fn event_to_universal(
));
}
schema::StreamJsonMessageType::Done => {
events.push(turn_completed_event());
events.push(turn_ended_event(None, None).synthetic());
events.push(
EventConversion::new(
UniversalEventType::SessionEnded,

View file

@ -3,7 +3,7 @@ use std::sync::atomic::{AtomicU64, Ordering};
use serde_json::Value;
use crate::{
turn_completed_event, ContentPart, EventConversion, ItemDeltaData, ItemEventData, ItemKind,
turn_ended_event, ContentPart, EventConversion, ItemDeltaData, ItemEventData, ItemKind,
ItemRole, ItemStatus, PermissionEventData, PermissionStatus, QuestionEventData, QuestionStatus,
SessionStartedData, UniversalEventData, UniversalEventType, UniversalItem,
};
@ -425,7 +425,7 @@ fn result_event_to_universal(event: &Value, session_id: &str) -> Vec<EventConver
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData { item: message_item }),
),
turn_completed_event(),
turn_ended_event(None, None).synthetic(),
]
}

View file

@ -4,7 +4,7 @@ use crate::codex as schema;
use crate::{
ContentPart, ErrorData, EventConversion, ItemDeltaData, ItemEventData, ItemKind, ItemRole,
ItemStatus, ReasoningVisibility, SessionEndReason, SessionEndedData, SessionStartedData,
TerminatedBy, UniversalEventData, UniversalEventType, UniversalItem,
TerminatedBy, TurnEventData, TurnPhase, UniversalEventData, UniversalEventType, UniversalItem,
};
/// Convert a Codex ServerNotification to universal events.
@ -36,18 +36,26 @@ pub fn notification_to_universal(
Some(params.thread_id.clone()),
raw,
)]),
schema::ServerNotification::TurnStarted(params) => Ok(vec![status_event(
"turn.started",
serde_json::to_string(&params.turn).ok(),
Some(params.thread_id.clone()),
raw,
)]),
schema::ServerNotification::TurnCompleted(params) => Ok(vec![status_event(
"turn.completed",
serde_json::to_string(&params.turn).ok(),
Some(params.thread_id.clone()),
raw,
)]),
schema::ServerNotification::TurnStarted(params) => Ok(vec![EventConversion::new(
UniversalEventType::TurnStarted,
UniversalEventData::Turn(TurnEventData {
phase: TurnPhase::Started,
turn_id: Some(params.turn.id.clone()),
metadata: serde_json::to_value(&params.turn).ok(),
}),
)
.with_native_session(Some(params.thread_id.clone()))
.with_raw(raw)]),
schema::ServerNotification::TurnCompleted(params) => Ok(vec![EventConversion::new(
UniversalEventType::TurnEnded,
UniversalEventData::Turn(TurnEventData {
phase: TurnPhase::Ended,
turn_id: Some(params.turn.id.clone()),
metadata: serde_json::to_value(&params.turn).ok(),
}),
)
.with_native_session(Some(params.thread_id.clone()))
.with_raw(raw)]),
schema::ServerNotification::TurnDiffUpdated(params) => Ok(vec![status_event(
"turn.diff.updated",
serde_json::to_string(params).ok(),

View file

@ -3,8 +3,9 @@ use serde_json::Value;
use crate::opencode as schema;
use crate::{
ContentPart, EventConversion, ItemDeltaData, ItemEventData, ItemKind, ItemRole, ItemStatus,
PermissionEventData, PermissionStatus, QuestionEventData, QuestionStatus, SessionStartedData,
UniversalEventData, UniversalEventType, UniversalItem,
PermissionEventData, PermissionStatus, QuestionEventData, QuestionStatus, ReasoningVisibility,
SessionStartedData, TurnEventData, TurnPhase, UniversalEventData, UniversalEventType,
UniversalItem,
};
pub fn event_to_universal(event: &schema::Event) -> Result<Vec<EventConversion>, String> {
@ -69,27 +70,37 @@ pub fn event_to_universal(event: &schema::Event) -> Result<Vec<EventConversion>,
);
}
schema::Part::ReasoningPart(reasoning_part) => {
let delta_text = delta
let reasoning_text = delta
.as_ref()
.cloned()
.unwrap_or_else(|| reasoning_part.text.clone());
let stub = stub_message_item(&message_id, ItemRole::Assistant);
let reasoning_id = reasoning_part.id.clone();
let mut started = stub_message_item(&reasoning_id, ItemRole::Assistant);
started.parent_id = Some(message_id.clone());
let completed = UniversalItem {
item_id: String::new(),
native_item_id: Some(reasoning_id),
parent_id: Some(message_id.clone()),
kind: ItemKind::Message,
role: Some(ItemRole::Assistant),
content: vec![ContentPart::Reasoning {
text: reasoning_text,
visibility: ReasoningVisibility::Public,
}],
status: ItemStatus::Completed,
};
events.push(
EventConversion::new(
UniversalEventType::ItemStarted,
UniversalEventData::Item(ItemEventData { item: stub }),
UniversalEventData::Item(ItemEventData { item: started }),
)
.synthetic()
.with_raw(raw.clone()),
);
events.push(
EventConversion::new(
UniversalEventType::ItemDelta,
UniversalEventData::ItemDelta(ItemDeltaData {
item_id: String::new(),
native_item_id: Some(message_id.clone()),
delta: delta_text,
}),
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData { item: completed }),
)
.with_native_session(session_id.clone())
.with_raw(raw.clone()),
@ -207,26 +218,59 @@ pub fn event_to_universal(event: &schema::Event) -> Result<Vec<EventConversion>,
properties,
type_: _,
} = status;
let status_type = serde_json::to_value(&properties.status)
.ok()
.and_then(|value| {
value
.get("type")
.and_then(Value::as_str)
.map(str::to_string)
});
let detail =
serde_json::to_string(&properties.status).unwrap_or_else(|_| "status".to_string());
let item = status_item("session.status", Some(detail));
let conversion = EventConversion::new(
let mut events = vec![EventConversion::new(
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData { item }),
)
.with_native_session(Some(properties.session_id.clone()))
.with_raw(raw);
Ok(vec![conversion])
.with_raw(raw.clone())];
if matches!(status_type.as_deref(), Some("busy" | "idle")) {
let (event_type, phase) = if status_type.as_deref() == Some("busy") {
(UniversalEventType::TurnStarted, TurnPhase::Started)
} else {
(UniversalEventType::TurnEnded, TurnPhase::Ended)
};
events.push(
EventConversion::new(
event_type,
UniversalEventData::Turn(TurnEventData {
phase,
turn_id: None,
metadata: Some(
serde_json::to_value(&properties.status).unwrap_or(Value::Null),
),
}),
)
.with_native_session(Some(properties.session_id.clone()))
.with_raw(raw),
);
}
Ok(events)
}
schema::Event::SessionIdle(idle) => {
let schema::EventSessionIdle {
properties,
type_: _,
} = idle;
let item = status_item("session.idle", None);
let conversion = EventConversion::new(
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData { item }),
UniversalEventType::TurnEnded,
UniversalEventData::Turn(TurnEventData {
phase: TurnPhase::Ended,
turn_id: None,
metadata: None,
}),
)
.with_native_session(Some(properties.session_id.clone()))
.with_raw(raw);
@ -528,3 +572,50 @@ fn permission_from_opencode(request: &schema::PermissionRequest) -> PermissionEv
metadata: serde_json::to_value(request).ok(),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn reasoning_part_updates_stay_typed_not_text_delta() {
let event = schema::Event::MessagePartUpdated(schema::EventMessagePartUpdated {
properties: schema::EventMessagePartUpdatedProperties {
delta: Some("Preparing friendly brief response".to_string()),
part: schema::Part::ReasoningPart(schema::ReasoningPart {
id: "part_reason_1".to_string(),
message_id: "msg_1".to_string(),
metadata: serde_json::Map::new(),
session_id: "ses_1".to_string(),
text: "Preparing".to_string(),
time: schema::ReasoningPartTime {
end: None,
start: 0.0,
},
type_: "reasoning".to_string(),
}),
},
type_: "message.part.updated".to_string(),
});
let converted = event_to_universal(&event).expect("conversion succeeds");
assert_eq!(converted.len(), 2);
assert!(converted
.iter()
.all(|entry| entry.event_type != UniversalEventType::ItemDelta));
let completed = converted
.iter()
.find(|entry| entry.event_type == UniversalEventType::ItemCompleted)
.expect("item.completed exists");
let UniversalEventData::Item(ItemEventData { item }) = &completed.data else {
panic!("expected item payload");
};
assert_eq!(item.native_item_id.as_deref(), Some("part_reason_1"));
assert!(matches!(
item.content.first(),
Some(ContentPart::Reasoning { text, .. })
if text == "Preparing friendly brief response"
));
}
}

View file

@ -40,6 +40,10 @@ pub enum UniversalEventType {
SessionStarted,
#[serde(rename = "session.ended")]
SessionEnded,
#[serde(rename = "turn.started")]
TurnStarted,
#[serde(rename = "turn.ended")]
TurnEnded,
#[serde(rename = "item.started")]
ItemStarted,
#[serde(rename = "item.delta")]
@ -63,6 +67,7 @@ pub enum UniversalEventType {
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
#[serde(untagged)]
pub enum UniversalEventData {
Turn(TurnEventData),
SessionStarted(SessionStartedData),
SessionEnded(SessionEndedData),
Item(ItemEventData),
@ -93,6 +98,22 @@ pub struct SessionEndedData {
pub stderr: Option<StderrOutput>,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
pub struct TurnEventData {
pub phase: TurnPhase,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub turn_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
#[serde(rename_all = "snake_case")]
pub enum TurnPhase {
Started,
Ended,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
pub struct StderrOutput {
/// First N lines of stderr (if truncated) or full stderr (if not truncated)
@ -161,8 +182,9 @@ pub struct PermissionEventData {
#[serde(rename_all = "snake_case")]
pub enum PermissionStatus {
Requested,
Approved,
Denied,
Accept,
AcceptForSession,
Reject,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)]
@ -317,25 +339,26 @@ impl EventConversion {
}
}
pub fn turn_completed_event() -> EventConversion {
pub fn turn_started_event(turn_id: Option<String>, metadata: Option<Value>) -> EventConversion {
EventConversion::new(
UniversalEventType::ItemCompleted,
UniversalEventData::Item(ItemEventData {
item: UniversalItem {
item_id: String::new(),
native_item_id: None,
parent_id: None,
kind: ItemKind::Status,
role: Some(ItemRole::System),
content: vec![ContentPart::Status {
label: "turn.completed".to_string(),
detail: None,
}],
status: ItemStatus::Completed,
},
UniversalEventType::TurnStarted,
UniversalEventData::Turn(TurnEventData {
phase: TurnPhase::Started,
turn_id,
metadata,
}),
)
}
pub fn turn_ended_event(turn_id: Option<String>, metadata: Option<Value>) -> EventConversion {
EventConversion::new(
UniversalEventType::TurnEnded,
UniversalEventData::Turn(TurnEventData {
phase: TurnPhase::Ended,
turn_id,
metadata,
}),
)
.synthetic()
}
pub fn item_from_text(role: ItemRole, text: String) -> UniversalItem {