chore(release): update version to 0.1.11

This commit is contained in:
Nathan Flurry 2026-02-09 02:20:00 -08:00
parent 4c8d93e077
commit ee61645498
19 changed files with 136 additions and 114 deletions

View file

@ -3,7 +3,7 @@ resolver = "2"
members = ["server/packages/*", "gigacode"]
[workspace.package]
version = "0.1.10"
version = "0.1.11"
edition = "2021"
authors = [ "Rivet Gaming, LLC <developer@rivet.gg>" ]
license = "Apache-2.0"
@ -12,12 +12,12 @@ description = "Universal API for automatic coding agents in sandboxes. Supports
[workspace.dependencies]
# Internal crates
sandbox-agent = { version = "0.1.10", path = "server/packages/sandbox-agent" }
sandbox-agent-error = { version = "0.1.10", path = "server/packages/error" }
sandbox-agent-agent-management = { version = "0.1.10", path = "server/packages/agent-management" }
sandbox-agent-agent-credentials = { version = "0.1.10", path = "server/packages/agent-credentials" }
sandbox-agent-universal-agent-schema = { version = "0.1.10", path = "server/packages/universal-agent-schema" }
sandbox-agent-extracted-agent-schemas = { version = "0.1.10", path = "server/packages/extracted-agent-schemas" }
sandbox-agent = { version = "0.1.11", path = "server/packages/sandbox-agent" }
sandbox-agent-error = { version = "0.1.11", path = "server/packages/error" }
sandbox-agent-agent-management = { version = "0.1.11", path = "server/packages/agent-management" }
sandbox-agent-agent-credentials = { version = "0.1.11", path = "server/packages/agent-credentials" }
sandbox-agent-universal-agent-schema = { version = "0.1.11", path = "server/packages/universal-agent-schema" }
sandbox-agent-extracted-agent-schemas = { version = "0.1.11", path = "server/packages/extracted-agent-schemas" }
# Serialization
serde = { version = "1.0", features = ["derive"] }

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli-shared",
"version": "0.1.10",
"version": "0.1.11",
"description": "Shared helpers for sandbox-agent CLI and SDK",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli",
"version": "0.1.10",
"version": "0.1.11",
"description": "CLI for sandbox-agent - run AI coding agents in sandboxes",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli-darwin-arm64",
"version": "0.1.10",
"version": "0.1.11",
"description": "sandbox-agent CLI binary for macOS ARM64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli-darwin-x64",
"version": "0.1.10",
"version": "0.1.11",
"description": "sandbox-agent CLI binary for macOS x64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli-linux-arm64",
"version": "0.1.10",
"version": "0.1.11",
"description": "sandbox-agent CLI binary for Linux arm64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli-linux-x64",
"version": "0.1.10",
"version": "0.1.11",
"description": "sandbox-agent CLI binary for Linux x64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/cli-win32-x64",
"version": "0.1.10",
"version": "0.1.11",
"description": "sandbox-agent CLI binary for Windows x64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/gigacode",
"version": "0.1.10",
"version": "0.1.11",
"description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/gigacode-darwin-arm64",
"version": "0.1.10",
"version": "0.1.11",
"description": "gigacode CLI binary for macOS arm64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/gigacode-darwin-x64",
"version": "0.1.10",
"version": "0.1.11",
"description": "gigacode CLI binary for macOS x64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/gigacode-linux-arm64",
"version": "0.1.10",
"version": "0.1.11",
"description": "gigacode CLI binary for Linux arm64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/gigacode-linux-x64",
"version": "0.1.10",
"version": "0.1.11",
"description": "gigacode CLI binary for Linux x64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "@sandbox-agent/gigacode-win32-x64",
"version": "0.1.10",
"version": "0.1.11",
"description": "gigacode CLI binary for Windows x64",
"license": "Apache-2.0",
"repository": {

View file

@ -1,6 +1,6 @@
{
"name": "sandbox-agent",
"version": "0.1.10",
"version": "0.1.11",
"description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.",
"license": "Apache-2.0",
"repository": {

View file

@ -19,9 +19,9 @@ use crate::router::{
SkillsConfig,
};
use crate::router::{
AgentListResponse, AgentModelsResponse, AgentModesResponse, CreateSessionResponse, EventsResponse,
FsActionResponse, FsEntry, FsMoveRequest, FsMoveResponse, FsStat, FsUploadBatchResponse,
FsWriteResponse, SessionListResponse,
AgentListResponse, AgentModelsResponse, AgentModesResponse, CreateSessionResponse,
EventsResponse, FsActionResponse, FsEntry, FsMoveRequest, FsMoveResponse, FsStat,
FsUploadBatchResponse, FsWriteResponse, SessionListResponse,
};
use crate::server_logs::ServerLogs;
use crate::telemetry;
@ -855,10 +855,9 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
let ctx = ClientContext::new(cli, &args.client)?;
let mcp = if let Some(path) = &args.mcp_config {
let text = std::fs::read_to_string(path)?;
let parsed =
serde_json::from_str::<std::collections::BTreeMap<String, McpServerConfig>>(
&text,
)?;
let parsed = serde_json::from_str::<
std::collections::BTreeMap<String, McpServerConfig>,
>(&text)?;
Some(parsed)
} else {
None

View file

@ -18,6 +18,7 @@ use axum::response::{IntoResponse, Response, Sse};
use axum::routing::{delete, get, post};
use axum::Json;
use axum::Router;
use base64::Engine;
use futures::{stream, StreamExt};
use reqwest::Client;
use sandbox_agent_error::{AgentError, ErrorType, ProblemDetails, SandboxError};
@ -33,14 +34,13 @@ use sandbox_agent_universal_agent_schema::{
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::{json, Map, Value};
use tar::Archive;
use tokio::sync::futures::OwnedNotified;
use tokio::sync::{broadcast, mpsc, oneshot, Mutex, Notify};
use tokio::time::sleep;
use tokio_stream::wrappers::BroadcastStream;
use tower_http::trace::TraceLayer;
use base64::Engine;
use tar::Archive;
use toml_edit::{value, Array, DocumentMut, Item, Table};
use tower_http::trace::TraceLayer;
use tracing::Span;
use utoipa::{Modify, OpenApi, ToSchema};
@ -4896,11 +4896,7 @@ pub struct HealthResponse {
#[serde(rename_all = "camelCase")]
pub struct FsPathQuery {
pub path: String,
#[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
#[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
pub session_id: Option<String>,
}
@ -4909,22 +4905,14 @@ pub struct FsPathQuery {
pub struct FsEntriesQuery {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
#[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
pub session_id: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct FsSessionQuery {
#[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
#[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
pub session_id: Option<String>,
}
@ -4932,11 +4920,7 @@ pub struct FsSessionQuery {
#[serde(rename_all = "camelCase")]
pub struct FsDeleteQuery {
pub path: String,
#[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
#[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
pub session_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub recursive: Option<bool>,
@ -4947,11 +4931,7 @@ pub struct FsDeleteQuery {
pub struct FsUploadBatchQuery {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
#[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
pub session_id: Option<String>,
}
@ -5079,7 +5059,11 @@ pub enum McpServerConfig {
command: McpCommand,
#[serde(default)]
args: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none", alias = "environment")]
#[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "environment"
)]
env: Option<BTreeMap<String, String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
enabled: Option<bool>,
@ -5782,10 +5766,11 @@ async fn fs_entries(
} else {
FsEntryType::File
};
let modified = metadata
.modified()
.ok()
.and_then(|time| chrono::DateTime::<chrono::Utc>::from(time).to_rfc3339().into());
let modified = metadata.modified().ok().and_then(|time| {
chrono::DateTime::<chrono::Utc>::from(time)
.to_rfc3339()
.into()
});
entries.push(FsEntry {
name: entry.file_name().to_string_lossy().to_string(),
path: path.to_string_lossy().to_string(),
@ -5986,10 +5971,11 @@ async fn fs_stat(
} else {
FsEntryType::File
};
let modified = metadata
.modified()
.ok()
.and_then(|time| chrono::DateTime::<chrono::Utc>::from(time).to_rfc3339().into());
let modified = metadata.modified().ok().and_then(|time| {
chrono::DateTime::<chrono::Utc>::from(time)
.to_rfc3339()
.into()
});
Ok(Json(FsStat {
path: target.to_string_lossy().to_string(),
entry_type,
@ -6058,7 +6044,9 @@ async fn fs_upload_batch(
if let Some(parent) = dest.parent() {
fs::create_dir_all(parent).map_err(|err| map_fs_error(parent, err))?;
}
entry.unpack(&dest).map_err(|err| SandboxError::StreamError {
entry
.unpack(&dest)
.map_err(|err| SandboxError::StreamError {
message: err.to_string(),
})?;
if extracted.len() < 1024 {
@ -6911,7 +6899,10 @@ mod tests {
let result = resolve_skill_source(&source, tmp.path());
assert!(result.is_err());
let msg = result.unwrap_err().to_string();
assert!(msg.contains("unsupported"), "expected 'unsupported' in: {msg}");
assert!(
msg.contains("unsupported"),
"expected 'unsupported' in: {msg}"
);
}
#[test]
@ -6940,11 +6931,7 @@ mod tests {
// Verify symlinks were created
for root in SKILL_ROOTS {
let link = work.path().join(root).join("alpha");
assert!(
link.exists(),
"expected skill link at {}",
link.display()
);
assert!(link.exists(), "expected skill link at {}", link.display());
assert!(link.join("SKILL.md").exists());
}
}
@ -6998,7 +6985,10 @@ mod tests {
assert!(result.is_err());
let msg = result.unwrap_err().to_string();
assert!(msg.contains("no skills found"), "expected 'no skills found' in: {msg}");
assert!(
msg.contains("no skills found"),
"expected 'no skills found' in: {msg}"
);
}
#[test]
@ -7090,7 +7080,10 @@ mod tests {
// ensure_skill_link should replace the dangling symlink
let result = ensure_skill_link(skill.path(), &dest);
assert!(result.is_ok(), "should replace dangling symlink: {result:?}");
assert!(
result.is_ok(),
"should replace dangling symlink: {result:?}"
);
assert!(dest.exists(), "link should now point to valid target");
assert!(dest.join("SKILL.md").exists());
}
@ -7178,15 +7171,27 @@ mod tests {
}
// Verify files were extracted without the prefix directory
assert!(dest.join("SKILL.md").exists(), "SKILL.md should exist at root");
assert_eq!(fs::read_to_string(dest.join("SKILL.md")).unwrap(), "# Test Skill");
assert!(dest.join("sub/nested.txt").exists(), "nested file should exist");
assert!(
dest.join("SKILL.md").exists(),
"SKILL.md should exist at root"
);
assert_eq!(
fs::read_to_string(dest.join("SKILL.md")).unwrap(),
"# Test Skill"
);
assert!(
dest.join("sub/nested.txt").exists(),
"nested file should exist"
);
assert_eq!(
fs::read_to_string(dest.join("sub/nested.txt")).unwrap(),
"nested content"
);
// Ensure no prefix directory leaked through
assert!(!dest.join("owner-repo-abc123").exists(), "prefix dir should be stripped");
assert!(
!dest.join("owner-repo-abc123").exists(),
"prefix dir should be stripped"
);
}
}
@ -7521,10 +7526,7 @@ fn ensure_skill_link(target: &StdPath, dest: &StdPath) -> Result<(), SandboxErro
}
}
return Err(SandboxError::InvalidRequest {
message: format!(
"skill path conflict: {} already exists",
dest.display()
),
message: format!("skill path conflict: {} already exists", dest.display()),
});
}
// Remove dangling symlinks (exists() follows symlinks and returns false for dangling ones)
@ -7637,8 +7639,12 @@ fn write_codex_mcp_config(mcp: &BTreeMap<String, McpServerConfig>) -> Result<(),
} else {
DocumentMut::new()
};
let mcp_item = doc.entry("mcp_servers").or_insert(Item::Table(Table::new()));
let mcp_table = mcp_item.as_table_mut().ok_or_else(|| SandboxError::InvalidRequest {
let mcp_item = doc
.entry("mcp_servers")
.or_insert(Item::Table(Table::new()));
let mcp_table = mcp_item
.as_table_mut()
.ok_or_else(|| SandboxError::InvalidRequest {
message: "invalid Codex config.toml: mcp_servers must be a table".to_string(),
})?;
for (name, config) in mcp {
@ -7660,10 +7666,10 @@ fn apply_amp_mcp_config(
agent_manager: &AgentManager,
mcp: &BTreeMap<String, McpServerConfig>,
) -> Result<(), SandboxError> {
let path = agent_manager
.resolve_binary(AgentId::Amp)
.map_err(|_| SandboxError::AgentNotInstalled {
let path = agent_manager.resolve_binary(AgentId::Amp).map_err(|_| {
SandboxError::AgentNotInstalled {
agent: "amp".to_string(),
}
})?;
let cwd = std::env::current_dir().map_err(|err| SandboxError::StreamError {
message: err.to_string(),
@ -7783,10 +7789,7 @@ fn opencode_mcp_config(config: &McpServerConfig) -> Result<Value, SandboxError>
fn claude_mcp_entry(config: &McpServerConfig) -> Result<Value, SandboxError> {
match config {
McpServerConfig::Local {
command,
args,
env,
..
command, args, env, ..
} => {
let (cmd_name, cmd_args) = mcp_command_parts(command, args)?;
let mut map = Map::new();
@ -7923,11 +7926,10 @@ fn mcp_command_parts(
});
}
let mut iter = values.iter();
let cmd = iter
.next()
.map(|value| value.to_string())
.ok_or_else(|| SandboxError::InvalidRequest {
let cmd = iter.next().map(|value| value.to_string()).ok_or_else(|| {
SandboxError::InvalidRequest {
message: "mcp command cannot be empty".to_string(),
}
})?;
let mut cmd_args = iter.map(|value| value.to_string()).collect::<Vec<_>>();
cmd_args.extend(args.iter().cloned());
@ -8045,12 +8047,10 @@ fn opencode_file_part_input(attachment: &MessageAttachment) -> Value {
map.insert("type".to_string(), json!("file"));
map.insert(
"mime".to_string(),
json!(
attachment
json!(attachment
.mime
.clone()
.unwrap_or_else(|| "application/octet-stream".to_string())
),
.unwrap_or_else(|| "application/octet-stream".to_string())),
);
map.insert("url".to_string(), json!(url));
if let Some(filename) = filename {

View file

@ -195,7 +195,8 @@ async fn create_session_with_skill_sources() {
let skill_dir = tempfile::tempdir().expect("create skill dir");
let skill_path = skill_dir.path().join("my-test-skill");
std::fs::create_dir_all(&skill_path).expect("create skill subdir");
std::fs::write(skill_path.join("SKILL.md"), "# Test Skill\nA test skill.").expect("write SKILL.md");
std::fs::write(skill_path.join("SKILL.md"), "# Test Skill\nA test skill.")
.expect("write SKILL.md");
// Create session with local skill source
let (status, payload) = send_json(
@ -215,9 +216,16 @@ async fn create_session_with_skill_sources() {
})),
)
.await;
assert_eq!(status, StatusCode::OK, "create session with skills: {payload}");
assert_eq!(
status,
StatusCode::OK,
"create session with skills: {payload}"
);
assert!(
payload.get("healthy").and_then(Value::as_bool).unwrap_or(false),
payload
.get("healthy")
.and_then(Value::as_bool)
.unwrap_or(false),
"session should be healthy"
);
}
@ -254,7 +262,11 @@ async fn create_session_with_skill_sources_filter() {
})),
)
.await;
assert_eq!(status, StatusCode::OK, "create session with skill filter: {payload}");
assert_eq!(
status,
StatusCode::OK,
"create session with skill filter: {payload}"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
@ -280,7 +292,11 @@ async fn create_session_with_invalid_skill_source() {
)
.await;
// Should fail with a 4xx or 5xx error
assert_ne!(status, StatusCode::OK, "session with invalid skill source should fail");
assert_ne!(
status,
StatusCode::OK,
"session with invalid skill source should fail"
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
@ -311,5 +327,9 @@ async fn create_session_with_skill_filter_no_match() {
})),
)
.await;
assert_ne!(status, StatusCode::OK, "session with no matching skills should fail");
assert_ne!(
status,
StatusCode::OK,
"session with no matching skills should fail"
);
}

View file

@ -209,7 +209,10 @@ async fn fs_upload_batch_truncates_paths() {
.cloned()
.unwrap_or_default();
assert_eq!(paths.len(), 1024);
assert_eq!(payload.get("truncated").and_then(|value| value.as_bool()), Some(true));
assert_eq!(
payload.get("truncated").and_then(|value| value.as_bool()),
Some(true)
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]