chore(release): update version to 0.1.11

This commit is contained in:
Nathan Flurry 2026-02-09 02:20:00 -08:00
parent 4c8d93e077
commit ee61645498
19 changed files with 136 additions and 114 deletions

View file

@ -3,7 +3,7 @@ resolver = "2"
members = ["server/packages/*", "gigacode"] members = ["server/packages/*", "gigacode"]
[workspace.package] [workspace.package]
version = "0.1.10" version = "0.1.11"
edition = "2021" edition = "2021"
authors = [ "Rivet Gaming, LLC <developer@rivet.gg>" ] authors = [ "Rivet Gaming, LLC <developer@rivet.gg>" ]
license = "Apache-2.0" license = "Apache-2.0"
@ -12,12 +12,12 @@ description = "Universal API for automatic coding agents in sandboxes. Supports
[workspace.dependencies] [workspace.dependencies]
# Internal crates # Internal crates
sandbox-agent = { version = "0.1.10", path = "server/packages/sandbox-agent" } sandbox-agent = { version = "0.1.11", path = "server/packages/sandbox-agent" }
sandbox-agent-error = { version = "0.1.10", path = "server/packages/error" } sandbox-agent-error = { version = "0.1.11", path = "server/packages/error" }
sandbox-agent-agent-management = { version = "0.1.10", path = "server/packages/agent-management" } sandbox-agent-agent-management = { version = "0.1.11", path = "server/packages/agent-management" }
sandbox-agent-agent-credentials = { version = "0.1.10", path = "server/packages/agent-credentials" } sandbox-agent-agent-credentials = { version = "0.1.11", path = "server/packages/agent-credentials" }
sandbox-agent-universal-agent-schema = { version = "0.1.10", path = "server/packages/universal-agent-schema" } sandbox-agent-universal-agent-schema = { version = "0.1.11", path = "server/packages/universal-agent-schema" }
sandbox-agent-extracted-agent-schemas = { version = "0.1.10", path = "server/packages/extracted-agent-schemas" } sandbox-agent-extracted-agent-schemas = { version = "0.1.11", path = "server/packages/extracted-agent-schemas" }
# Serialization # Serialization
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli-shared", "name": "@sandbox-agent/cli-shared",
"version": "0.1.10", "version": "0.1.11",
"description": "Shared helpers for sandbox-agent CLI and SDK", "description": "Shared helpers for sandbox-agent CLI and SDK",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli", "name": "@sandbox-agent/cli",
"version": "0.1.10", "version": "0.1.11",
"description": "CLI for sandbox-agent - run AI coding agents in sandboxes", "description": "CLI for sandbox-agent - run AI coding agents in sandboxes",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli-darwin-arm64", "name": "@sandbox-agent/cli-darwin-arm64",
"version": "0.1.10", "version": "0.1.11",
"description": "sandbox-agent CLI binary for macOS ARM64", "description": "sandbox-agent CLI binary for macOS ARM64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli-darwin-x64", "name": "@sandbox-agent/cli-darwin-x64",
"version": "0.1.10", "version": "0.1.11",
"description": "sandbox-agent CLI binary for macOS x64", "description": "sandbox-agent CLI binary for macOS x64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli-linux-arm64", "name": "@sandbox-agent/cli-linux-arm64",
"version": "0.1.10", "version": "0.1.11",
"description": "sandbox-agent CLI binary for Linux arm64", "description": "sandbox-agent CLI binary for Linux arm64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli-linux-x64", "name": "@sandbox-agent/cli-linux-x64",
"version": "0.1.10", "version": "0.1.11",
"description": "sandbox-agent CLI binary for Linux x64", "description": "sandbox-agent CLI binary for Linux x64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/cli-win32-x64", "name": "@sandbox-agent/cli-win32-x64",
"version": "0.1.10", "version": "0.1.11",
"description": "sandbox-agent CLI binary for Windows x64", "description": "sandbox-agent CLI binary for Windows x64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/gigacode", "name": "@sandbox-agent/gigacode",
"version": "0.1.10", "version": "0.1.11",
"description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)", "description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/gigacode-darwin-arm64", "name": "@sandbox-agent/gigacode-darwin-arm64",
"version": "0.1.10", "version": "0.1.11",
"description": "gigacode CLI binary for macOS arm64", "description": "gigacode CLI binary for macOS arm64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/gigacode-darwin-x64", "name": "@sandbox-agent/gigacode-darwin-x64",
"version": "0.1.10", "version": "0.1.11",
"description": "gigacode CLI binary for macOS x64", "description": "gigacode CLI binary for macOS x64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/gigacode-linux-arm64", "name": "@sandbox-agent/gigacode-linux-arm64",
"version": "0.1.10", "version": "0.1.11",
"description": "gigacode CLI binary for Linux arm64", "description": "gigacode CLI binary for Linux arm64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/gigacode-linux-x64", "name": "@sandbox-agent/gigacode-linux-x64",
"version": "0.1.10", "version": "0.1.11",
"description": "gigacode CLI binary for Linux x64", "description": "gigacode CLI binary for Linux x64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "@sandbox-agent/gigacode-win32-x64", "name": "@sandbox-agent/gigacode-win32-x64",
"version": "0.1.10", "version": "0.1.11",
"description": "gigacode CLI binary for Windows x64", "description": "gigacode CLI binary for Windows x64",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -1,6 +1,6 @@
{ {
"name": "sandbox-agent", "name": "sandbox-agent",
"version": "0.1.10", "version": "0.1.11",
"description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.", "description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.",
"license": "Apache-2.0", "license": "Apache-2.0",
"repository": { "repository": {

View file

@ -19,9 +19,9 @@ use crate::router::{
SkillsConfig, SkillsConfig,
}; };
use crate::router::{ use crate::router::{
AgentListResponse, AgentModelsResponse, AgentModesResponse, CreateSessionResponse, EventsResponse, AgentListResponse, AgentModelsResponse, AgentModesResponse, CreateSessionResponse,
FsActionResponse, FsEntry, FsMoveRequest, FsMoveResponse, FsStat, FsUploadBatchResponse, EventsResponse, FsActionResponse, FsEntry, FsMoveRequest, FsMoveResponse, FsStat,
FsWriteResponse, SessionListResponse, FsUploadBatchResponse, FsWriteResponse, SessionListResponse,
}; };
use crate::server_logs::ServerLogs; use crate::server_logs::ServerLogs;
use crate::telemetry; use crate::telemetry;
@ -855,10 +855,9 @@ fn run_sessions(command: &SessionsCommand, cli: &CliConfig) -> Result<(), CliErr
let ctx = ClientContext::new(cli, &args.client)?; let ctx = ClientContext::new(cli, &args.client)?;
let mcp = if let Some(path) = &args.mcp_config { let mcp = if let Some(path) = &args.mcp_config {
let text = std::fs::read_to_string(path)?; let text = std::fs::read_to_string(path)?;
let parsed = let parsed = serde_json::from_str::<
serde_json::from_str::<std::collections::BTreeMap<String, McpServerConfig>>( std::collections::BTreeMap<String, McpServerConfig>,
&text, >(&text)?;
)?;
Some(parsed) Some(parsed)
} else { } else {
None None

View file

@ -18,6 +18,7 @@ use axum::response::{IntoResponse, Response, Sse};
use axum::routing::{delete, get, post}; use axum::routing::{delete, get, post};
use axum::Json; use axum::Json;
use axum::Router; use axum::Router;
use base64::Engine;
use futures::{stream, StreamExt}; use futures::{stream, StreamExt};
use reqwest::Client; use reqwest::Client;
use sandbox_agent_error::{AgentError, ErrorType, ProblemDetails, SandboxError}; use sandbox_agent_error::{AgentError, ErrorType, ProblemDetails, SandboxError};
@ -33,14 +34,13 @@ use sandbox_agent_universal_agent_schema::{
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{json, Map, Value}; use serde_json::{json, Map, Value};
use tar::Archive;
use tokio::sync::futures::OwnedNotified; use tokio::sync::futures::OwnedNotified;
use tokio::sync::{broadcast, mpsc, oneshot, Mutex, Notify}; use tokio::sync::{broadcast, mpsc, oneshot, Mutex, Notify};
use tokio::time::sleep; use tokio::time::sleep;
use tokio_stream::wrappers::BroadcastStream; use tokio_stream::wrappers::BroadcastStream;
use tower_http::trace::TraceLayer;
use base64::Engine;
use tar::Archive;
use toml_edit::{value, Array, DocumentMut, Item, Table}; use toml_edit::{value, Array, DocumentMut, Item, Table};
use tower_http::trace::TraceLayer;
use tracing::Span; use tracing::Span;
use utoipa::{Modify, OpenApi, ToSchema}; use utoipa::{Modify, OpenApi, ToSchema};
@ -4896,11 +4896,7 @@ pub struct HealthResponse {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct FsPathQuery { pub struct FsPathQuery {
pub path: String, pub path: String,
#[serde( #[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
pub session_id: Option<String>, pub session_id: Option<String>,
} }
@ -4909,22 +4905,14 @@ pub struct FsPathQuery {
pub struct FsEntriesQuery { pub struct FsEntriesQuery {
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>, pub path: Option<String>,
#[serde( #[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
pub session_id: Option<String>, pub session_id: Option<String>,
} }
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, JsonSchema)] #[derive(Debug, Clone, Serialize, Deserialize, ToSchema, JsonSchema)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct FsSessionQuery { pub struct FsSessionQuery {
#[serde( #[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
pub session_id: Option<String>, pub session_id: Option<String>,
} }
@ -4932,11 +4920,7 @@ pub struct FsSessionQuery {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct FsDeleteQuery { pub struct FsDeleteQuery {
pub path: String, pub path: String,
#[serde( #[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
pub session_id: Option<String>, pub session_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub recursive: Option<bool>, pub recursive: Option<bool>,
@ -4947,11 +4931,7 @@ pub struct FsDeleteQuery {
pub struct FsUploadBatchQuery { pub struct FsUploadBatchQuery {
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>, pub path: Option<String>,
#[serde( #[serde(default, skip_serializing_if = "Option::is_none", alias = "session_id")]
default,
skip_serializing_if = "Option::is_none",
alias = "session_id"
)]
pub session_id: Option<String>, pub session_id: Option<String>,
} }
@ -5079,7 +5059,11 @@ pub enum McpServerConfig {
command: McpCommand, command: McpCommand,
#[serde(default)] #[serde(default)]
args: Vec<String>, args: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none", alias = "environment")] #[serde(
default,
skip_serializing_if = "Option::is_none",
alias = "environment"
)]
env: Option<BTreeMap<String, String>>, env: Option<BTreeMap<String, String>>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
enabled: Option<bool>, enabled: Option<bool>,
@ -5782,10 +5766,11 @@ async fn fs_entries(
} else { } else {
FsEntryType::File FsEntryType::File
}; };
let modified = metadata let modified = metadata.modified().ok().and_then(|time| {
.modified() chrono::DateTime::<chrono::Utc>::from(time)
.ok() .to_rfc3339()
.and_then(|time| chrono::DateTime::<chrono::Utc>::from(time).to_rfc3339().into()); .into()
});
entries.push(FsEntry { entries.push(FsEntry {
name: entry.file_name().to_string_lossy().to_string(), name: entry.file_name().to_string_lossy().to_string(),
path: path.to_string_lossy().to_string(), path: path.to_string_lossy().to_string(),
@ -5986,10 +5971,11 @@ async fn fs_stat(
} else { } else {
FsEntryType::File FsEntryType::File
}; };
let modified = metadata let modified = metadata.modified().ok().and_then(|time| {
.modified() chrono::DateTime::<chrono::Utc>::from(time)
.ok() .to_rfc3339()
.and_then(|time| chrono::DateTime::<chrono::Utc>::from(time).to_rfc3339().into()); .into()
});
Ok(Json(FsStat { Ok(Json(FsStat {
path: target.to_string_lossy().to_string(), path: target.to_string_lossy().to_string(),
entry_type, entry_type,
@ -6058,7 +6044,9 @@ async fn fs_upload_batch(
if let Some(parent) = dest.parent() { if let Some(parent) = dest.parent() {
fs::create_dir_all(parent).map_err(|err| map_fs_error(parent, err))?; fs::create_dir_all(parent).map_err(|err| map_fs_error(parent, err))?;
} }
entry.unpack(&dest).map_err(|err| SandboxError::StreamError { entry
.unpack(&dest)
.map_err(|err| SandboxError::StreamError {
message: err.to_string(), message: err.to_string(),
})?; })?;
if extracted.len() < 1024 { if extracted.len() < 1024 {
@ -6911,7 +6899,10 @@ mod tests {
let result = resolve_skill_source(&source, tmp.path()); let result = resolve_skill_source(&source, tmp.path());
assert!(result.is_err()); assert!(result.is_err());
let msg = result.unwrap_err().to_string(); let msg = result.unwrap_err().to_string();
assert!(msg.contains("unsupported"), "expected 'unsupported' in: {msg}"); assert!(
msg.contains("unsupported"),
"expected 'unsupported' in: {msg}"
);
} }
#[test] #[test]
@ -6940,11 +6931,7 @@ mod tests {
// Verify symlinks were created // Verify symlinks were created
for root in SKILL_ROOTS { for root in SKILL_ROOTS {
let link = work.path().join(root).join("alpha"); let link = work.path().join(root).join("alpha");
assert!( assert!(link.exists(), "expected skill link at {}", link.display());
link.exists(),
"expected skill link at {}",
link.display()
);
assert!(link.join("SKILL.md").exists()); assert!(link.join("SKILL.md").exists());
} }
} }
@ -6998,7 +6985,10 @@ mod tests {
assert!(result.is_err()); assert!(result.is_err());
let msg = result.unwrap_err().to_string(); let msg = result.unwrap_err().to_string();
assert!(msg.contains("no skills found"), "expected 'no skills found' in: {msg}"); assert!(
msg.contains("no skills found"),
"expected 'no skills found' in: {msg}"
);
} }
#[test] #[test]
@ -7090,7 +7080,10 @@ mod tests {
// ensure_skill_link should replace the dangling symlink // ensure_skill_link should replace the dangling symlink
let result = ensure_skill_link(skill.path(), &dest); let result = ensure_skill_link(skill.path(), &dest);
assert!(result.is_ok(), "should replace dangling symlink: {result:?}"); assert!(
result.is_ok(),
"should replace dangling symlink: {result:?}"
);
assert!(dest.exists(), "link should now point to valid target"); assert!(dest.exists(), "link should now point to valid target");
assert!(dest.join("SKILL.md").exists()); assert!(dest.join("SKILL.md").exists());
} }
@ -7178,15 +7171,27 @@ mod tests {
} }
// Verify files were extracted without the prefix directory // Verify files were extracted without the prefix directory
assert!(dest.join("SKILL.md").exists(), "SKILL.md should exist at root"); assert!(
assert_eq!(fs::read_to_string(dest.join("SKILL.md")).unwrap(), "# Test Skill"); dest.join("SKILL.md").exists(),
assert!(dest.join("sub/nested.txt").exists(), "nested file should exist"); "SKILL.md should exist at root"
);
assert_eq!(
fs::read_to_string(dest.join("SKILL.md")).unwrap(),
"# Test Skill"
);
assert!(
dest.join("sub/nested.txt").exists(),
"nested file should exist"
);
assert_eq!( assert_eq!(
fs::read_to_string(dest.join("sub/nested.txt")).unwrap(), fs::read_to_string(dest.join("sub/nested.txt")).unwrap(),
"nested content" "nested content"
); );
// Ensure no prefix directory leaked through // Ensure no prefix directory leaked through
assert!(!dest.join("owner-repo-abc123").exists(), "prefix dir should be stripped"); assert!(
!dest.join("owner-repo-abc123").exists(),
"prefix dir should be stripped"
);
} }
} }
@ -7521,10 +7526,7 @@ fn ensure_skill_link(target: &StdPath, dest: &StdPath) -> Result<(), SandboxErro
} }
} }
return Err(SandboxError::InvalidRequest { return Err(SandboxError::InvalidRequest {
message: format!( message: format!("skill path conflict: {} already exists", dest.display()),
"skill path conflict: {} already exists",
dest.display()
),
}); });
} }
// Remove dangling symlinks (exists() follows symlinks and returns false for dangling ones) // Remove dangling symlinks (exists() follows symlinks and returns false for dangling ones)
@ -7637,8 +7639,12 @@ fn write_codex_mcp_config(mcp: &BTreeMap<String, McpServerConfig>) -> Result<(),
} else { } else {
DocumentMut::new() DocumentMut::new()
}; };
let mcp_item = doc.entry("mcp_servers").or_insert(Item::Table(Table::new())); let mcp_item = doc
let mcp_table = mcp_item.as_table_mut().ok_or_else(|| SandboxError::InvalidRequest { .entry("mcp_servers")
.or_insert(Item::Table(Table::new()));
let mcp_table = mcp_item
.as_table_mut()
.ok_or_else(|| SandboxError::InvalidRequest {
message: "invalid Codex config.toml: mcp_servers must be a table".to_string(), message: "invalid Codex config.toml: mcp_servers must be a table".to_string(),
})?; })?;
for (name, config) in mcp { for (name, config) in mcp {
@ -7660,10 +7666,10 @@ fn apply_amp_mcp_config(
agent_manager: &AgentManager, agent_manager: &AgentManager,
mcp: &BTreeMap<String, McpServerConfig>, mcp: &BTreeMap<String, McpServerConfig>,
) -> Result<(), SandboxError> { ) -> Result<(), SandboxError> {
let path = agent_manager let path = agent_manager.resolve_binary(AgentId::Amp).map_err(|_| {
.resolve_binary(AgentId::Amp) SandboxError::AgentNotInstalled {
.map_err(|_| SandboxError::AgentNotInstalled {
agent: "amp".to_string(), agent: "amp".to_string(),
}
})?; })?;
let cwd = std::env::current_dir().map_err(|err| SandboxError::StreamError { let cwd = std::env::current_dir().map_err(|err| SandboxError::StreamError {
message: err.to_string(), message: err.to_string(),
@ -7783,10 +7789,7 @@ fn opencode_mcp_config(config: &McpServerConfig) -> Result<Value, SandboxError>
fn claude_mcp_entry(config: &McpServerConfig) -> Result<Value, SandboxError> { fn claude_mcp_entry(config: &McpServerConfig) -> Result<Value, SandboxError> {
match config { match config {
McpServerConfig::Local { McpServerConfig::Local {
command, command, args, env, ..
args,
env,
..
} => { } => {
let (cmd_name, cmd_args) = mcp_command_parts(command, args)?; let (cmd_name, cmd_args) = mcp_command_parts(command, args)?;
let mut map = Map::new(); let mut map = Map::new();
@ -7923,11 +7926,10 @@ fn mcp_command_parts(
}); });
} }
let mut iter = values.iter(); let mut iter = values.iter();
let cmd = iter let cmd = iter.next().map(|value| value.to_string()).ok_or_else(|| {
.next() SandboxError::InvalidRequest {
.map(|value| value.to_string())
.ok_or_else(|| SandboxError::InvalidRequest {
message: "mcp command cannot be empty".to_string(), message: "mcp command cannot be empty".to_string(),
}
})?; })?;
let mut cmd_args = iter.map(|value| value.to_string()).collect::<Vec<_>>(); let mut cmd_args = iter.map(|value| value.to_string()).collect::<Vec<_>>();
cmd_args.extend(args.iter().cloned()); cmd_args.extend(args.iter().cloned());
@ -8045,12 +8047,10 @@ fn opencode_file_part_input(attachment: &MessageAttachment) -> Value {
map.insert("type".to_string(), json!("file")); map.insert("type".to_string(), json!("file"));
map.insert( map.insert(
"mime".to_string(), "mime".to_string(),
json!( json!(attachment
attachment
.mime .mime
.clone() .clone()
.unwrap_or_else(|| "application/octet-stream".to_string()) .unwrap_or_else(|| "application/octet-stream".to_string())),
),
); );
map.insert("url".to_string(), json!(url)); map.insert("url".to_string(), json!(url));
if let Some(filename) = filename { if let Some(filename) = filename {

View file

@ -195,7 +195,8 @@ async fn create_session_with_skill_sources() {
let skill_dir = tempfile::tempdir().expect("create skill dir"); let skill_dir = tempfile::tempdir().expect("create skill dir");
let skill_path = skill_dir.path().join("my-test-skill"); let skill_path = skill_dir.path().join("my-test-skill");
std::fs::create_dir_all(&skill_path).expect("create skill subdir"); std::fs::create_dir_all(&skill_path).expect("create skill subdir");
std::fs::write(skill_path.join("SKILL.md"), "# Test Skill\nA test skill.").expect("write SKILL.md"); std::fs::write(skill_path.join("SKILL.md"), "# Test Skill\nA test skill.")
.expect("write SKILL.md");
// Create session with local skill source // Create session with local skill source
let (status, payload) = send_json( let (status, payload) = send_json(
@ -215,9 +216,16 @@ async fn create_session_with_skill_sources() {
})), })),
) )
.await; .await;
assert_eq!(status, StatusCode::OK, "create session with skills: {payload}"); assert_eq!(
status,
StatusCode::OK,
"create session with skills: {payload}"
);
assert!( assert!(
payload.get("healthy").and_then(Value::as_bool).unwrap_or(false), payload
.get("healthy")
.and_then(Value::as_bool)
.unwrap_or(false),
"session should be healthy" "session should be healthy"
); );
} }
@ -254,7 +262,11 @@ async fn create_session_with_skill_sources_filter() {
})), })),
) )
.await; .await;
assert_eq!(status, StatusCode::OK, "create session with skill filter: {payload}"); assert_eq!(
status,
StatusCode::OK,
"create session with skill filter: {payload}"
);
} }
#[tokio::test(flavor = "multi_thread", worker_threads = 2)] #[tokio::test(flavor = "multi_thread", worker_threads = 2)]
@ -280,7 +292,11 @@ async fn create_session_with_invalid_skill_source() {
) )
.await; .await;
// Should fail with a 4xx or 5xx error // Should fail with a 4xx or 5xx error
assert_ne!(status, StatusCode::OK, "session with invalid skill source should fail"); assert_ne!(
status,
StatusCode::OK,
"session with invalid skill source should fail"
);
} }
#[tokio::test(flavor = "multi_thread", worker_threads = 2)] #[tokio::test(flavor = "multi_thread", worker_threads = 2)]
@ -311,5 +327,9 @@ async fn create_session_with_skill_filter_no_match() {
})), })),
) )
.await; .await;
assert_ne!(status, StatusCode::OK, "session with no matching skills should fail"); assert_ne!(
status,
StatusCode::OK,
"session with no matching skills should fail"
);
} }

View file

@ -209,7 +209,10 @@ async fn fs_upload_batch_truncates_paths() {
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
assert_eq!(paths.len(), 1024); assert_eq!(paths.len(), 1024);
assert_eq!(payload.get("truncated").and_then(|value| value.as_bool()), Some(true)); assert_eq!(
payload.get("truncated").and_then(|value| value.as_bool()),
Some(true)
);
} }
#[tokio::test(flavor = "multi_thread", worker_threads = 2)] #[tokio::test(flavor = "multi_thread", worker_threads = 2)]