chore: sync workspace changes

This commit is contained in:
Nathan Flurry 2026-01-25 01:57:16 -08:00
parent 30d3aca1ee
commit f92ecd9b9a
38 changed files with 4829 additions and 1219 deletions

View file

@ -0,0 +1,10 @@
[package]
name = "sandbox-daemon-agent-credentials"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
dirs = "5.0"
time = { version = "0.3", features = ["parsing", "formatting"] }

View file

@ -0,0 +1,371 @@
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProviderCredentials {
pub api_key: String,
pub source: String,
pub auth_type: AuthType,
pub provider: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AuthType {
ApiKey,
Oauth,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ExtractedCredentials {
pub anthropic: Option<ProviderCredentials>,
pub openai: Option<ProviderCredentials>,
pub other: HashMap<String, ProviderCredentials>,
}
#[derive(Debug, Clone, Default)]
pub struct CredentialExtractionOptions {
pub home_dir: Option<PathBuf>,
pub include_oauth: bool,
}
impl CredentialExtractionOptions {
pub fn new() -> Self {
Self {
home_dir: None,
include_oauth: true,
}
}
}
pub fn extract_claude_credentials(options: &CredentialExtractionOptions) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let config_paths = [
home_dir.join(".claude.json.api"),
home_dir.join(".claude.json"),
home_dir.join(".claude.json.nathan"),
];
let key_paths = [
vec!["primaryApiKey"],
vec!["apiKey"],
vec!["anthropicApiKey"],
vec!["customApiKey"],
];
for path in config_paths {
let data = read_json_file(&path)?;
for key_path in &key_paths {
if let Some(key) = read_string_field(&data, key_path) {
if key.starts_with("sk-ant-") {
return Some(ProviderCredentials {
api_key: key,
source: "claude-code".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
}
}
}
if include_oauth {
let oauth_paths = [
home_dir.join(".claude").join(".credentials.json"),
home_dir.join(".claude-oauth-credentials.json"),
];
for path in oauth_paths {
let data = match read_json_file(&path) {
Some(value) => value,
None => continue,
};
let access = read_string_field(&data, &["claudeAiOauth", "accessToken"]);
if let Some(token) = access {
if let Some(expires_at) =
read_string_field(&data, &["claudeAiOauth", "expiresAt"])
{
if is_expired_rfc3339(&expires_at) {
continue;
}
}
return Some(ProviderCredentials {
api_key: token,
source: "claude-code".to_string(),
auth_type: AuthType::Oauth,
provider: "anthropic".to_string(),
});
}
}
}
None
}
pub fn extract_codex_credentials(options: &CredentialExtractionOptions) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir.join(".codex").join("auth.json");
let data = read_json_file(&path)?;
if let Some(key) = data.get("OPENAI_API_KEY").and_then(Value::as_str) {
if !key.is_empty() {
return Some(ProviderCredentials {
api_key: key.to_string(),
source: "codex".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
}
if include_oauth {
if let Some(token) = read_string_field(&data, &["tokens", "access_token"]) {
return Some(ProviderCredentials {
api_key: token,
source: "codex".to_string(),
auth_type: AuthType::Oauth,
provider: "openai".to_string(),
});
}
}
None
}
pub fn extract_opencode_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir
.join(".local")
.join("share")
.join("opencode")
.join("auth.json");
let mut result = ExtractedCredentials::default();
let data = match read_json_file(&path) {
Some(value) => value,
None => return result,
};
let obj = match data.as_object() {
Some(obj) => obj,
None => return result,
};
for (provider_name, value) in obj {
let config = match value.as_object() {
Some(config) => config,
None => continue,
};
let auth_type = config
.get("type")
.and_then(Value::as_str)
.unwrap_or("");
let credentials = if auth_type == "api" {
config.get("key").and_then(Value::as_str).map(|key| ProviderCredentials {
api_key: key.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::ApiKey,
provider: provider_name.to_string(),
})
} else if auth_type == "oauth" && include_oauth {
let expires = config.get("expires").and_then(Value::as_i64);
if let Some(expires) = expires {
if expires < current_epoch_millis() {
None
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
None
};
if let Some(credentials) = credentials {
if provider_name == "anthropic" {
result.anthropic = Some(credentials.clone());
} else if provider_name == "openai" {
result.openai = Some(credentials.clone());
} else {
result.other.insert(provider_name.to_string(), credentials.clone());
}
}
}
result
}
pub fn extract_amp_credentials(options: &CredentialExtractionOptions) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let path = home_dir.join(".amp").join("config.json");
let data = read_json_file(&path)?;
let key_paths: Vec<Vec<&str>> = vec![
vec!["anthropicApiKey"],
vec!["anthropic_api_key"],
vec!["apiKey"],
vec!["api_key"],
vec!["accessToken"],
vec!["access_token"],
vec!["token"],
vec!["auth", "anthropicApiKey"],
vec!["auth", "apiKey"],
vec!["auth", "token"],
vec!["anthropic", "apiKey"],
vec!["anthropic", "token"],
];
for key_path in key_paths {
if let Some(key) = read_string_field(&data, &key_path) {
if !key.is_empty() {
return Some(ProviderCredentials {
api_key: key,
source: "amp".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
}
}
None
}
pub fn extract_all_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials {
let mut result = ExtractedCredentials::default();
if let Ok(value) = std::env::var("ANTHROPIC_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
} else if let Ok(value) = std::env::var("CLAUDE_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
if let Ok(value) = std::env::var("OPENAI_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
} else if let Ok(value) = std::env::var("CODEX_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
if result.anthropic.is_none() {
result.anthropic = extract_amp_credentials(options);
}
if result.anthropic.is_none() {
result.anthropic = extract_claude_credentials(options);
}
if result.openai.is_none() {
result.openai = extract_codex_credentials(options);
}
let opencode_credentials = extract_opencode_credentials(options);
if result.anthropic.is_none() {
result.anthropic = opencode_credentials.anthropic.clone();
}
if result.openai.is_none() {
result.openai = opencode_credentials.openai.clone();
}
for (key, value) in opencode_credentials.other {
result.other.entry(key).or_insert(value);
}
result
}
pub fn get_anthropic_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.anthropic
.map(|cred| cred.api_key)
}
pub fn get_openai_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.openai
.map(|cred| cred.api_key)
}
pub fn set_credentials_as_env_vars(credentials: &ExtractedCredentials) {
if let Some(cred) = &credentials.anthropic {
std::env::set_var("ANTHROPIC_API_KEY", &cred.api_key);
}
if let Some(cred) = &credentials.openai {
std::env::set_var("OPENAI_API_KEY", &cred.api_key);
}
}
fn read_json_file(path: &Path) -> Option<Value> {
let contents = fs::read_to_string(path).ok()?;
serde_json::from_str(&contents).ok()
}
fn read_string_field(value: &Value, path: &[&str]) -> Option<String> {
let mut current = value;
for key in path {
current = current.get(*key)?;
}
current.as_str().map(|s| s.to_string())
}
fn default_home_dir() -> PathBuf {
dirs::home_dir().unwrap_or_else(|| PathBuf::from("."))
}
fn current_epoch_millis() -> i64 {
let now = OffsetDateTime::now_utc();
(now.unix_timestamp() * 1000) + (now.millisecond() as i64)
}
fn is_expired_rfc3339(value: &str) -> bool {
match OffsetDateTime::parse(value, &time::format_description::well_known::Rfc3339) {
Ok(expiry) => expiry < OffsetDateTime::now_utc(),
Err(_) => false,
}
}

View file

@ -0,0 +1,40 @@
use std::env;
use std::fs;
use std::path::PathBuf;
fn main() {
let mut out: Option<PathBuf> = None;
let mut stdout = false;
let mut args = env::args().skip(1).peekable();
while let Some(arg) = args.next() {
if arg == "--stdout" {
stdout = true;
continue;
}
if arg == "--out" {
if let Some(value) = args.next() {
out = Some(PathBuf::from(value));
}
continue;
}
if let Some(value) = arg.strip_prefix("--out=") {
out = Some(PathBuf::from(value));
continue;
}
if out.is_none() {
out = Some(PathBuf::from(arg));
}
}
let schema = sandbox_daemon_openapi_gen::OPENAPI_JSON;
if stdout {
println!("{schema}");
return;
}
let out = out.unwrap_or_else(|| PathBuf::from("openapi.json"));
if let Err(err) = fs::write(&out, schema) {
eprintln!("failed to write {}: {err}", out.display());
std::process::exit(1);
}
}

View file

@ -1,683 +0,0 @@
use std::collections::HashMap;
use std::fmt;
use std::fs;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::process::{Command, ExitStatus};
use flate2::read::GzDecoder;
use reqwest::blocking::Client;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum AgentId {
Claude,
Codex,
Opencode,
Amp,
}
impl AgentId {
pub fn as_str(self) -> &'static str {
match self {
AgentId::Claude => "claude",
AgentId::Codex => "codex",
AgentId::Opencode => "opencode",
AgentId::Amp => "amp",
}
}
pub fn binary_name(self) -> &'static str {
match self {
AgentId::Claude => "claude",
AgentId::Codex => "codex",
AgentId::Opencode => "opencode",
AgentId::Amp => "amp",
}
}
}
impl fmt::Display for AgentId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Platform {
LinuxX64,
LinuxX64Musl,
LinuxArm64,
MacosArm64,
MacosX64,
}
impl Platform {
pub fn detect() -> Result<Self, AgentError> {
let os = std::env::consts::OS;
let arch = std::env::consts::ARCH;
let is_musl = cfg!(target_env = "musl");
match (os, arch, is_musl) {
("linux", "x86_64", true) => Ok(Self::LinuxX64Musl),
("linux", "x86_64", false) => Ok(Self::LinuxX64),
("linux", "aarch64", _) => Ok(Self::LinuxArm64),
("macos", "aarch64", _) => Ok(Self::MacosArm64),
("macos", "x86_64", _) => Ok(Self::MacosX64),
_ => Err(AgentError::UnsupportedPlatform {
os: os.to_string(),
arch: arch.to_string(),
}),
}
}
}
#[derive(Debug, Clone)]
pub struct AgentManager {
install_dir: PathBuf,
platform: Platform,
}
impl AgentManager {
pub fn new(install_dir: impl Into<PathBuf>) -> Result<Self, AgentError> {
Ok(Self {
install_dir: install_dir.into(),
platform: Platform::detect()?,
})
}
pub fn with_platform(
install_dir: impl Into<PathBuf>,
platform: Platform,
) -> Self {
Self {
install_dir: install_dir.into(),
platform,
}
}
pub fn install(&self, agent: AgentId, options: InstallOptions) -> Result<InstallResult, AgentError> {
let install_path = self.binary_path(agent);
if install_path.exists() && !options.reinstall {
return Ok(InstallResult {
path: install_path,
version: self.version(agent).unwrap_or(None),
});
}
fs::create_dir_all(&self.install_dir)?;
match agent {
AgentId::Claude => install_claude(&install_path, self.platform, options.version.as_deref())?,
AgentId::Codex => install_codex(&install_path, self.platform, options.version.as_deref())?,
AgentId::Opencode => install_opencode(&install_path, self.platform, options.version.as_deref())?,
AgentId::Amp => install_amp(&install_path, self.platform, options.version.as_deref())?,
}
Ok(InstallResult {
path: install_path,
version: self.version(agent).unwrap_or(None),
})
}
pub fn is_installed(&self, agent: AgentId) -> bool {
self.binary_path(agent).exists() || find_in_path(agent.binary_name()).is_some()
}
pub fn binary_path(&self, agent: AgentId) -> PathBuf {
self.install_dir.join(agent.binary_name())
}
pub fn version(&self, agent: AgentId) -> Result<Option<String>, AgentError> {
let path = self.resolve_binary(agent)?;
let attempts = [vec!["--version"], vec!["version"], vec!["-V"]];
for args in attempts {
let output = Command::new(&path).args(args).output();
if let Ok(output) = output {
if output.status.success() {
if let Some(version) = parse_version_output(&output) {
return Ok(Some(version));
}
}
}
}
Ok(None)
}
pub fn spawn(&self, agent: AgentId, options: SpawnOptions) -> Result<SpawnResult, AgentError> {
let path = self.resolve_binary(agent)?;
let working_dir = options
.working_dir
.clone()
.unwrap_or_else(|| std::env::current_dir().unwrap_or_default());
let mut command = Command::new(&path);
command.current_dir(&working_dir);
match agent {
AgentId::Claude => {
command
.arg("--print")
.arg("--output-format")
.arg("stream-json")
.arg("--verbose")
.arg("--dangerously-skip-permissions");
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--resume").arg(session_id);
}
if let Some(permission_mode) = options.permission_mode.as_deref() {
if permission_mode == "plan" {
command.arg("--permission-mode").arg("plan");
}
}
command.arg(&options.prompt);
}
AgentId::Codex => {
command
.arg("exec")
.arg("--json")
.arg("--dangerously-bypass-approvals-and-sandbox");
if let Some(model) = options.model.as_deref() {
command.arg("-m").arg(model);
}
command.arg(&options.prompt);
}
AgentId::Opencode => {
command
.arg("run")
.arg("--format")
.arg("json");
if let Some(model) = options.model.as_deref() {
command.arg("-m").arg(model);
}
if let Some(agent_mode) = options.agent_mode.as_deref() {
command.arg("--agent").arg(agent_mode);
}
if let Some(variant) = options.variant.as_deref() {
command.arg("--variant").arg(variant);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("-s").arg(session_id);
}
command.arg(&options.prompt);
}
AgentId::Amp => {
let output = spawn_amp(&path, &working_dir, &options)?;
return Ok(SpawnResult {
status: output.status,
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
}
for (key, value) in options.env {
command.env(key, value);
}
let output = command.output().map_err(AgentError::Io)?;
Ok(SpawnResult {
status: output.status,
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
})
}
fn resolve_binary(&self, agent: AgentId) -> Result<PathBuf, AgentError> {
let path = self.binary_path(agent);
if path.exists() {
return Ok(path);
}
if let Some(path) = find_in_path(agent.binary_name()) {
return Ok(path);
}
Err(AgentError::BinaryNotFound { agent })
}
}
#[derive(Debug, Clone)]
pub struct InstallOptions {
pub reinstall: bool,
pub version: Option<String>,
}
impl Default for InstallOptions {
fn default() -> Self {
Self {
reinstall: false,
version: None,
}
}
}
#[derive(Debug, Clone)]
pub struct InstallResult {
pub path: PathBuf,
pub version: Option<String>,
}
#[derive(Debug, Clone)]
pub struct SpawnOptions {
pub prompt: String,
pub model: Option<String>,
pub variant: Option<String>,
pub agent_mode: Option<String>,
pub permission_mode: Option<String>,
pub session_id: Option<String>,
pub working_dir: Option<PathBuf>,
pub env: HashMap<String, String>,
}
impl SpawnOptions {
pub fn new(prompt: impl Into<String>) -> Self {
Self {
prompt: prompt.into(),
model: None,
variant: None,
agent_mode: None,
permission_mode: None,
session_id: None,
working_dir: None,
env: HashMap::new(),
}
}
}
#[derive(Debug, Clone)]
pub struct SpawnResult {
pub status: ExitStatus,
pub stdout: String,
pub stderr: String,
}
#[derive(Debug, Error)]
pub enum AgentError {
#[error("unsupported platform {os}/{arch}")]
UnsupportedPlatform { os: String, arch: String },
#[error("unsupported agent {agent}")]
UnsupportedAgent { agent: String },
#[error("binary not found for {agent}")]
BinaryNotFound { agent: AgentId },
#[error("download failed: {url}")]
DownloadFailed { url: Url },
#[error("http error: {0}")]
Http(#[from] reqwest::Error),
#[error("url parse error: {0}")]
UrlParse(#[from] url::ParseError),
#[error("io error: {0}")]
Io(#[from] io::Error),
#[error("extract failed: {0}")]
ExtractFailed(String),
}
fn parse_version_output(output: &std::process::Output) -> Option<String> {
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
let combined = format!("{}\n{}", stdout, stderr);
combined
.lines()
.map(str::trim)
.find(|line| !line.is_empty())
.map(|line| line.to_string())
}
fn spawn_amp(
path: &Path,
working_dir: &Path,
options: &SpawnOptions,
) -> Result<std::process::Output, AgentError> {
let flags = detect_amp_flags(path, working_dir).unwrap_or_default();
let mut args: Vec<&str> = Vec::new();
if flags.execute {
args.push("--execute");
} else if flags.print {
args.push("--print");
}
if flags.output_format {
args.push("--output-format");
args.push("stream-json");
}
if flags.dangerously_skip_permissions {
args.push("--dangerously-skip-permissions");
}
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
command.args(&args).arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
let output = command.output().map_err(AgentError::Io)?;
if output.status.success() {
return Ok(output);
}
let stderr = String::from_utf8_lossy(&output.stderr);
if stderr.contains("unknown option")
|| stderr.contains("unknown flag")
|| stderr.contains("User message must be provided")
{
return spawn_amp_fallback(path, working_dir, options);
}
Ok(output)
}
#[derive(Debug, Default, Clone, Copy)]
struct AmpFlags {
execute: bool,
print: bool,
output_format: bool,
dangerously_skip_permissions: bool,
}
fn detect_amp_flags(path: &Path, working_dir: &Path) -> Option<AmpFlags> {
let output = Command::new(path)
.current_dir(working_dir)
.arg("--help")
.output()
.ok()?;
let text = format!(
"{}\n{}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
Some(AmpFlags {
execute: text.contains("--execute"),
print: text.contains("--print"),
output_format: text.contains("--output-format"),
dangerously_skip_permissions: text.contains("--dangerously-skip-permissions"),
})
}
fn spawn_amp_fallback(
path: &Path,
working_dir: &Path,
options: &SpawnOptions,
) -> Result<std::process::Output, AgentError> {
let attempts = vec![
vec!["--execute"],
vec!["--print", "--output-format", "stream-json"],
vec!["--output-format", "stream-json"],
vec!["--dangerously-skip-permissions"],
vec![],
];
for args in attempts {
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
if !args.is_empty() {
command.args(&args);
}
command.arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
let output = command.output().map_err(AgentError::Io)?;
if output.status.success() {
return Ok(output);
}
}
let mut command = Command::new(path);
command.current_dir(working_dir);
if let Some(model) = options.model.as_deref() {
command.arg("--model").arg(model);
}
if let Some(session_id) = options.session_id.as_deref() {
command.arg("--continue").arg(session_id);
}
command.arg(&options.prompt);
for (key, value) in &options.env {
command.env(key, value);
}
Ok(command.output().map_err(AgentError::Io)?)
}
fn find_in_path(binary_name: &str) -> Option<PathBuf> {
let path_var = std::env::var_os("PATH")?;
for path in std::env::split_paths(&path_var) {
let candidate = path.join(binary_name);
if candidate.exists() {
return Some(candidate);
}
}
None
}
fn download_bytes(url: &Url) -> Result<Vec<u8>, AgentError> {
let client = Client::builder().build()?;
let mut response = client.get(url.clone()).send()?;
if !response.status().is_success() {
return Err(AgentError::DownloadFailed { url: url.clone() });
}
let mut bytes = Vec::new();
response.read_to_end(&mut bytes)?;
Ok(bytes)
}
fn install_claude(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
let version = match version {
Some(version) => version.to_string(),
None => {
let url = Url::parse(
"https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/latest",
)?;
let text = String::from_utf8(download_bytes(&url)?).map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
text.trim().to_string()
}
};
let platform_segment = match platform {
Platform::LinuxX64 => "linux-x64",
Platform::LinuxX64Musl => "linux-x64-musl",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 => "darwin-arm64",
Platform::MacosX64 => "darwin-x64",
};
let url = Url::parse(&format!(
"https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/{version}/{platform_segment}/claude"
))?;
let bytes = download_bytes(&url)?;
write_executable(path, &bytes)?;
Ok(())
}
fn install_amp(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
let version = match version {
Some(version) => version.to_string(),
None => {
let url = Url::parse("https://storage.googleapis.com/amp-public-assets-prod-0/cli/cli-version.txt")?;
let text = String::from_utf8(download_bytes(&url)?).map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
text.trim().to_string()
}
};
let platform_segment = match platform {
Platform::LinuxX64 | Platform::LinuxX64Musl => "linux-x64",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 => "darwin-arm64",
Platform::MacosX64 => "darwin-x64",
};
let url = Url::parse(&format!(
"https://storage.googleapis.com/amp-public-assets-prod-0/cli/{version}/amp-{platform_segment}"
))?;
let bytes = download_bytes(&url)?;
write_executable(path, &bytes)?;
Ok(())
}
fn install_codex(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
let target = match platform {
Platform::LinuxX64 | Platform::LinuxX64Musl => "x86_64-unknown-linux-musl",
Platform::LinuxArm64 => "aarch64-unknown-linux-musl",
Platform::MacosArm64 => "aarch64-apple-darwin",
Platform::MacosX64 => "x86_64-apple-darwin",
};
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/openai/codex/releases/download/{version}/codex-{target}.tar.gz"
))?,
None => Url::parse(&format!(
"https://github.com/openai/codex/releases/latest/download/codex-{target}.tar.gz"
))?,
};
let bytes = download_bytes(&url)?;
let temp_dir = tempfile::tempdir()?;
let cursor = io::Cursor::new(bytes);
let mut archive = tar::Archive::new(GzDecoder::new(cursor));
archive.unpack(temp_dir.path())?;
let expected = format!("codex-{target}");
let binary = find_file_recursive(temp_dir.path(), &expected)?
.ok_or_else(|| AgentError::ExtractFailed(format!("missing {expected}")))?;
move_executable(&binary, path)?;
Ok(())
}
fn install_opencode(path: &Path, platform: Platform, version: Option<&str>) -> Result<(), AgentError> {
match platform {
Platform::MacosArm64 => {
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/download/{version}/opencode-darwin-arm64.zip"
))?,
None => Url::parse(
"https://github.com/anomalyco/opencode/releases/latest/download/opencode-darwin-arm64.zip",
)?,
};
install_zip_binary(path, &url, "opencode")
}
Platform::MacosX64 => {
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/download/{version}/opencode-darwin-x64.zip"
))?,
None => Url::parse(
"https://github.com/anomalyco/opencode/releases/latest/download/opencode-darwin-x64.zip",
)?,
};
install_zip_binary(path, &url, "opencode")
}
_ => {
let platform_segment = match platform {
Platform::LinuxX64 => "linux-x64",
Platform::LinuxX64Musl => "linux-x64-musl",
Platform::LinuxArm64 => "linux-arm64",
Platform::MacosArm64 | Platform::MacosX64 => unreachable!(),
};
let url = match version {
Some(version) => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/download/{version}/opencode-{platform_segment}.tar.gz"
))?,
None => Url::parse(&format!(
"https://github.com/anomalyco/opencode/releases/latest/download/opencode-{platform_segment}.tar.gz"
))?,
};
let bytes = download_bytes(&url)?;
let temp_dir = tempfile::tempdir()?;
let cursor = io::Cursor::new(bytes);
let mut archive = tar::Archive::new(GzDecoder::new(cursor));
archive.unpack(temp_dir.path())?;
let binary = find_file_recursive(temp_dir.path(), "opencode")?
.ok_or_else(|| AgentError::ExtractFailed("missing opencode".to_string()))?;
move_executable(&binary, path)?;
Ok(())
}
}
}
fn install_zip_binary(path: &Path, url: &Url, binary_name: &str) -> Result<(), AgentError> {
let bytes = download_bytes(url)?;
let reader = io::Cursor::new(bytes);
let mut archive = zip::ZipArchive::new(reader).map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
let temp_dir = tempfile::tempdir()?;
for i in 0..archive.len() {
let mut file = archive
.by_index(i)
.map_err(|err| AgentError::ExtractFailed(err.to_string()))?;
if !file.name().ends_with(binary_name) {
continue;
}
let out_path = temp_dir.path().join(binary_name);
let mut out_file = fs::File::create(&out_path)?;
io::copy(&mut file, &mut out_file)?;
move_executable(&out_path, path)?;
return Ok(());
}
Err(AgentError::ExtractFailed(format!("missing {binary_name}")))
}
fn write_executable(path: &Path, bytes: &[u8]) -> Result<(), AgentError> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
fs::write(path, bytes)?;
set_executable(path)?;
Ok(())
}
fn move_executable(source: &Path, dest: &Path) -> Result<(), AgentError> {
if let Some(parent) = dest.parent() {
fs::create_dir_all(parent)?;
}
if dest.exists() {
fs::remove_file(dest)?;
}
fs::copy(source, dest)?;
set_executable(dest)?;
Ok(())
}
#[cfg(unix)]
fn set_executable(path: &Path) -> Result<(), AgentError> {
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(path)?.permissions();
perms.set_mode(0o755);
fs::set_permissions(path, perms)?;
Ok(())
}
#[cfg(not(unix))]
fn set_executable(_path: &Path) -> Result<(), AgentError> {
Ok(())
}
fn find_file_recursive(dir: &Path, filename: &str) -> Result<Option<PathBuf>, AgentError> {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
if let Some(found) = find_file_recursive(&path, filename)? {
return Ok(Some(found));
}
} else if let Some(name) = path.file_name().and_then(|s| s.to_str()) {
if name == filename {
return Ok(Some(path));
}
}
}
Ok(None)
}

View file

@ -1,335 +1 @@
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProviderCredentials {
pub api_key: String,
pub source: String,
pub auth_type: AuthType,
pub provider: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AuthType {
ApiKey,
Oauth,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ExtractedCredentials {
pub anthropic: Option<ProviderCredentials>,
pub openai: Option<ProviderCredentials>,
pub other: HashMap<String, ProviderCredentials>,
}
#[derive(Debug, Clone, Default)]
pub struct CredentialExtractionOptions {
pub home_dir: Option<PathBuf>,
pub include_oauth: bool,
}
impl CredentialExtractionOptions {
pub fn new() -> Self {
Self {
home_dir: None,
include_oauth: true,
}
}
}
pub fn extract_claude_credentials(
options: &CredentialExtractionOptions,
) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let config_paths = [
home_dir.join(".claude.json.api"),
home_dir.join(".claude.json"),
home_dir.join(".claude.json.nathan"),
];
let key_paths = [
vec!["primaryApiKey"],
vec!["apiKey"],
vec!["anthropicApiKey"],
vec!["customApiKey"],
];
for path in config_paths {
let data = read_json_file(&path)?;
for key_path in &key_paths {
if let Some(key) = read_string_field(&data, key_path) {
if key.starts_with("sk-ant-") {
return Some(ProviderCredentials {
api_key: key,
source: "claude-code".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
}
}
}
if include_oauth {
let oauth_paths = [
home_dir.join(".claude").join(".credentials.json"),
home_dir.join(".claude-oauth-credentials.json"),
];
for path in oauth_paths {
let data = match read_json_file(&path) {
Some(value) => value,
None => continue,
};
let access = read_string_field(&data, &["claudeAiOauth", "accessToken"]);
if let Some(token) = access {
if let Some(expires_at) = read_string_field(&data, &["claudeAiOauth", "expiresAt"]) {
if is_expired_rfc3339(&expires_at) {
continue;
}
}
return Some(ProviderCredentials {
api_key: token,
source: "claude-code".to_string(),
auth_type: AuthType::Oauth,
provider: "anthropic".to_string(),
});
}
}
}
None
}
pub fn extract_codex_credentials(
options: &CredentialExtractionOptions,
) -> Option<ProviderCredentials> {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir.join(".codex").join("auth.json");
let data = read_json_file(&path)?;
if let Some(key) = data.get("OPENAI_API_KEY").and_then(Value::as_str) {
if !key.is_empty() {
return Some(ProviderCredentials {
api_key: key.to_string(),
source: "codex".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
}
if include_oauth {
if let Some(token) = read_string_field(&data, &["tokens", "access_token"]) {
return Some(ProviderCredentials {
api_key: token,
source: "codex".to_string(),
auth_type: AuthType::Oauth,
provider: "openai".to_string(),
});
}
}
None
}
pub fn extract_opencode_credentials(
options: &CredentialExtractionOptions,
) -> ExtractedCredentials {
let home_dir = options.home_dir.clone().unwrap_or_else(default_home_dir);
let include_oauth = options.include_oauth;
let path = home_dir
.join(".local")
.join("share")
.join("opencode")
.join("auth.json");
let mut result = ExtractedCredentials::default();
let data = match read_json_file(&path) {
Some(value) => value,
None => return result,
};
let obj = match data.as_object() {
Some(obj) => obj,
None => return result,
};
for (provider_name, value) in obj {
let config = match value.as_object() {
Some(config) => config,
None => continue,
};
let auth_type = config
.get("type")
.and_then(Value::as_str)
.unwrap_or("");
let credentials = if auth_type == "api" {
config.get("key").and_then(Value::as_str).map(|key| ProviderCredentials {
api_key: key.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::ApiKey,
provider: provider_name.to_string(),
})
} else if auth_type == "oauth" && include_oauth {
let expires = config.get("expires").and_then(Value::as_i64);
if let Some(expires) = expires {
if expires < current_epoch_millis() {
None
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
config
.get("access")
.and_then(Value::as_str)
.map(|token| ProviderCredentials {
api_key: token.to_string(),
source: "opencode".to_string(),
auth_type: AuthType::Oauth,
provider: provider_name.to_string(),
})
}
} else {
None
};
if let Some(credentials) = credentials {
if provider_name == "anthropic" {
result.anthropic = Some(credentials.clone());
} else if provider_name == "openai" {
result.openai = Some(credentials.clone());
} else {
result.other.insert(provider_name.to_string(), credentials.clone());
}
}
}
result
}
pub fn extract_all_credentials(options: &CredentialExtractionOptions) -> ExtractedCredentials {
let mut result = ExtractedCredentials::default();
if let Ok(value) = std::env::var("ANTHROPIC_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
} else if let Ok(value) = std::env::var("CLAUDE_API_KEY") {
result.anthropic = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
if let Ok(value) = std::env::var("OPENAI_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
} else if let Ok(value) = std::env::var("CODEX_API_KEY") {
result.openai = Some(ProviderCredentials {
api_key: value,
source: "environment".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
if result.anthropic.is_none() {
result.anthropic = extract_claude_credentials(options);
}
if result.openai.is_none() {
result.openai = extract_codex_credentials(options);
}
let opencode_credentials = extract_opencode_credentials(options);
if result.anthropic.is_none() {
result.anthropic = opencode_credentials.anthropic.clone();
}
if result.openai.is_none() {
result.openai = opencode_credentials.openai.clone();
}
for (key, value) in opencode_credentials.other {
result.other.entry(key).or_insert(value);
}
result
}
pub fn get_anthropic_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.anthropic
.map(|cred| cred.api_key)
}
pub fn get_openai_api_key(options: &CredentialExtractionOptions) -> Option<String> {
extract_all_credentials(options)
.openai
.map(|cred| cred.api_key)
}
pub fn set_credentials_as_env_vars(credentials: &ExtractedCredentials) {
if let Some(cred) = &credentials.anthropic {
std::env::set_var("ANTHROPIC_API_KEY", &cred.api_key);
}
if let Some(cred) = &credentials.openai {
std::env::set_var("OPENAI_API_KEY", &cred.api_key);
}
}
fn read_json_file(path: &Path) -> Option<Value> {
let contents = fs::read_to_string(path).ok()?;
serde_json::from_str(&contents).ok()
}
fn read_string_field(value: &Value, path: &[&str]) -> Option<String> {
let mut current = value;
for key in path {
current = current.get(*key)?;
}
current.as_str().map(|s| s.to_string())
}
fn default_home_dir() -> PathBuf {
dirs::home_dir().unwrap_or_else(|| PathBuf::from("."))
}
fn current_epoch_millis() -> i64 {
let now = OffsetDateTime::now_utc();
(now.unix_timestamp() * 1000) + (now.millisecond() as i64)
}
fn is_expired_rfc3339(value: &str) -> bool {
match OffsetDateTime::parse(value, &time::format_description::well_known::Rfc3339) {
Ok(expiry) => expiry < OffsetDateTime::now_utc(),
Err(_) => false,
}
}
pub use sandbox_daemon_agent_credentials::*;

View file

@ -1,5 +1,4 @@
//! Sandbox daemon core utilities.
pub mod agents;
pub mod credentials;
pub mod router;

View file

@ -1,7 +1,11 @@
use std::collections::HashMap;
use sandbox_daemon_core::agents::{AgentId, AgentManager, InstallOptions, SpawnOptions};
use sandbox_daemon_core::credentials::{extract_all_credentials, CredentialExtractionOptions};
use sandbox_daemon_agent_management::agents::{
AgentError, AgentId, AgentManager, InstallOptions, SpawnOptions,
};
use sandbox_daemon_agent_management::credentials::{
extract_all_credentials, CredentialExtractionOptions,
};
fn build_env() -> HashMap<String, String> {
let options = CredentialExtractionOptions::new();
@ -21,6 +25,10 @@ fn amp_configured() -> bool {
home.join(".amp").join("config.json").exists()
}
fn prompt_ok(label: &str) -> String {
format!("Respond with exactly the text {label} and nothing else.")
}
#[test]
fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>> {
let temp_dir = tempfile::tempdir()?;
@ -32,11 +40,19 @@ fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>>
for agent in agents {
let install = manager.install(agent, InstallOptions::default())?;
assert!(install.path.exists(), "expected install for {agent}");
assert!(manager.is_installed(agent), "expected is_installed for {agent}");
manager.install(
agent,
InstallOptions {
reinstall: true,
version: None,
},
)?;
let version = manager.version(agent)?;
assert!(version.is_some(), "expected version for {agent}");
if agent != AgentId::Amp || amp_configured() {
let mut spawn = SpawnOptions::new("Respond with exactly the text OK and nothing else.");
let mut spawn = SpawnOptions::new(prompt_ok("OK"));
spawn.env = env.clone();
let result = manager.spawn(agent, spawn)?;
assert!(
@ -44,8 +60,53 @@ fn test_agents_install_version_spawn() -> Result<(), Box<dyn std::error::Error>>
"spawn failed for {agent}: {}",
result.stderr
);
let output = format!("{}{}", result.stdout, result.stderr);
assert!(
!result.events.is_empty(),
"expected events for {agent} but got none"
);
assert!(
result.session_id.is_some(),
"expected session id for {agent}"
);
let combined = format!("{}{}", result.stdout, result.stderr);
let output = result.result.clone().unwrap_or(combined);
assert!(output.contains("OK"), "expected OK for {agent}, got: {output}");
if agent == AgentId::Claude || agent == AgentId::Opencode || (agent == AgentId::Amp && amp_configured()) {
let mut resume = SpawnOptions::new(prompt_ok("OK2"));
resume.env = env.clone();
resume.session_id = result.session_id.clone();
let resumed = manager.spawn(agent, resume)?;
assert!(
resumed.status.success(),
"resume spawn failed for {agent}: {}",
resumed.stderr
);
let combined = format!("{}{}", resumed.stdout, resumed.stderr);
let output = resumed.result.clone().unwrap_or(combined);
assert!(output.contains("OK2"), "expected OK2 for {agent}, got: {output}");
} else if agent == AgentId::Codex {
let mut resume = SpawnOptions::new(prompt_ok("OK2"));
resume.env = env.clone();
resume.session_id = result.session_id.clone();
let err = manager.spawn(agent, resume).expect_err("expected resume error for codex");
assert!(matches!(err, AgentError::ResumeUnsupported { .. }));
}
if agent == AgentId::Claude || agent == AgentId::Codex {
let mut plan = SpawnOptions::new(prompt_ok("OK3"));
plan.env = env.clone();
plan.permission_mode = Some("plan".to_string());
let planned = manager.spawn(agent, plan)?;
assert!(
planned.status.success(),
"plan spawn failed for {agent}: {}",
planned.stderr
);
let combined = format!("{}{}", planned.stdout, planned.stderr);
let output = planned.result.clone().unwrap_or(combined);
assert!(output.contains("OK3"), "expected OK3 for {agent}, got: {output}");
}
}
}

View file

@ -7,4 +7,6 @@ edition = "2021"
sandbox-daemon-agent-schema = { path = "../agent-schema" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
schemars = "0.8"
thiserror = "1.0"
utoipa = { version = "4.2", features = ["axum_extras"] }

File diff suppressed because it is too large Load diff