diff --git a/frontend/packages/inspector/src/App.tsx b/frontend/packages/inspector/src/App.tsx
index cd4e118..52264f6 100644
--- a/frontend/packages/inspector/src/App.tsx
+++ b/frontend/packages/inspector/src/App.tsx
@@ -977,6 +977,8 @@ export default function App() {
onInstallAgent={installAgent}
agentsLoading={agentsLoading}
agentsError={agentsError}
+ baseUrl={endpoint}
+ token={token}
/>
diff --git a/frontend/packages/inspector/src/components/debug/DebugPanel.tsx b/frontend/packages/inspector/src/components/debug/DebugPanel.tsx
index 9c5de66..8af6e69 100644
--- a/frontend/packages/inspector/src/components/debug/DebugPanel.tsx
+++ b/frontend/packages/inspector/src/components/debug/DebugPanel.tsx
@@ -1,11 +1,12 @@
-import { Cloud, PlayCircle, Terminal } from "lucide-react";
+import { Cloud, PlayCircle, Terminal, Cpu } from "lucide-react";
import type { AgentInfo, AgentModeInfo, UniversalEvent } from "sandbox-agent";
import AgentsTab from "./AgentsTab";
import EventsTab from "./EventsTab";
+import ProcessesTab from "./ProcessesTab";
import RequestLogTab from "./RequestLogTab";
import type { RequestLog } from "../../types/requestLog";
-export type DebugTab = "log" | "events" | "agents";
+export type DebugTab = "log" | "events" | "agents" | "processes";
const DebugPanel = ({
debugTab,
@@ -24,7 +25,9 @@ const DebugPanel = ({
onRefreshAgents,
onInstallAgent,
agentsLoading,
- agentsError
+ agentsError,
+ baseUrl,
+ token
}: {
debugTab: DebugTab;
onDebugTabChange: (tab: DebugTab) => void;
@@ -43,6 +46,8 @@ const DebugPanel = ({
onInstallAgent: (agentId: string, reinstall: boolean) => void;
agentsLoading: boolean;
agentsError: string | null;
+ baseUrl: string;
+ token?: string;
}) => {
return (
@@ -60,6 +65,10 @@ const DebugPanel = ({
Agents
+
@@ -92,6 +101,13 @@ const DebugPanel = ({
error={agentsError}
/>
)}
+
+ {debugTab === "processes" && (
+
+ )}
);
diff --git a/frontend/packages/inspector/src/components/debug/ProcessesTab.tsx b/frontend/packages/inspector/src/components/debug/ProcessesTab.tsx
new file mode 100644
index 0000000..3544491
--- /dev/null
+++ b/frontend/packages/inspector/src/components/debug/ProcessesTab.tsx
@@ -0,0 +1,388 @@
+import { useCallback, useEffect, useRef, useState } from "react";
+import { Play, Square, Skull, Trash2, RefreshCw, ChevronDown, ChevronRight, Terminal } from "lucide-react";
+
+export interface ProcessInfo {
+ id: string;
+ command: string;
+ args: string[];
+ status: "starting" | "running" | "stopped" | "killed";
+ exitCode?: number | null;
+ logPaths: {
+ stdout: string;
+ stderr: string;
+ combined: string;
+ };
+ startedAt: number;
+ stoppedAt?: number | null;
+ cwd?: string | null;
+}
+
+export interface ProcessListResponse {
+ processes: ProcessInfo[];
+}
+
+export interface LogsResponse {
+ content: string;
+ lines: number;
+}
+
+interface ProcessesTabProps {
+ baseUrl: string;
+ token?: string;
+}
+
+const formatTimestamp = (ts: number) => {
+ return new Date(ts * 1000).toLocaleString();
+};
+
+const formatDuration = (startedAt: number, stoppedAt?: number | null) => {
+ const end = stoppedAt ?? Math.floor(Date.now() / 1000);
+ const duration = end - startedAt;
+ if (duration < 60) return `${duration}s`;
+ if (duration < 3600) return `${Math.floor(duration / 60)}m ${duration % 60}s`;
+ return `${Math.floor(duration / 3600)}h ${Math.floor((duration % 3600) / 60)}m`;
+};
+
+const StatusBadge = ({ status, exitCode }: { status: string; exitCode?: number | null }) => {
+ const colors: Record = {
+ starting: "var(--color-warning)",
+ running: "var(--color-success)",
+ stopped: exitCode === 0 ? "var(--color-muted)" : "var(--color-error)",
+ killed: "var(--color-error)"
+ };
+
+ return (
+
+ {status}
+ {status === "stopped" && exitCode !== undefined && exitCode !== null && ` (${exitCode})`}
+
+ );
+};
+
+const ProcessesTab = ({ baseUrl, token }: ProcessesTabProps) => {
+ const [processes, setProcesses] = useState([]);
+ const [loading, setLoading] = useState(false);
+ const [error, setError] = useState(null);
+ const [expandedId, setExpandedId] = useState(null);
+ const [logs, setLogs] = useState>({});
+ const [logsLoading, setLogsLoading] = useState>({});
+ const [stripTimestamps, setStripTimestamps] = useState(false);
+ const [logStream, setLogStream] = useState<"combined" | "stdout" | "stderr">("combined");
+ const refreshTimerRef = useRef(null);
+
+ const fetchWithAuth = useCallback(async (url: string, options: RequestInit = {}) => {
+ const headers: Record = {
+ "Content-Type": "application/json",
+ ...(options.headers as Record || {})
+ };
+ if (token) {
+ headers["Authorization"] = `Bearer ${token}`;
+ }
+ return fetch(url, { ...options, headers });
+ }, [token]);
+
+ const fetchProcesses = useCallback(async () => {
+ setLoading(true);
+ setError(null);
+ try {
+ const response = await fetchWithAuth(`${baseUrl}/v1/process`);
+ if (!response.ok) {
+ throw new Error(`Failed to fetch processes: ${response.status}`);
+ }
+ const data: ProcessListResponse = await response.json();
+ setProcesses(data.processes);
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Failed to fetch processes");
+ } finally {
+ setLoading(false);
+ }
+ }, [baseUrl, fetchWithAuth]);
+
+ const fetchLogs = useCallback(async (id: string) => {
+ setLogsLoading(prev => ({ ...prev, [id]: true }));
+ try {
+ const params = new URLSearchParams({
+ stream: logStream,
+ tail: "100"
+ });
+ if (stripTimestamps) {
+ params.set("strip_timestamps", "true");
+ }
+ const response = await fetchWithAuth(`${baseUrl}/v1/process/${id}/logs?${params}`);
+ if (!response.ok) {
+ throw new Error(`Failed to fetch logs: ${response.status}`);
+ }
+ const data: LogsResponse = await response.json();
+ setLogs(prev => ({ ...prev, [id]: data.content }));
+ } catch (err) {
+ setLogs(prev => ({ ...prev, [id]: `Error: ${err instanceof Error ? err.message : "Failed to fetch logs"}` }));
+ } finally {
+ setLogsLoading(prev => ({ ...prev, [id]: false }));
+ }
+ }, [baseUrl, fetchWithAuth, logStream, stripTimestamps]);
+
+ const stopProcess = useCallback(async (id: string) => {
+ try {
+ const response = await fetchWithAuth(`${baseUrl}/v1/process/${id}/stop`, { method: "POST" });
+ if (!response.ok) {
+ throw new Error(`Failed to stop process: ${response.status}`);
+ }
+ await fetchProcesses();
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Failed to stop process");
+ }
+ }, [baseUrl, fetchWithAuth, fetchProcesses]);
+
+ const killProcess = useCallback(async (id: string) => {
+ try {
+ const response = await fetchWithAuth(`${baseUrl}/v1/process/${id}/kill`, { method: "POST" });
+ if (!response.ok) {
+ throw new Error(`Failed to kill process: ${response.status}`);
+ }
+ await fetchProcesses();
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Failed to kill process");
+ }
+ }, [baseUrl, fetchWithAuth, fetchProcesses]);
+
+ const deleteProcess = useCallback(async (id: string) => {
+ try {
+ const response = await fetchWithAuth(`${baseUrl}/v1/process/${id}`, { method: "DELETE" });
+ if (!response.ok) {
+ throw new Error(`Failed to delete process: ${response.status}`);
+ }
+ if (expandedId === id) {
+ setExpandedId(null);
+ }
+ await fetchProcesses();
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Failed to delete process");
+ }
+ }, [baseUrl, fetchWithAuth, fetchProcesses, expandedId]);
+
+ const toggleExpand = useCallback((id: string) => {
+ if (expandedId === id) {
+ setExpandedId(null);
+ } else {
+ setExpandedId(id);
+ fetchLogs(id);
+ }
+ }, [expandedId, fetchLogs]);
+
+ // Initial fetch and auto-refresh
+ useEffect(() => {
+ fetchProcesses();
+
+ // Auto-refresh every 5 seconds
+ refreshTimerRef.current = window.setInterval(fetchProcesses, 5000);
+
+ return () => {
+ if (refreshTimerRef.current) {
+ window.clearInterval(refreshTimerRef.current);
+ }
+ };
+ }, [fetchProcesses]);
+
+ // Refresh logs when options change
+ useEffect(() => {
+ if (expandedId) {
+ fetchLogs(expandedId);
+ }
+ }, [stripTimestamps, logStream]);
+
+ const runningCount = processes.filter(p => p.status === "running").length;
+
+ return (
+
+
+
+
Processes
+ {runningCount > 0 && (
+
+ {runningCount} running
+
+ )}
+
+
+
+
+ {error && (
+
+ {error}
+
+ )}
+
+ {processes.length === 0 && !loading && (
+
+
+
No processes found
+
Start a process using the API
+
+ )}
+
+
+ {processes.map(process => (
+
+
toggleExpand(process.id)}
+ >
+ {expandedId === process.id ? (
+
+ ) : (
+
+ )}
+
+
+
+
+ {process.command} {process.args.join(" ")}
+
+
+
+ ID: {process.id} • Started: {formatTimestamp(process.startedAt)} • Duration: {formatDuration(process.startedAt, process.stoppedAt)}
+
+
+
+
+
+
e.stopPropagation()}>
+ {(process.status === "running" || process.status === "starting") && (
+ <>
+
+
+ >
+ )}
+ {(process.status === "stopped" || process.status === "killed") && (
+
+ )}
+
+
+
+ {expandedId === process.id && (
+
+
+
+
+
+
+
+ {logsLoading[process.id] ? "Loading..." : (logs[process.id] || "(no logs)")}
+
+
+ )}
+
+ ))}
+
+
+
+
+ );
+};
+
+export default ProcessesTab;
diff --git a/server/packages/sandbox-agent/src/process_manager.rs b/server/packages/sandbox-agent/src/process_manager.rs
index 590ce67..63a5aca 100644
--- a/server/packages/sandbox-agent/src/process_manager.rs
+++ b/server/packages/sandbox-agent/src/process_manager.rs
@@ -11,6 +11,8 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
+use time::format_description::well_known::Rfc3339;
+use time::OffsetDateTime;
use tokio::io::{AsyncBufReadExt, BufReader as TokioBufReader};
use tokio::process::{Child, Command};
use tokio::sync::{broadcast, Mutex, RwLock};
@@ -103,6 +105,9 @@ pub struct LogsQuery {
/// Which log stream to read: "stdout", "stderr", or "combined" (default)
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option,
+ /// Strip timestamp prefixes from log lines
+ #[serde(default)]
+ pub strip_timestamps: bool,
}
/// Response with log content
@@ -356,13 +361,14 @@ impl ProcessManager {
};
while let Ok(Some(line)) = lines.next_line().await {
- let log_line = format!("[stdout] {}\n", line);
- let _ = file.write_all(line.as_bytes());
- let _ = file.write_all(b"\n");
+ let timestamp = format_timestamp();
+ let timestamped_line = format!("[{}] {}\n", timestamp, line);
+ let combined_line = format!("[{}] [stdout] {}\n", timestamp, line);
+ let _ = file.write_all(timestamped_line.as_bytes());
if let Ok(mut combined) = combined.lock() {
- let _ = combined.write_all(log_line.as_bytes());
+ let _ = combined.write_all(combined_line.as_bytes());
}
- let _ = log_tx.send(log_line);
+ let _ = log_tx.send(combined_line);
}
});
}
@@ -380,13 +386,14 @@ impl ProcessManager {
};
while let Ok(Some(line)) = lines.next_line().await {
- let log_line = format!("[stderr] {}\n", line);
- let _ = file.write_all(line.as_bytes());
- let _ = file.write_all(b"\n");
+ let timestamp = format_timestamp();
+ let timestamped_line = format!("[{}] {}\n", timestamp, line);
+ let combined_line = format!("[{}] [stderr] {}\n", timestamp, line);
+ let _ = file.write_all(timestamped_line.as_bytes());
if let Ok(mut combined) = combined.lock() {
- let _ = combined.write_all(log_line.as_bytes());
+ let _ = combined.write_all(combined_line.as_bytes());
}
- let _ = log_tx.send(log_line);
+ let _ = log_tx.send(combined_line);
}
});
}
@@ -588,7 +595,7 @@ impl ProcessManager {
let content = fs::read_to_string(log_path).unwrap_or_default();
let lines: Vec<&str> = content.lines().collect();
- let (content, line_count) = if let Some(tail) = query.tail {
+ let (mut content, line_count) = if let Some(tail) = query.tail {
let start = lines.len().saturating_sub(tail);
let tail_lines: Vec<&str> = lines[start..].to_vec();
(tail_lines.join("\n"), tail_lines.len())
@@ -596,6 +603,11 @@ impl ProcessManager {
(content.clone(), lines.len())
};
+ // Strip timestamps if requested
+ if query.strip_timestamps {
+ content = strip_timestamps(&content);
+ }
+
Ok(LogsResponse {
content,
lines: line_count,
@@ -628,6 +640,35 @@ fn process_data_dir() -> PathBuf {
.join("processes")
}
+/// Format the current time as an ISO 8601 timestamp
+fn format_timestamp() -> String {
+ OffsetDateTime::now_utc()
+ .format(&Rfc3339)
+ .unwrap_or_else(|_| "unknown".to_string())
+}
+
+/// Strip timestamp prefixes from log lines
+/// Timestamps are in format: [2026-01-30T12:32:45.123Z] or [2026-01-30T12:32:45Z]
+fn strip_timestamps(content: &str) -> String {
+ content
+ .lines()
+ .map(|line| {
+ // Match pattern: [YYYY-MM-DDTHH:MM:SS...Z] at start of line
+ if line.starts_with('[') {
+ if let Some(end) = line.find("] ") {
+ // Check if it looks like a timestamp (starts with digit after [)
+ let potential_ts = &line[1..end];
+ if potential_ts.len() >= 19 && potential_ts.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false) {
+ return &line[end + 2..];
+ }
+ }
+ }
+ line
+ })
+ .collect::>()
+ .join("\n")
+}
+
/// Helper to save state from within a spawned task (simplified version)
async fn save_state_to_file(base_dir: &PathBuf) -> Result<(), std::io::Error> {
// This is a no-op for now - the state will be saved on the next explicit save_state call
diff --git a/server/packages/sandbox-agent/src/router.rs b/server/packages/sandbox-agent/src/router.rs
index 8e25074..8755a9b 100644
--- a/server/packages/sandbox-agent/src/router.rs
+++ b/server/packages/sandbox-agent/src/router.rs
@@ -4040,7 +4040,8 @@ async fn delete_process(
("id" = String, Path, description = "Process ID"),
("tail" = Option, Query, description = "Number of lines from end"),
("follow" = Option, Query, description = "Stream logs via SSE"),
- ("stream" = Option, Query, description = "Log stream: stdout, stderr, or combined")
+ ("stream" = Option, Query, description = "Log stream: stdout, stderr, or combined"),
+ ("strip_timestamps" = Option, Query, description = "Strip timestamp prefixes from log lines")
),
responses(
(status = 200, body = LogsResponse, description = "Log content"),
@@ -4059,6 +4060,7 @@ async fn get_process_logs(
tail: query.tail,
follow: false,
stream: query.stream.clone(),
+ strip_timestamps: query.strip_timestamps,
}).await?;
let receiver = state.process_manager.subscribe_logs(&id).await?;