Integrate OpenHandoff factory workspace (#212)

This commit is contained in:
Nathan Flurry 2026-03-09 14:00:20 -07:00 committed by GitHub
parent 3d9476ed0b
commit bf282199b5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
251 changed files with 42824 additions and 692 deletions

View file

@ -0,0 +1,45 @@
import { describe, expect, it } from "vitest";
import {
deriveFallbackTitle,
resolveCreateFlowDecision,
sanitizeBranchName
} from "../src/services/create-flow.js";
describe("create flow decision", () => {
it("derives a conventional-style fallback title from task text", () => {
const title = deriveFallbackTitle("Fix OAuth callback bug in handler");
expect(title).toBe("fix: Fix OAuth callback bug in handler");
});
it("preserves an explicit conventional prefix without duplicating it", () => {
const title = deriveFallbackTitle("Reply with exactly: READY", "feat: Browser UI Flow");
expect(title).toBe("feat: Browser UI Flow");
});
it("sanitizes generated branch names", () => {
expect(sanitizeBranchName("feat: Add @mentions & #hashtags")).toBe("feat-add-mentions-hashtags");
expect(sanitizeBranchName(" spaces everywhere ")).toBe("spaces-everywhere");
});
it("auto-increments generated branch names for conflicts", () => {
const resolved = resolveCreateFlowDecision({
task: "Add auth",
localBranches: ["feat-add-auth"],
handoffBranches: ["feat-add-auth-2"]
});
expect(resolved.title).toBe("feat: Add auth");
expect(resolved.branchName).toBe("feat-add-auth-3");
});
it("fails when explicit branch already exists", () => {
expect(() =>
resolveCreateFlowDecision({
task: "new task",
explicitBranchName: "existing-branch",
localBranches: ["existing-branch"],
handoffBranches: []
})
).toThrow("already exists");
});
});

View file

@ -0,0 +1,182 @@
import { describe, expect, it } from "vitest";
import type { DaytonaClientLike, DaytonaDriver } from "../src/driver.js";
import type { DaytonaCreateSandboxOptions } from "../src/integrations/daytona/client.js";
import { DaytonaProvider } from "../src/providers/daytona/index.js";
class RecordingDaytonaClient implements DaytonaClientLike {
createSandboxCalls: DaytonaCreateSandboxOptions[] = [];
executedCommands: string[] = [];
async createSandbox(options: DaytonaCreateSandboxOptions) {
this.createSandboxCalls.push(options);
return {
id: "sandbox-1",
state: "started",
snapshot: "snapshot-openhandoff",
labels: {},
};
}
async getSandbox(sandboxId: string) {
return {
id: sandboxId,
state: "started",
snapshot: "snapshot-openhandoff",
labels: {},
};
}
async startSandbox(_sandboxId: string, _timeoutSeconds?: number) {}
async stopSandbox(_sandboxId: string, _timeoutSeconds?: number) {}
async deleteSandbox(_sandboxId: string) {}
async executeCommand(_sandboxId: string, command: string) {
this.executedCommands.push(command);
return { exitCode: 0, result: "" };
}
async getPreviewEndpoint(sandboxId: string, port: number) {
return {
url: `https://preview.example/sandbox/${sandboxId}/port/${port}`,
token: "preview-token",
};
}
}
function createProviderWithClient(client: DaytonaClientLike): DaytonaProvider {
const daytonaDriver: DaytonaDriver = {
createClient: () => client,
};
return new DaytonaProvider(
{
apiKey: "test-key",
image: "ubuntu:24.04",
},
daytonaDriver
);
}
describe("daytona provider snapshot image behavior", () => {
it("creates sandboxes using a snapshot-capable image recipe", async () => {
const client = new RecordingDaytonaClient();
const provider = createProviderWithClient(client);
const handle = await provider.createSandbox({
workspaceId: "default",
repoId: "repo-1",
repoRemote: "https://github.com/acme/repo.git",
branchName: "feature/test",
handoffId: "handoff-1",
});
expect(client.createSandboxCalls).toHaveLength(1);
const createCall = client.createSandboxCalls[0];
if (!createCall) {
throw new Error("expected create sandbox call");
}
expect(typeof createCall.image).not.toBe("string");
if (typeof createCall.image === "string") {
throw new Error("expected daytona image recipe object");
}
const dockerfile = createCall.image.dockerfile;
expect(dockerfile).toContain("apt-get install -y curl ca-certificates git openssh-client nodejs npm");
expect(dockerfile).toContain("sandbox-agent/0.3.0/install.sh");
const installAgentLines = dockerfile.match(/sandbox-agent install-agent [a-z0-9-]+/gi) ?? [];
expect(installAgentLines.length).toBeGreaterThanOrEqual(2);
const commands = client.executedCommands.join("\n");
expect(commands).toContain("GIT_TERMINAL_PROMPT=0");
expect(commands).toContain("GIT_ASKPASS=/bin/echo");
expect(handle.metadata.snapshot).toBe("snapshot-openhandoff");
expect(handle.metadata.image).toBe("ubuntu:24.04");
expect(handle.metadata.cwd).toBe("/home/daytona/openhandoff/default/repo-1/handoff-1/repo");
expect(client.executedCommands.length).toBeGreaterThan(0);
});
it("starts sandbox-agent with ACP timeout env override", async () => {
const previous = process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS;
process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS = "240000";
try {
const client = new RecordingDaytonaClient();
const provider = createProviderWithClient(client);
await provider.ensureSandboxAgent({
workspaceId: "default",
sandboxId: "sandbox-1",
});
const startCommand = client.executedCommands.find((command) =>
command.includes("nohup env SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=240000 sandbox-agent server")
);
const joined = client.executedCommands.join("\n");
expect(joined).toContain("sandbox-agent/0.3.0/install.sh");
expect(joined).toContain("SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=240000");
expect(joined).toContain("apt-get install -y nodejs npm");
expect(joined).toContain("sandbox-agent server --no-token --host 0.0.0.0 --port 2468");
expect(startCommand).toBeTruthy();
} finally {
if (previous === undefined) {
delete process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS;
} else {
process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS = previous;
}
}
});
it("fails with explicit timeout when daytona createSandbox hangs", async () => {
const previous = process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS;
process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS = "120";
const hangingClient: DaytonaClientLike = {
createSandbox: async () => await new Promise(() => {}),
getSandbox: async (sandboxId) => ({ id: sandboxId, state: "started" }),
startSandbox: async () => {},
stopSandbox: async () => {},
deleteSandbox: async () => {},
executeCommand: async () => ({ exitCode: 0, result: "" }),
getPreviewEndpoint: async (sandboxId, port) => ({
url: `https://preview.example/sandbox/${sandboxId}/port/${port}`,
token: "preview-token",
}),
};
try {
const provider = createProviderWithClient(hangingClient);
await expect(provider.createSandbox({
workspaceId: "default",
repoId: "repo-1",
repoRemote: "https://github.com/acme/repo.git",
branchName: "feature/test",
handoffId: "handoff-timeout",
})).rejects.toThrow("daytona create sandbox timed out after 120ms");
} finally {
if (previous === undefined) {
delete process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS;
} else {
process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS = previous;
}
}
});
it("executes backend-managed sandbox commands through provider API", async () => {
const client = new RecordingDaytonaClient();
const provider = createProviderWithClient(client);
const result = await provider.executeCommand({
workspaceId: "default",
sandboxId: "sandbox-1",
command: "echo backend-push",
label: "manual push"
});
expect(result.exitCode).toBe(0);
expect(client.executedCommands).toContain("echo backend-push");
});
});

View file

@ -0,0 +1,136 @@
import { chmodSync, mkdtempSync, writeFileSync, readFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, expect, it } from "vitest";
import {
gitSpiceAvailable,
gitSpiceListStack,
gitSpiceRestackSubtree
} from "../src/integrations/git-spice/index.js";
function makeTempDir(prefix: string): string {
return mkdtempSync(join(tmpdir(), prefix));
}
function writeScript(path: string, body: string): void {
writeFileSync(path, body, "utf8");
chmodSync(path, 0o755);
}
async function withEnv<T>(
updates: Record<string, string | undefined>,
fn: () => Promise<T>
): Promise<T> {
const previous = new Map<string, string | undefined>();
for (const [key, value] of Object.entries(updates)) {
previous.set(key, process.env[key]);
if (value == null) {
delete process.env[key];
} else {
process.env[key] = value;
}
}
try {
return await fn();
} finally {
for (const [key, value] of previous) {
if (value == null) {
delete process.env[key];
} else {
process.env[key] = value;
}
}
}
}
describe("git-spice integration", () => {
it("parses stack rows from mixed/malformed json output", async () => {
const repoPath = makeTempDir("hf-git-spice-parse-");
const scriptPath = join(repoPath, "fake-git-spice.sh");
writeScript(
scriptPath,
[
"#!/bin/sh",
'if [ \"$1\" = \"--help\" ]; then',
" exit 0",
"fi",
'if [ \"$1\" = \"log\" ]; then',
" echo 'noise line'",
" echo '{\"branch\":\"feature/a\",\"parent\":\"main\"}'",
" echo '{bad json'",
" echo '{\"name\":\"feature/b\",\"parentBranch\":\"feature/a\"}'",
" echo '{\"name\":\"feature/a\",\"parent\":\"main\"}'",
" exit 0",
"fi",
"exit 1"
].join("\n")
);
await withEnv({ HF_GIT_SPICE_BIN: scriptPath }, async () => {
const rows = await gitSpiceListStack(repoPath);
expect(rows).toEqual([
{ branchName: "feature/a", parentBranch: "main" },
{ branchName: "feature/b", parentBranch: "feature/a" }
]);
});
});
it("falls back across versioned subtree restack command variants", async () => {
const repoPath = makeTempDir("hf-git-spice-fallback-");
const scriptPath = join(repoPath, "fake-git-spice.sh");
const logPath = join(repoPath, "calls.log");
writeScript(
scriptPath,
[
"#!/bin/sh",
'echo \"$*\" >> \"$SPICE_LOG_PATH\"',
'if [ \"$1\" = \"--help\" ]; then',
" exit 0",
"fi",
'if [ \"$1\" = \"upstack\" ] && [ \"$2\" = \"restack\" ]; then',
" exit 1",
"fi",
'if [ \"$1\" = \"branch\" ] && [ \"$2\" = \"restack\" ] && [ \"$5\" = \"--no-prompt\" ]; then',
" exit 0",
"fi",
"exit 1"
].join("\n")
);
await withEnv(
{
HF_GIT_SPICE_BIN: scriptPath,
SPICE_LOG_PATH: logPath
},
async () => {
await gitSpiceRestackSubtree(repoPath, "feature/a");
}
);
const lines = readFileSync(logPath, "utf8")
.trim()
.split("\n")
.filter((line) => line.trim().length > 0);
expect(lines).toContain("upstack restack --branch feature/a --no-prompt");
expect(lines).toContain("upstack restack --branch feature/a");
expect(lines).toContain("branch restack --branch feature/a --no-prompt");
expect(lines).not.toContain("branch restack --branch feature/a");
});
it("reports unavailable when explicit binary and PATH are missing", async () => {
const repoPath = makeTempDir("hf-git-spice-missing-");
await withEnv(
{
HF_GIT_SPICE_BIN: "/non-existent/hf-git-spice-binary",
PATH: "/non-existent/bin"
},
async () => {
const available = await gitSpiceAvailable(repoPath);
expect(available).toBe(false);
}
);
});
});

View file

@ -0,0 +1,40 @@
import { afterEach, beforeEach, describe, expect, test } from "vitest";
import { mkdtempSync, mkdirSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { execFile } from "node:child_process";
import { validateRemote } from "../src/integrations/git/index.js";
const execFileAsync = promisify(execFile);
describe("validateRemote", () => {
const originalCwd = process.cwd();
beforeEach(() => {
process.chdir(originalCwd);
});
afterEach(() => {
process.chdir(originalCwd);
});
test("ignores broken worktree gitdir in current directory", async () => {
const sandboxDir = mkdtempSync(join(tmpdir(), "validate-remote-cwd-"));
const brokenRepoDir = resolve(sandboxDir, "broken-worktree");
const remoteRepoDir = resolve(sandboxDir, "remote");
mkdirSync(brokenRepoDir, { recursive: true });
writeFileSync(resolve(brokenRepoDir, ".git"), "gitdir: /definitely/missing/worktree\n", "utf8");
await execFileAsync("git", ["init", remoteRepoDir]);
await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.name", "OpenHandoff Test"]);
await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.email", "test@example.com"]);
writeFileSync(resolve(remoteRepoDir, "README.md"), "# test\n", "utf8");
await execFileAsync("git", ["-C", remoteRepoDir, "add", "README.md"]);
await execFileAsync("git", ["-C", remoteRepoDir, "commit", "-m", "init"]);
process.chdir(brokenRepoDir);
await expect(validateRemote(remoteRepoDir)).resolves.toBeUndefined();
});
});

View file

@ -0,0 +1,40 @@
import { tmpdir } from "node:os";
import { join } from "node:path";
import { ConfigSchema, type AppConfig } from "@openhandoff/shared";
import type { BackendDriver } from "../../src/driver.js";
import { initActorRuntimeContext } from "../../src/actors/context.js";
import { createProviderRegistry } from "../../src/providers/index.js";
export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
return ConfigSchema.parse({
auto_submit: true,
notify: ["terminal" as const],
workspace: { default: "default" },
backend: {
host: "127.0.0.1",
port: 7741,
dbPath: join(
tmpdir(),
`hf-test-${Date.now()}-${Math.random().toString(16).slice(2)}.db`
),
opencode_poll_interval: 2,
github_poll_interval: 30,
backup_interval_secs: 3600,
backup_retention_days: 7,
},
providers: {
daytona: { image: "ubuntu:24.04" },
},
...overrides,
});
}
export function createTestRuntimeContext(
driver: BackendDriver,
configOverrides?: Partial<AppConfig>
): { config: AppConfig } {
const config = createTestConfig(configOverrides);
const providers = createProviderRegistry(config, driver);
initActorRuntimeContext(config, providers, undefined, driver);
return { config };
}

View file

@ -0,0 +1,127 @@
import type {
BackendDriver,
DaytonaClientLike,
DaytonaDriver,
GitDriver,
GithubDriver,
StackDriver,
SandboxAgentDriver,
SandboxAgentClientLike,
TmuxDriver,
} from "../../src/driver.js";
import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionRecord } from "sandbox-agent";
export function createTestDriver(overrides?: Partial<BackendDriver>): BackendDriver {
return {
git: overrides?.git ?? createTestGitDriver(),
stack: overrides?.stack ?? createTestStackDriver(),
github: overrides?.github ?? createTestGithubDriver(),
sandboxAgent: overrides?.sandboxAgent ?? createTestSandboxAgentDriver(),
daytona: overrides?.daytona ?? createTestDaytonaDriver(),
tmux: overrides?.tmux ?? createTestTmuxDriver(),
};
}
export function createTestGitDriver(overrides?: Partial<GitDriver>): GitDriver {
return {
validateRemote: async () => {},
ensureCloned: async () => {},
fetch: async () => {},
listRemoteBranches: async () => [],
remoteDefaultBaseRef: async () => "origin/main",
revParse: async () => "abc1234567890",
ensureRemoteBranch: async () => {},
diffStatForBranch: async () => "+0/-0",
conflictsWithMain: async () => false,
...overrides,
};
}
export function createTestStackDriver(overrides?: Partial<StackDriver>): StackDriver {
return {
available: async () => false,
listStack: async () => [],
syncRepo: async () => {},
restackRepo: async () => {},
restackSubtree: async () => {},
rebaseBranch: async () => {},
reparentBranch: async () => {},
trackBranch: async () => {},
...overrides,
};
}
export function createTestGithubDriver(overrides?: Partial<GithubDriver>): GithubDriver {
return {
listPullRequests: async () => [],
createPr: async (_repoPath, _headBranch, _title) => ({
number: 1,
url: `https://github.com/test/repo/pull/1`,
}),
...overrides,
};
}
export function createTestSandboxAgentDriver(
overrides?: Partial<SandboxAgentDriver>
): SandboxAgentDriver {
return {
createClient: (_opts) => createTestSandboxAgentClient(),
...overrides,
};
}
export function createTestSandboxAgentClient(
overrides?: Partial<SandboxAgentClientLike>
): SandboxAgentClientLike {
return {
createSession: async (_prompt) => ({ id: "test-session-1", status: "running" }),
sessionStatus: async (sessionId) => ({ id: sessionId, status: "running" }),
listSessions: async (_request?: ListPageRequest): Promise<ListPage<SessionRecord>> => ({
items: [],
nextCursor: undefined,
}),
listEvents: async (_request: ListEventsRequest): Promise<ListPage<SessionEvent>> => ({
items: [],
nextCursor: undefined,
}),
sendPrompt: async (_request) => {},
cancelSession: async (_sessionId) => {},
destroySession: async (_sessionId) => {},
...overrides,
};
}
export function createTestDaytonaDriver(
overrides?: Partial<DaytonaDriver>
): DaytonaDriver {
return {
createClient: (_opts) => createTestDaytonaClient(),
...overrides,
};
}
export function createTestDaytonaClient(
overrides?: Partial<DaytonaClientLike>
): DaytonaClientLike {
return {
createSandbox: async () => ({ id: "sandbox-test-1", state: "started" }),
getSandbox: async (sandboxId) => ({ id: sandboxId, state: "started" }),
startSandbox: async () => {},
stopSandbox: async () => {},
deleteSandbox: async () => {},
executeCommand: async () => ({ exitCode: 0, result: "" }),
getPreviewEndpoint: async (sandboxId, port) => ({
url: `https://preview.example/sandbox/${sandboxId}/port/${port}`,
token: "preview-token",
}),
...overrides,
};
}
export function createTestTmuxDriver(overrides?: Partial<TmuxDriver>): TmuxDriver {
return {
setWindowStatus: () => 0,
...overrides,
};
}

View file

@ -0,0 +1,31 @@
import { describe, expect, it } from "vitest";
import {
handoffKey,
handoffStatusSyncKey,
historyKey,
projectBranchSyncKey,
projectKey,
projectPrSyncKey,
sandboxInstanceKey,
workspaceKey
} from "../src/actors/keys.js";
describe("actor keys", () => {
it("prefixes every key with workspace namespace", () => {
const keys = [
workspaceKey("default"),
projectKey("default", "repo"),
handoffKey("default", "repo", "handoff"),
sandboxInstanceKey("default", "daytona", "sbx"),
historyKey("default", "repo"),
projectPrSyncKey("default", "repo"),
projectBranchSyncKey("default", "repo"),
handoffStatusSyncKey("default", "repo", "handoff", "sandbox-1", "session-1")
];
for (const key of keys) {
expect(key[0]).toBe("ws");
expect(key[1]).toBe("default");
}
});
});

View file

@ -0,0 +1,83 @@
import { describe, expect, it } from "vitest";
describe("malformed URI handling", () => {
it("safeFetch wrapper returns 400 on URIError", async () => {
// Simulate the pattern used in backend/src/index.ts
const mockApp = {
fetch: async (_req: Request): Promise<Response> => {
// Simulate what happens when rivetkit's router encounters a malformed URI
throw new URIError("URI malformed");
}
};
const safeFetch = async (req: Request): Promise<Response> => {
try {
return await mockApp.fetch(req);
} catch (err) {
if (err instanceof URIError) {
return new Response("Bad Request: Malformed URI", { status: 400 });
}
throw err;
}
};
const response = await safeFetch(new Request("http://localhost/%ZZ"));
expect(response.status).toBe(400);
expect(await response.text()).toBe("Bad Request: Malformed URI");
});
it("safeFetch wrapper re-throws non-URI errors", async () => {
const mockApp = {
fetch: async (_req: Request): Promise<Response> => {
throw new TypeError("some other error");
}
};
const safeFetch = async (req: Request): Promise<Response> => {
try {
return await mockApp.fetch(req);
} catch (err) {
if (err instanceof URIError) {
return new Response("Bad Request: Malformed URI", { status: 400 });
}
throw err;
}
};
await expect(safeFetch(new Request("http://localhost/test"))).rejects.toThrow(TypeError);
});
it("safeFetch wrapper passes through valid requests", async () => {
const mockApp = {
fetch: async (_req: Request): Promise<Response> => {
return new Response("OK", { status: 200 });
}
};
const safeFetch = async (req: Request): Promise<Response> => {
try {
return await mockApp.fetch(req);
} catch (err) {
if (err instanceof URIError) {
return new Response("Bad Request: Malformed URI", { status: 400 });
}
throw err;
}
};
const response = await safeFetch(new Request("http://localhost/valid/path"));
expect(response.status).toBe(200);
expect(await response.text()).toBe("OK");
});
it("decodeURIComponent throws on malformed percent-encoding", () => {
// Validates the core issue: decodeURIComponent throws URIError on malformed input
expect(() => decodeURIComponent("%ZZ")).toThrow(URIError);
expect(() => decodeURIComponent("%")).toThrow(URIError);
expect(() => decodeURIComponent("%E0%A4%A")).toThrow(URIError);
// Valid encoding should not throw
expect(decodeURIComponent("%20")).toBe(" ");
expect(decodeURIComponent("hello%20world")).toBe("hello world");
});
});

View file

@ -0,0 +1,52 @@
import { describe, expect, it } from "vitest";
import { ConfigSchema, type AppConfig } from "@openhandoff/shared";
import { createProviderRegistry } from "../src/providers/index.js";
function makeConfig(): AppConfig {
return ConfigSchema.parse({
auto_submit: true,
notify: ["terminal"],
workspace: { default: "default" },
backend: {
host: "127.0.0.1",
port: 7741,
dbPath: "~/.local/share/openhandoff/handoff.db",
opencode_poll_interval: 2,
github_poll_interval: 30,
backup_interval_secs: 3600,
backup_retention_days: 7
},
providers: {
local: {},
daytona: { image: "ubuntu:24.04" }
}
});
}
describe("provider registry", () => {
it("defaults to local when daytona is not configured", () => {
const registry = createProviderRegistry(makeConfig());
expect(registry.defaultProviderId()).toBe("local");
});
it("prefers daytona when an api key is configured", () => {
const registry = createProviderRegistry(
ConfigSchema.parse({
...makeConfig(),
providers: {
...makeConfig().providers,
daytona: {
...makeConfig().providers.daytona,
apiKey: "test-token",
},
},
})
);
expect(registry.defaultProviderId()).toBe("daytona");
});
it("returns the built-in provider", () => {
const registry = createProviderRegistry(makeConfig());
expect(registry.get("daytona").id()).toBe("daytona");
});
});

View file

@ -0,0 +1,44 @@
import { describe, expect, test } from "vitest";
import { normalizeRemoteUrl, repoIdFromRemote } from "../src/services/repo.js";
describe("normalizeRemoteUrl", () => {
test("accepts GitHub shorthand owner/repo", () => {
expect(normalizeRemoteUrl("rivet-dev/openhandoff")).toBe(
"https://github.com/rivet-dev/openhandoff.git"
);
});
test("accepts github.com/owner/repo without scheme", () => {
expect(normalizeRemoteUrl("github.com/rivet-dev/openhandoff")).toBe(
"https://github.com/rivet-dev/openhandoff.git"
);
});
test("canonicalizes GitHub repo URLs without .git", () => {
expect(normalizeRemoteUrl("https://github.com/rivet-dev/openhandoff")).toBe(
"https://github.com/rivet-dev/openhandoff.git"
);
});
test("canonicalizes GitHub non-clone URLs (e.g. /tree/main)", () => {
expect(normalizeRemoteUrl("https://github.com/rivet-dev/openhandoff/tree/main")).toBe(
"https://github.com/rivet-dev/openhandoff.git"
);
});
test("does not rewrite scp-style ssh remotes", () => {
expect(normalizeRemoteUrl("git@github.com:rivet-dev/openhandoff.git")).toBe(
"git@github.com:rivet-dev/openhandoff.git"
);
});
});
describe("repoIdFromRemote", () => {
test("repoId is stable across equivalent GitHub inputs", () => {
const a = repoIdFromRemote("rivet-dev/openhandoff");
const b = repoIdFromRemote("https://github.com/rivet-dev/openhandoff.git");
const c = repoIdFromRemote("https://github.com/rivet-dev/openhandoff/tree/main");
expect(a).toBe(b);
expect(b).toBe(c);
});
});

View file

@ -0,0 +1,21 @@
import { describe, expect, it } from "vitest";
import { resolveEventListOffset } from "../src/actors/sandbox-instance/persist.js";
describe("sandbox-instance persist event offset", () => {
it("returns newest tail when cursor is omitted", () => {
expect(resolveEventListOffset({ total: 180, limit: 50 })).toBe(130);
});
it("returns zero when total rows are below page size", () => {
expect(resolveEventListOffset({ total: 20, limit: 50 })).toBe(0);
});
it("uses explicit cursor when provided", () => {
expect(resolveEventListOffset({ cursor: "7", total: 180, limit: 50 })).toBe(7);
});
it("normalizes invalid cursors to zero", () => {
expect(resolveEventListOffset({ cursor: "-3", total: 180, limit: 50 })).toBe(0);
expect(resolveEventListOffset({ cursor: "not-a-number", total: 180, limit: 50 })).toBe(0);
});
});

View file

@ -0,0 +1,9 @@
// Suppress RivetKit traces driver flush errors that occur during test cleanup.
// These happen when the traces driver tries to write after actor state is unloaded.
process.on("unhandledRejection", (reason) => {
if (reason instanceof Error && reason.message.includes("state not loaded")) {
return;
}
// Re-throw non-suppressed rejections
throw reason;
});

View file

@ -0,0 +1,44 @@
import { describe, expect, it } from "vitest";
import {
normalizeParentBranch,
parentLookupFromStack,
sortBranchesForOverview,
} from "../src/actors/project/stack-model.js";
describe("stack-model", () => {
it("normalizes self-parent references to null", () => {
expect(normalizeParentBranch("feature/a", "feature/a")).toBeNull();
expect(normalizeParentBranch("feature/a", "main")).toBe("main");
expect(normalizeParentBranch("feature/a", null)).toBeNull();
});
it("builds parent lookup with sanitized entries", () => {
const lookup = parentLookupFromStack([
{ branchName: "feature/a", parentBranch: "main" },
{ branchName: "feature/b", parentBranch: "feature/b" },
{ branchName: " ", parentBranch: "main" },
]);
expect(lookup.get("feature/a")).toBe("main");
expect(lookup.get("feature/b")).toBeNull();
expect(lookup.has(" ")).toBe(false);
});
it("orders branches by graph depth and handles cycles safely", () => {
const rows = sortBranchesForOverview([
{ branchName: "feature/b", parentBranch: "feature/a", updatedAt: 200 },
{ branchName: "feature/a", parentBranch: "main", updatedAt: 100 },
{ branchName: "main", parentBranch: null, updatedAt: 50 },
{ branchName: "cycle-a", parentBranch: "cycle-b", updatedAt: 300 },
{ branchName: "cycle-b", parentBranch: "cycle-a", updatedAt: 250 },
]);
expect(rows.map((row) => row.branchName)).toEqual([
"main",
"feature/a",
"feature/b",
"cycle-a",
"cycle-b",
]);
});
});

View file

@ -0,0 +1,16 @@
import { describe, expect, it } from "vitest";
import { shouldMarkSessionUnreadForStatus } from "../src/actors/handoff/workbench.js";
describe("workbench unread status transitions", () => {
it("marks unread when a running session first becomes idle", () => {
expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "idle")).toBe(true);
});
it("does not re-mark unread on repeated idle polls after thinking has cleared", () => {
expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: null }, "idle")).toBe(false);
});
it("does not mark unread while the session is still running", () => {
expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "running")).toBe(false);
});
});

View file

@ -0,0 +1,89 @@
// @ts-nocheck
import { mkdtempSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { execFileSync } from "node:child_process";
import { setTimeout as delay } from "node:timers/promises";
import { describe, expect, it } from "vitest";
import { setupTest } from "rivetkit/test";
import { workspaceKey } from "../src/actors/keys.js";
import { registry } from "../src/actors/index.js";
import { createTestDriver } from "./helpers/test-driver.js";
import { createTestRuntimeContext } from "./helpers/test-context.js";
const runActorIntegration = process.env.HF_ENABLE_ACTOR_INTEGRATION_TESTS === "1";
function createRepo(): { repoPath: string } {
const repoPath = mkdtempSync(join(tmpdir(), "hf-isolation-repo-"));
execFileSync("git", ["init"], { cwd: repoPath });
execFileSync("git", ["config", "user.email", "test@example.com"], { cwd: repoPath });
execFileSync("git", ["config", "user.name", "OpenHandoff Test"], { cwd: repoPath });
writeFileSync(join(repoPath, "README.md"), "hello\n", "utf8");
execFileSync("git", ["add", "README.md"], { cwd: repoPath });
execFileSync("git", ["commit", "-m", "init"], { cwd: repoPath });
return { repoPath };
}
async function waitForWorkspaceRows(
ws: any,
workspaceId: string,
expectedCount: number
) {
for (let attempt = 0; attempt < 40; attempt += 1) {
const rows = await ws.listHandoffs({ workspaceId });
if (rows.length >= expectedCount) {
return rows;
}
await delay(50);
}
return ws.listHandoffs({ workspaceId });
}
describe("workspace isolation", () => {
it.skipIf(!runActorIntegration)(
"keeps handoff lists isolated by workspace",
async (t) => {
const testDriver = createTestDriver();
createTestRuntimeContext(testDriver);
const { client } = await setupTest(t, registry);
const wsA = await client.workspace.getOrCreate(workspaceKey("alpha"), {
createWithInput: "alpha"
});
const wsB = await client.workspace.getOrCreate(workspaceKey("beta"), {
createWithInput: "beta"
});
const { repoPath } = createRepo();
const repoA = await wsA.addRepo({ workspaceId: "alpha", remoteUrl: repoPath });
const repoB = await wsB.addRepo({ workspaceId: "beta", remoteUrl: repoPath });
await wsA.createHandoff({
workspaceId: "alpha",
repoId: repoA.repoId,
task: "task A",
providerId: "daytona",
explicitBranchName: "feature/a",
explicitTitle: "A"
});
await wsB.createHandoff({
workspaceId: "beta",
repoId: repoB.repoId,
task: "task B",
providerId: "daytona",
explicitBranchName: "feature/b",
explicitTitle: "B"
});
const aRows = await waitForWorkspaceRows(wsA, "alpha", 1);
const bRows = await waitForWorkspaceRows(wsB, "beta", 1);
expect(aRows.length).toBe(1);
expect(bRows.length).toBe(1);
expect(aRows[0]?.workspaceId).toBe("alpha");
expect(bRows[0]?.workspaceId).toBe("beta");
expect(aRows[0]?.handoffId).not.toBe(bRows[0]?.handoffId);
}
);
});