chore: rebrand companion-os to clanker-agent

- Rename all package names from companion-* to clanker-*
- Update npm scopes from @mariozechner to @harivansh-afk
- Rename config directories .companion -> .clanker
- Rename environment variables COMPANION_* -> CLANKER_*
- Update all documentation, README files, and install scripts
- Rename package directories (companion-channels, companion-grind, companion-teams)
- Update GitHub URLs to harivansh-afk/clanker-agent
- Preserve full git history from companion-cloud monorepo
This commit is contained in:
Harivansh Rathi 2026-03-26 16:22:52 -04:00
parent f93fe7d1a0
commit 67168d8289
356 changed files with 2249 additions and 10223 deletions

View file

@ -6,22 +6,22 @@ import { streamOpenAICodexResponses } from "../src/providers/openai-codex-respon
import type { Context, Model } from "../src/types.js";
const originalFetch = global.fetch;
const originalAgentDir = process.env.COMPANION_CODING_AGENT_DIR;
const originalAgentDir = process.env.CLANKER_CODING_AGENT_DIR;
afterEach(() => {
global.fetch = originalFetch;
if (originalAgentDir === undefined) {
delete process.env.COMPANION_CODING_AGENT_DIR;
delete process.env.CLANKER_CODING_AGENT_DIR;
} else {
process.env.COMPANION_CODING_AGENT_DIR = originalAgentDir;
process.env.CLANKER_CODING_AGENT_DIR = originalAgentDir;
}
vi.restoreAllMocks();
});
describe("openai-codex streaming", () => {
it("streams SSE responses into AssistantMessageEventStream", async () => {
const tempDir = mkdtempSync(join(tmpdir(), "companion-codex-stream-"));
process.env.COMPANION_CODING_AGENT_DIR = tempDir;
const tempDir = mkdtempSync(join(tmpdir(), "clanker-codex-stream-"));
process.env.CLANKER_CODING_AGENT_DIR = tempDir;
const payload = Buffer.from(
JSON.stringify({
@ -95,7 +95,7 @@ describe("openai-codex streaming", () => {
expect(headers?.get("Authorization")).toBe(`Bearer ${token}`);
expect(headers?.get("chatgpt-account-id")).toBe("acc_test");
expect(headers?.get("OpenAI-Beta")).toBe("responses=experimental");
expect(headers?.get("originator")).toBe("companion");
expect(headers?.get("originator")).toBe("clanker");
expect(headers?.get("accept")).toBe("text/event-stream");
expect(headers?.has("x-api-key")).toBe(false);
return new Response(stream, {
@ -149,8 +149,8 @@ describe("openai-codex streaming", () => {
});
it("sets conversation_id/session_id headers and prompt_cache_key when sessionId is provided", async () => {
const tempDir = mkdtempSync(join(tmpdir(), "companion-codex-stream-"));
process.env.COMPANION_CODING_AGENT_DIR = tempDir;
const tempDir = mkdtempSync(join(tmpdir(), "clanker-codex-stream-"));
process.env.CLANKER_CODING_AGENT_DIR = tempDir;
const payload = Buffer.from(
JSON.stringify({
@ -272,8 +272,8 @@ describe("openai-codex streaming", () => {
it.each(["gpt-5.3-codex", "gpt-5.4"])(
"clamps %s minimal reasoning effort to low",
async (modelId) => {
const tempDir = mkdtempSync(join(tmpdir(), "companion-codex-stream-"));
process.env.COMPANION_CODING_AGENT_DIR = tempDir;
const tempDir = mkdtempSync(join(tmpdir(), "clanker-codex-stream-"));
process.env.CLANKER_CODING_AGENT_DIR = tempDir;
const payload = Buffer.from(
JSON.stringify({
@ -393,8 +393,8 @@ describe("openai-codex streaming", () => {
);
it("does not set conversation_id/session_id headers when sessionId is not provided", async () => {
const tempDir = mkdtempSync(join(tmpdir(), "companion-codex-stream-"));
process.env.COMPANION_CODING_AGENT_DIR = tempDir;
const tempDir = mkdtempSync(join(tmpdir(), "clanker-codex-stream-"));
process.env.CLANKER_CODING_AGENT_DIR = tempDir;
const payload = Buffer.from(
JSON.stringify({