mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-17 05:00:16 +00:00
feat(ai): add OpenAI Codex OAuth + responses provider
This commit is contained in:
parent
6ddfd1be13
commit
1650041a63
22 changed files with 2705 additions and 5 deletions
132
packages/ai/test/openai-codex-stream.test.ts
Normal file
132
packages/ai/test/openai-codex-stream.test.ts
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
import { mkdtempSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { streamOpenAICodexResponses } from "../src/providers/openai-codex-responses.js";
|
||||
import type { Context, Model } from "../src/types.js";
|
||||
|
||||
const originalFetch = global.fetch;
|
||||
const originalAgentDir = process.env.PI_CODING_AGENT_DIR;
|
||||
|
||||
afterEach(() => {
|
||||
global.fetch = originalFetch;
|
||||
if (originalAgentDir === undefined) {
|
||||
delete process.env.PI_CODING_AGENT_DIR;
|
||||
} else {
|
||||
process.env.PI_CODING_AGENT_DIR = originalAgentDir;
|
||||
}
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("openai-codex streaming", () => {
|
||||
it("streams SSE responses into AssistantMessageEventStream", async () => {
|
||||
const tempDir = mkdtempSync(join(tmpdir(), "pi-codex-stream-"));
|
||||
process.env.PI_CODING_AGENT_DIR = tempDir;
|
||||
|
||||
const payload = Buffer.from(
|
||||
JSON.stringify({ "https://api.openai.com/auth": { chatgpt_account_id: "acc_test" } }),
|
||||
"utf8",
|
||||
).toString("base64");
|
||||
const token = `aaa.${payload}.bbb`;
|
||||
|
||||
const sse = `${[
|
||||
`data: ${JSON.stringify({
|
||||
type: "response.output_item.added",
|
||||
item: { type: "message", id: "msg_1", role: "assistant", status: "in_progress", content: [] },
|
||||
})}`,
|
||||
`data: ${JSON.stringify({ type: "response.content_part.added", part: { type: "output_text", text: "" } })}`,
|
||||
`data: ${JSON.stringify({ type: "response.output_text.delta", delta: "Hello" })}`,
|
||||
`data: ${JSON.stringify({
|
||||
type: "response.output_item.done",
|
||||
item: {
|
||||
type: "message",
|
||||
id: "msg_1",
|
||||
role: "assistant",
|
||||
status: "completed",
|
||||
content: [{ type: "output_text", text: "Hello" }],
|
||||
},
|
||||
})}`,
|
||||
`data: ${JSON.stringify({
|
||||
type: "response.completed",
|
||||
response: {
|
||||
status: "completed",
|
||||
usage: {
|
||||
input_tokens: 5,
|
||||
output_tokens: 3,
|
||||
total_tokens: 8,
|
||||
input_tokens_details: { cached_tokens: 0 },
|
||||
},
|
||||
},
|
||||
})}`,
|
||||
].join("\n\n")}\n\n`;
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
start(controller) {
|
||||
controller.enqueue(encoder.encode(sse));
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const fetchMock = vi.fn(async (input: string | URL, init?: RequestInit) => {
|
||||
const url = typeof input === "string" ? input : input.toString();
|
||||
if (url === "https://api.github.com/repos/openai/codex/releases/latest") {
|
||||
return new Response(JSON.stringify({ tag_name: "rust-v0.0.0" }), { status: 200 });
|
||||
}
|
||||
if (url.startsWith("https://raw.githubusercontent.com/openai/codex/")) {
|
||||
return new Response("PROMPT", { status: 200, headers: { etag: '"etag"' } });
|
||||
}
|
||||
if (url === "https://chatgpt.com/backend-api/codex/responses") {
|
||||
const headers = init?.headers instanceof Headers ? init.headers : undefined;
|
||||
expect(headers?.get("Authorization")).toBe(`Bearer ${token}`);
|
||||
expect(headers?.get("chatgpt-account-id")).toBe("acc_test");
|
||||
expect(headers?.get("OpenAI-Beta")).toBe("responses=experimental");
|
||||
expect(headers?.get("originator")).toBe("codex_cli_rs");
|
||||
expect(headers?.get("accept")).toBe("text/event-stream");
|
||||
expect(headers?.has("x-api-key")).toBe(false);
|
||||
return new Response(stream, {
|
||||
status: 200,
|
||||
headers: { "content-type": "text/event-stream" },
|
||||
});
|
||||
}
|
||||
return new Response("not found", { status: 404 });
|
||||
});
|
||||
|
||||
global.fetch = fetchMock as typeof fetch;
|
||||
|
||||
const model: Model<"openai-codex-responses"> = {
|
||||
id: "gpt-5.1-codex",
|
||||
name: "GPT-5.1 Codex",
|
||||
api: "openai-codex-responses",
|
||||
provider: "openai-codex",
|
||||
baseUrl: "https://chatgpt.com/backend-api",
|
||||
reasoning: true,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 400000,
|
||||
maxTokens: 128000,
|
||||
};
|
||||
|
||||
const context: Context = {
|
||||
systemPrompt: "You are a helpful assistant.",
|
||||
messages: [{ role: "user", content: "Say hello", timestamp: Date.now() }],
|
||||
};
|
||||
|
||||
const streamResult = streamOpenAICodexResponses(model, context, { apiKey: token });
|
||||
let sawTextDelta = false;
|
||||
let sawDone = false;
|
||||
|
||||
for await (const event of streamResult) {
|
||||
if (event.type === "text_delta") {
|
||||
sawTextDelta = true;
|
||||
}
|
||||
if (event.type === "done") {
|
||||
sawDone = true;
|
||||
expect(event.message.content.find((c) => c.type === "text")?.text).toBe("Hello");
|
||||
}
|
||||
}
|
||||
|
||||
expect(sawTextDelta).toBe(true);
|
||||
expect(sawDone).toBe(true);
|
||||
});
|
||||
});
|
||||
165
packages/ai/test/openai-codex.test.ts
Normal file
165
packages/ai/test/openai-codex.test.ts
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
import { mkdtempSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { getCodexInstructions } from "../src/providers/openai-codex/prompts/codex.js";
|
||||
import { CODEX_PI_BRIDGE } from "../src/providers/openai-codex/prompts/pi-codex-bridge.js";
|
||||
import {
|
||||
normalizeModel,
|
||||
type RequestBody,
|
||||
transformRequestBody,
|
||||
} from "../src/providers/openai-codex/request-transformer.js";
|
||||
import { parseCodexError } from "../src/providers/openai-codex/response-handler.js";
|
||||
|
||||
const DEFAULT_PROMPT_PREFIX =
|
||||
"You are an expert coding assistant. You help users with coding tasks by reading files, executing commands";
|
||||
const FALLBACK_PROMPT = readFileSync(
|
||||
new URL("../src/providers/openai-codex/prompts/codex-instructions.md", import.meta.url),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
describe("openai-codex request transformer", () => {
|
||||
it("filters item_reference, strips ids, and inserts bridge message", async () => {
|
||||
const body: RequestBody = {
|
||||
model: "gpt-5.1-codex",
|
||||
input: [
|
||||
{
|
||||
type: "message",
|
||||
role: "developer",
|
||||
id: "sys-1",
|
||||
content: [{ type: "input_text", text: `${DEFAULT_PROMPT_PREFIX}...` }],
|
||||
},
|
||||
{
|
||||
type: "message",
|
||||
role: "user",
|
||||
id: "user-1",
|
||||
content: [{ type: "input_text", text: "hello" }],
|
||||
},
|
||||
{ type: "item_reference", id: "ref-1" },
|
||||
{ type: "function_call_output", call_id: "missing", name: "tool", output: "result" },
|
||||
],
|
||||
tools: [{ type: "function", name: "tool", description: "", parameters: {} }],
|
||||
};
|
||||
|
||||
const transformed = await transformRequestBody(body, "CODEX_INSTRUCTIONS", {}, true);
|
||||
|
||||
expect(transformed.store).toBe(false);
|
||||
expect(transformed.stream).toBe(true);
|
||||
expect(transformed.instructions).toBe("CODEX_INSTRUCTIONS");
|
||||
expect(transformed.include).toEqual(["reasoning.encrypted_content"]);
|
||||
|
||||
const input = transformed.input || [];
|
||||
expect(input.some((item) => item.type === "item_reference")).toBe(false);
|
||||
expect(input.some((item) => "id" in item)).toBe(false);
|
||||
expect(input[0]?.type).toBe("message");
|
||||
expect(input[0]?.content).toEqual([{ type: "input_text", text: CODEX_PI_BRIDGE }]);
|
||||
|
||||
const orphaned = input.find((item) => item.type === "message" && item.role === "assistant");
|
||||
expect(orphaned?.content).toMatch(/Previous tool result/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("openai-codex model normalization", () => {
|
||||
it("maps space-separated codex-mini names to codex-mini-latest", () => {
|
||||
expect(normalizeModel("gpt 5 codex mini")).toBe("codex-mini-latest");
|
||||
});
|
||||
});
|
||||
|
||||
describe("openai-codex error parsing", () => {
|
||||
it("produces friendly usage-limit messages and rate limits", async () => {
|
||||
const resetAt = Math.floor(Date.now() / 1000) + 600;
|
||||
const response = new Response(
|
||||
JSON.stringify({
|
||||
error: { code: "usage_limit_reached", plan_type: "Plus", resets_at: resetAt },
|
||||
}),
|
||||
{
|
||||
status: 429,
|
||||
headers: {
|
||||
"x-codex-primary-used-percent": "99",
|
||||
"x-codex-primary-window-minutes": "60",
|
||||
"x-codex-primary-reset-at": String(resetAt),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const info = await parseCodexError(response);
|
||||
expect(info.friendlyMessage?.toLowerCase()).toContain("usage limit");
|
||||
expect(info.rateLimits?.primary?.used_percent).toBe(99);
|
||||
});
|
||||
});
|
||||
|
||||
describe("openai-codex prompt caching", () => {
|
||||
const originalFetch = global.fetch;
|
||||
const originalAgentDir = process.env.PI_CODING_AGENT_DIR;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
global.fetch = originalFetch;
|
||||
if (originalAgentDir === undefined) {
|
||||
delete process.env.PI_CODING_AGENT_DIR;
|
||||
} else {
|
||||
process.env.PI_CODING_AGENT_DIR = originalAgentDir;
|
||||
}
|
||||
});
|
||||
|
||||
it("caches prompts with etag and reuses cache", async () => {
|
||||
const tempDir = mkdtempSync(join(tmpdir(), "pi-codex-"));
|
||||
process.env.PI_CODING_AGENT_DIR = tempDir;
|
||||
|
||||
const tag = "rust-v0.0.0";
|
||||
const promptText = "PROMPT_CONTENT";
|
||||
const etag = '"etag-123"';
|
||||
|
||||
const fetchMock = vi.fn(async (input: string | URL, init?: RequestInit) => {
|
||||
const url = typeof input === "string" ? input : input.toString();
|
||||
if (url === "https://api.github.com/repos/openai/codex/releases/latest") {
|
||||
return new Response(JSON.stringify({ tag_name: tag }), { status: 200 });
|
||||
}
|
||||
if (url.startsWith("https://raw.githubusercontent.com/openai/codex/")) {
|
||||
const headerValue =
|
||||
init?.headers && typeof init.headers === "object" && "If-None-Match" in init.headers
|
||||
? String((init.headers as Record<string, string>)["If-None-Match"])
|
||||
: undefined;
|
||||
if (headerValue === etag) {
|
||||
return new Response("", { status: 304, headers: { etag } });
|
||||
}
|
||||
return new Response(promptText, { status: 200, headers: { etag } });
|
||||
}
|
||||
return new Response("not found", { status: 404 });
|
||||
});
|
||||
|
||||
global.fetch = fetchMock as typeof fetch;
|
||||
|
||||
const first = await getCodexInstructions("gpt-5.1-codex");
|
||||
expect(first).toBe(promptText);
|
||||
|
||||
const metaPath = join(tempDir, "cache", "openai-codex", "codex-instructions-meta.json");
|
||||
const meta = JSON.parse(readFileSync(metaPath, "utf8")) as { etag: string; tag: string; lastChecked: number };
|
||||
writeFileSync(metaPath, JSON.stringify({ ...meta, lastChecked: 0 }), "utf8");
|
||||
|
||||
const second = await getCodexInstructions("gpt-5.1-codex");
|
||||
expect(second).toBe(promptText);
|
||||
expect(fetchMock).toHaveBeenCalled();
|
||||
const rawCalls = fetchMock.mock.calls.filter((call) =>
|
||||
String(call[0]).startsWith("https://raw.githubusercontent.com/openai/codex/"),
|
||||
);
|
||||
expect(rawCalls.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("falls back to bundled instructions when cache and network are unavailable", async () => {
|
||||
const tempDir = mkdtempSync(join(tmpdir(), "pi-codex-"));
|
||||
process.env.PI_CODING_AGENT_DIR = tempDir;
|
||||
|
||||
const fetchMock = vi.fn(async () => {
|
||||
throw new Error("network down");
|
||||
});
|
||||
|
||||
global.fetch = fetchMock as typeof fetch;
|
||||
|
||||
const instructions = await getCodexInstructions("gpt-5.1-codex");
|
||||
expect(instructions).toBe(FALLBACK_PROMPT);
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue