co-mono/packages/ai/test/copilot-initiator.test.ts
Mario Zechner 4894fa411c Release v0.23.2
Fixed Claude models via GitHub Copilot re-answering all previous prompts.

fixes #209
2025-12-17 17:56:00 +01:00

336 lines
9.8 KiB
TypeScript

import { beforeEach, describe, expect, it, vi } from "vitest";
import { streamOpenAICompletions } from "../src/providers/openai-completions.js";
import { streamOpenAIResponses } from "../src/providers/openai-responses.js";
import type { Context, Model } from "../src/types.js";
interface OpenAIConstructorConfig {
defaultHeaders?: Record<string, string>;
}
let lastOpenAIConfig: OpenAIConstructorConfig | undefined;
// Mock OpenAI
vi.mock("openai", () => {
class MockOpenAI {
public chat: {
completions: {
create: (
_body: unknown,
_options?: unknown,
) => AsyncGenerator<{ choices: Array<{ delta: { content?: string }; finish_reason: string | null }> }>;
};
};
public responses: {
create: (
_body: unknown,
_options?: unknown,
) => AsyncGenerator<{
type: "response.completed";
response: {
status: "completed";
usage: {
input_tokens: number;
output_tokens: number;
total_tokens: number;
input_tokens_details?: { cached_tokens?: number };
};
};
}>;
};
constructor(config: OpenAIConstructorConfig) {
lastOpenAIConfig = config;
this.chat = {
completions: {
create: async function* () {
yield {
choices: [
{
delta: { content: "Hello" },
finish_reason: null,
},
],
};
yield {
choices: [
{
delta: { content: " world" },
finish_reason: "stop",
},
],
};
},
},
};
this.responses = {
create: async function* () {
yield {
type: "response.completed",
response: {
status: "completed",
usage: {
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
input_tokens_details: { cached_tokens: 0 },
},
},
};
},
};
}
}
return { default: MockOpenAI };
});
async function consumeStream(stream: AsyncIterable<unknown>): Promise<void> {
for await (const _ of stream) {
// consume
}
}
describe("GitHub Copilot Headers", () => {
beforeEach(() => {
lastOpenAIConfig = undefined;
});
const copilotCompletionsModel: Model<"openai-completions"> = {
id: "gpt-4",
name: "GPT-4",
api: "openai-completions",
provider: "github-copilot",
baseUrl: "https://api.individual.githubcopilot.com",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
headers: { Authorization: "Bearer token" },
};
const otherCompletionsModel: Model<"openai-completions"> = {
...copilotCompletionsModel,
provider: "openai",
};
const copilotResponsesModel: Model<"openai-responses"> = {
id: "gpt-5.1-codex",
name: "GPT-5.1-Codex",
api: "openai-responses",
provider: "github-copilot",
baseUrl: "https://api.individual.githubcopilot.com",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 128000,
headers: { Authorization: "Bearer token" },
};
const otherResponsesModel: Model<"openai-responses"> = {
...copilotResponsesModel,
provider: "openai",
};
const assistantMessage = {
role: "assistant" as const,
content: [],
api: "openai-completions" as const,
provider: "github-copilot" as const,
model: "gpt-4",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop" as const,
timestamp: Date.now(),
};
const toolResultMessage = {
role: "toolResult" as const,
content: [],
toolCallId: "1",
toolName: "test",
isError: false,
timestamp: Date.now(),
};
describe("completions API", () => {
it("sets X-Initiator: user for first message (no history)", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("sets X-Initiator: agent when assistant message exists in history", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }, assistantMessage],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("sets X-Initiator: agent when toolResult exists in history", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }, toolResultMessage],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("sets X-Initiator: agent for multi-turn conversation (last is user, but assistant in history)", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
assistantMessage,
{ role: "user", content: "Tell me a joke", timestamp: Date.now() },
],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("sets X-Initiator: user when there are no messages", async () => {
const context: Context = {
messages: [],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("sets Openai-Intent: conversation-edits", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["Openai-Intent"]).toBe("conversation-edits");
});
it("does NOT set Copilot headers for non-Copilot providers", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAICompletions(otherCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBeUndefined();
expect(lastOpenAIConfig?.defaultHeaders?.["Openai-Intent"]).toBeUndefined();
});
});
describe("responses API", () => {
it("sets X-Initiator: user for first message (no history)", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("sets X-Initiator: agent when assistant message exists in history", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
{ ...assistantMessage, api: "openai-responses" as const, model: "gpt-5.1-codex" },
],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("sets X-Initiator: agent when toolResult exists in history", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }, toolResultMessage],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("sets X-Initiator: agent for multi-turn conversation (last is user, but assistant in history)", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
{ ...assistantMessage, api: "openai-responses" as const, model: "gpt-5.1-codex" },
{ role: "user", content: "Tell me a joke", timestamp: Date.now() },
],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("sets X-Initiator: user when there are no messages", async () => {
const context: Context = {
messages: [],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("sets Openai-Intent: conversation-edits", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["Openai-Intent"]).toBe("conversation-edits");
});
it("does NOT set Copilot headers for non-Copilot providers", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAIResponses(otherResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBeUndefined();
expect(lastOpenAIConfig?.defaultHeaders?.["Openai-Intent"]).toBeUndefined();
});
});
});