From c2dea0ce8bb00c7e83f36f2e3fe4e32d182d4c91 Mon Sep 17 00:00:00 2001 From: Ahmed Kamal Date: Tue, 16 Dec 2025 15:05:22 +0200 Subject: [PATCH] Add X-Initiator header for GitHub Copilot (#200) --- .../ai/src/providers/openai-completions.ts | 19 +- packages/ai/src/providers/openai-responses.ts | 19 +- packages/ai/test/copilot-initiator.test.ts | 306 ++++++++++++++++++ 3 files changed, 338 insertions(+), 6 deletions(-) create mode 100644 packages/ai/test/copilot-initiator.test.ts diff --git a/packages/ai/src/providers/openai-completions.ts b/packages/ai/src/providers/openai-completions.ts index 1b36454e..beb1b010 100644 --- a/packages/ai/src/providers/openai-completions.ts +++ b/packages/ai/src/providers/openai-completions.ts @@ -98,7 +98,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = ( }; try { - const client = createClient(model, options?.apiKey); + const client = createClient(model, context, options?.apiKey); const params = buildParams(model, context, options); const openaiStream = await client.chat.completions.create(params, { signal: options?.signal }); stream.push({ type: "start", partial: output }); @@ -289,7 +289,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = ( return stream; }; -function createClient(model: Model<"openai-completions">, apiKey?: string) { +function createClient(model: Model<"openai-completions">, context: Context, apiKey?: string) { if (!apiKey) { if (!process.env.OPENAI_API_KEY) { throw new Error( @@ -298,11 +298,24 @@ function createClient(model: Model<"openai-completions">, apiKey?: string) { } apiKey = process.env.OPENAI_API_KEY; } + + const headers = { ...model.headers }; + if (model.provider === "github-copilot") { + // Copilot expects X-Initiator to indicate whether the request is user-initiated + // or agent-initiated (e.g. follow-up after assistant/tool messages). If there is + // no prior message, default to user-initiated. + const messages = context.messages || []; + const lastMessage = messages[messages.length - 1]; + const isAgentCall = lastMessage ? lastMessage.role !== "user" : false; + const initiatorValue = isAgentCall ? "agent" : "user"; + headers["X-Initiator"] = initiatorValue; + } + return new OpenAI({ apiKey, baseURL: model.baseUrl, dangerouslyAllowBrowser: true, - defaultHeaders: model.headers, + defaultHeaders: headers, }); } diff --git a/packages/ai/src/providers/openai-responses.ts b/packages/ai/src/providers/openai-responses.ts index b376b4f0..a52f8e7e 100644 --- a/packages/ai/src/providers/openai-responses.ts +++ b/packages/ai/src/providers/openai-responses.ts @@ -68,7 +68,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = ( try { // Create OpenAI client - const client = createClient(model, options?.apiKey); + const client = createClient(model, context, options?.apiKey); const params = buildParams(model, context, options); const openaiStream = await client.responses.create(params, { signal: options?.signal }); stream.push({ type: "start", partial: output }); @@ -297,7 +297,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = ( return stream; }; -function createClient(model: Model<"openai-responses">, apiKey?: string) { +function createClient(model: Model<"openai-responses">, context: Context, apiKey?: string) { if (!apiKey) { if (!process.env.OPENAI_API_KEY) { throw new Error( @@ -306,11 +306,24 @@ function createClient(model: Model<"openai-responses">, apiKey?: string) { } apiKey = process.env.OPENAI_API_KEY; } + + const headers = { ...model.headers }; + if (model.provider === "github-copilot") { + // Copilot expects X-Initiator to indicate whether the request is user-initiated + // or agent-initiated (e.g. follow-up after assistant/tool messages). If there is + // no prior message, default to user-initiated. + const messages = context.messages || []; + const lastMessage = messages[messages.length - 1]; + const isAgentCall = lastMessage ? lastMessage.role !== "user" : false; + const initiatorValue = isAgentCall ? "agent" : "user"; + headers["X-Initiator"] = initiatorValue; + } + return new OpenAI({ apiKey, baseURL: model.baseUrl, dangerouslyAllowBrowser: true, - defaultHeaders: model.headers, + defaultHeaders: headers, }); } diff --git a/packages/ai/test/copilot-initiator.test.ts b/packages/ai/test/copilot-initiator.test.ts new file mode 100644 index 00000000..3c91351f --- /dev/null +++ b/packages/ai/test/copilot-initiator.test.ts @@ -0,0 +1,306 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { streamOpenAICompletions } from "../src/providers/openai-completions.js"; +import { streamOpenAIResponses } from "../src/providers/openai-responses.js"; +import type { Context, Model } from "../src/types.js"; + +interface OpenAIConstructorConfig { + defaultHeaders?: Record; +} + +let lastOpenAIConfig: OpenAIConstructorConfig | undefined; + +// Mock OpenAI +vi.mock("openai", () => { + class MockOpenAI { + public chat: { + completions: { + create: ( + _body: unknown, + _options?: unknown, + ) => AsyncGenerator<{ choices: Array<{ delta: { content?: string }; finish_reason: string | null }> }>; + }; + }; + + public responses: { + create: ( + _body: unknown, + _options?: unknown, + ) => AsyncGenerator<{ + type: "response.completed"; + response: { + status: "completed"; + usage: { + input_tokens: number; + output_tokens: number; + total_tokens: number; + input_tokens_details?: { cached_tokens?: number }; + }; + }; + }>; + }; + + constructor(config: OpenAIConstructorConfig) { + lastOpenAIConfig = config; + + this.chat = { + completions: { + create: async function* () { + yield { + choices: [ + { + delta: { content: "Hello" }, + finish_reason: null, + }, + ], + }; + yield { + choices: [ + { + delta: { content: " world" }, + finish_reason: "stop", + }, + ], + }; + }, + }, + }; + + this.responses = { + create: async function* () { + yield { + type: "response.completed", + response: { + status: "completed", + usage: { + input_tokens: 0, + output_tokens: 0, + total_tokens: 0, + input_tokens_details: { cached_tokens: 0 }, + }, + }, + }; + }, + }; + } + } + + return { default: MockOpenAI }; +}); + +async function consumeStream(stream: AsyncIterable): Promise { + for await (const _ of stream) { + // consume + } +} + +describe("GitHub Copilot X-Initiator Header", () => { + beforeEach(() => { + lastOpenAIConfig = undefined; + }); + + const copilotCompletionsModel: Model<"openai-completions"> = { + id: "gpt-4", + name: "GPT-4", + api: "openai-completions", + provider: "github-copilot", + baseUrl: "https://api.individual.githubcopilot.com", + reasoning: false, + input: ["text"], + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, + contextWindow: 8192, + maxTokens: 4096, + headers: { Authorization: "Bearer token" }, + }; + + const otherCompletionsModel: Model<"openai-completions"> = { + ...copilotCompletionsModel, + provider: "openai", + }; + + const copilotResponsesModel: Model<"openai-responses"> = { + id: "gpt-5.1-codex", + name: "GPT-5.1-Codex", + api: "openai-responses", + provider: "github-copilot", + baseUrl: "https://api.individual.githubcopilot.com", + reasoning: true, + input: ["text"], + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, + contextWindow: 128000, + maxTokens: 128000, + headers: { Authorization: "Bearer token" }, + }; + + const otherResponsesModel: Model<"openai-responses"> = { + ...copilotResponsesModel, + provider: "openai", + }; + + it("completions: sets X-Initiator: user when last message is from user (Copilot)", async () => { + const context: Context = { + messages: [{ role: "user", content: "Hello", timestamp: Date.now() }], + }; + + const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user"); + }); + + it("completions: sets X-Initiator: agent when last message is from assistant (Copilot)", async () => { + const context: Context = { + messages: [ + { role: "user", content: "Hello", timestamp: Date.now() }, + { + role: "assistant", + content: [], + api: "openai-completions", + provider: "github-copilot", + model: "gpt-4", + usage: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + totalTokens: 0, + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 }, + }, + stopReason: "stop", + timestamp: Date.now(), + }, + ], + }; + + const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent"); + }); + + it("completions: sets X-Initiator: agent when last message is from toolResult (Copilot)", async () => { + const context: Context = { + messages: [ + { role: "user", content: "Hello", timestamp: Date.now() }, + { + role: "toolResult", + content: [], + toolCallId: "1", + toolName: "test", + isError: false, + timestamp: Date.now(), + }, + ], + }; + + const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent"); + }); + + it("completions: defaults to X-Initiator: user when there are no messages (Copilot)", async () => { + const context: Context = { + messages: [], + }; + + const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user"); + }); + + it("completions: does NOT set X-Initiator for non-Copilot providers", async () => { + const context: Context = { + messages: [{ role: "user", content: "Hello", timestamp: Date.now() }], + }; + + const stream = streamOpenAICompletions(otherCompletionsModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBeUndefined(); + }); + + it("responses: sets X-Initiator: user when last message is from user (Copilot)", async () => { + const context: Context = { + messages: [{ role: "user", content: "Hello", timestamp: Date.now() }], + }; + + const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user"); + }); + + it("responses: sets X-Initiator: agent when last message is from assistant (Copilot)", async () => { + const context: Context = { + messages: [ + { role: "user", content: "Hello", timestamp: Date.now() }, + { + role: "assistant", + content: [], + api: "openai-responses", + provider: "github-copilot", + model: "gpt-5.1-codex", + usage: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + totalTokens: 0, + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 }, + }, + stopReason: "stop", + timestamp: Date.now(), + }, + ], + }; + + const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent"); + }); + + it("responses: sets X-Initiator: agent when last message is from toolResult (Copilot)", async () => { + const context: Context = { + messages: [ + { role: "user", content: "Hello", timestamp: Date.now() }, + { + role: "toolResult", + content: [], + toolCallId: "1", + toolName: "test", + isError: false, + timestamp: Date.now(), + }, + ], + }; + + const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent"); + }); + + it("responses: defaults to X-Initiator: user when there are no messages (Copilot)", async () => { + const context: Context = { + messages: [], + }; + + const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user"); + }); + + it("responses: does NOT set X-Initiator for non-Copilot providers", async () => { + const context: Context = { + messages: [{ role: "user", content: "Hello", timestamp: Date.now() }], + }; + + const stream = streamOpenAIResponses(otherResponsesModel, context, { apiKey: "test-key" }); + await consumeStream(stream); + + expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBeUndefined(); + }); +});