Add X-Initiator header for GitHub Copilot (#200)

This commit is contained in:
Ahmed Kamal 2025-12-16 15:05:22 +02:00 committed by GitHub
parent 92577316e0
commit c2dea0ce8b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 338 additions and 6 deletions

View file

@ -98,7 +98,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = (
};
try {
const client = createClient(model, options?.apiKey);
const client = createClient(model, context, options?.apiKey);
const params = buildParams(model, context, options);
const openaiStream = await client.chat.completions.create(params, { signal: options?.signal });
stream.push({ type: "start", partial: output });
@ -289,7 +289,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = (
return stream;
};
function createClient(model: Model<"openai-completions">, apiKey?: string) {
function createClient(model: Model<"openai-completions">, context: Context, apiKey?: string) {
if (!apiKey) {
if (!process.env.OPENAI_API_KEY) {
throw new Error(
@ -298,11 +298,24 @@ function createClient(model: Model<"openai-completions">, apiKey?: string) {
}
apiKey = process.env.OPENAI_API_KEY;
}
const headers = { ...model.headers };
if (model.provider === "github-copilot") {
// Copilot expects X-Initiator to indicate whether the request is user-initiated
// or agent-initiated (e.g. follow-up after assistant/tool messages). If there is
// no prior message, default to user-initiated.
const messages = context.messages || [];
const lastMessage = messages[messages.length - 1];
const isAgentCall = lastMessage ? lastMessage.role !== "user" : false;
const initiatorValue = isAgentCall ? "agent" : "user";
headers["X-Initiator"] = initiatorValue;
}
return new OpenAI({
apiKey,
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders: model.headers,
defaultHeaders: headers,
});
}

View file

@ -68,7 +68,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = (
try {
// Create OpenAI client
const client = createClient(model, options?.apiKey);
const client = createClient(model, context, options?.apiKey);
const params = buildParams(model, context, options);
const openaiStream = await client.responses.create(params, { signal: options?.signal });
stream.push({ type: "start", partial: output });
@ -297,7 +297,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = (
return stream;
};
function createClient(model: Model<"openai-responses">, apiKey?: string) {
function createClient(model: Model<"openai-responses">, context: Context, apiKey?: string) {
if (!apiKey) {
if (!process.env.OPENAI_API_KEY) {
throw new Error(
@ -306,11 +306,24 @@ function createClient(model: Model<"openai-responses">, apiKey?: string) {
}
apiKey = process.env.OPENAI_API_KEY;
}
const headers = { ...model.headers };
if (model.provider === "github-copilot") {
// Copilot expects X-Initiator to indicate whether the request is user-initiated
// or agent-initiated (e.g. follow-up after assistant/tool messages). If there is
// no prior message, default to user-initiated.
const messages = context.messages || [];
const lastMessage = messages[messages.length - 1];
const isAgentCall = lastMessage ? lastMessage.role !== "user" : false;
const initiatorValue = isAgentCall ? "agent" : "user";
headers["X-Initiator"] = initiatorValue;
}
return new OpenAI({
apiKey,
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders: model.headers,
defaultHeaders: headers,
});
}

View file

@ -0,0 +1,306 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { streamOpenAICompletions } from "../src/providers/openai-completions.js";
import { streamOpenAIResponses } from "../src/providers/openai-responses.js";
import type { Context, Model } from "../src/types.js";
interface OpenAIConstructorConfig {
defaultHeaders?: Record<string, string>;
}
let lastOpenAIConfig: OpenAIConstructorConfig | undefined;
// Mock OpenAI
vi.mock("openai", () => {
class MockOpenAI {
public chat: {
completions: {
create: (
_body: unknown,
_options?: unknown,
) => AsyncGenerator<{ choices: Array<{ delta: { content?: string }; finish_reason: string | null }> }>;
};
};
public responses: {
create: (
_body: unknown,
_options?: unknown,
) => AsyncGenerator<{
type: "response.completed";
response: {
status: "completed";
usage: {
input_tokens: number;
output_tokens: number;
total_tokens: number;
input_tokens_details?: { cached_tokens?: number };
};
};
}>;
};
constructor(config: OpenAIConstructorConfig) {
lastOpenAIConfig = config;
this.chat = {
completions: {
create: async function* () {
yield {
choices: [
{
delta: { content: "Hello" },
finish_reason: null,
},
],
};
yield {
choices: [
{
delta: { content: " world" },
finish_reason: "stop",
},
],
};
},
},
};
this.responses = {
create: async function* () {
yield {
type: "response.completed",
response: {
status: "completed",
usage: {
input_tokens: 0,
output_tokens: 0,
total_tokens: 0,
input_tokens_details: { cached_tokens: 0 },
},
},
};
},
};
}
}
return { default: MockOpenAI };
});
async function consumeStream(stream: AsyncIterable<unknown>): Promise<void> {
for await (const _ of stream) {
// consume
}
}
describe("GitHub Copilot X-Initiator Header", () => {
beforeEach(() => {
lastOpenAIConfig = undefined;
});
const copilotCompletionsModel: Model<"openai-completions"> = {
id: "gpt-4",
name: "GPT-4",
api: "openai-completions",
provider: "github-copilot",
baseUrl: "https://api.individual.githubcopilot.com",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 4096,
headers: { Authorization: "Bearer token" },
};
const otherCompletionsModel: Model<"openai-completions"> = {
...copilotCompletionsModel,
provider: "openai",
};
const copilotResponsesModel: Model<"openai-responses"> = {
id: "gpt-5.1-codex",
name: "GPT-5.1-Codex",
api: "openai-responses",
provider: "github-copilot",
baseUrl: "https://api.individual.githubcopilot.com",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 128000,
headers: { Authorization: "Bearer token" },
};
const otherResponsesModel: Model<"openai-responses"> = {
...copilotResponsesModel,
provider: "openai",
};
it("completions: sets X-Initiator: user when last message is from user (Copilot)", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("completions: sets X-Initiator: agent when last message is from assistant (Copilot)", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
{
role: "assistant",
content: [],
api: "openai-completions",
provider: "github-copilot",
model: "gpt-4",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
},
],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("completions: sets X-Initiator: agent when last message is from toolResult (Copilot)", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
{
role: "toolResult",
content: [],
toolCallId: "1",
toolName: "test",
isError: false,
timestamp: Date.now(),
},
],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("completions: defaults to X-Initiator: user when there are no messages (Copilot)", async () => {
const context: Context = {
messages: [],
};
const stream = streamOpenAICompletions(copilotCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("completions: does NOT set X-Initiator for non-Copilot providers", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAICompletions(otherCompletionsModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBeUndefined();
});
it("responses: sets X-Initiator: user when last message is from user (Copilot)", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("responses: sets X-Initiator: agent when last message is from assistant (Copilot)", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
{
role: "assistant",
content: [],
api: "openai-responses",
provider: "github-copilot",
model: "gpt-5.1-codex",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
},
],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("responses: sets X-Initiator: agent when last message is from toolResult (Copilot)", async () => {
const context: Context = {
messages: [
{ role: "user", content: "Hello", timestamp: Date.now() },
{
role: "toolResult",
content: [],
toolCallId: "1",
toolName: "test",
isError: false,
timestamp: Date.now(),
},
],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("agent");
});
it("responses: defaults to X-Initiator: user when there are no messages (Copilot)", async () => {
const context: Context = {
messages: [],
};
const stream = streamOpenAIResponses(copilotResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBe("user");
});
it("responses: does NOT set X-Initiator for non-Copilot providers", async () => {
const context: Context = {
messages: [{ role: "user", content: "Hello", timestamp: Date.now() }],
};
const stream = streamOpenAIResponses(otherResponsesModel, context, { apiKey: "test-key" });
await consumeStream(stream);
expect(lastOpenAIConfig?.defaultHeaders?.["X-Initiator"]).toBeUndefined();
});
});