diff --git a/packages/ai/CHANGELOG.md b/packages/ai/CHANGELOG.md index c7c8cd93..94553145 100644 --- a/packages/ai/CHANGELOG.md +++ b/packages/ai/CHANGELOG.md @@ -2,6 +2,10 @@ ## [Unreleased] +### Added + +- Added `headers` option to `StreamOptions` for custom HTTP headers in API requests. Supported by all providers except Amazon Bedrock (which uses AWS SDK auth). Headers are merged with provider defaults and `model.headers`, with `options.headers` taking precedence. + ## [0.49.2] - 2026-01-19 ### Added diff --git a/packages/ai/src/providers/anthropic.ts b/packages/ai/src/providers/anthropic.ts index 3ec88b11..62a756c1 100644 --- a/packages/ai/src/providers/anthropic.ts +++ b/packages/ai/src/providers/anthropic.ts @@ -126,6 +126,16 @@ export interface AnthropicOptions extends StreamOptions { toolChoice?: "auto" | "any" | "none" | { type: "tool"; name: string }; } +function mergeHeaders(...headerSources: (Record | undefined)[]): Record { + const merged: Record = {}; + for (const headers of headerSources) { + if (headers) { + Object.assign(merged, headers); + } + } + return merged; +} + export const streamAnthropic: StreamFunction<"anthropic-messages"> = ( model: Model<"anthropic-messages">, context: Context, @@ -154,7 +164,12 @@ export const streamAnthropic: StreamFunction<"anthropic-messages"> = ( try { const apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? ""; - const { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true); + const { client, isOAuthToken } = createClient( + model, + apiKey, + options?.interleavedThinking ?? true, + options?.headers, + ); const params = buildParams(model, context, isOAuthToken, options); options?.onPayload?.(params); const anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal }); @@ -328,6 +343,7 @@ function createClient( model: Model<"anthropic-messages">, apiKey: string, interleavedThinking: boolean, + optionsHeaders?: Record, ): { client: Anthropic; isOAuthToken: boolean } { const betaFeatures = ["fine-grained-tool-streaming-2025-05-14"]; if (interleavedThinking) { @@ -337,14 +353,17 @@ function createClient( const oauthToken = isOAuthToken(apiKey); if (oauthToken) { // Stealth mode: Mimic Claude Code's headers exactly - const defaultHeaders = { - accept: "application/json", - "anthropic-dangerous-direct-browser-access": "true", - "anthropic-beta": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(",")}`, - "user-agent": `claude-cli/${claudeCodeVersion} (external, cli)`, - "x-app": "cli", - ...(model.headers || {}), - }; + const defaultHeaders = mergeHeaders( + { + accept: "application/json", + "anthropic-dangerous-direct-browser-access": "true", + "anthropic-beta": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(",")}`, + "user-agent": `claude-cli/${claudeCodeVersion} (external, cli)`, + "x-app": "cli", + }, + model.headers, + optionsHeaders, + ); const client = new Anthropic({ apiKey: null, @@ -357,12 +376,15 @@ function createClient( return { client, isOAuthToken: true }; } - const defaultHeaders = { - accept: "application/json", - "anthropic-dangerous-direct-browser-access": "true", - "anthropic-beta": betaFeatures.join(","), - ...(model.headers || {}), - }; + const defaultHeaders = mergeHeaders( + { + accept: "application/json", + "anthropic-dangerous-direct-browser-access": "true", + "anthropic-beta": betaFeatures.join(","), + }, + model.headers, + optionsHeaders, + ); const client = new Anthropic({ apiKey, diff --git a/packages/ai/src/providers/google-gemini-cli.ts b/packages/ai/src/providers/google-gemini-cli.ts index 0957f904..fee0012c 100644 --- a/packages/ai/src/providers/google-gemini-cli.ts +++ b/packages/ai/src/providers/google-gemini-cli.ts @@ -434,6 +434,7 @@ export const streamGoogleGeminiCli: StreamFunction<"google-gemini-cli"> = ( Accept: "text/event-stream", ...headers, ...(isClaudeThinkingModel(model.id) ? { "anthropic-beta": CLAUDE_THINKING_BETA_HEADER } : {}), + ...options?.headers, }; const requestBodyJson = JSON.stringify(requestBody); diff --git a/packages/ai/src/providers/google-vertex.ts b/packages/ai/src/providers/google-vertex.ts index 6346db47..184f21ca 100644 --- a/packages/ai/src/providers/google-vertex.ts +++ b/packages/ai/src/providers/google-vertex.ts @@ -82,7 +82,7 @@ export const streamGoogleVertex: StreamFunction<"google-vertex"> = ( try { const project = resolveProject(options); const location = resolveLocation(options); - const client = createClient(model, project, location); + const client = createClient(model, project, location, options?.headers); const params = buildParams(model, context, options); options?.onPayload?.(params); const googleStream = await client.models.generateContentStream(params); @@ -276,11 +276,16 @@ export const streamGoogleVertex: StreamFunction<"google-vertex"> = ( return stream; }; -function createClient(model: Model<"google-vertex">, project: string, location: string): GoogleGenAI { +function createClient( + model: Model<"google-vertex">, + project: string, + location: string, + optionsHeaders?: Record, +): GoogleGenAI { const httpOptions: { headers?: Record } = {}; - if (model.headers) { - httpOptions.headers = { ...model.headers }; + if (model.headers || optionsHeaders) { + httpOptions.headers = { ...model.headers, ...optionsHeaders }; } const hasHttpOptions = Object.values(httpOptions).some(Boolean); diff --git a/packages/ai/src/providers/google.ts b/packages/ai/src/providers/google.ts index 2ad8c00e..dc534f11 100644 --- a/packages/ai/src/providers/google.ts +++ b/packages/ai/src/providers/google.ts @@ -69,7 +69,7 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = ( try { const apiKey = options?.apiKey || getEnvApiKey(model.provider) || ""; - const client = createClient(model, apiKey); + const client = createClient(model, apiKey, options?.headers); const params = buildParams(model, context, options); options?.onPayload?.(params); const googleStream = await client.models.generateContentStream(params); @@ -264,14 +264,18 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = ( return stream; }; -function createClient(model: Model<"google-generative-ai">, apiKey?: string): GoogleGenAI { +function createClient( + model: Model<"google-generative-ai">, + apiKey?: string, + optionsHeaders?: Record, +): GoogleGenAI { const httpOptions: { baseUrl?: string; apiVersion?: string; headers?: Record } = {}; if (model.baseUrl) { httpOptions.baseUrl = model.baseUrl; httpOptions.apiVersion = ""; // baseUrl already includes version path, don't append } - if (model.headers) { - httpOptions.headers = model.headers; + if (model.headers || optionsHeaders) { + httpOptions.headers = { ...model.headers, ...optionsHeaders }; } return new GoogleGenAI({ diff --git a/packages/ai/src/providers/openai-codex-responses.ts b/packages/ai/src/providers/openai-codex-responses.ts index fcfed917..14f07b42 100644 --- a/packages/ai/src/providers/openai-codex-responses.ts +++ b/packages/ai/src/providers/openai-codex-responses.ts @@ -123,7 +123,7 @@ export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses" const accountId = extractAccountId(apiKey); const body = buildRequestBody(model, context, options); options?.onPayload?.(body); - const headers = buildHeaders(model.headers, accountId, apiKey, options?.sessionId); + const headers = buildHeaders(model.headers, options?.headers, accountId, apiKey, options?.sessionId); const bodyJson = JSON.stringify(body); // Fetch with retry logic for rate limits and transient errors @@ -697,6 +697,7 @@ function extractAccountId(token: string): string { function buildHeaders( initHeaders: Record | undefined, + additionalHeaders: Record | undefined, accountId: string, token: string, sessionId?: string, @@ -709,6 +710,9 @@ function buildHeaders( headers.set("User-Agent", `pi (${os.platform()} ${os.release()}; ${os.arch()})`); headers.set("accept", "text/event-stream"); headers.set("content-type", "application/json"); + for (const [key, value] of Object.entries(additionalHeaders || {})) { + headers.set(key, value); + } if (sessionId) { headers.set("session_id", sessionId); diff --git a/packages/ai/src/providers/openai-completions.ts b/packages/ai/src/providers/openai-completions.ts index 18abbe42..42576f0f 100644 --- a/packages/ai/src/providers/openai-completions.ts +++ b/packages/ai/src/providers/openai-completions.ts @@ -99,7 +99,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = ( try { const apiKey = options?.apiKey || getEnvApiKey(model.provider) || ""; - const client = createClient(model, context, apiKey); + const client = createClient(model, context, apiKey, options?.headers); const params = buildParams(model, context, options); options?.onPayload?.(params); const openaiStream = await client.chat.completions.create(params, { signal: options?.signal }); @@ -318,7 +318,12 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = ( return stream; }; -function createClient(model: Model<"openai-completions">, context: Context, apiKey?: string) { +function createClient( + model: Model<"openai-completions">, + context: Context, + apiKey?: string, + optionsHeaders?: Record, +) { if (!apiKey) { if (!process.env.OPENAI_API_KEY) { throw new Error( @@ -354,6 +359,11 @@ function createClient(model: Model<"openai-completions">, context: Context, apiK } } + // Merge options headers last so they can override defaults + if (optionsHeaders) { + Object.assign(headers, optionsHeaders); + } + return new OpenAI({ apiKey, baseURL: model.baseUrl, diff --git a/packages/ai/src/providers/openai-responses.ts b/packages/ai/src/providers/openai-responses.ts index 2cbb9dc5..5e9ce8bf 100644 --- a/packages/ai/src/providers/openai-responses.ts +++ b/packages/ai/src/providers/openai-responses.ts @@ -85,7 +85,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = ( try { // Create OpenAI client const apiKey = options?.apiKey || getEnvApiKey(model.provider) || ""; - const client = createClient(model, context, apiKey); + const client = createClient(model, context, apiKey, options?.headers); const params = buildParams(model, context, options); options?.onPayload?.(params); const openaiStream = await client.responses.create( @@ -319,7 +319,12 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = ( return stream; }; -function createClient(model: Model<"openai-responses">, context: Context, apiKey?: string) { +function createClient( + model: Model<"openai-responses">, + context: Context, + apiKey?: string, + optionsHeaders?: Record, +) { if (!apiKey) { if (!process.env.OPENAI_API_KEY) { throw new Error( @@ -355,6 +360,11 @@ function createClient(model: Model<"openai-responses">, context: Context, apiKey } } + // Merge options headers last so they can override defaults + if (optionsHeaders) { + Object.assign(headers, optionsHeaders); + } + return new OpenAI({ apiKey, baseURL: model.baseUrl, diff --git a/packages/ai/src/stream.ts b/packages/ai/src/stream.ts index 4164edcb..a390567d 100644 --- a/packages/ai/src/stream.ts +++ b/packages/ai/src/stream.ts @@ -218,6 +218,8 @@ function mapOptionsForApi( signal: options?.signal, apiKey: apiKey || options?.apiKey, sessionId: options?.sessionId, + headers: options?.headers, + onPayload: options?.onPayload, }; // Helper to clamp xhigh to high for providers that don't support it diff --git a/packages/ai/src/types.ts b/packages/ai/src/types.ts index d64f3fd1..4463ec5f 100644 --- a/packages/ai/src/types.ts +++ b/packages/ai/src/types.ts @@ -90,6 +90,12 @@ export interface StreamOptions { * Optional callback for inspecting provider payloads before sending. */ onPayload?: (payload: unknown) => void; + /** + * Optional custom HTTP headers to include in API requests. + * Merged with provider defaults; can override default headers. + * Not supported by all providers (e.g., AWS Bedrock uses SDK auth). + */ + headers?: Record; } // Unified options with reasoning passed to streamSimple() and completeSimple()