feat(ai): add headers option to StreamOptions for custom HTTP headers

- Added headers field to base StreamOptions interface
- Updated all providers to merge options.headers with defaults
- Forward headers and onPayload through streamSimple/completeSimple
- Bedrock not supported (uses AWS SDK auth)
This commit is contained in:
Mario Zechner 2026-01-20 01:08:24 +01:00
parent 20c7b5fed4
commit d2be6486a4
10 changed files with 96 additions and 28 deletions

View file

@ -2,6 +2,10 @@
## [Unreleased]
### Added
- Added `headers` option to `StreamOptions` for custom HTTP headers in API requests. Supported by all providers except Amazon Bedrock (which uses AWS SDK auth). Headers are merged with provider defaults and `model.headers`, with `options.headers` taking precedence.
## [0.49.2] - 2026-01-19
### Added

View file

@ -126,6 +126,16 @@ export interface AnthropicOptions extends StreamOptions {
toolChoice?: "auto" | "any" | "none" | { type: "tool"; name: string };
}
function mergeHeaders(...headerSources: (Record<string, string> | undefined)[]): Record<string, string> {
const merged: Record<string, string> = {};
for (const headers of headerSources) {
if (headers) {
Object.assign(merged, headers);
}
}
return merged;
}
export const streamAnthropic: StreamFunction<"anthropic-messages"> = (
model: Model<"anthropic-messages">,
context: Context,
@ -154,7 +164,12 @@ export const streamAnthropic: StreamFunction<"anthropic-messages"> = (
try {
const apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? "";
const { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true);
const { client, isOAuthToken } = createClient(
model,
apiKey,
options?.interleavedThinking ?? true,
options?.headers,
);
const params = buildParams(model, context, isOAuthToken, options);
options?.onPayload?.(params);
const anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal });
@ -328,6 +343,7 @@ function createClient(
model: Model<"anthropic-messages">,
apiKey: string,
interleavedThinking: boolean,
optionsHeaders?: Record<string, string>,
): { client: Anthropic; isOAuthToken: boolean } {
const betaFeatures = ["fine-grained-tool-streaming-2025-05-14"];
if (interleavedThinking) {
@ -337,14 +353,17 @@ function createClient(
const oauthToken = isOAuthToken(apiKey);
if (oauthToken) {
// Stealth mode: Mimic Claude Code's headers exactly
const defaultHeaders = {
accept: "application/json",
"anthropic-dangerous-direct-browser-access": "true",
"anthropic-beta": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(",")}`,
"user-agent": `claude-cli/${claudeCodeVersion} (external, cli)`,
"x-app": "cli",
...(model.headers || {}),
};
const defaultHeaders = mergeHeaders(
{
accept: "application/json",
"anthropic-dangerous-direct-browser-access": "true",
"anthropic-beta": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(",")}`,
"user-agent": `claude-cli/${claudeCodeVersion} (external, cli)`,
"x-app": "cli",
},
model.headers,
optionsHeaders,
);
const client = new Anthropic({
apiKey: null,
@ -357,12 +376,15 @@ function createClient(
return { client, isOAuthToken: true };
}
const defaultHeaders = {
accept: "application/json",
"anthropic-dangerous-direct-browser-access": "true",
"anthropic-beta": betaFeatures.join(","),
...(model.headers || {}),
};
const defaultHeaders = mergeHeaders(
{
accept: "application/json",
"anthropic-dangerous-direct-browser-access": "true",
"anthropic-beta": betaFeatures.join(","),
},
model.headers,
optionsHeaders,
);
const client = new Anthropic({
apiKey,

View file

@ -434,6 +434,7 @@ export const streamGoogleGeminiCli: StreamFunction<"google-gemini-cli"> = (
Accept: "text/event-stream",
...headers,
...(isClaudeThinkingModel(model.id) ? { "anthropic-beta": CLAUDE_THINKING_BETA_HEADER } : {}),
...options?.headers,
};
const requestBodyJson = JSON.stringify(requestBody);

View file

@ -82,7 +82,7 @@ export const streamGoogleVertex: StreamFunction<"google-vertex"> = (
try {
const project = resolveProject(options);
const location = resolveLocation(options);
const client = createClient(model, project, location);
const client = createClient(model, project, location, options?.headers);
const params = buildParams(model, context, options);
options?.onPayload?.(params);
const googleStream = await client.models.generateContentStream(params);
@ -276,11 +276,16 @@ export const streamGoogleVertex: StreamFunction<"google-vertex"> = (
return stream;
};
function createClient(model: Model<"google-vertex">, project: string, location: string): GoogleGenAI {
function createClient(
model: Model<"google-vertex">,
project: string,
location: string,
optionsHeaders?: Record<string, string>,
): GoogleGenAI {
const httpOptions: { headers?: Record<string, string> } = {};
if (model.headers) {
httpOptions.headers = { ...model.headers };
if (model.headers || optionsHeaders) {
httpOptions.headers = { ...model.headers, ...optionsHeaders };
}
const hasHttpOptions = Object.values(httpOptions).some(Boolean);

View file

@ -69,7 +69,7 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = (
try {
const apiKey = options?.apiKey || getEnvApiKey(model.provider) || "";
const client = createClient(model, apiKey);
const client = createClient(model, apiKey, options?.headers);
const params = buildParams(model, context, options);
options?.onPayload?.(params);
const googleStream = await client.models.generateContentStream(params);
@ -264,14 +264,18 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = (
return stream;
};
function createClient(model: Model<"google-generative-ai">, apiKey?: string): GoogleGenAI {
function createClient(
model: Model<"google-generative-ai">,
apiKey?: string,
optionsHeaders?: Record<string, string>,
): GoogleGenAI {
const httpOptions: { baseUrl?: string; apiVersion?: string; headers?: Record<string, string> } = {};
if (model.baseUrl) {
httpOptions.baseUrl = model.baseUrl;
httpOptions.apiVersion = ""; // baseUrl already includes version path, don't append
}
if (model.headers) {
httpOptions.headers = model.headers;
if (model.headers || optionsHeaders) {
httpOptions.headers = { ...model.headers, ...optionsHeaders };
}
return new GoogleGenAI({

View file

@ -123,7 +123,7 @@ export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses"
const accountId = extractAccountId(apiKey);
const body = buildRequestBody(model, context, options);
options?.onPayload?.(body);
const headers = buildHeaders(model.headers, accountId, apiKey, options?.sessionId);
const headers = buildHeaders(model.headers, options?.headers, accountId, apiKey, options?.sessionId);
const bodyJson = JSON.stringify(body);
// Fetch with retry logic for rate limits and transient errors
@ -697,6 +697,7 @@ function extractAccountId(token: string): string {
function buildHeaders(
initHeaders: Record<string, string> | undefined,
additionalHeaders: Record<string, string> | undefined,
accountId: string,
token: string,
sessionId?: string,
@ -709,6 +710,9 @@ function buildHeaders(
headers.set("User-Agent", `pi (${os.platform()} ${os.release()}; ${os.arch()})`);
headers.set("accept", "text/event-stream");
headers.set("content-type", "application/json");
for (const [key, value] of Object.entries(additionalHeaders || {})) {
headers.set(key, value);
}
if (sessionId) {
headers.set("session_id", sessionId);

View file

@ -99,7 +99,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = (
try {
const apiKey = options?.apiKey || getEnvApiKey(model.provider) || "";
const client = createClient(model, context, apiKey);
const client = createClient(model, context, apiKey, options?.headers);
const params = buildParams(model, context, options);
options?.onPayload?.(params);
const openaiStream = await client.chat.completions.create(params, { signal: options?.signal });
@ -318,7 +318,12 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = (
return stream;
};
function createClient(model: Model<"openai-completions">, context: Context, apiKey?: string) {
function createClient(
model: Model<"openai-completions">,
context: Context,
apiKey?: string,
optionsHeaders?: Record<string, string>,
) {
if (!apiKey) {
if (!process.env.OPENAI_API_KEY) {
throw new Error(
@ -354,6 +359,11 @@ function createClient(model: Model<"openai-completions">, context: Context, apiK
}
}
// Merge options headers last so they can override defaults
if (optionsHeaders) {
Object.assign(headers, optionsHeaders);
}
return new OpenAI({
apiKey,
baseURL: model.baseUrl,

View file

@ -85,7 +85,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = (
try {
// Create OpenAI client
const apiKey = options?.apiKey || getEnvApiKey(model.provider) || "";
const client = createClient(model, context, apiKey);
const client = createClient(model, context, apiKey, options?.headers);
const params = buildParams(model, context, options);
options?.onPayload?.(params);
const openaiStream = await client.responses.create(
@ -319,7 +319,12 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = (
return stream;
};
function createClient(model: Model<"openai-responses">, context: Context, apiKey?: string) {
function createClient(
model: Model<"openai-responses">,
context: Context,
apiKey?: string,
optionsHeaders?: Record<string, string>,
) {
if (!apiKey) {
if (!process.env.OPENAI_API_KEY) {
throw new Error(
@ -355,6 +360,11 @@ function createClient(model: Model<"openai-responses">, context: Context, apiKey
}
}
// Merge options headers last so they can override defaults
if (optionsHeaders) {
Object.assign(headers, optionsHeaders);
}
return new OpenAI({
apiKey,
baseURL: model.baseUrl,

View file

@ -218,6 +218,8 @@ function mapOptionsForApi<TApi extends Api>(
signal: options?.signal,
apiKey: apiKey || options?.apiKey,
sessionId: options?.sessionId,
headers: options?.headers,
onPayload: options?.onPayload,
};
// Helper to clamp xhigh to high for providers that don't support it

View file

@ -90,6 +90,12 @@ export interface StreamOptions {
* Optional callback for inspecting provider payloads before sending.
*/
onPayload?: (payload: unknown) => void;
/**
* Optional custom HTTP headers to include in API requests.
* Merged with provider defaults; can override default headers.
* Not supported by all providers (e.g., AWS Bedrock uses SDK auth).
*/
headers?: Record<string, string>;
}
// Unified options with reasoning passed to streamSimple() and completeSimple()