Add custom headers support for models.json

Fixes #39

- Added headers field to Model type (provider and model level)
- Model headers override provider headers when merged
- Supported in all APIs:
  - Anthropic: defaultHeaders
  - OpenAI (completions/responses): defaultHeaders
  - Google: httpOptions.headers
- Enables bypassing Cloudflare bot detection for proxied endpoints
- Updated documentation with examples

Also fixed:
- Mistral/Chutes syntax error (iif -> if)
- process.env.ANTHROPIC_API_KEY bug (use delete instead of = undefined)
This commit is contained in:
Mario Zechner 2025-11-20 17:05:31 +01:00
parent 425890e674
commit de39f1f493
9 changed files with 95 additions and 7 deletions

View file

@ -576,6 +576,24 @@ const ollamaModel: Model<'openai-completions'> = {
maxTokens: 32000
};
// Example: Custom endpoint with headers (bypassing Cloudflare bot detection)
const proxyModel: Model<'anthropic-messages'> = {
id: 'claude-sonnet-4',
name: 'Claude Sonnet 4 (Proxied)',
api: 'anthropic-messages',
provider: 'custom-proxy',
baseUrl: 'https://proxy.example.com/v1',
reasoning: true,
input: ['text', 'image'],
cost: { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
contextWindow: 200000,
maxTokens: 8192,
headers: {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36',
'X-Custom-Auth': 'bearer-token-here'
}
};
// Use the custom model
const response = await stream(ollamaModel, context, {
apiKey: 'dummy' // Ollama doesn't need a real key

View file

@ -288,11 +288,12 @@ function createClient(
accept: "application/json",
"anthropic-dangerous-direct-browser-access": "true",
"anthropic-beta": "oauth-2025-04-20,fine-grained-tool-streaming-2025-05-14",
...(model.headers || {}),
};
// Clear the env var if we're in Node.js to prevent SDK from using it
if (typeof process !== "undefined" && process.env) {
process.env.ANTHROPIC_API_KEY = undefined;
delete process.env.ANTHROPIC_API_KEY;
}
const client = new Anthropic({
@ -309,6 +310,7 @@ function createClient(
accept: "application/json",
"anthropic-dangerous-direct-browser-access": "true",
"anthropic-beta": "fine-grained-tool-streaming-2025-05-14",
...(model.headers || {}),
};
const client = new Anthropic({

View file

@ -63,7 +63,7 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = (
};
try {
const client = createClient(options?.apiKey);
const client = createClient(model, options?.apiKey);
const params = buildParams(model, context, options);
const googleStream = await client.models.generateContentStream(params);
@ -252,7 +252,7 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = (
return stream;
};
function createClient(apiKey?: string): GoogleGenAI {
function createClient(model: Model<"google-generative-ai">, apiKey?: string): GoogleGenAI {
if (!apiKey) {
if (!process.env.GEMINI_API_KEY) {
throw new Error(
@ -261,7 +261,10 @@ function createClient(apiKey?: string): GoogleGenAI {
}
apiKey = process.env.GEMINI_API_KEY;
}
return new GoogleGenAI({ apiKey });
return new GoogleGenAI({
apiKey,
httpOptions: model.headers ? { headers: model.headers } : undefined,
});
}
function buildParams(

View file

@ -260,7 +260,12 @@ function createClient(model: Model<"openai-completions">, apiKey?: string) {
}
apiKey = process.env.OPENAI_API_KEY;
}
return new OpenAI({ apiKey, baseURL: model.baseUrl, dangerouslyAllowBrowser: true });
return new OpenAI({
apiKey,
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders: model.headers,
});
}
function buildParams(model: Model<"openai-completions">, context: Context, options?: OpenAICompletionsOptions) {
@ -285,7 +290,7 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio
if (options?.maxTokens) {
// Mistral/Chutes uses max_tokens instead of max_completion_tokens
iif (model.baseUrl.includes("mistral.ai") || model.baseUrl.includes("chutes.ai")) {
if (model.baseUrl.includes("mistral.ai") || model.baseUrl.includes("chutes.ai")) {
(params as any).max_tokens = options?.maxTokens;
} else {
params.max_completion_tokens = options?.maxTokens;

View file

@ -307,7 +307,12 @@ function createClient(model: Model<"openai-responses">, apiKey?: string) {
}
apiKey = process.env.OPENAI_API_KEY;
}
return new OpenAI({ apiKey, baseURL: model.baseUrl, dangerouslyAllowBrowser: true });
return new OpenAI({
apiKey,
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders: model.headers,
});
}
function buildParams(model: Model<"openai-responses">, context: Context, options?: OpenAIResponsesOptions) {

View file

@ -168,4 +168,5 @@ export interface Model<TApi extends Api> {
};
contextWindow: number;
maxTokens: number;
headers?: Record<string, string>;
}