fix(ai): Deduplicate models and add Anthropic aliases

- Add proper Anthropic model aliases (claude-opus-4-1, claude-sonnet-4-0, etc.)
- Deduplicate models when same ID appears in both models.dev and OpenRouter
- models.dev takes priority over OpenRouter for duplicate IDs
- Fix test to use correct claude-3-5-haiku-latest alias
- Reduces Anthropic models from 11 to 10 (removed duplicate)
This commit is contained in:
Mario Zechner 2025-08-29 23:34:01 +02:00
parent 9c3f32b91e
commit d61d09b88d
3 changed files with 72 additions and 40 deletions

View file

@ -65,7 +65,22 @@ async function fetchOpenRouterModels(): Promise<NormalizedModel[]> {
modelKey = model.id.replace("openai/", "");
} else if (model.id.startsWith("anthropic/")) {
provider = "anthropic";
modelKey = model.id.replace("anthropic/", "");
const fullKey = model.id.replace("anthropic/", "");
// Map to Anthropic's preferred aliases
const anthropicAliases: Record<string, string> = {
"claude-opus-4.1": "claude-opus-4-1",
"claude-opus-4": "claude-opus-4-0",
"claude-sonnet-4": "claude-sonnet-4-0",
"claude-3.7-sonnet": "claude-3-7-sonnet-latest",
"claude-3.7-sonnet:thinking": "claude-3-7-sonnet-latest:thinking",
"claude-3.5-haiku": "claude-3-5-haiku-latest",
"claude-3.5-haiku-20241022": "claude-3-5-haiku-latest",
"claude-3-haiku": "claude-3-haiku-20240307",
"claude-3-sonnet": "claude-3-sonnet-20240229",
"claude-3-opus": "claude-3-opus-20240229",
"claude-3.5-sonnet": "claude-3-5-sonnet-latest"
};
modelKey = anthropicAliases[fullKey] || fullKey;
} else if (model.id.startsWith("x-ai/")) {
provider = "xai";
modelKey = model.id.replace("x-ai/", "");
@ -188,13 +203,17 @@ async function generateModels() {
// Combine models (models.dev takes priority for Groq/Cerebras)
const allModels = [...modelsDevModels, ...openRouterModels];
// Group by provider
const providers: Record<string, NormalizedModel[]> = {};
// Group by provider and deduplicate by model ID
const providers: Record<string, Record<string, NormalizedModel>> = {};
for (const model of allModels) {
if (!providers[model.provider]) {
providers[model.provider] = [];
providers[model.provider] = {};
}
// Use model ID as key to automatically deduplicate
// Only add if not already present (models.dev takes priority over OpenRouter)
if (!providers[model.provider][model.id]) {
providers[model.provider][model.id] = model;
}
providers[model.provider].push(model);
}
// Generate TypeScript file
@ -211,7 +230,7 @@ export const PROVIDERS = {
output += `\t${providerId}: {\n`;
output += `\t\tmodels: {\n`;
for (const model of models) {
for (const model of Object.values(models)) {
output += `\t\t\t"${model.id}": {\n`;
output += `\t\t\t\tid: "${model.id}",\n`;
output += `\t\t\t\tname: "${model.name}",\n`;
@ -254,7 +273,7 @@ export type ProviderModels = {
console.log(` Reasoning-capable models: ${reasoningModels}`);
for (const [provider, models] of Object.entries(providers)) {
console.log(` ${provider}: ${models.length} models`);
console.log(` ${provider}: ${Object.keys(models).length} models`);
}
}

View file

@ -2381,8 +2381,8 @@ export const PROVIDERS = {
},
anthropic: {
models: {
"claude-opus-4.1": {
id: "claude-opus-4.1",
"claude-opus-4-1": {
id: "claude-opus-4-1",
name: "Anthropic: Claude Opus 4.1",
provider: "anthropic",
reasoning: true,
@ -2396,8 +2396,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 32000,
} satisfies Model,
"claude-opus-4": {
id: "claude-opus-4",
"claude-opus-4-0": {
id: "claude-opus-4-0",
name: "Anthropic: Claude Opus 4",
provider: "anthropic",
reasoning: true,
@ -2411,8 +2411,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 32000,
} satisfies Model,
"claude-sonnet-4": {
id: "claude-sonnet-4",
"claude-sonnet-4-0": {
id: "claude-sonnet-4-0",
name: "Anthropic: Claude Sonnet 4",
provider: "anthropic",
reasoning: true,
@ -2426,8 +2426,8 @@ export const PROVIDERS = {
contextWindow: 1000000,
maxTokens: 64000,
} satisfies Model,
"claude-3.7-sonnet": {
id: "claude-3.7-sonnet",
"claude-3-7-sonnet-latest": {
id: "claude-3-7-sonnet-latest",
name: "Anthropic: Claude 3.7 Sonnet",
provider: "anthropic",
reasoning: true,
@ -2441,8 +2441,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 64000,
} satisfies Model,
"claude-3.7-sonnet:thinking": {
id: "claude-3.7-sonnet:thinking",
"claude-3-7-sonnet-latest:thinking": {
id: "claude-3-7-sonnet-latest:thinking",
name: "Anthropic: Claude 3.7 Sonnet (thinking)",
provider: "anthropic",
reasoning: true,
@ -2456,8 +2456,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 64000,
} satisfies Model,
"claude-3.5-haiku-20241022": {
id: "claude-3.5-haiku-20241022",
"claude-3-5-haiku-latest": {
id: "claude-3-5-haiku-latest",
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
provider: "anthropic",
reasoning: false,
@ -2471,23 +2471,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model,
"claude-3.5-haiku": {
id: "claude-3.5-haiku",
name: "Anthropic: Claude 3.5 Haiku",
provider: "anthropic",
reasoning: false,
input: ["text", "image"],
cost: {
input: 0.7999999999999999,
output: 4,
cacheRead: 0.08,
cacheWrite: 1,
},
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model,
"claude-3.5-sonnet": {
id: "claude-3.5-sonnet",
"claude-3-5-sonnet-latest": {
id: "claude-3-5-sonnet-latest",
name: "Anthropic: Claude 3.5 Sonnet",
provider: "anthropic",
reasoning: false,
@ -2516,8 +2501,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model,
"claude-3-haiku": {
id: "claude-3-haiku",
"claude-3-haiku-20240307": {
id: "claude-3-haiku-20240307",
name: "Anthropic: Claude 3 Haiku",
provider: "anthropic",
reasoning: false,
@ -2531,8 +2516,8 @@ export const PROVIDERS = {
contextWindow: 200000,
maxTokens: 4096,
} satisfies Model,
"claude-3-opus": {
id: "claude-3-opus",
"claude-3-opus-20240229": {
id: "claude-3-opus-20240229",
name: "Anthropic: Claude 3 Opus",
provider: "anthropic",
reasoning: false,

View file

@ -538,4 +538,32 @@ describe("AI Providers E2E Tests", () => {
await multiTurn(llm, {reasoningEffort: "medium"});
});
});
describe.skipIf(!process.env.ANTHROPIC_API_KEY)("Anthropic Provider (Haiku 3.5)", () => {
let llm: AnthropicLLM;
beforeAll(() => {
llm = createLLM("anthropic", "claude-3-5-haiku-latest");
});
it("should complete basic text generation", async () => {
await basicTextGeneration(llm);
});
it("should handle tool calling", async () => {
await handleToolCall(llm);
});
it("should handle streaming", async () => {
await handleStreaming(llm);
});
it("should handle thinking mode", async () => {
await handleThinking(llm, {thinking: {enabled: true}}, false);
});
it("should handle multi-turn with thinking and tools", async () => {
await multiTurn(llm, {thinking: {enabled: true}});
});
});
});