feat(ai): Fetch Anthropic, Google, and OpenAI models from models.dev instead of OpenRouter

- Updated generate-models.ts to fetch these providers directly from models.dev API
- OpenRouter now only used for xAI and other third-party providers
- Fixed test model IDs to match new model names from models.dev
- Removed unused import from google.ts
This commit is contained in:
Mario Zechner 2025-09-02 01:18:59 +02:00
parent f1c3d44602
commit efaa5cdb39
8 changed files with 772 additions and 910 deletions

View file

@ -62,48 +62,11 @@ async function fetchOpenRouterModels(): Promise<NormalizedModel[]> {
let provider = "";
let modelKey = model.id;
// Map provider prefixes to our provider names
if (model.id.startsWith("google/")) {
provider = "google";
modelKey = model.id.replace("google/", "");
} else if (model.id.startsWith("openai/")) {
provider = "openai";
modelKey = model.id.replace("openai/", "");
} else if (model.id.startsWith("anthropic/")) {
provider = "anthropic";
modelKey = model.id.replace("anthropic/", "");
// Fix dot notation to dash notation for ALL Anthropic models
modelKey = modelKey.replace(/\./g, "-");
// Map version-less models to -latest aliases
if (modelKey === "claude-3-5-haiku") {
modelKey = "claude-3-5-haiku-latest";
} else if (modelKey === "claude-3-5-sonnet") {
modelKey = "claude-3-5-sonnet-latest";
} else if (modelKey === "claude-3-7-sonnet") {
modelKey = "claude-3-7-sonnet-latest";
} else if (modelKey === "claude-3-7-sonnet:thinking") {
modelKey = "claude-3-7-sonnet-latest:thinking";
}
// Map numbered versions to proper format
else if (modelKey === "claude-opus-4-1") {
modelKey = "claude-opus-4-1";
} else if (modelKey === "claude-opus-4") {
modelKey = "claude-opus-4-0";
} else if (modelKey === "claude-sonnet-4") {
modelKey = "claude-sonnet-4-0";
}
// Map old 3.x models to their specific dates
else if (modelKey === "claude-3-haiku") {
modelKey = "claude-3-haiku-20240307";
} else if (modelKey === "claude-3-sonnet") {
modelKey = "claude-3-sonnet-20240229";
} else if (modelKey === "claude-3-opus") {
modelKey = "claude-3-opus-20240229";
} else {
modelKey = modelKey.replace("\.", "-");
}
// Skip models that we get from models.dev (Anthropic, Google, OpenAI)
if (model.id.startsWith("google/") ||
model.id.startsWith("openai/") ||
model.id.startsWith("anthropic/")) {
continue;
} else if (model.id.startsWith("x-ai/")) {
provider = "xai";
modelKey = model.id.replace("x-ai/", "");
@ -113,8 +76,8 @@ async function fetchOpenRouterModels(): Promise<NormalizedModel[]> {
modelKey = model.id; // Keep full ID for OpenRouter
}
// Skip if not one of our supported providers
if (!["google", "openai", "anthropic", "xai", "openrouter"].includes(provider)) {
// Skip if not one of our supported providers from OpenRouter
if (!["xai", "openrouter"].includes(provider)) {
continue;
}
@ -172,6 +135,78 @@ async function loadModelsDevData(): Promise<NormalizedModel[]> {
const models: NormalizedModel[] = [];
// Process Anthropic models
if (data.anthropic?.models) {
for (const [modelId, model] of Object.entries(data.anthropic.models)) {
const m = model as ModelsDevModel;
if (m.tool_call !== true) continue;
models.push({
id: modelId,
name: m.name || modelId,
provider: "anthropic",
reasoning: m.reasoning === true,
input: m.modalities?.input?.includes("image") ? ["text", "image"] : ["text"],
cost: {
input: m.cost?.input || 0,
output: m.cost?.output || 0,
cacheRead: m.cost?.cache_read || 0,
cacheWrite: m.cost?.cache_write || 0,
},
contextWindow: m.limit?.context || 4096,
maxTokens: m.limit?.output || 4096,
});
}
}
// Process Google models
if (data.google?.models) {
for (const [modelId, model] of Object.entries(data.google.models)) {
const m = model as ModelsDevModel;
if (m.tool_call !== true) continue;
models.push({
id: modelId,
name: m.name || modelId,
provider: "google",
reasoning: m.reasoning === true,
input: m.modalities?.input?.includes("image") ? ["text", "image"] : ["text"],
cost: {
input: m.cost?.input || 0,
output: m.cost?.output || 0,
cacheRead: m.cost?.cache_read || 0,
cacheWrite: m.cost?.cache_write || 0,
},
contextWindow: m.limit?.context || 4096,
maxTokens: m.limit?.output || 4096,
});
}
}
// Process OpenAI models
if (data.openai?.models) {
for (const [modelId, model] of Object.entries(data.openai.models)) {
const m = model as ModelsDevModel;
if (m.tool_call !== true) continue;
models.push({
id: modelId,
name: m.name || modelId,
provider: "openai",
reasoning: m.reasoning === true,
input: m.modalities?.input?.includes("image") ? ["text", "image"] : ["text"],
cost: {
input: m.cost?.input || 0,
output: m.cost?.output || 0,
cacheRead: m.cost?.cache_read || 0,
cacheWrite: m.cost?.cache_write || 0,
},
contextWindow: m.limit?.context || 4096,
maxTokens: m.limit?.output || 4096,
});
}
}
// Process Groq models
if (data.groq?.models) {
for (const [modelId, model] of Object.entries(data.groq.models)) {
@ -231,11 +266,13 @@ async function loadModelsDevData(): Promise<NormalizedModel[]> {
}
async function generateModels() {
// Fetch all models
const openRouterModels = await fetchOpenRouterModels();
// Fetch models from both sources
// models.dev: Anthropic, Google, OpenAI, Groq, Cerebras
// OpenRouter: xAI and other providers (excluding Anthropic, Google, OpenAI)
const modelsDevModels = await loadModelsDevData();
const openRouterModels = await fetchOpenRouterModels();
// Combine models (models.dev takes priority for Groq/Cerebras)
// Combine models (models.dev has priority)
const allModels = [...modelsDevModels, ...openRouterModels];
// Group by provider and deduplicate by model ID

File diff suppressed because it is too large Load diff

View file

@ -6,7 +6,6 @@ import {
type GenerateContentParameters,
GoogleGenAI,
type Part,
setDefaultBaseUrls,
} from "@google/genai";
import { calculateCost } from "../models.js";
import type {

View file

@ -93,12 +93,32 @@ export class OpenAIResponsesLLM implements LLM<OpenAIResponsesLLMOptions> {
}
// Add reasoning options for models that support it
if (this.modelInfo?.reasoning && (options?.reasoningEffort || options?.reasoningSummary)) {
params.reasoning = {
effort: options?.reasoningEffort || "medium",
summary: options?.reasoningSummary || "auto",
};
params.include = ["reasoning.encrypted_content"];
if (this.modelInfo?.reasoning) {
if (options?.reasoningEffort || options?.reasoningSummary) {
params.reasoning = {
effort: options?.reasoningEffort || "medium",
summary: options?.reasoningSummary || "auto",
};
params.include = ["reasoning.encrypted_content"];
} else {
params.reasoning = {
effort: this.modelInfo.name.startsWith("gpt-5") ? "minimal" : null,
summary: null,
};
if (this.modelInfo.name.startsWith("gpt-5")) {
// Jesus Christ, see https://community.openai.com/t/need-reasoning-false-option-for-gpt-5/1351588/7
input.push({
role: "developer",
content: [
{
type: "input_text",
text: "# Juice: 0 !important",
},
],
});
}
}
}
const stream = await this.client.responses.create(params, {

View file

@ -114,7 +114,7 @@ describe("AI Providers Abort Tests", () => {
let llm: AnthropicLLM;
beforeAll(() => {
llm = new AnthropicLLM(getModel("anthropic", "claude-opus-4-1")!, process.env.ANTHROPIC_OAUTH_TOKEN!);
llm = new AnthropicLLM(getModel("anthropic", "claude-opus-4-1-20250805")!, process.env.ANTHROPIC_OAUTH_TOKEN!);
});
it("should abort mid-stream", async () => {

View file

@ -4,7 +4,7 @@ import { OpenAICompletionsLLM } from "../src/providers/openai-completions.js";
import { OpenAIResponsesLLM } from "../src/providers/openai-responses.js";
import { AnthropicLLM } from "../src/providers/anthropic.js";
import type { LLM, Context, AssistantMessage, Tool, Message } from "../src/types.js";
import { getModel } from "../src/models.js";
import { createLLM, getModel } from "../src/models.js";
// Tool for testing
const weatherTool: Tool = {

View file

@ -0,0 +1,31 @@
import { GoogleGenAI } from "@google/genai";
import OpenAI from "openai";
const ai = new GoogleGenAI({});
async function main() {
/*let pager = await ai.models.list();
do {
for (const model of pager.page) {
console.log(JSON.stringify(model, null, 2));
console.log("---");
}
if (!pager.hasNextPage()) break;
await pager.nextPage();
} while (true);*/
const openai = new OpenAI();
const response = await openai.models.list();
do {
const page = response.data;
for (const model of page) {
const info = await openai.models.retrieve(model.id);
console.log(JSON.stringify(model, null, 2));
console.log("---");
}
if (!response.hasNextPage()) break;
await response.getNextPage();
} while (true);
}
await main();

View file

@ -340,11 +340,11 @@ describe("AI Providers E2E Tests", () => {
});
});
describe.skipIf(!process.env.ANTHROPIC_OAUTH_TOKEN)("Anthropic Provider (claude-sonnet-4-0)", () => {
describe.skipIf(!process.env.ANTHROPIC_OAUTH_TOKEN)("Anthropic Provider (claude-sonnet-4-20250514)", () => {
let llm: AnthropicLLM;
beforeAll(() => {
llm = new AnthropicLLM(getModel("anthropic", "claude-sonnet-4-0")!, process.env.ANTHROPIC_OAUTH_TOKEN!);
llm = new AnthropicLLM(getModel("anthropic", "claude-sonnet-4-20250514")!, process.env.ANTHROPIC_OAUTH_TOKEN!);
});
it("should complete basic text generation", async () => {