refactor(ai): Implement unified model system with type-safe createLLM

- Add Model interface to types.ts with normalized structure
- Create type-safe generic createLLM function with provider-specific model constraints
- Generate models from OpenRouter API and models.dev data
- Strip provider prefixes for direct providers (google, openai, anthropic, xai)
- Keep full model IDs for OpenRouter-proxied models
- Clean separation: types.ts (Model interface), models.ts (factory logic), models.generated.ts (data)
- Remove old model scripts and unused dependencies
- Rename GeminiLLM to GoogleLLM for consistency
- Add tests for new providers (xAI, Groq, Cerebras, OpenRouter)
- Support 181 tool-capable models across 7 providers with full type safety
This commit is contained in:
Mario Zechner 2025-08-29 23:19:47 +02:00
parent 3f36051bc6
commit c7618db3f7
8 changed files with 409 additions and 418 deletions

View file

@ -1,133 +1,97 @@
import { readFileSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import { PROVIDERS } from "./models.generated.js";
import { AnthropicLLM } from "./providers/anthropic.js";
import { GoogleLLM } from "./providers/gemini.js";
import { OpenAICompletionsLLM } from "./providers/openai-completions.js";
import { OpenAIResponsesLLM } from "./providers/openai-responses.js";
import type { Model } from "./types.js";
export type ModalityInput = "text" | "image" | "audio" | "video" | "pdf";
export type ModalityOutput = "text" | "image" | "audio";
// Provider configuration with factory functions
export const PROVIDER_CONFIG = {
google: {
envKey: "GEMINI_API_KEY",
create: (model: string, apiKey: string) => new GoogleLLM(model, apiKey),
},
openai: {
envKey: "OPENAI_API_KEY",
create: (model: string, apiKey: string) => new OpenAIResponsesLLM(model, apiKey),
},
anthropic: {
envKey: "ANTHROPIC_API_KEY",
create: (model: string, apiKey: string) => new AnthropicLLM(model, apiKey),
},
xai: {
envKey: "XAI_API_KEY",
create: (model: string, apiKey: string) => new OpenAICompletionsLLM(model, apiKey, "https://api.x.ai/v1"),
},
groq: {
envKey: "GROQ_API_KEY",
create: (model: string, apiKey: string) =>
new OpenAICompletionsLLM(model, apiKey, "https://api.groq.com/openai/v1"),
},
cerebras: {
envKey: "CEREBRAS_API_KEY",
create: (model: string, apiKey: string) => new OpenAICompletionsLLM(model, apiKey, "https://api.cerebras.ai/v1"),
},
openrouter: {
envKey: "OPENROUTER_API_KEY",
create: (model: string, apiKey: string) =>
new OpenAICompletionsLLM(model, apiKey, "https://openrouter.ai/api/v1"),
},
} as const;
export interface ModelInfo {
id: string;
name: string;
attachment: boolean;
reasoning: boolean;
temperature: boolean;
tool_call: boolean;
release_date: string;
last_updated: string;
modalities: {
input: ModalityInput[];
output: ModalityOutput[];
};
open_weights: boolean;
limit: {
context: number;
output: number;
};
knowledge?: string; // Optional - knowledge cutoff date
cost?: {
input: number;
output: number;
cache_read?: number;
cache_write?: number;
};
// Type mapping from provider to LLM implementation
export type ProviderToLLM = {
google: GoogleLLM;
openai: OpenAIResponsesLLM;
anthropic: AnthropicLLM;
xai: OpenAICompletionsLLM;
groq: OpenAICompletionsLLM;
cerebras: OpenAICompletionsLLM;
openrouter: OpenAICompletionsLLM;
};
// Extract model types for each provider
export type GoogleModel = keyof typeof PROVIDERS.google.models;
export type OpenAIModel = keyof typeof PROVIDERS.openai.models;
export type AnthropicModel = keyof typeof PROVIDERS.anthropic.models;
export type XAIModel = keyof typeof PROVIDERS.xai.models;
export type GroqModel = keyof typeof PROVIDERS.groq.models;
export type CerebrasModel = keyof typeof PROVIDERS.cerebras.models;
export type OpenRouterModel = keyof typeof PROVIDERS.openrouter.models;
// Map providers to their model types
export type ProviderModels = {
google: GoogleModel;
openai: OpenAIModel;
anthropic: AnthropicModel;
xai: XAIModel;
groq: GroqModel;
cerebras: CerebrasModel;
openrouter: OpenRouterModel;
};
// Single generic factory function
export function createLLM<P extends keyof typeof PROVIDERS, M extends keyof (typeof PROVIDERS)[P]["models"]>(
provider: P,
model: M,
apiKey?: string,
): ProviderToLLM[P] {
const config = PROVIDER_CONFIG[provider as keyof typeof PROVIDER_CONFIG];
if (!config) throw new Error(`Unknown provider: ${provider}`);
const providerData = PROVIDERS[provider];
if (!providerData) throw new Error(`Unknown provider: ${provider}`);
// Type-safe model lookup
const models = providerData.models as Record<string, Model>;
const modelData = models[model as string];
if (!modelData) throw new Error(`Unknown model: ${String(model)} for provider ${provider}`);
const key = apiKey || process.env[config.envKey];
if (!key) throw new Error(`No API key provided for ${provider}. Set ${config.envKey} or pass apiKey.`);
return config.create(model as string, key) as ProviderToLLM[P];
}
export interface ProviderInfo {
id: string;
env?: string[];
npm?: string;
api?: string;
name: string;
doc?: string;
models: Record<string, ModelInfo>;
}
export type ModelsData = Record<string, ProviderInfo>;
let cachedModels: ModelsData | null = null;
/**
* Load models data from models.json
* The file is loaded relative to this module's location
*/
export function loadModels(): ModelsData {
if (cachedModels) {
return cachedModels;
}
try {
// Get the directory of this module
const currentDir = dirname(fileURLToPath(import.meta.url));
const modelsPath = join(currentDir, "models.json");
const data = readFileSync(modelsPath, "utf-8");
cachedModels = JSON.parse(data);
return cachedModels!;
} catch (error) {
console.error("Failed to load models.json:", error);
// Return empty providers object as fallback
return {};
}
}
/**
* Get information about a specific model
*/
export function getModelInfo(modelId: string): ModelInfo | undefined {
const data = loadModels();
// Search through all providers
for (const provider of Object.values(data)) {
if (provider.models && provider.models[modelId]) {
return provider.models[modelId];
}
}
return undefined;
}
/**
* Get all models for a specific provider
*/
export function getProviderModels(providerId: string): ModelInfo[] {
const data = loadModels();
const provider = data[providerId];
if (!provider || !provider.models) {
return [];
}
return Object.values(provider.models);
}
/**
* Get provider information
*/
export function getProviderInfo(providerId: string): ProviderInfo | undefined {
const data = loadModels();
return data[providerId];
}
/**
* Check if a model supports thinking/reasoning
*/
export function supportsThinking(modelId: string): boolean {
const model = getModelInfo(modelId);
return model?.reasoning === true;
}
/**
* Check if a model supports tool calling
*/
export function supportsTools(modelId: string): boolean {
const model = getModelInfo(modelId);
return model?.tool_call === true;
}
/**
* Get all available providers
*/
export function getAllProviders(): ProviderInfo[] {
const data = loadModels();
return Object.values(data);
}
// Re-export Model type for convenience
export type { Model };