diff --git a/packages/ai/CHANGELOG.md b/packages/ai/CHANGELOG.md index cd869ace..6eff993c 100644 --- a/packages/ai/CHANGELOG.md +++ b/packages/ai/CHANGELOG.md @@ -5,6 +5,7 @@ ### Added - Added Claude Opus 4.6 model to the generated model catalog +- Added GPT-5.3 Codex model to the generated model catalog (OpenAI Codex provider only) ## [0.51.6] - 2026-02-04 diff --git a/packages/ai/scripts/generate-models.ts b/packages/ai/scripts/generate-models.ts index 2a62810a..3771361c 100644 --- a/packages/ai/scripts/generate-models.ts +++ b/packages/ai/scripts/generate-models.ts @@ -791,6 +791,18 @@ async function generateModels() { contextWindow: CODEX_CONTEXT, maxTokens: CODEX_MAX_TOKENS, }, + { + id: "gpt-5.3-codex", + name: "GPT-5.3 Codex", + api: "openai-codex-responses", + provider: "openai-codex", + baseUrl: CODEX_BASE_URL, + reasoning: true, + input: ["text", "image"], + cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 }, + contextWindow: CODEX_CONTEXT, + maxTokens: CODEX_MAX_TOKENS, + }, ]; allModels.push(...codexModels); diff --git a/packages/ai/src/models.generated.ts b/packages/ai/src/models.generated.ts index d614d4dd..b242cbc5 100644 --- a/packages/ai/src/models.generated.ts +++ b/packages/ai/src/models.generated.ts @@ -5,6 +5,23 @@ import type { Model } from "./types.js"; export const MODELS = { "amazon-bedrock": { + "amazon.nova-2-lite-v1:0": { + id: "amazon.nova-2-lite-v1:0", + name: "Nova 2 Lite", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text", "image"], + cost: { + input: 0.33, + output: 2.75, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, "amazon.nova-lite-v1:0": { id: "amazon.nova-lite-v1:0", name: "Nova Lite", @@ -39,6 +56,23 @@ export const MODELS = { contextWindow: 128000, maxTokens: 8192, } satisfies Model<"bedrock-converse-stream">, + "amazon.nova-premier-v1:0": { + id: "amazon.nova-premier-v1:0", + name: "Nova Premier", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 2.5, + output: 12.5, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 1000000, + maxTokens: 16384, + } satisfies Model<"bedrock-converse-stream">, "amazon.nova-pro-v1:0": { id: "amazon.nova-pro-v1:0", name: "Nova Pro", @@ -56,6 +90,40 @@ export const MODELS = { contextWindow: 300000, maxTokens: 8192, } satisfies Model<"bedrock-converse-stream">, + "amazon.titan-text-express-v1": { + id: "amazon.titan-text-express-v1", + name: "Titan Text G1 - Express", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text"], + cost: { + input: 0.2, + output: 0.6, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, + "amazon.titan-text-express-v1:0:8k": { + id: "amazon.titan-text-express-v1:0:8k", + name: "Titan Text G1 - Express", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text"], + cost: { + input: 0.2, + output: 0.6, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, "anthropic.claude-3-5-haiku-20241022-v1:0": { id: "anthropic.claude-3-5-haiku-20241022-v1:0", name: "Claude Haiku 3.5", @@ -107,6 +175,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 8192, } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-3-7-sonnet-20250219-v1:0": { + id: "anthropic.claude-3-7-sonnet-20250219-v1:0", + name: "Claude Sonnet 3.7", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text", "image"], + cost: { + input: 3, + output: 15, + cacheRead: 0.3, + cacheWrite: 3.75, + }, + contextWindow: 200000, + maxTokens: 8192, + } satisfies Model<"bedrock-converse-stream">, "anthropic.claude-3-haiku-20240307-v1:0": { id: "anthropic.claude-3-haiku-20240307-v1:0", name: "Claude Haiku 3", @@ -158,6 +243,125 @@ export const MODELS = { contextWindow: 200000, maxTokens: 4096, } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-haiku-4-5-20251001-v1:0": { + id: "anthropic.claude-haiku-4-5-20251001-v1:0", + name: "Claude Haiku 4.5", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 1, + output: 5, + cacheRead: 0.1, + cacheWrite: 1.25, + }, + contextWindow: 200000, + maxTokens: 64000, + } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-opus-4-1-20250805-v1:0": { + id: "anthropic.claude-opus-4-1-20250805-v1:0", + name: "Claude Opus 4.1", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 15, + output: 75, + cacheRead: 1.5, + cacheWrite: 18.75, + }, + contextWindow: 200000, + maxTokens: 32000, + } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-opus-4-20250514-v1:0": { + id: "anthropic.claude-opus-4-20250514-v1:0", + name: "Claude Opus 4", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 15, + output: 75, + cacheRead: 1.5, + cacheWrite: 18.75, + }, + contextWindow: 200000, + maxTokens: 32000, + } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-opus-4-5-20251101-v1:0": { + id: "anthropic.claude-opus-4-5-20251101-v1:0", + name: "Claude Opus 4.5", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 200000, + maxTokens: 64000, + } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-opus-4-6-v1:0": { + id: "anthropic.claude-opus-4-6-v1:0", + name: "Claude Opus 4.6", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 200000, + maxTokens: 128000, + } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-sonnet-4-20250514-v1:0": { + id: "anthropic.claude-sonnet-4-20250514-v1:0", + name: "Claude Sonnet 4", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 3, + output: 15, + cacheRead: 0.3, + cacheWrite: 3.75, + }, + contextWindow: 200000, + maxTokens: 64000, + } satisfies Model<"bedrock-converse-stream">, + "anthropic.claude-sonnet-4-5-20250929-v1:0": { + id: "anthropic.claude-sonnet-4-5-20250929-v1:0", + name: "Claude Sonnet 4.5", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 3, + output: 15, + cacheRead: 0.3, + cacheWrite: 3.75, + }, + contextWindow: 200000, + maxTokens: 64000, + } satisfies Model<"bedrock-converse-stream">, "cohere.command-r-plus-v1:0": { id: "cohere.command-r-plus-v1:0", name: "Command R+", @@ -192,6 +396,23 @@ export const MODELS = { contextWindow: 128000, maxTokens: 4096, } satisfies Model<"bedrock-converse-stream">, + "deepseek.r1-v1:0": { + id: "deepseek.r1-v1:0", + name: "DeepSeek-R1", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text"], + cost: { + input: 1.35, + output: 5.4, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 32768, + } satisfies Model<"bedrock-converse-stream">, "deepseek.v3-v1:0": { id: "deepseek.v3-v1:0", name: "DeepSeek-V3.1", @@ -243,6 +464,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"bedrock-converse-stream">, + "eu.anthropic.claude-opus-4-6-v1:0": { + id: "eu.anthropic.claude-opus-4-6-v1:0", + name: "Claude Opus 4.6 (EU)", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 1.5, + cacheWrite: 18.75, + }, + contextWindow: 200000, + maxTokens: 128000, + } satisfies Model<"bedrock-converse-stream">, "eu.anthropic.claude-sonnet-4-20250514-v1:0": { id: "eu.anthropic.claude-sonnet-4-20250514-v1:0", name: "Claude Sonnet 4 (EU)", @@ -311,6 +549,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"bedrock-converse-stream">, + "global.anthropic.claude-opus-4-6-v1:0": { + id: "global.anthropic.claude-opus-4-6-v1:0", + name: "Claude Opus 4.6 (Global)", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 200000, + maxTokens: 128000, + } satisfies Model<"bedrock-converse-stream">, "global.anthropic.claude-sonnet-4-20250514-v1:0": { id: "global.anthropic.claude-sonnet-4-20250514-v1:0", name: "Claude Sonnet 4 (Global)", @@ -413,6 +668,125 @@ export const MODELS = { contextWindow: 128000, maxTokens: 4096, } satisfies Model<"bedrock-converse-stream">, + "meta.llama3-2-11b-instruct-v1:0": { + id: "meta.llama3-2-11b-instruct-v1:0", + name: "Llama 3.2 11B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text", "image"], + cost: { + input: 0.16, + output: 0.16, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, + "meta.llama3-2-1b-instruct-v1:0": { + id: "meta.llama3-2-1b-instruct-v1:0", + name: "Llama 3.2 1B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text"], + cost: { + input: 0.1, + output: 0.1, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 131000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, + "meta.llama3-2-3b-instruct-v1:0": { + id: "meta.llama3-2-3b-instruct-v1:0", + name: "Llama 3.2 3B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text"], + cost: { + input: 0.15, + output: 0.15, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 131000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, + "meta.llama3-2-90b-instruct-v1:0": { + id: "meta.llama3-2-90b-instruct-v1:0", + name: "Llama 3.2 90B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text", "image"], + cost: { + input: 0.72, + output: 0.72, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, + "meta.llama3-3-70b-instruct-v1:0": { + id: "meta.llama3-3-70b-instruct-v1:0", + name: "Llama 3.3 70B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text"], + cost: { + input: 0.72, + output: 0.72, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 4096, + } satisfies Model<"bedrock-converse-stream">, + "meta.llama4-maverick-17b-instruct-v1:0": { + id: "meta.llama4-maverick-17b-instruct-v1:0", + name: "Llama 4 Maverick 17B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text", "image"], + cost: { + input: 0.24, + output: 0.97, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 1000000, + maxTokens: 16384, + } satisfies Model<"bedrock-converse-stream">, + "meta.llama4-scout-17b-instruct-v1:0": { + id: "meta.llama4-scout-17b-instruct-v1:0", + name: "Llama 4 Scout 17B Instruct", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: false, + input: ["text", "image"], + cost: { + input: 0.17, + output: 0.66, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 3500000, + maxTokens: 16384, + } satisfies Model<"bedrock-converse-stream">, "minimax.minimax-m2": { id: "minimax.minimax-m2", name: "MiniMax M2", @@ -736,57 +1110,6 @@ export const MODELS = { contextWindow: 262000, maxTokens: 262000, } satisfies Model<"bedrock-converse-stream">, - "us.amazon.nova-2-lite-v1:0": { - id: "us.amazon.nova-2-lite-v1:0", - name: "Nova 2 Lite (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text", "image"], - cost: { - input: 0.33, - output: 2.75, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 128000, - maxTokens: 4096, - } satisfies Model<"bedrock-converse-stream">, - "us.amazon.nova-premier-v1:0": { - id: "us.amazon.nova-premier-v1:0", - name: "Nova Premier (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: true, - input: ["text", "image"], - cost: { - input: 2.5, - output: 12.5, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 1000000, - maxTokens: 16384, - } satisfies Model<"bedrock-converse-stream">, - "us.anthropic.claude-3-7-sonnet-20250219-v1:0": { - id: "us.anthropic.claude-3-7-sonnet-20250219-v1:0", - name: "Claude Sonnet 3.7 (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text", "image"], - cost: { - input: 3, - output: 15, - cacheRead: 0.3, - cacheWrite: 3.75, - }, - contextWindow: 200000, - maxTokens: 8192, - } satisfies Model<"bedrock-converse-stream">, "us.anthropic.claude-haiku-4-5-20251001-v1:0": { id: "us.anthropic.claude-haiku-4-5-20251001-v1:0", name: "Claude Haiku 4.5 (US)", @@ -855,6 +1178,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"bedrock-converse-stream">, + "us.anthropic.claude-opus-4-6-v1:0": { + id: "us.anthropic.claude-opus-4-6-v1:0", + name: "Claude Opus 4.6 (US)", + api: "bedrock-converse-stream", + provider: "amazon-bedrock", + baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 1.5, + cacheWrite: 18.75, + }, + contextWindow: 200000, + maxTokens: 128000, + } satisfies Model<"bedrock-converse-stream">, "us.anthropic.claude-sonnet-4-20250514-v1:0": { id: "us.anthropic.claude-sonnet-4-20250514-v1:0", name: "Claude Sonnet 4 (US)", @@ -889,142 +1229,6 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"bedrock-converse-stream">, - "us.deepseek.r1-v1:0": { - id: "us.deepseek.r1-v1:0", - name: "DeepSeek-R1 (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: true, - input: ["text"], - cost: { - input: 1.35, - output: 5.4, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 128000, - maxTokens: 32768, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama3-2-11b-instruct-v1:0": { - id: "us.meta.llama3-2-11b-instruct-v1:0", - name: "Llama 3.2 11B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text", "image"], - cost: { - input: 0.16, - output: 0.16, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 128000, - maxTokens: 4096, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama3-2-1b-instruct-v1:0": { - id: "us.meta.llama3-2-1b-instruct-v1:0", - name: "Llama 3.2 1B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text"], - cost: { - input: 0.1, - output: 0.1, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 131000, - maxTokens: 4096, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama3-2-3b-instruct-v1:0": { - id: "us.meta.llama3-2-3b-instruct-v1:0", - name: "Llama 3.2 3B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text"], - cost: { - input: 0.15, - output: 0.15, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 131000, - maxTokens: 4096, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama3-2-90b-instruct-v1:0": { - id: "us.meta.llama3-2-90b-instruct-v1:0", - name: "Llama 3.2 90B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text", "image"], - cost: { - input: 0.72, - output: 0.72, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 128000, - maxTokens: 4096, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama3-3-70b-instruct-v1:0": { - id: "us.meta.llama3-3-70b-instruct-v1:0", - name: "Llama 3.3 70B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text"], - cost: { - input: 0.72, - output: 0.72, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 128000, - maxTokens: 4096, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama4-maverick-17b-instruct-v1:0": { - id: "us.meta.llama4-maverick-17b-instruct-v1:0", - name: "Llama 4 Maverick 17B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text", "image"], - cost: { - input: 0.24, - output: 0.97, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 1000000, - maxTokens: 16384, - } satisfies Model<"bedrock-converse-stream">, - "us.meta.llama4-scout-17b-instruct-v1:0": { - id: "us.meta.llama4-scout-17b-instruct-v1:0", - name: "Llama 4 Scout 17B Instruct (US)", - api: "bedrock-converse-stream", - provider: "amazon-bedrock", - baseUrl: "https://bedrock-runtime.us-east-1.amazonaws.com", - reasoning: false, - input: ["text", "image"], - cost: { - input: 0.17, - output: 0.66, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 3500000, - maxTokens: 16384, - } satisfies Model<"bedrock-converse-stream">, }, "anthropic": { "claude-3-5-haiku-20241022": { @@ -1316,6 +1520,40 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"anthropic-messages">, + "claude-opus-4-6": { + id: "claude-opus-4-6", + name: "Claude Opus 4.6", + api: "anthropic-messages", + provider: "anthropic", + baseUrl: "https://api.anthropic.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 1000000, + maxTokens: 128000, + } satisfies Model<"anthropic-messages">, + "claude-opus-4-6-20260205": { + id: "claude-opus-4-6-20260205", + name: "Claude Opus 4.6", + api: "anthropic-messages", + provider: "anthropic", + baseUrl: "https://api.anthropic.com", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 200000, + maxTokens: 128000, + } satisfies Model<"anthropic-messages">, "claude-sonnet-4-0": { id: "claude-sonnet-4-0", name: "Claude Sonnet 4 (latest)", @@ -1828,6 +2066,23 @@ export const MODELS = { contextWindow: 400000, maxTokens: 128000, } satisfies Model<"azure-openai-responses">, + "gpt-5.3-codex": { + id: "gpt-5.3-codex", + name: "GPT-5.3 Codex", + api: "azure-openai-responses", + provider: "azure-openai-responses", + baseUrl: "", + reasoning: true, + input: ["text", "image"], + cost: { + input: 1.75, + output: 14, + cacheRead: 0.175, + cacheWrite: 0, + }, + contextWindow: 400000, + maxTokens: 128000, + } satisfies Model<"azure-openai-responses">, "o1": { id: "o1", name: "o1", @@ -2057,6 +2312,25 @@ export const MODELS = { contextWindow: 128000, maxTokens: 16000, } satisfies Model<"openai-completions">, + "claude-opus-4.6": { + id: "claude-opus-4.6", + name: "Claude Opus 4.6", + api: "openai-completions", + provider: "github-copilot", + baseUrl: "https://api.individual.githubcopilot.com", + headers: {"User-Agent":"GitHubCopilotChat/0.35.0","Editor-Version":"vscode/1.107.0","Editor-Plugin-Version":"copilot-chat/0.35.0","Copilot-Integration-Id":"vscode-chat"}, + compat: {"supportsStore":false,"supportsDeveloperRole":false,"supportsReasoningEffort":false}, + reasoning: true, + input: ["text", "image"], + cost: { + input: 0, + output: 0, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 128000, + maxTokens: 16000, + } satisfies Model<"openai-completions">, "claude-sonnet-4": { id: "claude-sonnet-4", name: "Claude Sonnet 4", @@ -4599,6 +4873,23 @@ export const MODELS = { contextWindow: 400000, maxTokens: 128000, } satisfies Model<"openai-responses">, + "gpt-5.3-codex": { + id: "gpt-5.3-codex", + name: "GPT-5.3 Codex", + api: "openai-responses", + provider: "openai", + baseUrl: "https://api.openai.com/v1", + reasoning: true, + input: ["text", "image"], + cost: { + input: 1.75, + output: 14, + cacheRead: 0.175, + cacheWrite: 0, + }, + contextWindow: 400000, + maxTokens: 128000, + } satisfies Model<"openai-responses">, "o1": { id: "o1", name: "o1", @@ -4822,6 +5113,23 @@ export const MODELS = { contextWindow: 272000, maxTokens: 128000, } satisfies Model<"openai-codex-responses">, + "gpt-5.3-codex": { + id: "gpt-5.3-codex", + name: "GPT-5.3 Codex", + api: "openai-codex-responses", + provider: "openai-codex", + baseUrl: "https://chatgpt.com/backend-api", + reasoning: true, + input: ["text", "image"], + cost: { + input: 1.75, + output: 14, + cacheRead: 0.175, + cacheWrite: 0, + }, + contextWindow: 272000, + maxTokens: 128000, + } satisfies Model<"openai-codex-responses">, }, "opencode": { "big-pickle": { @@ -4909,6 +5217,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"anthropic-messages">, + "claude-opus-4-6": { + id: "claude-opus-4-6", + name: "Claude Opus 4.6", + api: "anthropic-messages", + provider: "opencode", + baseUrl: "https://opencode.ai/zen", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 1000000, + maxTokens: 128000, + } satisfies Model<"anthropic-messages">, "claude-sonnet-4": { id: "claude-sonnet-4", name: "Claude Sonnet 4", @@ -5625,6 +5950,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"openai-completions">, + "anthropic/claude-opus-4.6": { + id: "anthropic/claude-opus-4.6", + name: "Anthropic: Claude Opus 4.6", + api: "openai-completions", + provider: "openrouter", + baseUrl: "https://openrouter.ai/api/v1", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 1000000, + maxTokens: 128000, + } satisfies Model<"openai-completions">, "anthropic/claude-sonnet-4": { id: "anthropic/claude-sonnet-4", name: "Anthropic: Claude Sonnet 4", @@ -5829,57 +6171,6 @@ export const MODELS = { contextWindow: 128000, maxTokens: 4000, } satisfies Model<"openai-completions">, - "deepcogito/cogito-v2-preview-llama-109b-moe": { - id: "deepcogito/cogito-v2-preview-llama-109b-moe", - name: "Cogito V2 Preview Llama 109B", - api: "openai-completions", - provider: "openrouter", - baseUrl: "https://openrouter.ai/api/v1", - reasoning: true, - input: ["text", "image"], - cost: { - input: 0.18, - output: 0.59, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 32767, - maxTokens: 4096, - } satisfies Model<"openai-completions">, - "deepcogito/cogito-v2-preview-llama-405b": { - id: "deepcogito/cogito-v2-preview-llama-405b", - name: "Deep Cogito: Cogito V2 Preview Llama 405B", - api: "openai-completions", - provider: "openrouter", - baseUrl: "https://openrouter.ai/api/v1", - reasoning: true, - input: ["text"], - cost: { - input: 3.5, - output: 3.5, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 32768, - maxTokens: 4096, - } satisfies Model<"openai-completions">, - "deepcogito/cogito-v2-preview-llama-70b": { - id: "deepcogito/cogito-v2-preview-llama-70b", - name: "Deep Cogito: Cogito V2 Preview Llama 70B", - api: "openai-completions", - provider: "openrouter", - baseUrl: "https://openrouter.ai/api/v1", - reasoning: true, - input: ["text"], - cost: { - input: 0.88, - output: 0.88, - cacheRead: 0, - cacheWrite: 0, - }, - contextWindow: 32768, - maxTokens: 4096, - } satisfies Model<"openai-completions">, "deepseek/deepseek-chat": { id: "deepseek/deepseek-chat", name: "DeepSeek: DeepSeek V3", @@ -5976,7 +6267,7 @@ export const MODELS = { cost: { input: 0.21, output: 0.7899999999999999, - cacheRead: 0.16799999999999998, + cacheRead: 0.1300000002, cacheWrite: 0, }, contextWindow: 163840, @@ -6234,8 +6525,8 @@ export const MODELS = { cacheRead: 0, cacheWrite: 0, }, - contextWindow: 96000, - maxTokens: 96000, + contextWindow: 128000, + maxTokens: 65536, } satisfies Model<"openai-completions">, "google/gemma-3-27b-it:free": { id: "google/gemma-3-27b-it:free", @@ -6485,12 +6776,12 @@ export const MODELS = { input: ["text"], cost: { input: 0.27, - output: 1.1, - cacheRead: 0, + output: 0.95, + cacheRead: 0.0299999997, cacheWrite: 0, }, contextWindow: 196608, - maxTokens: 196608, + maxTokens: 4096, } satisfies Model<"openai-completions">, "mistralai/codestral-2508": { id: "mistralai/codestral-2508", @@ -8304,7 +8595,7 @@ export const MODELS = { input: ["text"], cost: { input: 0.071, - output: 0.463, + output: 0.09999999999999999, cacheRead: 0, cacheWrite: 0, }, @@ -8490,8 +8781,8 @@ export const MODELS = { reasoning: false, input: ["text"], cost: { - input: 0.19999999999999998, - output: 1.5, + input: 0.07, + output: 0.3, cacheRead: 0, cacheWrite: 0, }, @@ -8627,8 +8918,8 @@ export const MODELS = { input: ["text", "image"], cost: { input: 0.19999999999999998, - output: 1.2, - cacheRead: 0, + output: 0.88, + cacheRead: 0.11, cacheWrite: 0, }, contextWindow: 262144, @@ -9267,7 +9558,7 @@ export const MODELS = { } satisfies Model<"anthropic-messages">, "alibaba/qwen-3-235b": { id: "alibaba/qwen-3-235b", - name: "Qwen3 235B A22b Instruct 2507", + name: "Qwen3-235B-A22B", api: "anthropic-messages", provider: "vercel-ai-gateway", baseUrl: "https://ai-gateway.vercel.sh", @@ -9367,6 +9658,23 @@ export const MODELS = { contextWindow: 160000, maxTokens: 32768, } satisfies Model<"anthropic-messages">, + "alibaba/qwen3-coder-next": { + id: "alibaba/qwen3-coder-next", + name: "Qwen3 Coder Next", + api: "anthropic-messages", + provider: "vercel-ai-gateway", + baseUrl: "https://ai-gateway.vercel.sh", + reasoning: true, + input: ["text"], + cost: { + input: 0.5, + output: 1.2, + cacheRead: 0, + cacheWrite: 0, + }, + contextWindow: 256000, + maxTokens: 256000, + } satisfies Model<"anthropic-messages">, "alibaba/qwen3-coder-plus": { id: "alibaba/qwen3-coder-plus", name: "Qwen3 Coder Plus", @@ -9588,6 +9896,23 @@ export const MODELS = { contextWindow: 200000, maxTokens: 64000, } satisfies Model<"anthropic-messages">, + "anthropic/claude-opus-4.6": { + id: "anthropic/claude-opus-4.6", + name: "Claude Opus 4.6", + api: "anthropic-messages", + provider: "vercel-ai-gateway", + baseUrl: "https://ai-gateway.vercel.sh", + reasoning: true, + input: ["text", "image"], + cost: { + input: 5, + output: 25, + cacheRead: 0.5, + cacheWrite: 6.25, + }, + contextWindow: 1000000, + maxTokens: 128000, + } satisfies Model<"anthropic-messages">, "anthropic/claude-sonnet-4": { id: "anthropic/claude-sonnet-4", name: "Claude Sonnet 4", @@ -10056,13 +10381,13 @@ export const MODELS = { reasoning: true, input: ["text"], cost: { - input: 0.27, - output: 1.15, - cacheRead: 0, - cacheWrite: 0, + input: 0.3, + output: 1.2, + cacheRead: 0.03, + cacheWrite: 0.375, }, - contextWindow: 262114, - maxTokens: 262114, + contextWindow: 205000, + maxTokens: 205000, } satisfies Model<"anthropic-messages">, "minimax/minimax-m2.1": { id: "minimax/minimax-m2.1", @@ -10345,13 +10670,13 @@ export const MODELS = { reasoning: true, input: ["text", "image"], cost: { - input: 0.44999999999999996, + input: 0.5, output: 2.8, cacheRead: 0, cacheWrite: 0, }, - contextWindow: 262144, - maxTokens: 252144, + contextWindow: 256000, + maxTokens: 256000, } satisfies Model<"anthropic-messages">, "nvidia/nemotron-nano-12b-v2-vl": { id: "nvidia/nemotron-nano-12b-v2-vl", diff --git a/packages/coding-agent/CHANGELOG.md b/packages/coding-agent/CHANGELOG.md index e21b14f5..1628a8c5 100644 --- a/packages/coding-agent/CHANGELOG.md +++ b/packages/coding-agent/CHANGELOG.md @@ -13,6 +13,7 @@ - API keys in `auth.json` now support shell command resolution (`!command`) and environment variable lookup, matching the behavior in `models.json` - Added `minimal-mode.ts` example extension demonstrating how to override built-in tool rendering for a minimal display mode - Added Claude Opus 4.6 model to the model catalog +- Added GPT-5.3 Codex model to the model catalog (OpenAI Codex provider only) - Added SSH URL support for git packages ([#1287](https://github.com/badlogic/pi-mono/pull/1287) by [@markusn](https://github.com/markusn)) - Model selectors now display the selected model name ([#1275](https://github.com/badlogic/pi-mono/pull/1275) by [@haoqixu](https://github.com/haoqixu))