fix(ai): filter deprecated OpenCode models from generation (#970)

Add status === 'deprecated' check for OpenCode Zen models, matching
the existing pattern used for GitHub Copilot models. This removes
deprecated models like glm-4.7-free and minimax-m2.1-free from the
generated model catalog.
This commit is contained in:
Daniel Tatarkin 2026-01-26 17:56:13 -05:00 committed by GitHub
parent a5f603d704
commit 9f3eef65f8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 4 additions and 54 deletions

View file

@ -482,8 +482,9 @@ async function loadModelsDevData(): Promise<Model<any>[]> {
// - null/undefined/@ai-sdk/openai-compatible → openai-completions
if (data.opencode?.models) {
for (const [modelId, model] of Object.entries(data.opencode.models)) {
const m = model as ModelsDevModel;
const m = model as ModelsDevModel & { status?: string };
if (m.tool_call !== true) continue;
if (m.status === "deprecated") continue;
const npm = m.provider?.npm;
let api: Api;

View file

@ -4636,23 +4636,6 @@ export const MODELS = {
contextWindow: 204800,
maxTokens: 131072,
} satisfies Model<"openai-completions">,
"glm-4.7-free": {
id: "glm-4.7-free",
name: "GLM-4.7",
api: "openai-completions",
provider: "opencode",
baseUrl: "https://opencode.ai/zen/v1",
reasoning: true,
input: ["text"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 204800,
maxTokens: 131072,
} satisfies Model<"openai-completions">,
"gpt-5": {
id: "gpt-5",
name: "GPT-5",
@ -4806,23 +4789,6 @@ export const MODELS = {
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-responses">,
"grok-code": {
id: "grok-code",
name: "Grok Code Fast 1",
api: "openai-completions",
provider: "opencode",
baseUrl: "https://opencode.ai/zen/v1",
reasoning: true,
input: ["text"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 256000,
maxTokens: 256000,
} satisfies Model<"openai-completions">,
"kimi-k2": {
id: "kimi-k2",
name: "Kimi K2",
@ -4857,23 +4823,6 @@ export const MODELS = {
contextWindow: 262144,
maxTokens: 262144,
} satisfies Model<"openai-completions">,
"minimax-m2.1-free": {
id: "minimax-m2.1-free",
name: "MiniMax M2.1",
api: "anthropic-messages",
provider: "opencode",
baseUrl: "https://opencode.ai/zen",
reasoning: true,
input: ["text"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 204800,
maxTokens: 131072,
} satisfies Model<"anthropic-messages">,
"qwen3-coder": {
id: "qwen3-coder",
name: "Qwen3 Coder",
@ -5074,8 +5023,8 @@ export const MODELS = {
cost: {
input: 0.7999999999999999,
output: 4,
cacheRead: 0,
cacheWrite: 0,
cacheRead: 0.08,
cacheWrite: 1,
},
contextWindow: 200000,
maxTokens: 8192,