feat(ai,coding-agent): add OpenCode Go provider support closes #1757

This commit is contained in:
Mario Zechner 2026-03-03 16:02:29 +01:00
parent 1912f0336b
commit 42579dd923
11 changed files with 107 additions and 22 deletions

View file

@ -63,6 +63,8 @@ Unified LLM API with automatic model discovery, provider configuration, token an
- **Google Gemini CLI** (requires OAuth, see below)
- **Antigravity** (requires OAuth, see below)
- **Amazon Bedrock**
- **OpenCode Zen**
- **OpenCode Go**
- **Kimi For Coding** (Moonshot AI, uses Anthropic-compatible API)
- **Any OpenAI-compatible API**: Ollama, vLLM, LM Studio, etc.
@ -905,6 +907,7 @@ In Node.js environments, you can set environment variables to avoid passing API
| Vercel AI Gateway | `AI_GATEWAY_API_KEY` |
| zAI | `ZAI_API_KEY` |
| MiniMax | `MINIMAX_API_KEY` |
| OpenCode Zen / OpenCode Go | `OPENCODE_API_KEY` |
| Kimi For Coding | `KIMI_API_KEY` |
| GitHub Copilot | `COPILOT_GITHUB_TOKEN` or `GH_TOKEN` or `GITHUB_TOKEN` |

View file

@ -460,14 +460,21 @@ async function loadModelsDevData(): Promise<Model<any>[]> {
}
}
// Process OpenCode Zen models
// Process OpenCode models (Zen and Go)
// API mapping based on provider.npm field:
// - @ai-sdk/openai → openai-responses
// - @ai-sdk/anthropic → anthropic-messages
// - @ai-sdk/google → google-generative-ai
// - null/undefined/@ai-sdk/openai-compatible → openai-completions
if (data.opencode?.models) {
for (const [modelId, model] of Object.entries(data.opencode.models)) {
const opencodeVariants = [
{ key: "opencode", provider: "opencode", basePath: "https://opencode.ai/zen" },
{ key: "opencode-go", provider: "opencode-go", basePath: "https://opencode.ai/zen/go" },
] as const;
for (const variant of opencodeVariants) {
if (!data[variant.key]?.models) continue;
for (const [modelId, model] of Object.entries(data[variant.key].models)) {
const m = model as ModelsDevModel & { status?: string };
if (m.tool_call !== true) continue;
if (m.status === "deprecated") continue;
@ -478,25 +485,25 @@ async function loadModelsDevData(): Promise<Model<any>[]> {
if (npm === "@ai-sdk/openai") {
api = "openai-responses";
baseUrl = "https://opencode.ai/zen/v1";
baseUrl = `${variant.basePath}/v1`;
} else if (npm === "@ai-sdk/anthropic") {
api = "anthropic-messages";
// Anthropic SDK appends /v1/messages to baseURL
baseUrl = "https://opencode.ai/zen";
baseUrl = variant.basePath;
} else if (npm === "@ai-sdk/google") {
api = "google-generative-ai";
baseUrl = "https://opencode.ai/zen/v1";
baseUrl = `${variant.basePath}/v1`;
} else {
// null, undefined, or @ai-sdk/openai-compatible
api = "openai-completions";
baseUrl = "https://opencode.ai/zen/v1";
baseUrl = `${variant.basePath}/v1`;
}
models.push({
id: modelId,
name: m.name || modelId,
api,
provider: "opencode",
provider: variant.provider,
baseUrl,
reasoning: m.reasoning === true,
input: m.modalities?.input?.includes("image") ? ["text", "image"] : ["text"],
@ -657,11 +664,17 @@ async function generateModels() {
candidate.cost.cacheWrite = 6.25;
candidate.contextWindow = 200000;
}
if ((candidate.provider === "anthropic" || candidate.provider === "opencode") && candidate.id === "claude-opus-4-6") {
if (
(candidate.provider === "anthropic" || candidate.provider === "opencode" || candidate.provider === "opencode-go") &&
candidate.id === "claude-opus-4-6"
) {
candidate.contextWindow = 200000;
}
// opencode lists Claude Sonnet 4/4.5 with 1M context, actual limit is 200K
if (candidate.provider === "opencode" && (candidate.id === "claude-sonnet-4-5" || candidate.id === "claude-sonnet-4")) {
// OpenCode variants list Claude Sonnet 4/4.5 with 1M context, actual limit is 200K
if (
(candidate.provider === "opencode" || candidate.provider === "opencode-go") &&
(candidate.id === "claude-sonnet-4-5" || candidate.id === "claude-sonnet-4")
) {
candidate.contextWindow = 200000;
}
}

View file

@ -113,6 +113,7 @@ export function getEnvApiKey(provider: any): string | undefined {
"minimax-cn": "MINIMAX_CN_API_KEY",
huggingface: "HF_TOKEN",
opencode: "OPENCODE_API_KEY",
"opencode-go": "OPENCODE_API_KEY",
"kimi-coding": "KIMI_API_KEY",
};

View file

@ -6155,6 +6155,59 @@ export const MODELS = {
maxTokens: 131072,
} satisfies Model<"openai-completions">,
},
"opencode-go": {
"glm-5": {
id: "glm-5",
name: "GLM-5",
api: "openai-completions",
provider: "opencode-go",
baseUrl: "https://opencode.ai/zen/go/v1",
reasoning: true,
input: ["text"],
cost: {
input: 1,
output: 3.2,
cacheRead: 0.2,
cacheWrite: 0,
},
contextWindow: 204800,
maxTokens: 131072,
} satisfies Model<"openai-completions">,
"kimi-k2.5": {
id: "kimi-k2.5",
name: "Kimi K2.5",
api: "openai-completions",
provider: "opencode-go",
baseUrl: "https://opencode.ai/zen/go/v1",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0.6,
output: 3,
cacheRead: 0.1,
cacheWrite: 0,
},
contextWindow: 262144,
maxTokens: 65536,
} satisfies Model<"openai-completions">,
"minimax-m2.5": {
id: "minimax-m2.5",
name: "MiniMax M2.5",
api: "anthropic-messages",
provider: "opencode-go",
baseUrl: "https://opencode.ai/zen/go",
reasoning: true,
input: ["text"],
cost: {
input: 0.3,
output: 1.2,
cacheRead: 0.03,
cacheWrite: 0,
},
contextWindow: 204800,
maxTokens: 131072,
} satisfies Model<"anthropic-messages">,
},
"openrouter": {
"ai21/jamba-large-1.7": {
id: "ai21/jamba-large-1.7",

View file

@ -37,6 +37,7 @@ export type KnownProvider =
| "minimax-cn"
| "huggingface"
| "opencode"
| "opencode-go"
| "kimi-coding";
export type Provider = KnownProvider | string;

View file

@ -101,6 +101,9 @@ const PROVIDER_MODEL_PAIRS: ProviderModelPair[] = [
{ provider: "opencode", model: "glm-4.7-free", label: "zen-glm-4.7-free" },
{ provider: "opencode", model: "gpt-5.2-codex", label: "zen-gpt-5.2-codex" },
{ provider: "opencode", model: "minimax-m2.1-free", label: "zen-minimax-m2.1-free" },
// OpenCode Go
{ provider: "opencode-go", model: "kimi-k2.5", label: "go-kimi-k2.5" },
{ provider: "opencode-go", model: "minimax-m2.5", label: "go-minimax-m2.5" },
];
// Cached context structure

View file

@ -3,17 +3,23 @@ import { MODELS } from "../src/models.generated.js";
import { complete } from "../src/stream.js";
import type { Model } from "../src/types.js";
describe.skipIf(!process.env.OPENCODE_API_KEY)("OpenCode Zen Models Smoke Test", () => {
const zenModels = Object.values(MODELS.opencode);
describe.skipIf(!process.env.OPENCODE_API_KEY)("OpenCode Models Smoke Test", () => {
const providers = [
{ key: "opencode", label: "OpenCode Zen" },
{ key: "opencode-go", label: "OpenCode Go" },
] as const;
zenModels.forEach((model) => {
it(`${model.id}`, async () => {
const response = await complete(model as Model<any>, {
messages: [{ role: "user", content: "Say hello.", timestamp: Date.now() }],
});
providers.forEach(({ key, label }) => {
const providerModels = Object.values(MODELS[key]);
providerModels.forEach((model) => {
it(`${label}: ${model.id}`, async () => {
const response = await complete(model as Model<any>, {
messages: [{ role: "user", content: "Say hello.", timestamp: Date.now() }],
});
expect(response.content).toBeTruthy();
expect(response.stopReason).toBe("stop");
}, 60000);
expect(response.content).toBeTruthy();
expect(response.stopReason).toBe("stop");
}, 60000);
});
});
});

View file

@ -98,6 +98,7 @@ For each built-in provider, pi maintains a list of tool-capable models, updated
- Vercel AI Gateway
- ZAI
- OpenCode Zen
- OpenCode Go
- Hugging Face
- Kimi For Coding
- MiniMax

View file

@ -65,6 +65,7 @@ pi
| Vercel AI Gateway | `AI_GATEWAY_API_KEY` | `vercel-ai-gateway` |
| ZAI | `ZAI_API_KEY` | `zai` |
| OpenCode Zen | `OPENCODE_API_KEY` | `opencode` |
| OpenCode Go | `OPENCODE_API_KEY` | `opencode-go` |
| Hugging Face | `HF_TOKEN` | `huggingface` |
| Kimi For Coding | `KIMI_API_KEY` | `kimi-coding` |
| MiniMax | `MINIMAX_API_KEY` | `minimax` |
@ -81,7 +82,8 @@ Store credentials in `~/.pi/agent/auth.json`:
"anthropic": { "type": "api_key", "key": "sk-ant-..." },
"openai": { "type": "api_key", "key": "sk-..." },
"google": { "type": "api_key", "key": "..." },
"opencode": { "type": "api_key", "key": "..." }
"opencode": { "type": "api_key", "key": "..." },
"opencode-go": { "type": "api_key", "key": "..." }
}
```

View file

@ -291,6 +291,7 @@ ${chalk.bold("Environment Variables:")}
ZAI_API_KEY - ZAI API key
MISTRAL_API_KEY - Mistral API key
MINIMAX_API_KEY - MiniMax API key
OPENCODE_API_KEY - OpenCode Zen/OpenCode Go API key
KIMI_API_KEY - Kimi For Coding API key
AWS_PROFILE - AWS profile for Amazon Bedrock
AWS_ACCESS_KEY_ID - AWS access key for Amazon Bedrock

View file

@ -33,6 +33,7 @@ export const defaultModelPerProvider: Record<KnownProvider, string> = {
"minimax-cn": "MiniMax-M2.1",
huggingface: "moonshotai/Kimi-K2.5",
opencode: "claude-opus-4-6",
"opencode-go": "kimi-k2.5",
"kimi-coding": "kimi-k2-thinking",
};