Add MiniMax provider support (#656 by @dannote)

- Add minimax to KnownProvider and Api types
- Add MINIMAX_API_KEY to getEnvApiKey()
- Generate MiniMax-M2 and MiniMax-M2.1 models
- Add context overflow detection pattern
- Add tests to all required test files
- Update README and CHANGELOG with attribution

Also fixes:
- Bedrock duplicate toolResult ID when content has multiple blocks
- Sandbox extension unused parameter lint warning
This commit is contained in:
Mario Zechner 2026-01-13 02:27:09 +01:00
parent edc576024f
commit 8af8d0d672
20 changed files with 233 additions and 31 deletions

View file

@ -160,6 +160,18 @@ describe("AI Providers Abort Tests", () => {
});
});
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax Provider Abort", () => {
const llm = getModel("minimax", "MiniMax-M2.1");
it("should abort mid-stream", { retry: 3 }, async () => {
await testAbortSignal(llm);
});
it("should handle immediate abort", { retry: 3 }, async () => {
await testImmediateAbort(llm);
});
});
// Google Gemini CLI / Antigravity share the same provider, so one test covers both
describe("Google Gemini CLI Provider Abort", () => {
it.skipIf(!geminiCliToken)("should abort mid-stream", { retry: 3 }, async () => {

View file

@ -396,6 +396,22 @@ describe("Context overflow error handling", () => {
}, 120000);
});
// =============================================================================
// MiniMax
// Expected pattern: TBD - need to test actual error message
// =============================================================================
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax", () => {
it("MiniMax-M2.1 - should detect overflow via isContextOverflow", async () => {
const model = getModel("minimax", "MiniMax-M2.1");
const result = await testContextOverflow(model, process.env.MINIMAX_API_KEY!);
logResult(result);
expect(result.stopReason).toBe("error");
expect(isContextOverflow(result.response, model.contextWindow)).toBe(true);
}, 120000);
});
// =============================================================================
// OpenRouter - Multiple backend providers
// Expected pattern: "maximum context length is X tokens"

View file

@ -322,6 +322,26 @@ describe("AI Providers Empty Message Tests", () => {
});
});
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax Provider Empty Messages", () => {
const llm = getModel("minimax", "MiniMax-M2.1");
it("should handle empty content array", { retry: 3, timeout: 30000 }, async () => {
await testEmptyMessage(llm);
});
it("should handle empty string content", { retry: 3, timeout: 30000 }, async () => {
await testEmptyStringMessage(llm);
});
it("should handle whitespace-only content", { retry: 3, timeout: 30000 }, async () => {
await testWhitespaceOnlyMessage(llm);
});
it("should handle empty assistant message in conversation", { retry: 3, timeout: 30000 }, async () => {
await testEmptyAssistantMessage(llm);
});
});
describe.skipIf(!hasBedrockCredentials())("Amazon Bedrock Provider Empty Messages", () => {
const llm = getModel("amazon-bedrock", "global.anthropic.claude-sonnet-4-5-20250929-v1:0");

View file

@ -699,6 +699,30 @@ describe("Generate E2E Tests", () => {
});
});
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax Provider (MiniMax-M2.1 via Anthropic Messages)", () => {
const llm = getModel("minimax", "MiniMax-M2.1");
it("should complete basic text generation", { retry: 3 }, async () => {
await basicTextGeneration(llm);
});
it("should handle tool calling", { retry: 3 }, async () => {
await handleToolCall(llm);
});
it("should handle streaming", { retry: 3 }, async () => {
await handleStreaming(llm);
});
it("should handle thinking mode", { retry: 3 }, async () => {
await handleThinking(llm, { thinkingEnabled: true, thinkingBudgetTokens: 2048 });
});
it("should handle multi-turn with thinking and tools", { retry: 3 }, async () => {
await multiTurn(llm, { thinkingEnabled: true, thinkingBudgetTokens: 2048 });
});
});
// =========================================================================
// OAuth-based providers (credentials from ~/.pi/agent/oauth.json)
// Tokens are resolved at module level (see oauthTokens above)

View file

@ -46,7 +46,8 @@ async function testTokensOnAbort<TApi extends Api>(llm: Model<TApi>, options: Op
expect(msg.stopReason).toBe("aborted");
// OpenAI providers, OpenAI Codex, Gemini CLI, zai, Amazon Bedrock, and the GPT-OSS model on Antigravity only send usage in the final chunk,
// so when aborted they have no token stats Anthropic and Google send usage information early in the stream
// so when aborted they have no token stats. Anthropic and Google send usage information early in the stream.
// MiniMax reports input tokens but not output tokens when aborted.
if (
llm.api === "openai-completions" ||
llm.api === "openai-responses" ||
@ -58,6 +59,10 @@ async function testTokensOnAbort<TApi extends Api>(llm: Model<TApi>, options: Op
) {
expect(msg.usage.input).toBe(0);
expect(msg.usage.output).toBe(0);
} else if (llm.provider === "minimax") {
// MiniMax reports input tokens early but output tokens only in final chunk
expect(msg.usage.input).toBeGreaterThan(0);
expect(msg.usage.output).toBe(0);
} else {
expect(msg.usage.input).toBeGreaterThan(0);
expect(msg.usage.output).toBeGreaterThan(0);
@ -146,6 +151,14 @@ describe("Token Statistics on Abort", () => {
});
});
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax Provider", () => {
const llm = getModel("minimax", "MiniMax-M2.1");
it("should include token stats when aborted mid-stream", { retry: 3, timeout: 30000 }, async () => {
await testTokensOnAbort(llm);
});
});
// =========================================================================
// OAuth-based providers (credentials from ~/.pi/agent/oauth.json)
// =========================================================================

View file

@ -171,6 +171,14 @@ describe("Tool Call Without Result Tests", () => {
});
});
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax Provider", () => {
const model = getModel("minimax", "MiniMax-M2.1");
it("should filter out tool calls without corresponding tool results", { retry: 3, timeout: 30000 }, async () => {
await testToolCallWithoutResult(model);
});
});
describe.skipIf(!hasBedrockCredentials())("Amazon Bedrock Provider", () => {
const model = getModel("amazon-bedrock", "global.anthropic.claude-sonnet-4-5-20250929-v1:0");

View file

@ -325,6 +325,29 @@ describe("totalTokens field", () => {
);
});
// =========================================================================
// MiniMax
// =========================================================================
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax", () => {
it(
"MiniMax-M2.1 - should return totalTokens equal to sum of components",
{ retry: 3, timeout: 60000 },
async () => {
const llm = getModel("minimax", "MiniMax-M2.1");
console.log(`\nMiniMax / ${llm.id}:`);
const { first, second } = await testTotalTokensWithCache(llm, { apiKey: process.env.MINIMAX_API_KEY });
logUsage("First request", first);
logUsage("Second request", second);
assertTotalTokensEqualsComponents(first);
assertTotalTokensEqualsComponents(second);
},
);
});
// =========================================================================
// OpenRouter - Multiple backend providers
// =========================================================================

View file

@ -618,6 +618,22 @@ describe("AI Providers Unicode Surrogate Pair Tests", () => {
});
});
describe.skipIf(!process.env.MINIMAX_API_KEY)("MiniMax Provider Unicode Handling", () => {
const llm = getModel("minimax", "MiniMax-M2.1");
it("should handle emoji in tool results", { retry: 3, timeout: 30000 }, async () => {
await testEmojiInToolResults(llm);
});
it("should handle real-world LinkedIn comment data with emoji", { retry: 3, timeout: 30000 }, async () => {
await testRealWorldLinkedInData(llm);
});
it("should handle unpaired high surrogate (0xD83D) in tool results", { retry: 3, timeout: 30000 }, async () => {
await testUnpairedHighSurrogate(llm);
});
});
describe.skipIf(!hasBedrockCredentials())("Amazon Bedrock Provider Unicode Handling", () => {
const llm = getModel("amazon-bedrock", "global.anthropic.claude-sonnet-4-5-20250929-v1:0");