feat(ai): Add Amazon Bedrock provider (#494)

Adds support for Amazon Bedrock with Claude models including:
- Full streaming support via Converse API
- Reasoning/thinking support for Claude models
- Cross-region inference model ID handling
- Multiple AWS credential sources (profile, IAM keys, API keys)
- Image support in messages and tool results
- Unicode surrogate sanitization

Also adds 'Adding a New Provider' documentation to AGENTS.md and README.

Co-authored-by: nickchan2 <nickchan2@users.noreply.github.com>
This commit is contained in:
Mario Zechner 2026-01-13 00:32:59 +01:00
parent 4f216d318f
commit fd268479a4
31 changed files with 3550 additions and 2593 deletions

View file

@ -2,6 +2,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { stream } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi } from "../src/types.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
// Resolve OAuth tokens at module level (async, runs before tests)
@ -44,7 +45,7 @@ async function testTokensOnAbort<TApi extends Api>(llm: Model<TApi>, options: Op
expect(msg.stopReason).toBe("aborted");
// OpenAI providers, OpenAI Codex, Gemini CLI, zai, and the GPT-OSS model on Antigravity only send usage in the final chunk,
// OpenAI providers, OpenAI Codex, Gemini CLI, zai, Amazon Bedrock, and the GPT-OSS model on Antigravity only send usage in the final chunk,
// so when aborted they have no token stats Anthropic and Google send usage information early in the stream
if (
llm.api === "openai-completions" ||
@ -52,6 +53,7 @@ async function testTokensOnAbort<TApi extends Api>(llm: Model<TApi>, options: Op
llm.api === "openai-codex-responses" ||
llm.provider === "google-gemini-cli" ||
llm.provider === "zai" ||
llm.provider === "amazon-bedrock" ||
(llm.provider === "google-antigravity" && llm.id.includes("gpt-oss"))
) {
expect(msg.usage.input).toBe(0);
@ -230,4 +232,12 @@ describe("Token Statistics on Abort", () => {
},
);
});
describe.skipIf(!hasBedrockCredentials())("Amazon Bedrock Provider", () => {
const llm = getModel("amazon-bedrock", "global.anthropic.claude-sonnet-4-5-20250929-v1:0");
it("should include token stats when aborted mid-stream", { retry: 3, timeout: 30000 }, async () => {
await testTokensOnAbort(llm);
});
});
});