feat(ai): Add Amazon Bedrock provider (#494)

Adds support for Amazon Bedrock with Claude models including:
- Full streaming support via Converse API
- Reasoning/thinking support for Claude models
- Cross-region inference model ID handling
- Multiple AWS credential sources (profile, IAM keys, API keys)
- Image support in messages and tool results
- Unicode surrogate sanitization

Also adds 'Adding a New Provider' documentation to AGENTS.md and README.

Co-authored-by: nickchan2 <nickchan2@users.noreply.github.com>
This commit is contained in:
Mario Zechner 2026-01-13 00:32:59 +01:00
parent 4f216d318f
commit fd268479a4
31 changed files with 3550 additions and 2593 deletions

View file

@ -2,6 +2,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { complete, stream } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi } from "../src/types.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
// Resolve OAuth tokens at module level (async, runs before tests)
@ -66,6 +67,35 @@ async function testImmediateAbort<TApi extends Api>(llm: Model<TApi>, options: O
expect(response.stopReason).toBe("aborted");
}
async function testAbortThenNewMessage<TApi extends Api>(llm: Model<TApi>, options: OptionsForApi<TApi> = {}) {
// First request: abort immediately before any response content arrives
const controller = new AbortController();
controller.abort();
const context: Context = {
messages: [{ role: "user", content: "Hello, how are you?", timestamp: Date.now() }],
};
const abortedResponse = await complete(llm, context, { ...options, signal: controller.signal });
expect(abortedResponse.stopReason).toBe("aborted");
// The aborted message has empty content since we aborted before anything arrived
expect(abortedResponse.content.length).toBe(0);
// Add the aborted assistant message to context (this is what happens in the real coding agent)
context.messages.push(abortedResponse);
// Second request: send a new message - this should work even with the aborted message in context
context.messages.push({
role: "user",
content: "What is 2 + 2?",
timestamp: Date.now(),
});
const followUp = await complete(llm, context, options);
expect(followUp.stopReason).toBe("stop");
expect(followUp.content.length).toBeGreaterThan(0);
}
describe("AI Providers Abort Tests", () => {
describe.skipIf(!process.env.GEMINI_API_KEY)("Google Provider Abort", () => {
const llm = getModel("google", "gemini-2.5-flash");
@ -154,4 +184,20 @@ describe("AI Providers Abort Tests", () => {
await testImmediateAbort(llm, { apiKey: openaiCodexToken });
});
});
describe.skipIf(!hasBedrockCredentials())("Amazon Bedrock Provider Abort", () => {
const llm = getModel("amazon-bedrock", "global.anthropic.claude-sonnet-4-5-20250929-v1:0");
it("should abort mid-stream", { retry: 3 }, async () => {
await testAbortSignal(llm, { reasoning: "medium" });
});
it("should handle immediate abort", { retry: 3 }, async () => {
await testImmediateAbort(llm);
});
it("should handle abort then new message", { retry: 3 }, async () => {
await testAbortThenNewMessage(llm);
});
});
});