fix(ai): Use API type instead of model for message compatibility checks

- Add getApi() method to all providers to identify the API type
- Add api field to AssistantMessage to track which API generated it
- Update transformMessages to check API compatibility instead of model
- Fixes issue where OpenAI Responses API failed when switching models
- Preserves thinking blocks and signatures when staying within same API
This commit is contained in:
Mario Zechner 2025-09-02 00:20:06 +02:00
parent 3007b7a5ac
commit 2cfd8ff3c3
6 changed files with 46 additions and 11 deletions

View file

@ -77,10 +77,15 @@ export class AnthropicLLM implements LLM<AnthropicLLMOptions> {
return this.modelInfo;
}
getApi(): string {
return "anthropic-messages";
}
async generate(context: Context, options?: AnthropicLLMOptions): Promise<AssistantMessage> {
const output: AssistantMessage = {
role: "assistant",
content: [],
api: this.getApi(),
provider: this.modelInfo.provider,
model: this.modelInfo.id,
usage: {
@ -260,7 +265,7 @@ export class AnthropicLLM implements LLM<AnthropicLLMOptions> {
const params: MessageParam[] = [];
// Transform messages for cross-provider compatibility
const transformedMessages = transformMessages(messages, this.modelInfo);
const transformedMessages = transformMessages(messages, this.modelInfo, this.getApi());
for (const msg of transformedMessages) {
if (msg.role === "user") {
@ -290,9 +295,12 @@ export class AnthropicLLM implements LLM<AnthropicLLMOptions> {
};
}
});
const filteredBlocks = !this.modelInfo?.input.includes("image")
? blocks.filter((b) => b.type !== "image")
: blocks;
params.push({
role: "user",
content: blocks,
content: filteredBlocks,
});
}
} else if (msg.role === "assistant") {