Merge branch 'main' into fix/chutes-ai-provider-400-error

This commit is contained in:
butelo 2025-11-20 15:04:29 +01:00 committed by GitHub
commit b76f7a0f88
63 changed files with 4781 additions and 3540 deletions

View file

@ -460,11 +460,20 @@ function convertMessages(messages: Message[], model: Model<"anthropic-messages">
});
} else if (block.type === "thinking") {
if (block.thinking.trim().length === 0) continue;
blocks.push({
type: "thinking",
thinking: sanitizeSurrogates(block.thinking),
signature: block.thinkingSignature || "",
});
// If thinking signature is missing/empty (e.g., from aborted stream),
// convert to text block to avoid API rejection
if (!block.thinkingSignature || block.thinkingSignature.trim().length === 0) {
blocks.push({
type: "text",
text: sanitizeSurrogates(`<thinking>\n${block.thinking}\n</thinking>`),
});
} else {
blocks.push({
type: "thinking",
thinking: sanitizeSurrogates(block.thinking),
signature: block.thinkingSignature,
});
}
} else if (block.type === "toolCall") {
blocks.push({
type: "tool_use",

View file

@ -162,6 +162,7 @@ export const streamGoogle: StreamFunction<"google-generative-ai"> = (
id: toolCallId,
name: part.functionCall.name || "",
arguments: part.functionCall.args as Record<string, any>,
...(part.thoughtSignature && { thoughtSignature: part.thoughtSignature }),
};
// Validate tool arguments if tool definition is available
@ -361,13 +362,17 @@ function convertMessages(model: Model<"google-generative-ai">, context: Context)
};
parts.push(thinkingPart);
} else if (block.type === "toolCall") {
parts.push({
const part: Part = {
functionCall: {
id: block.id,
name: block.name,
args: block.arguments,
},
});
};
if (block.thoughtSignature) {
part.thoughtSignature = block.thoughtSignature;
}
parts.push(part);
}
}

View file

@ -273,7 +273,7 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio
stream_options: { include_usage: true },
};
// Cerebras/xAI/Mistral/Chutes dont like the "store" field
// Cerebras/xAI/Mistral dont like the "store" field
if (
!model.baseUrl.includes("cerebras.ai") &&
!model.baseUrl.includes("api.x.ai") &&
@ -284,8 +284,8 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio
}
if (options?.maxTokens) {
// Mistral/Chutes use max_tokens instead of max_completion_tokens
if (model.baseUrl.includes("mistral.ai") || model.baseUrl.includes("chutes.ai")) {
// Mistral/Chutes uses max_tokens instead of max_completion_tokens
iif (model.baseUrl.includes("mistral.ai") || model.baseUrl.includes("chutes.ai")) {
(params as any).max_tokens = options?.maxTokens;
} else {
params.max_completion_tokens = options?.maxTokens;