mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-19 14:01:15 +00:00
fix 400 error when trying to use chutes ai provider
This commit is contained in:
parent
147a850de2
commit
bd1731c9ba
1 changed files with 19 additions and 5 deletions
|
|
@ -273,13 +273,23 @@ function buildParams(model: Model<"openai-completions">, context: Context, optio
|
||||||
stream_options: { include_usage: true },
|
stream_options: { include_usage: true },
|
||||||
};
|
};
|
||||||
|
|
||||||
// Cerebras/xAI dont like the "store" field
|
// Cerebras/xAI/Mistral/Chutes dont like the "store" field
|
||||||
if (!model.baseUrl.includes("cerebras.ai") && !model.baseUrl.includes("api.x.ai")) {
|
if (
|
||||||
|
!model.baseUrl.includes("cerebras.ai") &&
|
||||||
|
!model.baseUrl.includes("api.x.ai") &&
|
||||||
|
!model.baseUrl.includes("mistral.ai") &&
|
||||||
|
!model.baseUrl.includes("chutes.ai")
|
||||||
|
) {
|
||||||
params.store = false;
|
params.store = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options?.maxTokens) {
|
if (options?.maxTokens) {
|
||||||
params.max_completion_tokens = options?.maxTokens;
|
// Mistral/Chutes use max_tokens instead of max_completion_tokens
|
||||||
|
if (model.baseUrl.includes("mistral.ai") || model.baseUrl.includes("chutes.ai")) {
|
||||||
|
(params as any).max_tokens = options?.maxTokens;
|
||||||
|
} else {
|
||||||
|
params.max_completion_tokens = options?.maxTokens;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options?.temperature !== undefined) {
|
if (options?.temperature !== undefined) {
|
||||||
|
|
@ -308,9 +318,13 @@ function convertMessages(model: Model<"openai-completions">, context: Context):
|
||||||
const transformedMessages = transformMessages(context.messages, model);
|
const transformedMessages = transformMessages(context.messages, model);
|
||||||
|
|
||||||
if (context.systemPrompt) {
|
if (context.systemPrompt) {
|
||||||
// Cerebras/xAi don't like the "developer" role
|
// Cerebras/xAi/Mistral/Chutes don't like the "developer" role
|
||||||
const useDeveloperRole =
|
const useDeveloperRole =
|
||||||
model.reasoning && !model.baseUrl.includes("cerebras.ai") && !model.baseUrl.includes("api.x.ai");
|
model.reasoning &&
|
||||||
|
!model.baseUrl.includes("cerebras.ai") &&
|
||||||
|
!model.baseUrl.includes("api.x.ai") &&
|
||||||
|
!model.baseUrl.includes("mistral.ai") &&
|
||||||
|
!model.baseUrl.includes("chutes.ai");
|
||||||
const role = useDeveloperRole ? "developer" : "system";
|
const role = useDeveloperRole ? "developer" : "system";
|
||||||
params.push({ role: role, content: sanitizeSurrogates(context.systemPrompt) });
|
params.push({ role: role, content: sanitizeSurrogates(context.systemPrompt) });
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue