fix(ai): omit strict for unsupported openai completions

This commit is contained in:
Mario Zechner 2026-02-02 00:44:55 +01:00
parent 5d6a7d6c34
commit ff0eb3ecd4
6 changed files with 94 additions and 29 deletions

View file

@ -1,38 +1,40 @@
import { Type } from "@sinclair/typebox";
import { describe, expect, it, vi } from "vitest";
import { getModel } from "../src/models.js";
import { streamSimple } from "../src/stream.js";
import type { Tool } from "../src/types.js";
let lastParams: unknown;
const mockState = vi.hoisted(() => ({ lastParams: undefined as unknown }));
class FakeOpenAI {
chat = {
completions: {
create: async (params: unknown) => {
lastParams = params;
return {
async *[Symbol.asyncIterator]() {
yield {
choices: [{ delta: {}, finish_reason: "stop" }],
usage: {
prompt_tokens: 1,
completion_tokens: 1,
prompt_tokens_details: { cached_tokens: 0 },
completion_tokens_details: { reasoning_tokens: 0 },
},
};
},
};
vi.mock("openai", () => {
class FakeOpenAI {
chat = {
completions: {
create: async (params: unknown) => {
mockState.lastParams = params;
return {
async *[Symbol.asyncIterator]() {
yield {
choices: [{ delta: {}, finish_reason: "stop" }],
usage: {
prompt_tokens: 1,
completion_tokens: 1,
prompt_tokens_details: { cached_tokens: 0 },
completion_tokens_details: { reasoning_tokens: 0 },
},
};
},
};
},
},
},
};
}
};
}
vi.mock("openai", () => ({ default: FakeOpenAI }));
return { default: FakeOpenAI };
});
describe("openai-completions tool_choice", () => {
it("forwards toolChoice from simple options to payload", async () => {
const { streamSimple } = await import("../src/stream.js");
const { getModel } = await import("../src/models.js");
const { compat: _compat, ...baseModel } = getModel("openai", "gpt-4o-mini")!;
const model = { ...baseModel, api: "openai-completions" } as const;
const tools: Tool[] = [
@ -67,9 +69,54 @@ describe("openai-completions tool_choice", () => {
} as unknown as Parameters<typeof streamSimple>[2],
).result();
const params = (payload ?? lastParams) as { tool_choice?: string; tools?: unknown[] };
const params = (payload ?? mockState.lastParams) as { tool_choice?: string; tools?: unknown[] };
expect(params.tool_choice).toBe("required");
expect(Array.isArray(params.tools)).toBe(true);
expect(params.tools?.length ?? 0).toBeGreaterThan(0);
});
it("omits strict when compat disables strict mode", async () => {
const { compat: _compat, ...baseModel } = getModel("openai", "gpt-4o-mini")!;
const model = {
...baseModel,
api: "openai-completions",
compat: { supportsStrictMode: false },
} as const;
const tools: Tool[] = [
{
name: "ping",
description: "Ping tool",
parameters: Type.Object({
ok: Type.Boolean(),
}),
},
];
let payload: unknown;
await streamSimple(
model,
{
messages: [
{
role: "user",
content: "Call ping with ok=true",
timestamp: Date.now(),
},
],
tools,
},
{
apiKey: "test",
onPayload: (params: unknown) => {
payload = params;
},
} as unknown as Parameters<typeof streamSimple>[2],
).result();
const params = (payload ?? mockState.lastParams) as { tools?: Array<{ function?: Record<string, unknown> }> };
const tool = params.tools?.[0]?.function;
expect(tool).toBeTruthy();
expect(tool?.strict).toBeUndefined();
expect("strict" in (tool ?? {})).toBe(false);
});
});

View file

@ -32,6 +32,7 @@ const compat: Required<OpenAICompletionsCompat> = {
thinkingFormat: "openai",
openRouterRouting: {},
vercelGatewayRouting: {},
supportsStrictMode: true,
};
function buildToolResult(toolCallId: string, timestamp: number): ToolResultMessage {