add Azure deployment name map and refresh generated models

This commit is contained in:
Markus Ylisiurunen 2026-01-21 22:28:34 +02:00 committed by Mario Zechner
parent 0789bcbab0
commit 085c378d34
13 changed files with 84 additions and 44 deletions

View file

@ -2,7 +2,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { complete, stream } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -142,7 +142,7 @@ describe("AI Providers Abort Tests", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider Abort", () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should abort mid-stream", { retry: 3 }, async () => {

View file

@ -2,8 +2,27 @@
* Utility functions for Azure OpenAI tests
*/
function parseDeploymentNameMap(value: string | undefined): Map<string, string> {
const map = new Map<string, string>();
if (!value) return map;
for (const entry of value.split(",")) {
const trimmed = entry.trim();
if (!trimmed) continue;
const [modelId, deploymentName] = trimmed.split("=", 2);
if (!modelId || !deploymentName) continue;
map.set(modelId.trim(), deploymentName.trim());
}
return map;
}
export function hasAzureOpenAICredentials(): boolean {
const hasKey = !!process.env.AZURE_OPENAI_API_KEY;
const hasBaseUrl = !!(process.env.AZURE_OPENAI_BASE_URL || process.env.AZURE_OPENAI_RESOURCE_NAME);
return hasKey && hasBaseUrl;
}
export function resolveAzureDeploymentName(modelId: string): string | undefined {
const mapValue = process.env.AZURE_OPENAI_DEPLOYMENT_NAME_MAP;
if (!mapValue) return undefined;
return parseDeploymentNameMap(mapValue).get(modelId);
}

View file

@ -2,7 +2,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { complete } from "../src/stream.js";
import type { Api, AssistantMessage, Context, Model, OptionsForApi, UserMessage } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -205,7 +205,7 @@ describe("AI Providers Empty Message Tests", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider Empty Messages", () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should handle empty content array", { retry: 3, timeout: 30000 }, async () => {

View file

@ -5,7 +5,7 @@ import { describe, expect, it } from "vitest";
import type { Api, Context, Model, Tool, ToolResultMessage } from "../src/index.js";
import { complete, getModel } from "../src/index.js";
import type { OptionsForApi } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -246,7 +246,7 @@ describe("Tool Results with Images", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider (gpt-4o-mini)", () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should handle tool result with only image", { retry: 3, timeout: 30000 }, async () => {

View file

@ -8,7 +8,7 @@ import { getModel } from "../src/models.js";
import { complete, stream } from "../src/stream.js";
import type { Api, Context, ImageContent, Model, OptionsForApi, Tool, ToolResultMessage } from "../src/types.js";
import { StringEnum } from "../src/utils/typebox-helpers.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -509,7 +509,7 @@ describe("Generate E2E Tests", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider (gpt-4o-mini)", () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should complete basic text generation", { retry: 3 }, async () => {

View file

@ -2,7 +2,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { stream } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -111,7 +111,7 @@ describe("Token Statistics on Abort", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider", () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should include token stats when aborted mid-stream", { retry: 3, timeout: 30000 }, async () => {

View file

@ -3,7 +3,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { complete } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi, Tool } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -128,7 +128,7 @@ describe("Tool Call Without Result Tests", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider", () => {
const model = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(model.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should filter out tool calls without corresponding tool results", { retry: 3, timeout: 30000 }, async () => {

View file

@ -16,7 +16,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { complete } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi, Usage } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -196,7 +196,7 @@ describe("totalTokens field", () => {
{ retry: 3, timeout: 60000 },
async () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
console.log(`\nAzure OpenAI Responses / ${llm.id}:`);

View file

@ -3,7 +3,7 @@ import { describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { complete } from "../src/stream.js";
import type { Api, Context, Model, OptionsForApi, ToolResultMessage } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { hasAzureOpenAICredentials, resolveAzureDeploymentName } from "./azure-utils.js";
import { hasBedrockCredentials } from "./bedrock-utils.js";
import { resolveApiKey } from "./oauth.js";
@ -332,7 +332,7 @@ describe("AI Providers Unicode Surrogate Pair Tests", () => {
describe.skipIf(!hasAzureOpenAICredentials())("Azure OpenAI Responses Provider Unicode Handling", () => {
const llm = getModel("azure-openai-responses", "gpt-4o-mini");
const azureDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME;
const azureDeploymentName = resolveAzureDeploymentName(llm.id);
const azureOptions = azureDeploymentName ? { azureDeploymentName } : {};
it("should handle emoji in tool results", { retry: 3, timeout: 30000 }, async () => {