switch azure responses to base url config and v1 api

This commit is contained in:
Markus Ylisiurunen 2026-01-21 22:04:43 +02:00 committed by Mario Zechner
parent 01f559efc0
commit 391c93800c
5 changed files with 26 additions and 33 deletions

View file

@ -875,7 +875,7 @@ In Node.js environments, you can set environment variables to avoid passing API
| Provider | Environment Variable(s) |
|----------|------------------------|
| OpenAI | `OPENAI_API_KEY` |
| Azure OpenAI | `AZURE_OPENAI_API_KEY` + `AZURE_OPENAI_ENDPOINT` or `AZURE_OPENAI_RESOURCE_NAME` (optional `AZURE_OPENAI_API_VERSION`, `AZURE_OPENAI_DEPLOYMENT_NAME`) |
| Azure OpenAI | `AZURE_OPENAI_API_KEY` + `AZURE_OPENAI_BASE_URL` or `AZURE_OPENAI_RESOURCE_NAME` (optional `AZURE_OPENAI_API_VERSION`, `AZURE_OPENAI_DEPLOYMENT_NAME`) |
| Anthropic | `ANTHROPIC_API_KEY` or `ANTHROPIC_OAUTH_TOKEN` |
| Google | `GEMINI_API_KEY` |
| Vertex AI | `GOOGLE_CLOUD_PROJECT` (or `GCLOUD_PROJECT`) + `GOOGLE_CLOUD_LOCATION` + ADC |
@ -1048,7 +1048,7 @@ const response = await complete(model, {
**OpenAI Codex**: Requires a ChatGPT Plus or Pro subscription. Provides access to GPT-5.x Codex models with extended context windows and reasoning capabilities. The library automatically handles session-based prompt caching when `sessionId` is provided in stream options.
**Azure OpenAI (Responses)**: Uses the Responses API only. Set `AZURE_OPENAI_API_KEY` and either `AZURE_OPENAI_ENDPOINT` or `AZURE_OPENAI_RESOURCE_NAME`. Deployment names are treated as model IDs by default, override with `azureDeploymentName` or `AZURE_OPENAI_DEPLOYMENT_NAME`. Legacy deployment-based URLs are intentionally unsupported.
**Azure OpenAI (Responses)**: Uses the Responses API only. Set `AZURE_OPENAI_API_KEY` and either `AZURE_OPENAI_BASE_URL` (should end with `/openai/v1`) or `AZURE_OPENAI_RESOURCE_NAME`. Deployment names are treated as model IDs by default, override with `azureDeploymentName` or `AZURE_OPENAI_DEPLOYMENT_NAME`. Legacy deployment-based URLs are intentionally unsupported.
**GitHub Copilot**: If you get "The requested model is not supported" error, enable the model manually in VS Code: open Copilot Chat, click the model selector, select the model (warning icon), and click "Enable".

View file

@ -45,14 +45,13 @@ function shortHash(str: string): string {
return (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36);
}
const DEFAULT_AZURE_API_VERSION = "2025-04-01-preview";
const DEFAULT_AZURE_API_VERSION = "v1";
// Azure OpenAI Responses-specific options
export interface AzureOpenAIResponsesOptions extends StreamOptions {
reasoningEffort?: "minimal" | "low" | "medium" | "high" | "xhigh";
reasoningSummary?: "auto" | "detailed" | "concise" | null;
azureApiVersion?: string;
azureEndpoint?: string;
azureResourceName?: string;
azureBaseUrl?: string;
azureDeploymentName?: string;
@ -332,47 +331,41 @@ export const streamAzureOpenAIResponses: StreamFunction<"azure-openai-responses"
return stream;
};
function normalizeAzureEndpoint(endpoint: string): string {
return endpoint.replace(/\/+$/, "");
function normalizeAzureBaseUrl(baseUrl: string): string {
return baseUrl.replace(/\/+$/, "");
}
function getAzureEndpoint(options?: AzureOpenAIResponsesOptions): string | undefined {
const endpoint =
options?.azureEndpoint ||
(options?.azureResourceName ? `https://${options.azureResourceName}.openai.azure.com` : undefined) ||
process.env.AZURE_OPENAI_ENDPOINT ||
(process.env.AZURE_OPENAI_RESOURCE_NAME
? `https://${process.env.AZURE_OPENAI_RESOURCE_NAME}.openai.azure.com`
: undefined);
return endpoint ? normalizeAzureEndpoint(endpoint) : undefined;
function buildDefaultBaseUrl(resourceName: string): string {
return `https://${resourceName}.openai.azure.com/openai/v1`;
}
function resolveAzureConfig(
model: Model<"azure-openai-responses">,
options?: AzureOpenAIResponsesOptions,
): { baseUrl?: string; endpoint?: string; apiVersion: string } {
): { baseUrl: string; apiVersion: string } {
const apiVersion = options?.azureApiVersion || process.env.AZURE_OPENAI_API_VERSION || DEFAULT_AZURE_API_VERSION;
const baseUrl = options?.azureBaseUrl?.trim() || undefined;
const endpoint = getAzureEndpoint(options);
const baseUrl = options?.azureBaseUrl?.trim() || process.env.AZURE_OPENAI_BASE_URL?.trim() || undefined;
const resourceName = options?.azureResourceName || process.env.AZURE_OPENAI_RESOURCE_NAME;
let resolvedBaseUrl = baseUrl;
const resolvedEndpoint = endpoint;
if (!resolvedBaseUrl && !resolvedEndpoint && model.baseUrl) {
if (!resolvedBaseUrl && resourceName) {
resolvedBaseUrl = buildDefaultBaseUrl(resourceName);
}
if (!resolvedBaseUrl && model.baseUrl) {
resolvedBaseUrl = model.baseUrl;
}
if (!resolvedBaseUrl && !resolvedEndpoint) {
if (!resolvedBaseUrl) {
throw new Error(
"Azure OpenAI endpoint is required. Set AZURE_OPENAI_ENDPOINT or AZURE_OPENAI_RESOURCE_NAME, or pass azureEndpoint, azureResourceName, azureBaseUrl, or model.baseUrl.",
"Azure OpenAI base URL is required. Set AZURE_OPENAI_BASE_URL or AZURE_OPENAI_RESOURCE_NAME, or pass azureBaseUrl, azureResourceName, or model.baseUrl.",
);
}
return {
baseUrl: resolvedBaseUrl,
endpoint: resolvedEndpoint,
baseUrl: normalizeAzureBaseUrl(resolvedBaseUrl),
apiVersion,
};
}
@ -393,14 +386,14 @@ function createClient(model: Model<"azure-openai-responses">, apiKey: string, op
Object.assign(headers, options.headers);
}
const { baseUrl, endpoint, apiVersion } = resolveAzureConfig(model, options);
const { baseUrl, apiVersion } = resolveAzureConfig(model, options);
return new AzureOpenAI({
apiKey,
apiVersion,
dangerouslyAllowBrowser: true,
defaultHeaders: headers,
...(baseUrl ? { baseURL: baseUrl } : { endpoint }),
baseURL: baseUrl,
});
}

View file

@ -4,6 +4,6 @@
export function hasAzureOpenAICredentials(): boolean {
const hasKey = !!process.env.AZURE_OPENAI_API_KEY;
const hasEndpoint = !!(process.env.AZURE_OPENAI_ENDPOINT || process.env.AZURE_OPENAI_RESOURCE_NAME);
return hasKey && hasEndpoint;
const hasBaseUrl = !!(process.env.AZURE_OPENAI_BASE_URL || process.env.AZURE_OPENAI_RESOURCE_NAME);
return hasKey && hasBaseUrl;
}

View file

@ -210,7 +210,7 @@ Add API keys to `~/.pi/agent/auth.json`:
| MiniMax | `minimax` | `MINIMAX_API_KEY` |
| MiniMax (China) | `minimax-cn` | `MINIMAX_CN_API_KEY` |
Azure OpenAI also requires `AZURE_OPENAI_ENDPOINT` or `AZURE_OPENAI_RESOURCE_NAME`. Optional: `AZURE_OPENAI_API_VERSION` (defaults to `2025-04-01-preview`) and `AZURE_OPENAI_DEPLOYMENT_NAME` to override the deployment name.
Azure OpenAI also requires `AZURE_OPENAI_BASE_URL` or `AZURE_OPENAI_RESOURCE_NAME`. Optional: `AZURE_OPENAI_API_VERSION` (defaults to `v1`) and `AZURE_OPENAI_DEPLOYMENT_NAME` to override the deployment name.
Auth file keys take priority over environment variables.

View file

@ -266,9 +266,9 @@ ${chalk.bold("Environment Variables:")}
ANTHROPIC_OAUTH_TOKEN - Anthropic OAuth token (alternative to API key)
OPENAI_API_KEY - OpenAI GPT API key
AZURE_OPENAI_API_KEY - Azure OpenAI API key
AZURE_OPENAI_ENDPOINT - Azure OpenAI endpoint (https://{resource}.openai.azure.com)
AZURE_OPENAI_RESOURCE_NAME - Azure OpenAI resource name (alternative to endpoint)
AZURE_OPENAI_API_VERSION - Azure OpenAI API version (default: 2025-04-01-preview)
AZURE_OPENAI_BASE_URL - Azure OpenAI base URL (https://{resource}.openai.azure.com/openai/v1)
AZURE_OPENAI_RESOURCE_NAME - Azure OpenAI resource name (alternative to base URL)
AZURE_OPENAI_API_VERSION - Azure OpenAI API version (default: v1)
AZURE_OPENAI_DEPLOYMENT_NAME - Azure OpenAI deployment name override
GEMINI_API_KEY - Google Gemini API key
GROQ_API_KEY - Groq API key