remove service tier from azure-openai-responses; add link to changelog entry

This commit is contained in:
Markus Ylisiurunen 2026-01-21 20:31:02 +02:00 committed by Mario Zechner
parent 856012296b
commit 3112526051
2 changed files with 1 additions and 30 deletions

View file

@ -13,7 +13,7 @@
- Added `headers` option to `StreamOptions` for custom HTTP headers in API requests. Supported by all providers except Amazon Bedrock (which uses AWS SDK auth). Headers are merged with provider defaults and `model.headers`, with `options.headers` taking precedence.
- Added `originator` option to `loginOpenAICodex()` for custom OAuth client identification
- Browser compatibility for pi-ai: replaced top-level Node.js imports with dynamic imports for browser environments ([#873](https://github.com/badlogic/pi-mono/issues/873))
- Added `azure-openai-responses` provider support for Azure OpenAI Responses API.
- Added `azure-openai-responses` provider support for Azure OpenAI Responses API. ([#890](https://github.com/badlogic/pi-mono/pull/890) by [@markusylisiurunen](https://github.com/markusylisiurunen))
### Fixed

View file

@ -25,7 +25,6 @@ import type {
ThinkingContent,
Tool,
ToolCall,
Usage,
} from "../types.js";
import { AssistantMessageEventStream } from "../utils/event-stream.js";
import { parseStreamingJson } from "../utils/json-parse.js";
@ -52,7 +51,6 @@ const DEFAULT_AZURE_API_VERSION = "2025-04-01-preview";
export interface AzureOpenAIResponsesOptions extends StreamOptions {
reasoningEffort?: "minimal" | "low" | "medium" | "high" | "xhigh";
reasoningSummary?: "auto" | "detailed" | "concise" | null;
serviceTier?: ResponseCreateParamsStreaming["service_tier"];
azureApiVersion?: string;
azureEndpoint?: string;
azureResourceName?: string;
@ -292,7 +290,6 @@ export const streamAzureOpenAIResponses: StreamFunction<"azure-openai-responses"
};
}
calculateCost(model, output.usage);
applyServiceTierPricing(output.usage, response?.service_tier ?? options?.serviceTier);
// Map status to stop reason
output.stopReason = mapStopReason(response?.status);
if (output.content.some((b) => b.type === "toolCall") && output.stopReason === "stop") {
@ -424,10 +421,6 @@ function buildParams(
params.temperature = options?.temperature;
}
if (options?.serviceTier !== undefined) {
params.service_tier = options.serviceTier;
}
if (context.tools) {
params.tools = convertTools(context.tools);
}
@ -616,28 +609,6 @@ function convertTools(tools: Tool[]): OpenAITool[] {
}));
}
function getServiceTierCostMultiplier(serviceTier: ResponseCreateParamsStreaming["service_tier"] | undefined): number {
switch (serviceTier) {
case "flex":
return 0.5;
case "priority":
return 2;
default:
return 1;
}
}
function applyServiceTierPricing(usage: Usage, serviceTier: ResponseCreateParamsStreaming["service_tier"] | undefined) {
const multiplier = getServiceTierCostMultiplier(serviceTier);
if (multiplier === 1) return;
usage.cost.input *= multiplier;
usage.cost.output *= multiplier;
usage.cost.cacheRead *= multiplier;
usage.cost.cacheWrite *= multiplier;
usage.cost.total = usage.cost.input + usage.cost.output + usage.cost.cacheRead + usage.cost.cacheWrite;
}
function mapStopReason(status: OpenAI.Responses.ResponseStatus | undefined): StopReason {
if (!status) return "stop";
switch (status) {