adjust azure responses metadata and handoff gating

This commit is contained in:
Markus Ylisiurunen 2026-01-22 21:00:18 +02:00 committed by Mario Zechner
parent bd7049b7d1
commit c6e966bd1c
3 changed files with 10 additions and 3 deletions

View file

@ -2,6 +2,10 @@
## [Unreleased]
### Added
- Added `azure-openai-responses` provider support for Azure OpenAI Responses API. ([#890](https://github.com/badlogic/pi-mono/pull/890) by [@markusylisiurunen](https://github.com/markusylisiurunen))
### Fixed
- Fixed Bun runtime detection for dynamic imports in browser-compatible modules (stream.ts, openai-codex-responses.ts, openai-codex.ts) ([#922](https://github.com/badlogic/pi-mono/pull/922) by [@dannote](https://github.com/dannote))
@ -13,7 +17,6 @@
- Added `headers` option to `StreamOptions` for custom HTTP headers in API requests. Supported by all providers except Amazon Bedrock (which uses AWS SDK auth). Headers are merged with provider defaults and `model.headers`, with `options.headers` taking precedence.
- Added `originator` option to `loginOpenAICodex()` for custom OAuth client identification
- Browser compatibility for pi-ai: replaced top-level Node.js imports with dynamic imports for browser environments ([#873](https://github.com/badlogic/pi-mono/issues/873))
- Added `azure-openai-responses` provider support for Azure OpenAI Responses API. ([#890](https://github.com/badlogic/pi-mono/pull/890) by [@markusylisiurunen](https://github.com/markusylisiurunen))
### Fixed

View file

@ -58,7 +58,7 @@ export const streamAzureOpenAIResponses: StreamFunction<"azure-openai-responses"
content: [],
api: "azure-openai-responses" as Api,
provider: model.provider,
model: deploymentName,
model: model.id,
usage: {
input: 0,
output: 0,
@ -208,7 +208,7 @@ function buildParams(
};
params.include = ["reasoning.encrypted_content"];
} else {
if (model.name.startsWith("gpt-5")) {
if (model.name.toLowerCase().startsWith("gpt-5")) {
// Jesus Christ, see https://community.openai.com/t/need-reasoning-false-option-for-gpt-5/1351588/7
messages.push({
role: "developer",

View file

@ -28,6 +28,7 @@ import { beforeAll, describe, expect, it } from "vitest";
import { getModel } from "../src/models.js";
import { completeSimple, getEnvApiKey } from "../src/stream.js";
import type { Api, AssistantMessage, Message, Model, Tool, ToolResultMessage } from "../src/types.js";
import { hasAzureOpenAICredentials } from "./azure-utils.js";
import { resolveApiKey } from "./oauth.js";
// Simple tool for testing
@ -121,6 +122,9 @@ async function getApiKey(provider: string): Promise<string | undefined> {
* Synchronous check for API key availability (env vars only, for skipIf)
*/
function hasApiKey(provider: string): boolean {
if (provider === "azure-openai-responses") {
return hasAzureOpenAICredentials();
}
return !!getEnvApiKey(provider);
}