diff --git a/package-lock.json b/package-lock.json index e4bc0fb5..262a4618 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6745,9 +6745,9 @@ } }, "node_modules/openai": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/openai/-/openai-6.10.0.tgz", - "integrity": "sha512-ITxOGo7rO3XRMiKA5l7tQ43iNNu+iXGFAcf2t+aWVzzqRaS0i7m1K2BhxNdaveB+5eENhO0VY1FkiZzhBk4v3A==", + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-6.26.0.tgz", + "integrity": "sha512-zd23dbWTjiJ6sSAX6s0HrCZi41JwTA1bQVs0wLQPZ2/5o2gxOJA5wh7yOAUgwYybfhDXyhwlpeQf7Mlgx8EOCA==", "license": "Apache-2.0", "bin": { "openai": "bin/cli" @@ -8571,7 +8571,7 @@ "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "chalk": "^5.6.2", - "openai": "6.10.0", + "openai": "6.26.0", "partial-json": "^0.1.7", "proxy-agent": "^6.5.0", "undici": "^7.19.1", diff --git a/packages/ai/CHANGELOG.md b/packages/ai/CHANGELOG.md index c2b4feb7..fddab649 100644 --- a/packages/ai/CHANGELOG.md +++ b/packages/ai/CHANGELOG.md @@ -2,6 +2,10 @@ ## [Unreleased] +### Fixed + +- Preserved OpenAI Responses assistant `phase` metadata (`commentary`, `final_answer`) across turns by encoding `id` and `phase` in `textSignature` for session persistence and replay, with backward compatibility for legacy plain signatures ([#1819](https://github.com/badlogic/pi-mono/issues/1819)). + ## [0.56.1] - 2026-03-05 ## [0.56.0] - 2026-03-04 diff --git a/packages/ai/package.json b/packages/ai/package.json index df7d4567..11d6a1b4 100644 --- a/packages/ai/package.json +++ b/packages/ai/package.json @@ -48,7 +48,7 @@ "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "chalk": "^5.6.2", - "openai": "6.10.0", + "openai": "6.26.0", "partial-json": "^0.1.7", "proxy-agent": "^6.5.0", "undici": "^7.19.1", diff --git a/packages/ai/src/providers/openai-responses-shared.ts b/packages/ai/src/providers/openai-responses-shared.ts index 552bc606..c8e98fce 100644 --- a/packages/ai/src/providers/openai-responses-shared.ts +++ b/packages/ai/src/providers/openai-responses-shared.ts @@ -20,6 +20,7 @@ import type { Model, StopReason, TextContent, + TextSignatureV1, ThinkingContent, Tool, ToolCall, @@ -48,6 +49,32 @@ function shortHash(str: string): string { return (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36); } +function encodeTextSignatureV1(id: string, phase?: TextSignatureV1["phase"]): string { + const payload: TextSignatureV1 = { v: 1, id }; + if (phase) payload.phase = phase; + return JSON.stringify(payload); +} + +function parseTextSignature( + signature: string | undefined, +): { id: string; phase?: TextSignatureV1["phase"] } | undefined { + if (!signature) return undefined; + if (signature.startsWith("{")) { + try { + const parsed = JSON.parse(signature) as Partial; + if (parsed.v === 1 && typeof parsed.id === "string") { + if (parsed.phase === "commentary" || parsed.phase === "final_answer") { + return { id: parsed.id, phase: parsed.phase }; + } + return { id: parsed.id }; + } + } catch { + // Fall through to legacy plain-string handling. + } + } + return { id: signature }; +} + export interface OpenAIResponsesStreamOptions { serviceTier?: ResponseCreateParamsStreaming["service_tier"]; applyServiceTierPricing?: ( @@ -152,8 +179,9 @@ export function convertResponsesMessages( } } else if (block.type === "text") { const textBlock = block as TextContent; + const parsedSignature = parseTextSignature(textBlock.textSignature); // OpenAI requires id to be max 64 characters - let msgId = textBlock.textSignature; + let msgId = parsedSignature?.id; if (!msgId) { msgId = `msg_${msgIndex}`; } else if (msgId.length > 64) { @@ -165,6 +193,7 @@ export function convertResponsesMessages( content: [{ type: "output_text", text: sanitizeSurrogates(textBlock.text), annotations: [] }], status: "completed", id: msgId, + phase: parsedSignature?.phase, } satisfies ResponseOutputMessage); } else if (block.type === "toolCall") { const toolCall = block as ToolCall; @@ -403,7 +432,7 @@ export async function processResponsesStream( currentBlock = null; } else if (item.type === "message" && currentBlock?.type === "text") { currentBlock.text = item.content.map((c) => (c.type === "output_text" ? c.text : c.refusal)).join(""); - currentBlock.textSignature = item.id; + currentBlock.textSignature = encodeTextSignatureV1(item.id, item.phase ?? undefined); stream.push({ type: "text_end", contentIndex: blockIndex(), diff --git a/packages/ai/src/types.ts b/packages/ai/src/types.ts index 2bc11de1..4f327987 100644 --- a/packages/ai/src/types.ts +++ b/packages/ai/src/types.ts @@ -119,10 +119,16 @@ export type StreamFunction AssistantMessageEventStream; +export interface TextSignatureV1 { + v: 1; + id: string; + phase?: "commentary" | "final_answer"; +} + export interface TextContent { type: "text"; text: string; - textSignature?: string; // e.g., for OpenAI responses, the message ID + textSignature?: string; // e.g., for OpenAI responses, message metadata (legacy id string or TextSignatureV1 JSON) } export interface ThinkingContent { diff --git a/packages/coding-agent/CHANGELOG.md b/packages/coding-agent/CHANGELOG.md index 58df46e6..11326e98 100644 --- a/packages/coding-agent/CHANGELOG.md +++ b/packages/coding-agent/CHANGELOG.md @@ -4,6 +4,7 @@ ### Fixed +- Fixed GPT-5.3 Codex follow-up turns dropping OpenAI Responses assistant `phase` metadata by preserving replayable signatures in session history and forwarding `phase` back to the Responses API ([#1819](https://github.com/badlogic/pi-mono/issues/1819)). - Fixed editor/footer visibility drift during terminal resize by forcing full redraws when terminal width or height changes ([#1844](https://github.com/badlogic/pi-mono/pull/1844) by [@ghoulr](https://github.com/ghoulr)). - Fixed footer width truncation for wide Unicode text (session name, model, provider) to prevent TUI crashes from rendered lines exceeding terminal width ([#1833](https://github.com/badlogic/pi-mono/issues/1833)).