Fix Vercel AI SDK v6 protocol compliance

- Add messageId to start chunk (required by useChat)
- Remove undocumented tool-output-error wire type, use
  tool-output-available for all tool results
- Pass structured tool output through instead of JSON-stringifying
This commit is contained in:
Harivansh Rathi 2026-03-06 01:36:19 -08:00
parent 998945afe5
commit ca0861400d
2 changed files with 13 additions and 18 deletions

View file

@ -1,3 +1,4 @@
import { randomUUID } from "node:crypto";
import type { ServerResponse } from "node:http"; import type { ServerResponse } from "node:http";
import type { AgentSessionEvent } from "./agent-session.js"; import type { AgentSessionEvent } from "./agent-session.js";
@ -55,8 +56,10 @@ export function extractUserText(body: Record<string, unknown>): string | null {
*/ */
export function createVercelStreamListener( export function createVercelStreamListener(
response: ServerResponse, response: ServerResponse,
messageId?: string,
): (event: AgentSessionEvent) => void { ): (event: AgentSessionEvent) => void {
let started = false; let started = false;
const msgId = messageId ?? randomUUID();
return (event: AgentSessionEvent) => { return (event: AgentSessionEvent) => {
if (response.writableEnded) return; if (response.writableEnded) return;
@ -64,7 +67,7 @@ export function createVercelStreamListener(
switch (event.type) { switch (event.type) {
case "agent_start": case "agent_start":
if (!started) { if (!started) {
writeChunk(response, { type: "start" }); writeChunk(response, { type: "start", messageId: msgId });
started = true; started = true;
} }
return; return;
@ -153,19 +156,11 @@ export function createVercelStreamListener(
return; return;
case "tool_execution_end": case "tool_execution_end":
if (event.isError) { writeChunk(response, {
writeChunk(response, { type: "tool-output-available",
type: "tool-output-error", toolCallId: event.toolCallId,
toolCallId: event.toolCallId, output: event.result,
errorText: typeof event.result === "string" ? event.result : JSON.stringify(event.result), });
});
} else {
writeChunk(response, {
type: "tool-output-available",
toolCallId: event.toolCallId,
output: typeof event.result === "string" ? event.result : JSON.stringify(event.result),
});
}
return; return;
} }
}; };

View file

@ -86,7 +86,7 @@ describe("createVercelStreamListener", () => {
it("translates text streaming events", () => { it("translates text streaming events", () => {
const response = createMockResponse(); const response = createMockResponse();
const listener = createVercelStreamListener(response); const listener = createVercelStreamListener(response, "test-msg-id");
listener({ type: "agent_start" } as AgentSessionEvent); listener({ type: "agent_start" } as AgentSessionEvent);
listener({ type: "turn_start", turnIndex: 0, timestamp: Date.now() } as AgentSessionEvent); listener({ type: "turn_start", turnIndex: 0, timestamp: Date.now() } as AgentSessionEvent);
@ -109,7 +109,7 @@ describe("createVercelStreamListener", () => {
const parsed = parseChunks(response.chunks); const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([ expect(parsed).toEqual([
{ type: "start" }, { type: "start", messageId: "test-msg-id" },
{ type: "start-step" }, { type: "start-step" },
{ type: "text-start", id: "text_0" }, { type: "text-start", id: "text_0" },
{ type: "text-delta", id: "text_0", delta: "hello" }, { type: "text-delta", id: "text_0", delta: "hello" },
@ -120,13 +120,13 @@ describe("createVercelStreamListener", () => {
it("does not write after response has ended", () => { it("does not write after response has ended", () => {
const response = createMockResponse(); const response = createMockResponse();
const listener = createVercelStreamListener(response); const listener = createVercelStreamListener(response, "test-msg-id");
listener({ type: "agent_start" } as AgentSessionEvent); listener({ type: "agent_start" } as AgentSessionEvent);
response.end(); response.end();
listener({ type: "turn_start", turnIndex: 0, timestamp: Date.now() } as AgentSessionEvent); listener({ type: "turn_start", turnIndex: 0, timestamp: Date.now() } as AgentSessionEvent);
const parsed = parseChunks(response.chunks); const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([{ type: "start" }]); expect(parsed).toEqual([{ type: "start", messageId: "test-msg-id" }]);
}); });
}); });