mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-15 05:02:07 +00:00
Fix Vercel AI SDK v6 protocol compliance
- Add messageId to start chunk (required by useChat) - Remove undocumented tool-output-error wire type, use tool-output-available for all tool results - Pass structured tool output through instead of JSON-stringifying
This commit is contained in:
parent
998945afe5
commit
ca0861400d
2 changed files with 13 additions and 18 deletions
|
|
@ -1,3 +1,4 @@
|
|||
import { randomUUID } from "node:crypto";
|
||||
import type { ServerResponse } from "node:http";
|
||||
import type { AgentSessionEvent } from "./agent-session.js";
|
||||
|
||||
|
|
@ -55,8 +56,10 @@ export function extractUserText(body: Record<string, unknown>): string | null {
|
|||
*/
|
||||
export function createVercelStreamListener(
|
||||
response: ServerResponse,
|
||||
messageId?: string,
|
||||
): (event: AgentSessionEvent) => void {
|
||||
let started = false;
|
||||
const msgId = messageId ?? randomUUID();
|
||||
|
||||
return (event: AgentSessionEvent) => {
|
||||
if (response.writableEnded) return;
|
||||
|
|
@ -64,7 +67,7 @@ export function createVercelStreamListener(
|
|||
switch (event.type) {
|
||||
case "agent_start":
|
||||
if (!started) {
|
||||
writeChunk(response, { type: "start" });
|
||||
writeChunk(response, { type: "start", messageId: msgId });
|
||||
started = true;
|
||||
}
|
||||
return;
|
||||
|
|
@ -153,19 +156,11 @@ export function createVercelStreamListener(
|
|||
return;
|
||||
|
||||
case "tool_execution_end":
|
||||
if (event.isError) {
|
||||
writeChunk(response, {
|
||||
type: "tool-output-error",
|
||||
toolCallId: event.toolCallId,
|
||||
errorText: typeof event.result === "string" ? event.result : JSON.stringify(event.result),
|
||||
});
|
||||
} else {
|
||||
writeChunk(response, {
|
||||
type: "tool-output-available",
|
||||
toolCallId: event.toolCallId,
|
||||
output: typeof event.result === "string" ? event.result : JSON.stringify(event.result),
|
||||
});
|
||||
}
|
||||
writeChunk(response, {
|
||||
type: "tool-output-available",
|
||||
toolCallId: event.toolCallId,
|
||||
output: event.result,
|
||||
});
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ describe("createVercelStreamListener", () => {
|
|||
|
||||
it("translates text streaming events", () => {
|
||||
const response = createMockResponse();
|
||||
const listener = createVercelStreamListener(response);
|
||||
const listener = createVercelStreamListener(response, "test-msg-id");
|
||||
|
||||
listener({ type: "agent_start" } as AgentSessionEvent);
|
||||
listener({ type: "turn_start", turnIndex: 0, timestamp: Date.now() } as AgentSessionEvent);
|
||||
|
|
@ -109,7 +109,7 @@ describe("createVercelStreamListener", () => {
|
|||
|
||||
const parsed = parseChunks(response.chunks);
|
||||
expect(parsed).toEqual([
|
||||
{ type: "start" },
|
||||
{ type: "start", messageId: "test-msg-id" },
|
||||
{ type: "start-step" },
|
||||
{ type: "text-start", id: "text_0" },
|
||||
{ type: "text-delta", id: "text_0", delta: "hello" },
|
||||
|
|
@ -120,13 +120,13 @@ describe("createVercelStreamListener", () => {
|
|||
|
||||
it("does not write after response has ended", () => {
|
||||
const response = createMockResponse();
|
||||
const listener = createVercelStreamListener(response);
|
||||
const listener = createVercelStreamListener(response, "test-msg-id");
|
||||
|
||||
listener({ type: "agent_start" } as AgentSessionEvent);
|
||||
response.end();
|
||||
listener({ type: "turn_start", turnIndex: 0, timestamp: Date.now() } as AgentSessionEvent);
|
||||
|
||||
const parsed = parseChunks(response.chunks);
|
||||
expect(parsed).toEqual([{ type: "start" }]);
|
||||
expect(parsed).toEqual([{ type: "start", messageId: "test-msg-id" }]);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue