Merge pull request #273 from getcompanion-ai/grind

fix chat
This commit is contained in:
Hari 2026-03-09 16:02:14 -04:00 committed by GitHub
commit d880d2740e
5 changed files with 386 additions and 49 deletions

View file

@ -1,3 +1,5 @@
import type { IncomingMessage, ServerResponse } from "node:http";
import type { AssistantMessage } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import type { AgentSessionEvent } from "../src/core/agent-session.js";
import {
@ -5,6 +7,24 @@ import {
extractUserText,
} from "../src/core/gateway/vercel-ai-stream.js";
type MessageUpdateSessionEvent = Extract<
AgentSessionEvent,
{ type: "message_update" }
>;
type MessageEndSessionEvent = Extract<
AgentSessionEvent,
{ type: "message_end" }
>;
type TurnEndSessionEvent = Extract<AgentSessionEvent, { type: "turn_end" }>;
type TextAssistantMessageEvent = Extract<
MessageUpdateSessionEvent["assistantMessageEvent"],
{ type: "text_start" | "text_delta" | "text_end" }
>;
type MockResponse = ServerResponse<IncomingMessage> & {
chunks: string[];
ended: boolean;
};
describe("extractUserText", () => {
it("extracts text from useChat v5+ format with parts", () => {
const body = {
@ -61,24 +81,73 @@ describe("extractUserText", () => {
});
describe("createVercelStreamListener", () => {
function createMockResponse() {
function createAssistantMessage(text: string): AssistantMessage {
return {
role: "assistant",
content: [{ type: "text", text }],
api: "anthropic-messages",
provider: "anthropic",
model: "mock",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
};
}
function createMockResponse(): MockResponse {
const chunks: string[] = [];
let ended = false;
return {
writableEnded: false,
const response = {
get writableEnded() {
return ended;
},
write(data: string) {
chunks.push(data);
return true;
},
end() {
ended = true;
this.writableEnded = true;
},
chunks,
get ended() {
return ended;
},
} as any;
} as unknown as MockResponse;
return response;
}
function createMessageUpdateEvent(
assistantMessageEvent: TextAssistantMessageEvent,
): MessageUpdateSessionEvent {
return {
type: "message_update",
message: createAssistantMessage(""),
assistantMessageEvent,
};
}
function createAssistantMessageEndEvent(
text: string,
): MessageEndSessionEvent {
return {
type: "message_end",
message: createAssistantMessage(text),
};
}
function createTurnEndEvent(): TurnEndSessionEvent {
return {
type: "turn_end",
message: createAssistantMessage(""),
toolResults: [],
};
}
function parseChunks(chunks: string[]): Array<object | string> {
@ -104,41 +173,172 @@ describe("createVercelStreamListener", () => {
turnIndex: 0,
timestamp: Date.now(),
} as AgentSessionEvent);
listener({
type: "message_update",
message: {} as any,
assistantMessageEvent: {
listener(
createMessageUpdateEvent({
type: "text_start",
contentIndex: 0,
partial: {} as any,
},
} as AgentSessionEvent);
listener({
type: "message_update",
message: {} as any,
assistantMessageEvent: {
partial: createAssistantMessage(""),
}),
);
listener(
createMessageUpdateEvent({
type: "text_delta",
contentIndex: 0,
delta: "hello",
partial: {} as any,
},
} as AgentSessionEvent);
listener({
type: "message_update",
message: {} as any,
assistantMessageEvent: {
partial: createAssistantMessage("hello"),
}),
);
listener(
createMessageUpdateEvent({
type: "text_end",
contentIndex: 0,
content: "hello",
partial: {} as any,
},
} as AgentSessionEvent);
partial: createAssistantMessage("hello"),
}),
);
listener(createTurnEndEvent());
const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([
{ type: "start", messageId: "test-msg-id" },
{ type: "start-step" },
{ type: "text-start", id: "text_0" },
{ type: "text-delta", id: "text_0", delta: "hello" },
{ type: "text-end", id: "text_0" },
{ type: "finish-step" },
]);
});
it("flushes final assistant text from message_end when no deltas streamed", () => {
const response = createMockResponse();
const listener = createVercelStreamListener(response, "test-msg-id");
listener({ type: "agent_start" } as AgentSessionEvent);
listener({
type: "turn_end",
type: "turn_start",
turnIndex: 0,
message: {} as any,
toolResults: [],
timestamp: Date.now(),
} as AgentSessionEvent);
listener(createAssistantMessageEndEvent("final answer"));
listener(createTurnEndEvent());
const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([
{ type: "start", messageId: "test-msg-id" },
{ type: "start-step" },
{ type: "text-start", id: "text_0" },
{ type: "text-delta", id: "text_0", delta: "final answer" },
{ type: "text-end", id: "text_0" },
{ type: "finish-step" },
]);
});
it("flushes the missing text suffix on message_end", () => {
const response = createMockResponse();
const listener = createVercelStreamListener(response, "test-msg-id");
listener({ type: "agent_start" } as AgentSessionEvent);
listener({
type: "turn_start",
turnIndex: 0,
timestamp: Date.now(),
} as AgentSessionEvent);
listener(
createMessageUpdateEvent({
type: "text_start",
contentIndex: 0,
partial: createAssistantMessage(""),
}),
);
listener(
createMessageUpdateEvent({
type: "text_delta",
contentIndex: 0,
delta: "hel",
partial: createAssistantMessage("hel"),
}),
);
listener(createAssistantMessageEndEvent("hello"));
listener(createTurnEndEvent());
const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([
{ type: "start", messageId: "test-msg-id" },
{ type: "start-step" },
{ type: "text-start", id: "text_0" },
{ type: "text-delta", id: "text_0", delta: "hel" },
{ type: "text-delta", id: "text_0", delta: "lo" },
{ type: "text-end", id: "text_0" },
{ type: "finish-step" },
]);
});
it("flushes text_end content before closing the block", () => {
const response = createMockResponse();
const listener = createVercelStreamListener(response, "test-msg-id");
listener({ type: "agent_start" } as AgentSessionEvent);
listener({
type: "turn_start",
turnIndex: 0,
timestamp: Date.now(),
} as AgentSessionEvent);
listener(
createMessageUpdateEvent({
type: "text_start",
contentIndex: 0,
partial: createAssistantMessage(""),
}),
);
listener(
createMessageUpdateEvent({
type: "text_end",
contentIndex: 0,
content: "hello",
partial: createAssistantMessage("hello"),
}),
);
listener(createAssistantMessageEndEvent("hello"));
listener(createTurnEndEvent());
const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([
{ type: "start", messageId: "test-msg-id" },
{ type: "start-step" },
{ type: "text-start", id: "text_0" },
{ type: "text-delta", id: "text_0", delta: "hello" },
{ type: "text-end", id: "text_0" },
{ type: "finish-step" },
]);
});
it("closes an open text block when final text mismatches the streamed prefix", () => {
const response = createMockResponse();
const listener = createVercelStreamListener(response, "test-msg-id");
listener({ type: "agent_start" } as AgentSessionEvent);
listener({
type: "turn_start",
turnIndex: 0,
timestamp: Date.now(),
} as AgentSessionEvent);
listener(
createMessageUpdateEvent({
type: "text_start",
contentIndex: 0,
partial: createAssistantMessage(""),
}),
);
listener(
createMessageUpdateEvent({
type: "text_delta",
contentIndex: 0,
delta: "hello",
partial: createAssistantMessage("hello"),
}),
);
listener(createAssistantMessageEndEvent("goodbye"));
listener(createTurnEndEvent());
const parsed = parseChunks(response.chunks);
expect(parsed).toEqual([