refactor(ai): Add completion signal to onText/onThinking callbacks

- Update LLMOptions interface to include completion boolean parameter
- Modify all providers to signal when text/thinking blocks are complete
- Update examples to handle the completion parameter
- Move documentation files to docs/ directory
This commit is contained in:
Mario Zechner 2025-08-24 20:33:26 +02:00
parent a42c54e6fe
commit cb4c32faaa
11 changed files with 45 additions and 13 deletions

View file

@ -114,15 +114,33 @@ export class AnthropicLLM implements LLM<AnthropicLLMOptions> {
},
);
let blockType: "text" | "thinking" | "other" = "other";
for await (const event of stream) {
if (event.type === "content_block_start") {
if (event.content_block.type === "text") {
blockType = "text";
} else if (event.content_block.type === "thinking") {
blockType = "thinking";
} else {
blockType = "other";
}
}
if (event.type === "content_block_delta") {
if (event.delta.type === "text_delta") {
options?.onText?.(event.delta.text);
options?.onText?.(event.delta.text, false);
}
if (event.delta.type === "thinking_delta") {
options?.onThinking?.(event.delta.thinking);
options?.onThinking?.(event.delta.thinking, false);
}
}
if (event.type === "content_block_stop") {
if (blockType === "text") {
options?.onText?.("", true);
} else if (blockType === "thinking") {
options?.onThinking?.("", true);
}
blockType = "other";
}
}
const msg = await stream.finalMessage();
const thinking = msg.content.some((block) => block.type === "thinking")