refactor(ai): Add completion signal to onText/onThinking callbacks

- Update LLMOptions interface to include completion boolean parameter
- Modify all providers to signal when text/thinking blocks are complete
- Update examples to handle the completion parameter
- Move documentation files to docs/ directory
This commit is contained in:
Mario Zechner 2025-08-24 20:33:26 +02:00
parent a42c54e6fe
commit cb4c32faaa
11 changed files with 45 additions and 13 deletions

View file

@ -114,15 +114,33 @@ export class AnthropicLLM implements LLM<AnthropicLLMOptions> {
}, },
); );
let blockType: "text" | "thinking" | "other" = "other";
for await (const event of stream) { for await (const event of stream) {
if (event.type === "content_block_start") {
if (event.content_block.type === "text") {
blockType = "text";
} else if (event.content_block.type === "thinking") {
blockType = "thinking";
} else {
blockType = "other";
}
}
if (event.type === "content_block_delta") { if (event.type === "content_block_delta") {
if (event.delta.type === "text_delta") { if (event.delta.type === "text_delta") {
options?.onText?.(event.delta.text); options?.onText?.(event.delta.text, false);
} }
if (event.delta.type === "thinking_delta") { if (event.delta.type === "thinking_delta") {
options?.onThinking?.(event.delta.thinking); options?.onThinking?.(event.delta.thinking, false);
} }
} }
if (event.type === "content_block_stop") {
if (blockType === "text") {
options?.onText?.("", true);
} else if (blockType === "thinking") {
options?.onThinking?.("", true);
}
blockType = "other";
}
} }
const msg = await stream.finalMessage(); const msg = await stream.finalMessage();
const thinking = msg.content.some((block) => block.type === "thinking") const thinking = msg.content.some((block) => block.type === "thinking")

View file

@ -87,17 +87,24 @@ export class OpenAICompletionsLLM implements LLM<OpenAICompletionsLLMOptions> {
}; };
let finishReason: ChatCompletionChunk.Choice["finish_reason"] | null = null; let finishReason: ChatCompletionChunk.Choice["finish_reason"] | null = null;
let inTextBlock = false;
for await (const chunk of stream) { for await (const chunk of stream) {
const choice = chunk.choices[0]; const choice = chunk.choices[0];
// Handle text content // Handle text content
if (choice?.delta?.content) { if (choice?.delta?.content) {
content += choice.delta.content; content += choice.delta.content;
options?.onText?.(choice.delta.content); options?.onText?.(choice.delta.content, false);
inTextBlock = true;
} }
// Handle tool calls // Handle tool calls
if (choice?.delta?.tool_calls) { if (choice?.delta?.tool_calls) {
if (inTextBlock) {
// If we were in a text block, signal its end
options?.onText?.("", true);
inTextBlock = false;
}
for (const toolCall of choice.delta.tool_calls) { for (const toolCall of choice.delta.tool_calls) {
const index = toolCall.index; const index = toolCall.index;
@ -120,6 +127,11 @@ export class OpenAICompletionsLLM implements LLM<OpenAICompletionsLLMOptions> {
// Capture finish reason // Capture finish reason
if (choice?.finish_reason) { if (choice?.finish_reason) {
if (inTextBlock) {
// If we were in a text block, signal its end
options?.onText?.("", true);
inTextBlock = false;
}
finishReason = choice.finish_reason; finishReason = choice.finish_reason;
} }

View file

@ -91,21 +91,23 @@ export class OpenAIResponsesLLM implements LLM<OpenAIResponsesLLMOptions> {
if (event.type === "response.reasoning_summary_text.delta") { if (event.type === "response.reasoning_summary_text.delta") {
const delta = event.delta; const delta = event.delta;
thinking += delta; thinking += delta;
options?.onThinking?.(delta); options?.onThinking?.(delta, false);
} else if (event.type === "response.reasoning_summary_text.done") { } else if (event.type === "response.reasoning_summary_text.done") {
if (event.text) { if (event.text) {
thinking = event.text; thinking = event.text;
} }
options?.onThinking?.("", true);
} }
// Handle main text output // Handle main text output
else if (event.type === "response.output_text.delta") { else if (event.type === "response.output_text.delta") {
const delta = event.delta; const delta = event.delta;
content += delta; content += delta;
options?.onText?.(delta); options?.onText?.(delta, false);
} else if (event.type === "response.output_text.done") { } else if (event.type === "response.output_text.done") {
if (event.text) { if (event.text) {
content = event.text; content = event.text;
} }
options?.onText?.("", true);
} }
// Handle function calls // Handle function calls
else if (event.type === "response.output_item.done") { else if (event.type === "response.output_item.done") {

View file

@ -1,8 +1,8 @@
export interface LLMOptions { export interface LLMOptions {
temperature?: number; temperature?: number;
maxTokens?: number; maxTokens?: number;
onText?: (text: string) => void; onText?: (text: string, complete: boolean) => void;
onThinking?: (thinking: string) => void; onThinking?: (thinking: string, complete: boolean) => void;
signal?: AbortSignal; signal?: AbortSignal;
} }

View file

@ -24,8 +24,8 @@ const tools: Tool[] = [
]; ];
const options: AnthropicLLMOptions = { const options: AnthropicLLMOptions = {
onText: (t) => process.stdout.write(t), onText: (t, complete) => process.stdout.write(t + (complete ? "\n" : "")),
onThinking: (t) => process.stdout.write(chalk.dim(t)), onThinking: (t, complete) => process.stdout.write(chalk.dim(t + (complete ? "\n" : ""))),
thinking: { enabled: true } thinking: { enabled: true }
}; };
const ai = new AnthropicLLM("claude-sonnet-4-0", process.env.ANTHROPIC_OAUTH_TOKEN ?? process.env.ANTHROPIC_API_KEY); const ai = new AnthropicLLM("claude-sonnet-4-0", process.env.ANTHROPIC_OAUTH_TOKEN ?? process.env.ANTHROPIC_API_KEY);

View file

@ -21,8 +21,8 @@ const tools: Tool[] = [
]; ];
const options: OpenAICompletionsLLMOptions = { const options: OpenAICompletionsLLMOptions = {
onText: (t) => process.stdout.write(t), onText: (t, complete) => process.stdout.write(t + (complete ? "\n" : "")),
onThinking: (t) => process.stdout.write(chalk.dim(t)), onThinking: (t, complete) => process.stdout.write(chalk.dim(t + (complete ? "\n" : ""))),
reasoningEffort: "medium", reasoningEffort: "medium",
toolChoice: "auto" toolChoice: "auto"
}; };

View file

@ -32,8 +32,8 @@ const context: Context = {
} }
const options: OpenAIResponsesLLMOptions = { const options: OpenAIResponsesLLMOptions = {
onText: (t) => process.stdout.write(t), onText: (t, complete) => process.stdout.write(t + (complete ? "\n" : "")),
onThinking: (t) => process.stdout.write(chalk.dim(t)), onThinking: (t, complete) => process.stdout.write(chalk.dim(t + (complete ? "\n" : ""))),
reasoningEffort: "low", reasoningEffort: "low",
reasoningSummary: "auto" reasoningSummary: "auto"
}; };