Fix slash commands and hook commands during streaming

- Hook commands now execute immediately during streaming (they manage their own LLM interaction via pi.sendMessage())
- File-based slash commands are expanded and queued via steer/followUp during streaming
- prompt() accepts new streamingBehavior option ('steer' or 'followUp') for explicit queueing during streaming
- steer() and followUp() now expand file-based slash commands and error on hook commands
- RPC prompt command accepts optional streamingBehavior field
- Updated docs: rpc.md, sdk.md, CHANGELOG.md

fixes #420
This commit is contained in:
Mario Zechner 2026-01-03 15:36:54 +01:00
parent 308c0e0ec0
commit e9cf3c1835
7 changed files with 207 additions and 52 deletions

View file

@ -83,6 +83,8 @@ export interface PromptOptions {
expandSlashCommands?: boolean;
/** Image attachments */
images?: ImageContent[];
/** When streaming, how to queue the message: "steer" (interrupt) or "followUp" (wait). Required if streaming. */
streamingBehavior?: "steer" | "followUp";
}
/** Result from cycleModel() */
@ -461,22 +463,18 @@ export class AgentSession {
/**
* Send a prompt to the agent.
* - Validates model and API key before sending
* - Handles hook commands (registered via pi.registerCommand)
* - Handles hook commands (registered via pi.registerCommand) immediately, even during streaming
* - Expands file-based slash commands by default
* @throws Error if no model selected or no API key available
* - During streaming, queues via steer() or followUp() based on streamingBehavior option
* - Validates model and API key before sending (when not streaming)
* @throws Error if streaming and no streamingBehavior specified
* @throws Error if no model selected or no API key available (when not streaming)
*/
async prompt(text: string, options?: PromptOptions): Promise<void> {
if (this.isStreaming) {
throw new Error("Agent is already processing. Use steer() or followUp() to queue messages during streaming.");
}
// Flush any pending bash messages before the new prompt
this._flushPendingBashMessages();
const expandCommands = options?.expandSlashCommands ?? true;
// Handle hook commands first (if enabled and text is a slash command)
// Handle hook commands first (execute immediately, even during streaming)
// Hook commands manage their own LLM interaction via pi.sendMessage()
if (expandCommands && text.startsWith("/")) {
const handled = await this._tryExecuteHookCommand(text);
if (handled) {
@ -485,6 +483,27 @@ export class AgentSession {
}
}
// Expand file-based slash commands if requested
const expandedText = expandCommands ? expandSlashCommand(text, [...this._fileCommands]) : text;
// If streaming, queue via steer() or followUp() based on option
if (this.isStreaming) {
if (!options?.streamingBehavior) {
throw new Error(
"Agent is already processing. Specify streamingBehavior ('steer' or 'followUp') to queue the message.",
);
}
if (options.streamingBehavior === "followUp") {
await this._queueFollowUp(expandedText);
} else {
await this._queueSteer(expandedText);
}
return;
}
// Flush any pending bash messages before the new prompt
this._flushPendingBashMessages();
// Validate model
if (!this.model) {
throw new Error(
@ -509,9 +528,6 @@ export class AgentSession {
await this._checkCompaction(lastAssistant, false);
}
// Expand file-based slash commands if requested
const expandedText = expandCommands ? expandSlashCommand(text, [...this._fileCommands]) : text;
// Build messages array (hook message if any, then user message)
const messages: AgentMessage[] = [];
@ -579,8 +595,43 @@ export class AgentSession {
/**
* Queue a steering message to interrupt the agent mid-run.
* Delivered after current tool execution, skips remaining tools.
* Expands file-based slash commands. Errors on hook commands.
* @throws Error if text is a hook command
*/
async steer(text: string): Promise<void> {
// Check for hook commands (cannot be queued)
if (text.startsWith("/")) {
this._throwIfHookCommand(text);
}
// Expand file-based slash commands
const expandedText = expandSlashCommand(text, [...this._fileCommands]);
await this._queueSteer(expandedText);
}
/**
* Queue a follow-up message to be processed after the agent finishes.
* Delivered only when agent has no more tool calls or steering messages.
* Expands file-based slash commands. Errors on hook commands.
* @throws Error if text is a hook command
*/
async followUp(text: string): Promise<void> {
// Check for hook commands (cannot be queued)
if (text.startsWith("/")) {
this._throwIfHookCommand(text);
}
// Expand file-based slash commands
const expandedText = expandSlashCommand(text, [...this._fileCommands]);
await this._queueFollowUp(expandedText);
}
/**
* Internal: Queue a steering message (already expanded, no hook command check).
*/
private async _queueSteer(text: string): Promise<void> {
this._steeringMessages.push(text);
this.agent.steer({
role: "user",
@ -590,10 +641,9 @@ export class AgentSession {
}
/**
* Queue a follow-up message to be processed after the agent finishes.
* Delivered only when agent has no more tool calls or steering messages.
* Internal: Queue a follow-up message (already expanded, no hook command check).
*/
async followUp(text: string): Promise<void> {
private async _queueFollowUp(text: string): Promise<void> {
this._followUpMessages.push(text);
this.agent.followUp({
role: "user",
@ -602,6 +652,23 @@ export class AgentSession {
});
}
/**
* Throw an error if the text is a hook command.
*/
private _throwIfHookCommand(text: string): void {
if (!this._hookRunner) return;
const spaceIndex = text.indexOf(" ");
const commandName = spaceIndex === -1 ? text.slice(1) : text.slice(1, spaceIndex);
const command = this._hookRunner.getCommand(commandName);
if (command) {
throw new Error(
`Hook command "/${commandName}" cannot be queued. Use prompt() or execute the command when not streaming.`,
);
}
}
/**
* Send a hook message to the session. Creates a CustomMessageEntry.
*

View file

@ -915,26 +915,13 @@ export class InteractiveMode {
return;
}
// Hook commands always run immediately, even during streaming
// (if they need to interact with LLM, they use pi.sendMessage which handles queueing)
if (text.startsWith("/") && this.session.hookRunner) {
const spaceIndex = text.indexOf(" ");
const commandName = spaceIndex === -1 ? text.slice(1) : text.slice(1, spaceIndex);
const command = this.session.hookRunner.getCommand(commandName);
if (command) {
this.editor.addToHistory(text);
this.editor.setText("");
await this.session.prompt(text);
return;
}
}
// Queue steering message if agent is streaming (interrupts current work)
// If streaming, use prompt() with steer behavior
// This handles hook commands (execute immediately), slash command expansion, and queueing
if (this.session.isStreaming) {
await this.session.steer(text);
this.updatePendingMessagesDisplay();
this.editor.addToHistory(text);
this.editor.setText("");
await this.session.prompt(text, { streamingBehavior: "steer" });
this.updatePendingMessagesDisplay();
this.ui.requestRender();
return;
}
@ -1461,11 +1448,12 @@ export class InteractiveMode {
if (!text) return;
// Alt+Enter queues a follow-up message (waits until agent finishes)
// This handles hook commands (execute immediately), slash command expansion, and queueing
if (this.session.isStreaming) {
await this.session.followUp(text);
this.updatePendingMessagesDisplay();
this.editor.addToHistory(text);
this.editor.setText("");
await this.session.prompt(text, { streamingBehavior: "followUp" });
this.updatePendingMessagesDisplay();
this.ui.requestRender();
}
// If not streaming, Alt+Enter acts like regular Enter (trigger onSubmit)

View file

@ -244,10 +244,12 @@ export async function runRpcMode(session: AgentSession): Promise<never> {
case "prompt": {
// Don't await - events will stream
// Hook commands and file slash commands are handled in session.prompt()
// Hook commands are executed immediately, file slash commands are expanded
// If streaming and streamingBehavior specified, queues via steer/followUp
session
.prompt(command.message, {
images: command.images,
streamingBehavior: command.streamingBehavior,
})
.catch((e) => output(error(id, "prompt", e.message)));
return success(id, "prompt");

View file

@ -17,7 +17,7 @@ import type { CompactionResult } from "../../core/compaction/index.js";
export type RpcCommand =
// Prompting
| { id?: string; type: "prompt"; message: string; images?: ImageContent[] }
| { id?: string; type: "prompt"; message: string; images?: ImageContent[]; streamingBehavior?: "steer" | "followUp" }
| { id?: string; type: "steer"; message: string }
| { id?: string; type: "follow_up"; message: string }
| { id?: string; type: "abort" }