Add guard against concurrent prompt() calls

Agent.prompt() and Agent.continue() now throw if called while already
streaming, preventing race conditions and corrupted state. Use
queueMessage() to queue messages during streaming, or await the
previous call.

AgentSession.prompt() has the same guard with a message directing
users to queueMessage().

Ref #403
This commit is contained in:
Mario Zechner 2026-01-02 21:52:45 +01:00
parent 5c3c8e6f7e
commit 5ef3cc90d1
6 changed files with 327 additions and 2 deletions

View file

@ -112,7 +112,6 @@ export interface SessionStats {
cost: number;
}
/** Internal marker for hook messages queued through the agent loop */
// ============================================================================
// Constants
// ============================================================================
@ -456,6 +455,10 @@ export class AgentSession {
* @throws Error if no model selected or no API key available
*/
async prompt(text: string, options?: PromptOptions): Promise<void> {
if (this.isStreaming) {
throw new Error("Agent is already processing. Use queueMessage() to queue messages during streaming.");
}
// Flush any pending bash messages before the new prompt
this._flushPendingBashMessages();