mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-17 02:04:05 +00:00
Add guard against concurrent prompt() calls
Agent.prompt() and Agent.continue() now throw if called while already streaming, preventing race conditions and corrupted state. Use queueMessage() to queue messages during streaming, or await the previous call. AgentSession.prompt() has the same guard with a message directing users to queueMessage(). Ref #403
This commit is contained in:
parent
5c3c8e6f7e
commit
5ef3cc90d1
6 changed files with 327 additions and 2 deletions
|
|
@ -171,6 +171,10 @@ export class Agent {
|
|||
async prompt(message: AgentMessage | AgentMessage[]): Promise<void>;
|
||||
async prompt(input: string, images?: ImageContent[]): Promise<void>;
|
||||
async prompt(input: string | AgentMessage | AgentMessage[], images?: ImageContent[]) {
|
||||
if (this._state.isStreaming) {
|
||||
throw new Error("Agent is already processing a prompt. Use queueMessage() or wait for completion.");
|
||||
}
|
||||
|
||||
const model = this._state.model;
|
||||
if (!model) throw new Error("No model configured");
|
||||
|
||||
|
|
@ -199,6 +203,10 @@ export class Agent {
|
|||
|
||||
/** Continue from current context (for retry after overflow) */
|
||||
async continue() {
|
||||
if (this._state.isStreaming) {
|
||||
throw new Error("Agent is already processing. Wait for completion before continuing.");
|
||||
}
|
||||
|
||||
const messages = this._state.messages;
|
||||
if (messages.length === 0) {
|
||||
throw new Error("No messages to continue from");
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue