mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-16 06:02:42 +00:00
Add Agent.prompt(AppMessage) overload for custom message types
Instead of using continue() which validates roles, prompt() now accepts an AppMessage directly. This allows hook messages with role: 'hookMessage' to trigger proper agent loop with message events. - Add overloads: prompt(AppMessage) and prompt(string, attachments?) - sendHookMessage uses prompt(appMessage) instead of appendMessage+continue
This commit is contained in:
parent
02f2c50155
commit
a6322fda59
3 changed files with 33 additions and 25 deletions
|
|
@ -584,10 +584,8 @@ export class AgentSession {
|
|||
// Queue for processing by agent loop
|
||||
await this.agent.queueMessage(appMessage);
|
||||
} else if (triggerTurn) {
|
||||
// Append to agent state and session, then trigger a turn
|
||||
this.agent.appendMessage(appMessage);
|
||||
// Start a new turn - emit message events for the hook message so TUI can render it
|
||||
await this.agent.continue(true);
|
||||
// Send as prompt - agent loop will emit message events
|
||||
await this.agent.prompt(appMessage);
|
||||
} else {
|
||||
// Just append to agent state and session, no turn
|
||||
this.agent.appendMessage(appMessage);
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ export function messageTransformer(messages: AppMessage[]): Message[] {
|
|||
};
|
||||
}
|
||||
if (isHookAppMessage(m)) {
|
||||
// Convert hook message to user message
|
||||
// Convert hook message to user message for LLM
|
||||
return {
|
||||
role: "user",
|
||||
content: m.content,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue