Wire context event to preprocessor for per-LLM-call execution

- Change from contextTransform (runs once at agent start) to preprocessor
- preprocessor runs before EACH LLM call inside the agent loop
- ContextEvent now uses Message[] (pi-ai format) instead of AppMessage[]
- Deep copy handled by pi-ai preprocessor, not Agent

This enables:
- Pruning rules applied on every turn (not just agent start)
- /prune during long agent loop takes effect immediately
- Compaction can use same transforms (future work)
This commit is contained in:
Mario Zechner 2025-12-27 19:57:14 +01:00
parent 77fe3f1a13
commit a2515cf43f
6 changed files with 22 additions and 28 deletions

View file

@ -2,7 +2,7 @@
* Hook runner - executes hooks and manages their lifecycle.
*/
import type { AppMessage } from "@mariozechner/pi-agent-core";
import type { Message } from "@mariozechner/pi-ai";
import type { ModelRegistry } from "../model-registry.js";
import type { SessionManager } from "../session-manager.js";
import type { AppendEntryHandler, LoadedHook, SendMessageHandler } from "./loader.js";
@ -311,12 +311,13 @@ export class HookRunner {
/**
* Emit a context event to all hooks.
* Handlers are chained - each gets the previous handler's output (if any).
* Returns the final modified messages, or undefined if no modifications.
* Returns the final modified messages, or the original if no modifications.
*
* Note: Messages are already deep-copied by the caller (pi-ai preprocessor).
*/
async emitContext(messages: AppMessage[]): Promise<AppMessage[] | undefined> {
async emitContext(messages: Message[]): Promise<Message[]> {
const ctx = this.createContext();
let currentMessages = messages;
let modified = false;
for (const hook of this.hooks) {
const handlers = hook.handlers.get("context");
@ -331,7 +332,6 @@ export class HookRunner {
if (handlerResult && (handlerResult as ContextEventResult).messages) {
currentMessages = (handlerResult as ContextEventResult).messages!;
modified = true;
}
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
@ -344,6 +344,6 @@ export class HookRunner {
}
}
return modified ? currentMessages : undefined;
return currentMessages;
}
}

View file

@ -6,7 +6,7 @@
*/
import type { AppMessage } from "@mariozechner/pi-agent-core";
import type { ImageContent, Model, TextContent, ToolResultMessage } from "@mariozechner/pi-ai";
import type { ImageContent, Message, Model, TextContent, ToolResultMessage } from "@mariozechner/pi-ai";
import type { Component } from "@mariozechner/pi-tui";
import type { Theme } from "../../modes/interactive/theme/theme.js";
import type { CompactionPreparation, CompactionResult } from "../compaction.js";
@ -148,13 +148,14 @@ export type SessionEvent =
/**
* Event data for context event.
* Fired before messages are sent to the LLM, allowing hooks to modify context non-destructively.
* Fired before each LLM call, allowing hooks to modify context non-destructively.
* Original session messages are NOT modified - only the messages sent to the LLM are affected.
* Messages are already in LLM format (Message[], not AppMessage[]).
*/
export interface ContextEvent {
type: "context";
/** Messages about to be sent to the LLM */
messages: AppMessage[];
/** Messages about to be sent to the LLM (deep copy, safe to modify) */
messages: Message[];
}
/**
@ -330,7 +331,7 @@ export type HookEvent =
*/
export interface ContextEventResult {
/** Modified messages to send instead of the original */
messages?: AppMessage[];
messages?: Message[];
}
/**

View file

@ -589,7 +589,7 @@ export async function createAgentSession(options: CreateAgentSessionOptions = {}
tools: allToolsArray,
},
messageTransformer,
contextTransform: hookRunner
preprocessor: hookRunner
? async (messages) => {
return hookRunner.emitContext(messages);
}