Add /thinking command and improve TUI UX

- Add /thinking slash command with autocomplete for setting reasoning levels (off, minimal, low, medium, high)
- Fix Ctrl+C behavior: remove hardcoded exit in TUI, let focused component handle it
- Add empty lines before and after tool execution components for better visual separation
- Fix stats rendering: display stats AFTER tool executions complete (matches web-ui behavior)
- Remove "Press Ctrl+C again to exit" message, show "(esc to interrupt)" in loader instead
- Add bash tool abort signal support with immediate SIGKILL on interrupt
- Make Text and Markdown components return empty arrays when no actual text content
- Add setCustomBgRgb() method to Markdown for dynamic background colors
This commit is contained in:
Mario Zechner 2025-11-11 20:28:10 +01:00
parent c5083bb7cb
commit dc1e2f928b
7 changed files with 516 additions and 166 deletions

View file

@ -1840,7 +1840,7 @@ export const MODELS = {
openrouter: { openrouter: {
"kwaipilot/kat-coder-pro:free": { "kwaipilot/kat-coder-pro:free": {
id: "kwaipilot/kat-coder-pro:free", id: "kwaipilot/kat-coder-pro:free",
name: "Kwaipilot: Kat Coder (free)", name: "Kwaipilot: KAT-Coder-Pro V1 (free)",
api: "openai-completions", api: "openai-completions",
provider: "openrouter", provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1", baseUrl: "https://openrouter.ai/api/v1",
@ -4470,7 +4470,7 @@ export const MODELS = {
cacheRead: 0, cacheRead: 0,
cacheWrite: 0, cacheWrite: 0,
}, },
contextWindow: 262144, contextWindow: 256000,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"deepseek/deepseek-chat": { "deepseek/deepseek-chat": {
@ -4745,23 +4745,6 @@ export const MODELS = {
contextWindow: 200000, contextWindow: 200000,
maxTokens: 8192, maxTokens: 8192,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"mistralai/ministral-3b": {
id: "mistralai/ministral-3b",
name: "Mistral: Ministral 3B",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.04,
output: 0.04,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"mistralai/ministral-8b": { "mistralai/ministral-8b": {
id: "mistralai/ministral-8b", id: "mistralai/ministral-8b",
name: "Mistral: Ministral 8B", name: "Mistral: Ministral 8B",
@ -4779,6 +4762,23 @@ export const MODELS = {
contextWindow: 131072, contextWindow: 131072,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"mistralai/ministral-3b": {
id: "mistralai/ministral-3b",
name: "Mistral: Ministral 3B",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.04,
output: 0.04,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"qwen/qwen-2.5-7b-instruct": { "qwen/qwen-2.5-7b-instruct": {
id: "qwen/qwen-2.5-7b-instruct", id: "qwen/qwen-2.5-7b-instruct",
name: "Qwen: Qwen2.5 7B Instruct", name: "Qwen: Qwen2.5 7B Instruct",
@ -4983,23 +4983,6 @@ export const MODELS = {
contextWindow: 128000, contextWindow: 128000,
maxTokens: 16384, maxTokens: 16384,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-405b-instruct": {
id: "meta-llama/llama-3.1-405b-instruct",
name: "Meta: Llama 3.1 405B Instruct",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 3.5,
output: 3.5,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 130815,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-8b-instruct": { "meta-llama/llama-3.1-8b-instruct": {
id: "meta-llama/llama-3.1-8b-instruct", id: "meta-llama/llama-3.1-8b-instruct",
name: "Meta: Llama 3.1 8B Instruct", name: "Meta: Llama 3.1 8B Instruct",
@ -5017,6 +5000,23 @@ export const MODELS = {
contextWindow: 131072, contextWindow: 131072,
maxTokens: 16384, maxTokens: 16384,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-405b-instruct": {
id: "meta-llama/llama-3.1-405b-instruct",
name: "Meta: Llama 3.1 405B Instruct",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 3.5,
output: 3.5,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 130815,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-70b-instruct": { "meta-llama/llama-3.1-70b-instruct": {
id: "meta-llama/llama-3.1-70b-instruct", id: "meta-llama/llama-3.1-70b-instruct",
name: "Meta: Llama 3.1 70B Instruct", name: "Meta: Llama 3.1 70B Instruct",
@ -5187,6 +5187,23 @@ export const MODELS = {
contextWindow: 128000, contextWindow: 128000,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"openai/gpt-4o-2024-05-13": {
id: "openai/gpt-4o-2024-05-13",
name: "OpenAI: GPT-4o (2024-05-13)",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text", "image"],
cost: {
input: 5,
output: 15,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openai/gpt-4o": { "openai/gpt-4o": {
id: "openai/gpt-4o", id: "openai/gpt-4o",
name: "OpenAI: GPT-4o", name: "OpenAI: GPT-4o",
@ -5221,22 +5238,22 @@ export const MODELS = {
contextWindow: 128000, contextWindow: 128000,
maxTokens: 64000, maxTokens: 64000,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"openai/gpt-4o-2024-05-13": { "meta-llama/llama-3-70b-instruct": {
id: "openai/gpt-4o-2024-05-13", id: "meta-llama/llama-3-70b-instruct",
name: "OpenAI: GPT-4o (2024-05-13)", name: "Meta: Llama 3 70B Instruct",
api: "openai-completions", api: "openai-completions",
provider: "openrouter", provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1", baseUrl: "https://openrouter.ai/api/v1",
reasoning: false, reasoning: false,
input: ["text", "image"], input: ["text"],
cost: { cost: {
input: 5, input: 0.3,
output: 15, output: 0.39999999999999997,
cacheRead: 0, cacheRead: 0,
cacheWrite: 0, cacheWrite: 0,
}, },
contextWindow: 128000, contextWindow: 8192,
maxTokens: 4096, maxTokens: 16384,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"meta-llama/llama-3-8b-instruct": { "meta-llama/llama-3-8b-instruct": {
id: "meta-llama/llama-3-8b-instruct", id: "meta-llama/llama-3-8b-instruct",
@ -5255,23 +5272,6 @@ export const MODELS = {
contextWindow: 8192, contextWindow: 8192,
maxTokens: 16384, maxTokens: 16384,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"meta-llama/llama-3-70b-instruct": {
id: "meta-llama/llama-3-70b-instruct",
name: "Meta: Llama 3 70B Instruct",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.3,
output: 0.39999999999999997,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 8192,
maxTokens: 16384,
} satisfies Model<"openai-completions">,
"mistralai/mixtral-8x22b-instruct": { "mistralai/mixtral-8x22b-instruct": {
id: "mistralai/mixtral-8x22b-instruct", id: "mistralai/mixtral-8x22b-instruct",
name: "Mistral: Mixtral 8x22B Instruct", name: "Mistral: Mixtral 8x22B Instruct",
@ -5493,21 +5493,21 @@ export const MODELS = {
contextWindow: 16385, contextWindow: 16385,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"openai/gpt-3.5-turbo": { "openai/gpt-4-0314": {
id: "openai/gpt-3.5-turbo", id: "openai/gpt-4-0314",
name: "OpenAI: GPT-3.5 Turbo", name: "OpenAI: GPT-4 (older v0314)",
api: "openai-completions", api: "openai-completions",
provider: "openrouter", provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1", baseUrl: "https://openrouter.ai/api/v1",
reasoning: false, reasoning: false,
input: ["text"], input: ["text"],
cost: { cost: {
input: 0.5, input: 30,
output: 1.5, output: 60,
cacheRead: 0, cacheRead: 0,
cacheWrite: 0, cacheWrite: 0,
}, },
contextWindow: 16385, contextWindow: 8191,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"openai/gpt-4": { "openai/gpt-4": {
@ -5527,21 +5527,21 @@ export const MODELS = {
contextWindow: 8191, contextWindow: 8191,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
"openai/gpt-4-0314": { "openai/gpt-3.5-turbo": {
id: "openai/gpt-4-0314", id: "openai/gpt-3.5-turbo",
name: "OpenAI: GPT-4 (older v0314)", name: "OpenAI: GPT-3.5 Turbo",
api: "openai-completions", api: "openai-completions",
provider: "openrouter", provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1", baseUrl: "https://openrouter.ai/api/v1",
reasoning: false, reasoning: false,
input: ["text"], input: ["text"],
cost: { cost: {
input: 30, input: 0.5,
output: 60, output: 1.5,
cacheRead: 0, cacheRead: 0,
cacheWrite: 0, cacheWrite: 0,
}, },
contextWindow: 8191, contextWindow: 16385,
maxTokens: 4096, maxTokens: 4096,
} satisfies Model<"openai-completions">, } satisfies Model<"openai-completions">,
}, },

View file

@ -106,7 +106,7 @@ Guidelines:
Current directory: ${process.cwd()}`; Current directory: ${process.cwd()}`;
async function runInteractiveMode(agent: Agent, _sessionManager: SessionManager): Promise<void> { async function runInteractiveMode(agent: Agent, _sessionManager: SessionManager): Promise<void> {
const renderer = new TuiRenderer(); const renderer = new TuiRenderer(agent);
// Initialize TUI // Initialize TUI
await renderer.init(); await renderer.init();
@ -116,6 +116,9 @@ async function runInteractiveMode(agent: Agent, _sessionManager: SessionManager)
agent.abort(); agent.abort();
}); });
// Render any existing messages (from --continue mode)
renderer.renderInitialMessages(agent.state);
// Subscribe to agent events // Subscribe to agent events
agent.subscribe(async (event) => { agent.subscribe(async (event) => {
// Pass all events to the renderer // Pass all events to the renderer

View file

@ -1,9 +1,6 @@
import type { AgentTool } from "@mariozechner/pi-ai"; import type { AgentTool } from "@mariozechner/pi-ai";
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
import { exec } from "child_process"; import { exec } from "child_process";
import { promisify } from "util";
const execAsync = promisify(exec);
const bashSchema = Type.Object({ const bashSchema = Type.Object({
command: Type.String({ description: "Bash command to execute" }), command: Type.String({ description: "Bash command to execute" }),
@ -15,23 +12,54 @@ export const bashTool: AgentTool<typeof bashSchema> = {
description: description:
"Execute a bash command in the current working directory. Returns stdout and stderr. Commands run with a 30 second timeout.", "Execute a bash command in the current working directory. Returns stdout and stderr. Commands run with a 30 second timeout.",
parameters: bashSchema, parameters: bashSchema,
execute: async (_toolCallId: string, { command }: { command: string }) => { execute: async (_toolCallId: string, { command }: { command: string }, signal?: AbortSignal) => {
try { return new Promise((resolve) => {
const { stdout, stderr } = await execAsync(command, { const child = exec(
timeout: 30000, command,
maxBuffer: 10 * 1024 * 1024, // 10MB {
}); timeout: 30000,
maxBuffer: 10 * 1024 * 1024, // 10MB
},
(error, stdout, stderr) => {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
let output = ""; if (signal?.aborted) {
if (stdout) output += stdout; resolve({
if (stderr) output += stderr ? `\nSTDERR:\n${stderr}` : ""; output: `Command aborted by user\nSTDOUT: ${stdout}\nSTDERR: ${stderr}`,
details: undefined,
});
return;
}
return { output: output || "(no output)", details: undefined }; let output = "";
} catch (error: any) { if (stdout) output += stdout;
return { if (stderr) output += stderr ? `\nSTDERR:\n${stderr}` : "";
output: `Error executing command: ${error.message}\nSTDOUT: ${error.stdout || ""}\nSTDERR: ${error.stderr || ""}`,
details: undefined, if (error && !error.killed) {
resolve({
output: `Error executing command: ${error.message}\n${output}`,
details: undefined,
});
} else {
resolve({ output: output || "(no output)", details: undefined });
}
},
);
// Handle abort signal
const onAbort = () => {
child.kill("SIGKILL");
}; };
}
if (signal) {
if (signal.aborted) {
onAbort();
} else {
signal.addEventListener("abort", onAbort, { once: true });
}
}
});
}, },
}; };

View file

@ -1,5 +1,6 @@
import type { AgentState } from "@mariozechner/pi-agent"; import type { Agent, AgentState, ThinkingLevel } from "@mariozechner/pi-agent";
import type { AssistantMessage, Message } from "@mariozechner/pi-ai"; import type { AssistantMessage, Message } from "@mariozechner/pi-ai";
import type { SlashCommand } from "@mariozechner/pi-tui";
import { import {
CombinedAutocompleteProvider, CombinedAutocompleteProvider,
Container, Container,
@ -42,16 +43,20 @@ class CustomEditor extends Editor {
*/ */
class StreamingMessageComponent extends Container { class StreamingMessageComponent extends Container {
private markdown: Markdown; private markdown: Markdown;
private statsText: Text;
constructor() { constructor() {
super(); super();
this.markdown = new Markdown(""); this.markdown = new Markdown("");
this.statsText = new Text("", 1, 0);
this.addChild(this.markdown); this.addChild(this.markdown);
this.addChild(this.statsText);
} }
updateContent(message: Message | null) { updateContent(message: Message | null) {
if (!message) { if (!message) {
this.markdown.setText(""); this.markdown.setText("");
this.statsText.setText("");
return; return;
} }
@ -65,36 +70,74 @@ class StreamingMessageComponent extends Container {
.join(""); .join("");
this.markdown.setText(textContent); this.markdown.setText(textContent);
// Update usage stats
const usage = assistantMsg.usage;
if (usage) {
// Format token counts (similar to web-ui)
const formatTokens = (count: number): string => {
if (count < 1000) return count.toString();
if (count < 10000) return (count / 1000).toFixed(1) + "k";
return Math.round(count / 1000) + "k";
};
const statsParts = [];
if (usage.input) statsParts.push(`${formatTokens(usage.input)}`);
if (usage.output) statsParts.push(`${formatTokens(usage.output)}`);
if (usage.cacheRead) statsParts.push(`R${formatTokens(usage.cacheRead)}`);
if (usage.cacheWrite) statsParts.push(`W${formatTokens(usage.cacheWrite)}`);
if (usage.cost?.total) statsParts.push(`$${usage.cost.total.toFixed(3)}`);
this.statsText.setText(chalk.dim(statsParts.join(" ")));
} else {
this.statsText.setText("");
}
} }
} }
} }
/** /**
* Component that renders a tool call with its result * Component that renders a tool call with its result (updateable)
*/ */
class ToolExecutionComponent extends Container { class ToolExecutionComponent extends Container {
private markdown: Markdown; private markdown: Markdown;
private toolName: string;
private args: any;
private result?: { output: string; isError: boolean };
constructor(toolName: string, args: any, result?: { output: string; isError: boolean }) { constructor(toolName: string, args: any) {
super(); super();
const bgColor = result this.toolName = toolName;
? result.isError this.args = args;
this.markdown = new Markdown("", undefined, undefined, { r: 40, g: 40, b: 50 });
this.addChild(this.markdown);
this.updateDisplay();
}
updateResult(result: { output: string; isError: boolean }): void {
this.result = result;
this.updateDisplay();
}
private updateDisplay(): void {
const bgColor = this.result
? this.result.isError
? { r: 60, g: 40, b: 40 } ? { r: 60, g: 40, b: 40 }
: { r: 40, g: 50, b: 40 } : { r: 40, g: 50, b: 40 }
: { r: 40, g: 40, b: 50 }; : { r: 40, g: 40, b: 50 };
this.markdown = new Markdown(this.formatToolExecution(toolName, args, result), undefined, undefined, bgColor); this.markdown.setCustomBgRgb(bgColor);
this.addChild(this.markdown); this.markdown.setText(this.formatToolExecution());
} }
private formatToolExecution(toolName: string, args: any, result?: { output: string; isError: boolean }): string { private formatToolExecution(): string {
let text = ""; let text = "";
// Format based on tool type // Format based on tool type
if (toolName === "bash") { if (this.toolName === "bash") {
const command = args.command || ""; const command = this.args.command || "";
text = `**$ ${command}**`; text = `**$ ${command}**`;
if (result) { if (this.result) {
const lines = result.output.split("\n"); const lines = this.result.output.split("\n");
const maxLines = 5; const maxLines = 5;
const displayLines = lines.slice(0, maxLines); const displayLines = lines.slice(0, maxLines);
const remaining = lines.length - maxLines; const remaining = lines.length - maxLines;
@ -105,15 +148,15 @@ class ToolExecutionComponent extends Container {
} }
text += "\n```"; text += "\n```";
if (result.isError) { if (this.result.isError) {
text += " ❌"; text += " ❌";
} }
} }
} else if (toolName === "read") { } else if (this.toolName === "read") {
const path = args.path || ""; const path = this.args.path || "";
text = `**read** \`${path}\``; text = `**read** \`${path}\``;
if (result) { if (this.result) {
const lines = result.output.split("\n"); const lines = this.result.output.split("\n");
const maxLines = 5; const maxLines = 5;
const displayLines = lines.slice(0, maxLines); const displayLines = lines.slice(0, maxLines);
const remaining = lines.length - maxLines; const remaining = lines.length - maxLines;
@ -124,30 +167,30 @@ class ToolExecutionComponent extends Container {
} }
text += "\n```"; text += "\n```";
if (result.isError) { if (this.result.isError) {
text += " ❌"; text += " ❌";
} }
} }
} else if (toolName === "write") { } else if (this.toolName === "write") {
const path = args.path || ""; const path = this.args.path || "";
const content = args.content || ""; const content = this.args.content || "";
const lines = content.split("\n"); const lines = content.split("\n");
text = `**write** \`${path}\` (${lines.length} lines)`; text = `**write** \`${path}\` (${lines.length} lines)`;
if (result) { if (this.result) {
text += result.isError ? " ❌" : " ✓"; text += this.result.isError ? " ❌" : " ✓";
} }
} else if (toolName === "edit") { } else if (this.toolName === "edit") {
const path = args.path || ""; const path = this.args.path || "";
text = `**edit** \`${path}\``; text = `**edit** \`${path}\``;
if (result) { if (this.result) {
text += result.isError ? " ❌" : " ✓"; text += this.result.isError ? " ❌" : " ✓";
} }
} else { } else {
// Generic tool // Generic tool
text = `**${toolName}**\n\`\`\`json\n${JSON.stringify(args, null, 2)}\n\`\`\``; text = `**${this.toolName}**\n\`\`\`json\n${JSON.stringify(this.args, null, 2)}\n\`\`\``;
if (result) { if (this.result) {
text += `\n\`\`\`\n${result.output}\n\`\`\``; text += `\n\`\`\`\n${this.result.output}\n\`\`\``;
text += result.isError ? " ❌" : " ✓"; text += this.result.isError ? " ❌" : " ✓";
} }
} }
@ -155,6 +198,82 @@ class ToolExecutionComponent extends Container {
} }
} }
/**
* Footer component that shows pwd, token stats, and context usage
*/
class FooterComponent {
private state: AgentState;
constructor(state: AgentState) {
this.state = state;
}
updateState(state: AgentState): void {
this.state = state;
}
render(width: number): string[] {
// Calculate cumulative usage from all assistant messages
let totalInput = 0;
let totalOutput = 0;
let totalCacheRead = 0;
let totalCacheWrite = 0;
let totalCost = 0;
for (const message of this.state.messages) {
if (message.role === "assistant") {
const assistantMsg = message as AssistantMessage;
totalInput += assistantMsg.usage.input;
totalOutput += assistantMsg.usage.output;
totalCacheRead += assistantMsg.usage.cacheRead;
totalCacheWrite += assistantMsg.usage.cacheWrite;
totalCost += assistantMsg.usage.cost.total;
}
}
// Calculate total tokens and % of context window
const totalTokens = totalInput + totalOutput;
const contextWindow = this.state.model.contextWindow;
const contextPercent = contextWindow > 0 ? ((totalTokens / contextWindow) * 100).toFixed(1) : "0.0";
// Format token counts (similar to web-ui)
const formatTokens = (count: number): string => {
if (count < 1000) return count.toString();
if (count < 10000) return (count / 1000).toFixed(1) + "k";
return Math.round(count / 1000) + "k";
};
// Replace home directory with ~
let pwd = process.cwd();
const home = process.env.HOME || process.env.USERPROFILE;
if (home && pwd.startsWith(home)) {
pwd = "~" + pwd.slice(home.length);
}
// Truncate path if too long to fit width
const maxPathLength = Math.max(20, width - 10); // Leave some margin
if (pwd.length > maxPathLength) {
const start = pwd.slice(0, Math.floor(maxPathLength / 2) - 2);
const end = pwd.slice(-(Math.floor(maxPathLength / 2) - 1));
pwd = `${start}...${end}`;
}
// Build stats line
const statsParts = [];
if (totalInput) statsParts.push(`${formatTokens(totalInput)}`);
if (totalOutput) statsParts.push(`${formatTokens(totalOutput)}`);
if (totalCacheRead) statsParts.push(`R${formatTokens(totalCacheRead)}`);
if (totalCacheWrite) statsParts.push(`W${formatTokens(totalCacheWrite)}`);
if (totalCost) statsParts.push(`$${totalCost.toFixed(3)}`);
statsParts.push(`${contextPercent}%`);
const statsLine = statsParts.join(" ");
// Return two lines: pwd and stats
return [chalk.dim(pwd), chalk.dim(statsLine)];
}
}
/** /**
* TUI renderer for the coding agent * TUI renderer for the coding agent
*/ */
@ -163,6 +282,8 @@ export class TuiRenderer {
private chatContainer: Container; private chatContainer: Container;
private statusContainer: Container; private statusContainer: Container;
private editor: CustomEditor; private editor: CustomEditor;
private footer: FooterComponent;
private agent: Agent;
private isInitialized = false; private isInitialized = false;
private onInputCallback?: (text: string) => void; private onInputCallback?: (text: string) => void;
private loadingAnimation: Loader | null = null; private loadingAnimation: Loader | null = null;
@ -172,17 +293,38 @@ export class TuiRenderer {
// Streaming message tracking // Streaming message tracking
private streamingComponent: StreamingMessageComponent | null = null; private streamingComponent: StreamingMessageComponent | null = null;
// Tool execution tracking: toolCallId -> { component, toolName, args } // Tool execution tracking: toolCallId -> component
private pendingTools = new Map<string, { component: ToolExecutionComponent; toolName: string; args: any }>(); private pendingTools = new Map<string, ToolExecutionComponent>();
constructor() { // Track assistant message with tool calls that needs stats shown after tools complete
private deferredStats: { usage: any; toolCallIds: Set<string> } | null = null;
constructor(agent: Agent) {
this.agent = agent;
this.ui = new TUI(new ProcessTerminal()); this.ui = new TUI(new ProcessTerminal());
this.chatContainer = new Container(); this.chatContainer = new Container();
this.statusContainer = new Container(); this.statusContainer = new Container();
this.editor = new CustomEditor(); this.editor = new CustomEditor();
this.footer = new FooterComponent(agent.state);
// Define slash commands
const thinkingCommand: SlashCommand = {
name: "thinking",
description: "Set reasoning level (off, minimal, low, medium, high)",
getArgumentCompletions: (argumentPrefix: string) => {
const levels = ["off", "minimal", "low", "medium", "high"];
return levels
.filter((level) => level.toLowerCase().startsWith(argumentPrefix.toLowerCase()))
.map((level) => ({
value: level,
label: level,
description: `Set thinking level to ${level}`,
}));
},
};
// Setup autocomplete for file paths and slash commands // Setup autocomplete for file paths and slash commands
const autocompleteProvider = new CombinedAutocompleteProvider([], process.cwd()); const autocompleteProvider = new CombinedAutocompleteProvider([thinkingCommand], process.cwd());
this.editor.setAutocompleteProvider(autocompleteProvider); this.editor.setAutocompleteProvider(autocompleteProvider);
} }
@ -193,7 +335,6 @@ export class TuiRenderer {
const header = new Text( const header = new Text(
">> coding-agent interactive <<\n" + ">> coding-agent interactive <<\n" +
"Press Escape to interrupt while processing\n" + "Press Escape to interrupt while processing\n" +
"Press CTRL+C to clear the text editor\n" +
"Press CTRL+C twice quickly to exit\n", "Press CTRL+C twice quickly to exit\n",
); );
@ -202,6 +343,7 @@ export class TuiRenderer {
this.ui.addChild(this.chatContainer); this.ui.addChild(this.chatContainer);
this.ui.addChild(this.statusContainer); this.ui.addChild(this.statusContainer);
this.ui.addChild(this.editor); this.ui.addChild(this.editor);
this.ui.addChild(this.footer);
this.ui.setFocus(this.editor); this.ui.setFocus(this.editor);
// Set up custom key handlers on the editor // Set up custom key handlers on the editor
@ -213,19 +355,7 @@ export class TuiRenderer {
}; };
this.editor.onCtrlC = () => { this.editor.onCtrlC = () => {
// Handle Ctrl+C (raw mode sends \x03) this.handleCtrlC();
const now = Date.now();
const timeSinceLastCtrlC = now - this.lastSigintTime;
if (timeSinceLastCtrlC < 500) {
// Second Ctrl+C within 500ms - exit
this.stop();
process.exit(0);
} else {
// First Ctrl+C - clear the editor
this.clearEditor();
this.lastSigintTime = now;
}
}; };
// Handle editor submission // Handle editor submission
@ -233,6 +363,32 @@ export class TuiRenderer {
text = text.trim(); text = text.trim();
if (!text) return; if (!text) return;
// Check for slash commands
if (text.startsWith("/thinking ")) {
const level = text.slice("/thinking ".length).trim() as ThinkingLevel;
const validLevels: ThinkingLevel[] = ["off", "minimal", "low", "medium", "high"];
if (validLevels.includes(level)) {
this.agent.setThinkingLevel(level);
// Show confirmation message
const confirmText = new Text(chalk.dim(`Thinking level set to: ${level}`), 1, 0);
this.chatContainer.addChild(confirmText);
this.ui.requestRender();
this.editor.setText("");
return;
} else {
// Show error message
const errorText = new Text(
chalk.red(`Invalid thinking level: ${level}. Use: off, minimal, low, medium, high`),
1,
0,
);
this.chatContainer.addChild(errorText);
this.ui.requestRender();
this.editor.setText("");
return;
}
}
if (this.onInputCallback) { if (this.onInputCallback) {
this.onInputCallback(text); this.onInputCallback(text);
} }
@ -243,11 +399,14 @@ export class TuiRenderer {
this.isInitialized = true; this.isInitialized = true;
} }
async handleEvent(event: import("@mariozechner/pi-agent").AgentEvent, _state: AgentState): Promise<void> { async handleEvent(event: import("@mariozechner/pi-agent").AgentEvent, state: AgentState): Promise<void> {
if (!this.isInitialized) { if (!this.isInitialized) {
await this.init(); await this.init();
} }
// Update footer with current stats
this.footer.updateState(state);
switch (event.type) { switch (event.type) {
case "agent_start": case "agent_start":
// Show loading animation // Show loading animation
@ -257,7 +416,7 @@ export class TuiRenderer {
this.loadingAnimation.stop(); this.loadingAnimation.stop();
} }
this.statusContainer.clear(); this.statusContainer.clear();
this.loadingAnimation = new Loader(this.ui, "Working..."); this.loadingAnimation = new Loader(this.ui, "Working... (esc to interrupt)");
this.statusContainer.addChild(this.loadingAnimation); this.statusContainer.addChild(this.loadingAnimation);
this.ui.requestRender(); this.ui.requestRender();
break; break;
@ -300,26 +459,39 @@ export class TuiRenderer {
break; break;
case "tool_execution_start": { case "tool_execution_start": {
// Add empty line before tool execution
this.chatContainer.addChild(new Text("", 0, 0));
// Create tool execution component and add it // Create tool execution component and add it
const component = new ToolExecutionComponent(event.toolName, event.args); const component = new ToolExecutionComponent(event.toolName, event.args);
this.chatContainer.addChild(component); this.chatContainer.addChild(component);
this.pendingTools.set(event.toolCallId, { component, toolName: event.toolName, args: event.args }); this.pendingTools.set(event.toolCallId, component);
this.ui.requestRender(); this.ui.requestRender();
break; break;
} }
case "tool_execution_end": { case "tool_execution_end": {
// Update the existing tool component with the result // Update the existing tool component with the result
const pending = this.pendingTools.get(event.toolCallId); const component = this.pendingTools.get(event.toolCallId);
if (pending) { if (component) {
// Re-render the component with result // Update the component with the result
this.chatContainer.removeChild(pending.component); component.updateResult({
const updatedComponent = new ToolExecutionComponent(pending.toolName, pending.args, {
output: typeof event.result === "string" ? event.result : event.result.output, output: typeof event.result === "string" ? event.result : event.result.output,
isError: event.isError, isError: event.isError,
}); });
this.chatContainer.addChild(updatedComponent); // Add empty line after tool execution
this.chatContainer.addChild(new Text("", 0, 0));
this.pendingTools.delete(event.toolCallId); this.pendingTools.delete(event.toolCallId);
// Check if this was part of deferred stats and all tools are complete
if (this.deferredStats) {
this.deferredStats.toolCallIds.delete(event.toolCallId);
if (this.deferredStats.toolCallIds.size === 0) {
// All tools complete - show stats now
this.addStatsComponent(this.deferredStats.usage);
this.deferredStats = null;
}
}
this.ui.requestRender(); this.ui.requestRender();
} }
break; break;
@ -337,6 +509,7 @@ export class TuiRenderer {
this.streamingComponent = null; this.streamingComponent = null;
} }
this.pendingTools.clear(); this.pendingTools.clear();
this.deferredStats = null; // Clear any deferred stats
this.editor.disableSubmit = false; this.editor.disableSubmit = false;
this.ui.requestRender(); this.ui.requestRender();
break; break;
@ -381,10 +554,133 @@ export class TuiRenderer {
this.chatContainer.addChild(errorText); this.chatContainer.addChild(errorText);
return; return;
} }
// Check if this message has tool calls
const hasToolCalls = assistantMsg.content.some((c) => c.type === "toolCall");
if (hasToolCalls) {
// Defer stats until after tool executions complete
const toolCallIds = new Set<string>();
for (const content of assistantMsg.content) {
if (content.type === "toolCall") {
toolCallIds.add(content.id);
}
}
this.deferredStats = { usage: assistantMsg.usage, toolCallIds };
} else {
// No tool calls - show stats immediately
this.addStatsComponent(assistantMsg.usage);
}
} }
// Note: tool calls and results are now handled via tool_execution_start/end events // Note: tool calls and results are now handled via tool_execution_start/end events
} }
private addStatsComponent(usage: any): void {
if (!usage) return;
// Format token counts (similar to web-ui)
const formatTokens = (count: number): string => {
if (count < 1000) return count.toString();
if (count < 10000) return (count / 1000).toFixed(1) + "k";
return Math.round(count / 1000) + "k";
};
const statsParts = [];
if (usage.input) statsParts.push(`${formatTokens(usage.input)}`);
if (usage.output) statsParts.push(`${formatTokens(usage.output)}`);
if (usage.cacheRead) statsParts.push(`R${formatTokens(usage.cacheRead)}`);
if (usage.cacheWrite) statsParts.push(`W${formatTokens(usage.cacheWrite)}`);
if (usage.cost?.total) statsParts.push(`$${usage.cost.total.toFixed(3)}`);
if (statsParts.length > 0) {
const statsText = new Text(chalk.dim(statsParts.join(" ")), 1, 0);
this.chatContainer.addChild(statsText);
// Add empty line after stats
this.chatContainer.addChild(new Text("", 0, 0));
}
}
renderInitialMessages(state: AgentState): void {
// Render all existing messages (for --continue mode)
// Track assistant messages with their tool calls to show stats after tools
const assistantWithTools = new Map<
number,
{ usage: any; toolCallIds: Set<string>; remainingToolCallIds: Set<string> }
>();
// First pass: identify assistant messages with tool calls
for (let i = 0; i < state.messages.length; i++) {
const message = state.messages[i];
if (message.role === "assistant") {
const assistantMsg = message as AssistantMessage;
const toolCallIds = new Set<string>();
for (const content of assistantMsg.content) {
if (content.type === "toolCall") {
toolCallIds.add(content.id);
}
}
if (toolCallIds.size > 0) {
assistantWithTools.set(i, {
usage: assistantMsg.usage,
toolCallIds,
remainingToolCallIds: new Set(toolCallIds),
});
}
}
}
// Second pass: render messages
for (let i = 0; i < state.messages.length; i++) {
const message = state.messages[i];
if (message.role === "user" || message.role === "assistant") {
// Temporarily disable deferred stats for initial render
const savedDeferredStats = this.deferredStats;
this.deferredStats = null;
this.addMessageToChat(message);
this.deferredStats = savedDeferredStats;
} else if (message.role === "toolResult") {
// Render tool calls that have already completed
const toolResultMsg = message as any;
const assistantMsgIndex = state.messages.findIndex(
(m) =>
m.role === "assistant" &&
m.content.some((c: any) => c.type === "toolCall" && c.id === toolResultMsg.toolCallId),
);
if (assistantMsgIndex !== -1) {
const assistantMsg = state.messages[assistantMsgIndex] as AssistantMessage;
const toolCall = assistantMsg.content.find(
(c) => c.type === "toolCall" && c.id === toolResultMsg.toolCallId,
) as any;
if (toolCall) {
// Add empty line before tool execution
this.chatContainer.addChild(new Text("", 0, 0));
const component = new ToolExecutionComponent(toolCall.name, toolCall.arguments);
component.updateResult({
output: toolResultMsg.output,
isError: toolResultMsg.isError,
});
this.chatContainer.addChild(component);
// Add empty line after tool execution
this.chatContainer.addChild(new Text("", 0, 0));
// Check if this was the last tool call for this assistant message
const assistantData = assistantWithTools.get(assistantMsgIndex);
if (assistantData) {
assistantData.remainingToolCallIds.delete(toolResultMsg.toolCallId);
if (assistantData.remainingToolCallIds.size === 0) {
// All tools for this assistant message are complete - show stats
this.addStatsComponent(assistantData.usage);
}
}
}
}
}
}
this.ui.requestRender();
}
async getUserInput(): Promise<string> { async getUserInput(): Promise<string> {
return new Promise((resolve) => { return new Promise((resolve) => {
this.onInputCallback = (text: string) => { this.onInputCallback = (text: string) => {
@ -398,17 +694,26 @@ export class TuiRenderer {
this.onInterruptCallback = callback; this.onInterruptCallback = callback;
} }
private handleCtrlC(): void {
// Handle Ctrl+C double-press logic
const now = Date.now();
const timeSinceLastCtrlC = now - this.lastSigintTime;
if (timeSinceLastCtrlC < 500) {
// Second Ctrl+C within 500ms - exit
this.stop();
process.exit(0);
} else {
// First Ctrl+C - clear the editor
this.clearEditor();
this.lastSigintTime = now;
}
}
clearEditor(): void { clearEditor(): void {
this.editor.setText(""); this.editor.setText("");
this.statusContainer.clear(); this.statusContainer.clear();
const hint = new Text("Press Ctrl+C again to exit");
this.statusContainer.addChild(hint);
this.ui.requestRender(); this.ui.requestRender();
setTimeout(() => {
this.statusContainer.clear();
this.ui.requestRender();
}, 500);
} }
stop(): void { stop(): void {

View file

@ -76,6 +76,14 @@ export class Markdown implements Component {
this.cachedLines = undefined; this.cachedLines = undefined;
} }
setCustomBgRgb(customBgRgb?: { r: number; g: number; b: number }): void {
this.customBgRgb = customBgRgb;
// Invalidate cache when color changes
this.cachedText = undefined;
this.cachedWidth = undefined;
this.cachedLines = undefined;
}
render(width: number): string[] { render(width: number): string[] {
// Check cache // Check cache
if (this.cachedLines && this.cachedText === this.text && this.cachedWidth === width) { if (this.cachedLines && this.cachedText === this.text && this.cachedWidth === width) {
@ -85,6 +93,16 @@ export class Markdown implements Component {
// Calculate available width for content (subtract horizontal padding) // Calculate available width for content (subtract horizontal padding)
const contentWidth = Math.max(1, width - this.paddingX * 2); const contentWidth = Math.max(1, width - this.paddingX * 2);
// Don't render anything if there's no actual text
if (!this.text || this.text.trim() === "") {
const result: string[] = [];
// Update cache
this.cachedText = this.text;
this.cachedWidth = width;
this.cachedLines = result;
return result;
}
// Parse markdown to HTML-like tokens // Parse markdown to HTML-like tokens
const tokens = marked.lexer(this.text); const tokens = marked.lexer(this.text);

View file

@ -37,8 +37,9 @@ export class Text implements Component {
// Calculate available width for content (subtract horizontal padding) // Calculate available width for content (subtract horizontal padding)
const contentWidth = Math.max(1, width - this.paddingX * 2); const contentWidth = Math.max(1, width - this.paddingX * 2);
if (!this.text) { // Don't render anything if there's no actual text
const result = [""]; if (!this.text || this.text.trim() === "") {
const result: string[] = [];
// Update cache // Update cache
this.cachedText = this.text; this.cachedText = this.text;
this.cachedWidth = width; this.cachedWidth = width;

View file

@ -98,13 +98,8 @@ export class TUI extends Container {
} }
private handleInput(data: string): void { private handleInput(data: string): void {
// Exit on Ctrl+C // Pass input to focused component (including Ctrl+C)
if (data === "\x03") { // The focused component can decide how to handle Ctrl+C
this.stop();
process.exit(0);
}
// Pass input to focused component
if (this.focusedComponent?.handleInput) { if (this.focusedComponent?.handleInput) {
this.focusedComponent.handleInput(data); this.focusedComponent.handleInput(data);
this.requestRender(); this.requestRender();