tui: Fix differential rendering to preserve scrollback buffer

- renderDifferential now correctly handles content that exceeds viewport
- When changes are above viewport, do full re-render with scrollback clear
- When changes are in viewport, do partial re-render from change point
- All tests pass, correctly preserves 100 items in scrollback
- Issue: Still re-renders too much (entire tail from first change)
This commit is contained in:
Mario Zechner 2025-08-11 00:57:59 +02:00
parent afa807b200
commit 0131b29b2c
22 changed files with 2878 additions and 80 deletions

6
package-lock.json generated
View file

@ -792,7 +792,7 @@
},
"packages/agent": {
"name": "@mariozechner/pi-agent",
"version": "0.5.7",
"version": "0.5.8",
"license": "MIT",
"dependencies": {
"@mariozechner/pi-tui": "^0.5.7",
@ -1236,7 +1236,7 @@
},
"packages/pods": {
"name": "@mariozechner/pi",
"version": "0.5.7",
"version": "0.5.8",
"license": "MIT",
"dependencies": {
"@mariozechner/pi-agent": "^0.5.7",
@ -1252,7 +1252,7 @@
},
"packages/tui": {
"name": "@mariozechner/pi-tui",
"version": "0.5.7",
"version": "0.5.8",
"license": "MIT",
"dependencies": {
"@types/mime-types": "^2.1.4",

View file

@ -1,12 +1,12 @@
{
"name": "@mariozechner/pi-agent",
"version": "0.5.7",
"version": "0.5.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@mariozechner/pi-agent",
"version": "0.5.7",
"version": "0.5.8",
"license": "MIT",
"dependencies": {
"@mariozechner/tui": "^0.1.1",

View file

@ -1,6 +1,6 @@
{
"name": "@mariozechner/pi-agent",
"version": "0.5.7",
"version": "0.5.8",
"description": "General-purpose agent with tool calling and session persistence",
"type": "module",
"bin": {
@ -18,7 +18,7 @@
"prepublishOnly": "npm run clean && npm run build"
},
"dependencies": {
"@mariozechner/pi-tui": "^0.5.7",
"@mariozechner/pi-tui": "^0.5.8",
"@types/glob": "^8.1.0",
"chalk": "^5.5.0",
"glob": "^11.0.3",

771
packages/agent/src/agent.js Normal file
View file

@ -0,0 +1,771 @@
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Agent = void 0;
exports.callModelResponsesApi = callModelResponsesApi;
exports.callModelChatCompletionsApi = callModelChatCompletionsApi;
var openai_1 = require("openai");
var tools_js_1 = require("./tools/tools.js");
// Cache for model reasoning support detection per API type
var modelReasoningSupport = new Map();
// Provider detection based on base URL
function detectProvider(baseURL) {
if (!baseURL)
return "openai";
if (baseURL.includes("api.openai.com"))
return "openai";
if (baseURL.includes("generativelanguage.googleapis.com"))
return "gemini";
if (baseURL.includes("api.groq.com"))
return "groq";
if (baseURL.includes("api.anthropic.com"))
return "anthropic";
if (baseURL.includes("openrouter.ai"))
return "openrouter";
return "other";
}
// Parse provider-specific reasoning from message content
function parseReasoningFromMessage(message, baseURL) {
var provider = detectProvider(baseURL);
var reasoningTexts = [];
var cleanContent = message.content || "";
switch (provider) {
case "gemini":
// Gemini returns thinking in <thought> tags
if (cleanContent.includes("<thought>")) {
var thoughtMatches = cleanContent.matchAll(/<thought>([\s\S]*?)<\/thought>/g);
for (var _i = 0, thoughtMatches_1 = thoughtMatches; _i < thoughtMatches_1.length; _i++) {
var match = thoughtMatches_1[_i];
reasoningTexts.push(match[1].trim());
}
// Remove all thought tags from the response
cleanContent = cleanContent.replace(/<thought>[\s\S]*?<\/thought>/g, "").trim();
}
break;
case "groq":
// Groq returns reasoning in a separate field when reasoning_format is "parsed"
if (message.reasoning) {
reasoningTexts.push(message.reasoning);
}
break;
case "openrouter":
// OpenRouter returns reasoning in message.reasoning field
if (message.reasoning) {
reasoningTexts.push(message.reasoning);
}
break;
default:
// Other providers don't embed reasoning in message content
break;
}
return { cleanContent: cleanContent, reasoningTexts: reasoningTexts };
}
// Adjust request options based on provider-specific requirements
function adjustRequestForProvider(requestOptions, api, baseURL, supportsReasoning) {
var provider = detectProvider(baseURL);
// Handle provider-specific adjustments
switch (provider) {
case "gemini":
if (api === "completions" && supportsReasoning && requestOptions.reasoning_effort) {
// Gemini needs extra_body for thinking content
// Can't use both reasoning_effort and thinking_config
var budget = requestOptions.reasoning_effort === "low"
? 1024
: requestOptions.reasoning_effort === "medium"
? 8192
: 24576;
requestOptions.extra_body = {
google: {
thinking_config: {
thinking_budget: budget,
include_thoughts: true,
},
},
};
// Remove reasoning_effort when using thinking_config
delete requestOptions.reasoning_effort;
}
break;
case "groq":
if (api === "responses" && requestOptions.reasoning) {
// Groq responses API doesn't support reasoning.summary
delete requestOptions.reasoning.summary;
}
else if (api === "completions" && supportsReasoning && requestOptions.reasoning_effort) {
// Groq Chat Completions uses reasoning_format instead of reasoning_effort alone
requestOptions.reasoning_format = "parsed";
// Keep reasoning_effort for Groq
}
break;
case "anthropic":
// Anthropic's OpenAI compatibility has its own quirks
// But thinking content isn't available via OpenAI compat layer
break;
case "openrouter":
// OpenRouter uses a unified reasoning parameter format
if (api === "completions" && supportsReasoning && requestOptions.reasoning_effort) {
// Convert reasoning_effort to OpenRouter's reasoning format
requestOptions.reasoning = {
effort: requestOptions.reasoning_effort === "low"
? "low"
: requestOptions.reasoning_effort === "minimal"
? "low"
: requestOptions.reasoning_effort === "medium"
? "medium"
: "high",
};
delete requestOptions.reasoning_effort;
}
break;
default:
// OpenAI and others use standard format
break;
}
return requestOptions;
}
function checkReasoningSupport(client, model, api, baseURL) {
return __awaiter(this, void 0, void 0, function () {
var cacheKey, cached, supportsReasoning, provider, testRequest, error_1, testRequest, error_2, existing;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
cacheKey = model;
cached = modelReasoningSupport.get(cacheKey);
if (cached && cached[api] !== undefined) {
return [2 /*return*/, cached[api]];
}
supportsReasoning = false;
provider = detectProvider(baseURL);
if (!(api === "responses")) return [3 /*break*/, 5];
_a.label = 1;
case 1:
_a.trys.push([1, 3, , 4]);
testRequest = {
model: model,
input: "test",
max_output_tokens: 1024,
reasoning: {
effort: "low", // Use low instead of minimal to ensure we get summaries
},
};
return [4 /*yield*/, client.responses.create(testRequest)];
case 2:
_a.sent();
supportsReasoning = true;
return [3 /*break*/, 4];
case 3:
error_1 = _a.sent();
supportsReasoning = false;
return [3 /*break*/, 4];
case 4: return [3 /*break*/, 8];
case 5:
_a.trys.push([5, 7, , 8]);
testRequest = {
model: model,
messages: [{ role: "user", content: "test" }],
max_completion_tokens: 1024,
};
// Add provider-specific reasoning parameters
if (provider === "gemini") {
// Gemini uses extra_body for thinking
testRequest.extra_body = {
google: {
thinking_config: {
thinking_budget: 100, // Minimum viable budget for test
include_thoughts: true,
},
},
};
}
else if (provider === "groq") {
// Groq uses both reasoning_format and reasoning_effort
testRequest.reasoning_format = "parsed";
testRequest.reasoning_effort = "low";
}
else {
// Others use reasoning_effort
testRequest.reasoning_effort = "minimal";
}
return [4 /*yield*/, client.chat.completions.create(testRequest)];
case 6:
_a.sent();
supportsReasoning = true;
return [3 /*break*/, 8];
case 7:
error_2 = _a.sent();
supportsReasoning = false;
return [3 /*break*/, 8];
case 8:
existing = modelReasoningSupport.get(cacheKey) || {};
existing[api] = supportsReasoning;
modelReasoningSupport.set(cacheKey, existing);
return [2 /*return*/, supportsReasoning];
}
});
});
}
function callModelResponsesApi(client, model, messages, signal, eventReceiver, supportsReasoning, baseURL) {
return __awaiter(this, void 0, void 0, function () {
var conversationDone, requestOptions, response, usage, output, _i, output_1, item, type, message, _a, reasoningItems, _b, reasoningItems_1, content, _c, _d, content, result, toolResultMsg, e_1, errorMsg;
var _e, _f;
return __generator(this, function (_g) {
switch (_g.label) {
case 0:
conversationDone = false;
_g.label = 1;
case 1:
if (!!conversationDone) return [3 /*break*/, 31];
if (!(signal === null || signal === void 0 ? void 0 : signal.aborted)) return [3 /*break*/, 3];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "interrupted" }))];
case 2:
_g.sent();
throw new Error("Interrupted");
case 3:
requestOptions = __assign({ model: model, input: messages, tools: tools_js_1.toolsForResponses, tool_choice: "auto", parallel_tool_calls: true, max_output_tokens: 2000 }, (supportsReasoning && {
reasoning: {
effort: "minimal", // Use minimal effort for responses API
summary: "detailed", // Request detailed reasoning summaries
},
}));
// Apply provider-specific adjustments
requestOptions = adjustRequestForProvider(requestOptions, "responses", baseURL, supportsReasoning);
return [4 /*yield*/, client.responses.create(requestOptions, { signal: signal })];
case 4:
response = _g.sent();
// Report token usage if available (responses API format)
if (response.usage) {
usage = response.usage;
eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({
type: "token_usage",
inputTokens: usage.input_tokens || 0,
outputTokens: usage.output_tokens || 0,
totalTokens: usage.total_tokens || 0,
cacheReadTokens: ((_e = usage.input_tokens_details) === null || _e === void 0 ? void 0 : _e.cached_tokens) || 0,
cacheWriteTokens: 0, // Not available in API
reasoningTokens: ((_f = usage.output_tokens_details) === null || _f === void 0 ? void 0 : _f.reasoning_tokens) || 0,
});
}
output = response.output;
if (!output)
return [3 /*break*/, 31];
_i = 0, output_1 = output;
_g.label = 5;
case 5:
if (!(_i < output_1.length)) return [3 /*break*/, 30];
item = output_1[_i];
// gpt-oss vLLM quirk: need to remove type from "message" events
if (item.id === "message") {
type = item.type, message = __rest(item, ["type"]);
messages.push(item);
}
else {
messages.push(item);
}
_a = item.type;
switch (_a) {
case "reasoning": return [3 /*break*/, 6];
case "message": return [3 /*break*/, 11];
case "function_call": return [3 /*break*/, 19];
}
return [3 /*break*/, 28];
case 6:
reasoningItems = item.content || item.summary || [];
_b = 0, reasoningItems_1 = reasoningItems;
_g.label = 7;
case 7:
if (!(_b < reasoningItems_1.length)) return [3 /*break*/, 10];
content = reasoningItems_1[_b];
if (!(content.type === "reasoning_text" || content.type === "summary_text")) return [3 /*break*/, 9];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "reasoning", text: content.text }))];
case 8:
_g.sent();
_g.label = 9;
case 9:
_b++;
return [3 /*break*/, 7];
case 10: return [3 /*break*/, 29];
case 11:
_c = 0, _d = item.content || [];
_g.label = 12;
case 12:
if (!(_c < _d.length)) return [3 /*break*/, 18];
content = _d[_c];
if (!(content.type === "output_text")) return [3 /*break*/, 14];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "assistant_message", text: content.text }))];
case 13:
_g.sent();
return [3 /*break*/, 16];
case 14:
if (!(content.type === "refusal")) return [3 /*break*/, 16];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "error", message: "Refusal: ".concat(content.refusal) }))];
case 15:
_g.sent();
_g.label = 16;
case 16:
conversationDone = true;
_g.label = 17;
case 17:
_c++;
return [3 /*break*/, 12];
case 18: return [3 /*break*/, 29];
case 19:
if (!(signal === null || signal === void 0 ? void 0 : signal.aborted)) return [3 /*break*/, 21];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "interrupted" }))];
case 20:
_g.sent();
throw new Error("Interrupted");
case 21:
_g.trys.push([21, 25, , 27]);
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({
type: "tool_call",
toolCallId: item.call_id || "",
name: item.name,
args: item.arguments,
}))];
case 22:
_g.sent();
return [4 /*yield*/, (0, tools_js_1.executeTool)(item.name, item.arguments, signal)];
case 23:
result = _g.sent();
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({
type: "tool_result",
toolCallId: item.call_id || "",
result: result,
isError: false,
}))];
case 24:
_g.sent();
toolResultMsg = {
type: "function_call_output",
call_id: item.call_id,
output: result,
};
messages.push(toolResultMsg);
return [3 /*break*/, 27];
case 25:
e_1 = _g.sent();
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({
type: "tool_result",
toolCallId: item.call_id || "",
result: e_1.message,
isError: true,
}))];
case 26:
_g.sent();
errorMsg = {
type: "function_call_output",
call_id: item.id,
output: e_1.message,
isError: true,
};
messages.push(errorMsg);
return [3 /*break*/, 27];
case 27: return [3 /*break*/, 29];
case 28:
{
eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "error", message: "Unknown output type in LLM response: ".concat(item.type) });
return [3 /*break*/, 29];
}
_g.label = 29;
case 29:
_i++;
return [3 /*break*/, 5];
case 30: return [3 /*break*/, 1];
case 31: return [2 /*return*/];
}
});
});
}
function callModelChatCompletionsApi(client, model, messages, signal, eventReceiver, supportsReasoning, baseURL) {
return __awaiter(this, void 0, void 0, function () {
var assistantResponded, requestOptions, response, message, usage, assistantMsg, _i, _a, toolCall, funcName, funcArgs, result, toolMsg, e_2, errorMsg, _b, cleanContent, reasoningTexts, _c, reasoningTexts_1, reasoning, finalMsg;
var _d, _e;
return __generator(this, function (_f) {
switch (_f.label) {
case 0:
assistantResponded = false;
_f.label = 1;
case 1:
if (!!assistantResponded) return [3 /*break*/, 23];
if (!(signal === null || signal === void 0 ? void 0 : signal.aborted)) return [3 /*break*/, 3];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "interrupted" }))];
case 2:
_f.sent();
throw new Error("Interrupted");
case 3:
requestOptions = __assign({ model: model, messages: messages, tools: tools_js_1.toolsForChat, tool_choice: "auto", max_completion_tokens: 2000 }, (supportsReasoning && {
reasoning_effort: "low", // Use low effort for completions API
}));
// Apply provider-specific adjustments
requestOptions = adjustRequestForProvider(requestOptions, "completions", baseURL, supportsReasoning);
return [4 /*yield*/, client.chat.completions.create(requestOptions, { signal: signal })];
case 4:
response = _f.sent();
message = response.choices[0].message;
if (!response.usage) return [3 /*break*/, 6];
usage = response.usage;
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({
type: "token_usage",
inputTokens: usage.prompt_tokens || 0,
outputTokens: usage.completion_tokens || 0,
totalTokens: usage.total_tokens || 0,
cacheReadTokens: ((_d = usage.prompt_tokens_details) === null || _d === void 0 ? void 0 : _d.cached_tokens) || 0,
cacheWriteTokens: 0, // Not available in API
reasoningTokens: ((_e = usage.completion_tokens_details) === null || _e === void 0 ? void 0 : _e.reasoning_tokens) || 0,
}))];
case 5:
_f.sent();
_f.label = 6;
case 6:
if (!(message.tool_calls && message.tool_calls.length > 0)) return [3 /*break*/, 16];
assistantMsg = {
role: "assistant",
content: message.content || null,
tool_calls: message.tool_calls,
};
messages.push(assistantMsg);
_i = 0, _a = message.tool_calls;
_f.label = 7;
case 7:
if (!(_i < _a.length)) return [3 /*break*/, 15];
toolCall = _a[_i];
if (!(signal === null || signal === void 0 ? void 0 : signal.aborted)) return [3 /*break*/, 9];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "interrupted" }))];
case 8:
_f.sent();
throw new Error("Interrupted");
case 9:
_f.trys.push([9, 13, , 14]);
funcName = toolCall.type === "function" ? toolCall.function.name : toolCall.custom.name;
funcArgs = toolCall.type === "function" ? toolCall.function.arguments : toolCall.custom.input;
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "tool_call", toolCallId: toolCall.id, name: funcName, args: funcArgs }))];
case 10:
_f.sent();
return [4 /*yield*/, (0, tools_js_1.executeTool)(funcName, funcArgs, signal)];
case 11:
result = _f.sent();
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "tool_result", toolCallId: toolCall.id, result: result, isError: false }))];
case 12:
_f.sent();
toolMsg = {
role: "tool",
tool_call_id: toolCall.id,
content: result,
};
messages.push(toolMsg);
return [3 /*break*/, 14];
case 13:
e_2 = _f.sent();
eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "tool_result", toolCallId: toolCall.id, result: e_2.message, isError: true });
errorMsg = {
role: "tool",
tool_call_id: toolCall.id,
content: e_2.message,
};
messages.push(errorMsg);
return [3 /*break*/, 14];
case 14:
_i++;
return [3 /*break*/, 7];
case 15: return [3 /*break*/, 22];
case 16:
if (!message.content) return [3 /*break*/, 22];
_b = parseReasoningFromMessage(message, baseURL), cleanContent = _b.cleanContent, reasoningTexts = _b.reasoningTexts;
_c = 0, reasoningTexts_1 = reasoningTexts;
_f.label = 17;
case 17:
if (!(_c < reasoningTexts_1.length)) return [3 /*break*/, 20];
reasoning = reasoningTexts_1[_c];
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "reasoning", text: reasoning }))];
case 18:
_f.sent();
_f.label = 19;
case 19:
_c++;
return [3 /*break*/, 17];
case 20:
// Emit the cleaned assistant message
return [4 /*yield*/, (eventReceiver === null || eventReceiver === void 0 ? void 0 : eventReceiver.on({ type: "assistant_message", text: cleanContent }))];
case 21:
// Emit the cleaned assistant message
_f.sent();
finalMsg = { role: "assistant", content: cleanContent };
messages.push(finalMsg);
assistantResponded = true;
_f.label = 22;
case 22: return [3 /*break*/, 1];
case 23: return [2 /*return*/];
}
});
});
}
var Agent = /** @class */ (function () {
function Agent(config, renderer, sessionManager) {
var _this = this;
this.messages = [];
this.abortController = null;
this.supportsReasoning = null;
this.config = config;
this.client = new openai_1.default({
apiKey: config.apiKey,
baseURL: config.baseURL,
});
// Use provided renderer or default to console
this.renderer = renderer;
this.sessionManager = sessionManager;
this.comboReceiver = {
on: function (event) { return __awaiter(_this, void 0, void 0, function () {
var _a, _b;
return __generator(this, function (_c) {
switch (_c.label) {
case 0: return [4 /*yield*/, ((_a = this.renderer) === null || _a === void 0 ? void 0 : _a.on(event))];
case 1:
_c.sent();
return [4 /*yield*/, ((_b = this.sessionManager) === null || _b === void 0 ? void 0 : _b.on(event))];
case 2:
_c.sent();
return [2 /*return*/];
}
});
}); },
};
// Initialize with system prompt if provided
if (config.systemPrompt) {
this.messages.push({ role: "system", content: config.systemPrompt });
}
// Start session logging if we have a session manager
if (sessionManager) {
sessionManager.startSession(this.config);
// Emit session_start event
this.comboReceiver.on({
type: "session_start",
sessionId: sessionManager.getSessionId(),
model: config.model,
api: config.api,
baseURL: config.baseURL,
systemPrompt: config.systemPrompt,
});
}
}
Agent.prototype.ask = function (userMessage) {
return __awaiter(this, void 0, void 0, function () {
var userMsg, _a, e_3, errorMessage;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
// Render user message through the event system
this.comboReceiver.on({ type: "user_message", text: userMessage });
userMsg = { role: "user", content: userMessage };
this.messages.push(userMsg);
// Create a new AbortController for this chat session
this.abortController = new AbortController();
_b.label = 1;
case 1:
_b.trys.push([1, 9, 10, 11]);
return [4 /*yield*/, this.comboReceiver.on({ type: "assistant_start" })];
case 2:
_b.sent();
if (!(this.supportsReasoning === null)) return [3 /*break*/, 4];
_a = this;
return [4 /*yield*/, checkReasoningSupport(this.client, this.config.model, this.config.api, this.config.baseURL)];
case 3:
_a.supportsReasoning = _b.sent();
_b.label = 4;
case 4:
if (!(this.config.api === "responses")) return [3 /*break*/, 6];
return [4 /*yield*/, callModelResponsesApi(this.client, this.config.model, this.messages, this.abortController.signal, this.comboReceiver, this.supportsReasoning, this.config.baseURL)];
case 5:
_b.sent();
return [3 /*break*/, 8];
case 6: return [4 /*yield*/, callModelChatCompletionsApi(this.client, this.config.model, this.messages, this.abortController.signal, this.comboReceiver, this.supportsReasoning, this.config.baseURL)];
case 7:
_b.sent();
_b.label = 8;
case 8: return [3 /*break*/, 11];
case 9:
e_3 = _b.sent();
errorMessage = e_3 instanceof Error ? e_3.message : String(e_3);
if (errorMessage === "Interrupted" || this.abortController.signal.aborted) {
return [2 /*return*/];
}
throw e_3;
case 10:
this.abortController = null;
return [7 /*endfinally*/];
case 11: return [2 /*return*/];
}
});
});
};
Agent.prototype.interrupt = function () {
var _a;
(_a = this.abortController) === null || _a === void 0 ? void 0 : _a.abort();
};
Agent.prototype.setEvents = function (events) {
// Reconstruct messages from events based on API type
this.messages = [];
if (this.config.api === "responses") {
// Responses API format
if (this.config.systemPrompt) {
this.messages.push({
type: "system",
content: [{ type: "system_text", text: this.config.systemPrompt }],
});
}
for (var _i = 0, events_1 = events; _i < events_1.length; _i++) {
var event_1 = events_1[_i];
switch (event_1.type) {
case "user_message":
this.messages.push({
type: "user",
content: [{ type: "input_text", text: event_1.text }],
});
break;
case "reasoning":
// Add reasoning message
this.messages.push({
type: "reasoning",
content: [{ type: "reasoning_text", text: event_1.text }],
});
break;
case "tool_call":
// Add function call
this.messages.push({
type: "function_call",
id: event_1.toolCallId,
name: event_1.name,
arguments: event_1.args,
});
break;
case "tool_result":
// Add function result
this.messages.push({
type: "function_call_output",
call_id: event_1.toolCallId,
output: event_1.result,
});
break;
case "assistant_message":
// Add final message
this.messages.push({
type: "message",
content: [{ type: "output_text", text: event_1.text }],
});
break;
}
}
}
else {
// Chat Completions API format
if (this.config.systemPrompt) {
this.messages.push({ role: "system", content: this.config.systemPrompt });
}
// Track tool calls in progress
var pendingToolCalls = [];
for (var _a = 0, events_2 = events; _a < events_2.length; _a++) {
var event_2 = events_2[_a];
switch (event_2.type) {
case "user_message":
this.messages.push({ role: "user", content: event_2.text });
break;
case "assistant_start":
// Reset pending tool calls for new assistant response
pendingToolCalls = [];
break;
case "tool_call":
// Accumulate tool calls
pendingToolCalls.push({
id: event_2.toolCallId,
type: "function",
function: {
name: event_2.name,
arguments: event_2.args,
},
});
break;
case "tool_result":
// When we see the first tool result, add the assistant message with all tool calls
if (pendingToolCalls.length > 0) {
this.messages.push({
role: "assistant",
content: null,
tool_calls: pendingToolCalls,
});
pendingToolCalls = [];
}
// Add the tool result
this.messages.push({
role: "tool",
tool_call_id: event_2.toolCallId,
content: event_2.result,
});
break;
case "assistant_message":
// Final assistant response (no tool calls)
this.messages.push({ role: "assistant", content: event_2.text });
break;
// Skip other event types (thinking, error, interrupted, token_usage)
}
}
}
};
return Agent;
}());
exports.Agent = Agent;

View file

@ -533,7 +533,10 @@ export class Agent {
// Initialize with system prompt if provided
if (config.systemPrompt) {
this.messages.push({ role: "system", content: config.systemPrompt });
this.messages.push({
role: "developer",
content: config.systemPrompt,
});
}
// Start session logging if we have a session manager
@ -621,8 +624,8 @@ export class Agent {
// Responses API format
if (this.config.systemPrompt) {
this.messages.push({
type: "system",
content: [{ type: "system_text", text: this.config.systemPrompt }],
role: "developer",
content: this.config.systemPrompt,
});
}
@ -630,7 +633,7 @@ export class Agent {
switch (event.type) {
case "user_message":
this.messages.push({
type: "user",
role: "user",
content: [{ type: "input_text", text: event.text }],
});
break;

164
packages/agent/src/args.js Normal file
View file

@ -0,0 +1,164 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseArgs = parseArgs;
exports.printHelp = printHelp;
var os_1 = require("os");
var path_1 = require("path");
function parseArgs(defs, args) {
var result = { _: [] };
var aliasMap = {};
// Build alias map and set defaults
for (var _i = 0, _a = Object.entries(defs); _i < _a.length; _i++) {
var _b = _a[_i], key = _b[0], def = _b[1];
if (def.alias) {
aliasMap[def.alias] = key;
}
if (def.default !== undefined) {
result[key] = def.default;
}
else if (def.type === "flag" || def.type === "boolean") {
result[key] = false;
}
}
// Parse arguments
for (var i = 0; i < args.length; i++) {
var arg = args[i];
// Check if it's a flag
if (arg.startsWith("--")) {
var flagName = arg.slice(2);
var key = aliasMap[flagName] || flagName;
var def = defs[key];
if (!def) {
// Unknown flag, add to positional args
result._.push(arg);
continue;
}
if (def.type === "flag") {
// Simple on/off flag
result[key] = true;
}
else if (i + 1 < args.length) {
// Flag with value
var value = args[++i];
var parsedValue = void 0;
switch (def.type) {
case "boolean":
parsedValue = value === "true" || value === "1" || value === "yes";
break;
case "int":
parsedValue = parseInt(value, 10);
if (Number.isNaN(parsedValue)) {
throw new Error("Invalid integer value for --".concat(key, ": ").concat(value));
}
break;
case "float":
parsedValue = parseFloat(value);
if (Number.isNaN(parsedValue)) {
throw new Error("Invalid float value for --".concat(key, ": ").concat(value));
}
break;
case "string":
parsedValue = value;
break;
case "file": {
// Resolve ~ to home directory and make absolute
var path = value;
if (path.startsWith("~")) {
path = path.replace("~", (0, os_1.homedir)());
}
parsedValue = (0, path_1.resolve)(path);
break;
}
}
// Validate against choices if specified
if (def.choices) {
var validValues = def.choices.map(function (c) { return (typeof c === "string" ? c : c.value); });
if (!validValues.includes(parsedValue)) {
throw new Error("Invalid value for --".concat(key, ": \"").concat(parsedValue, "\". Valid choices: ").concat(validValues.join(", ")));
}
}
result[key] = parsedValue;
}
else {
throw new Error("Flag --".concat(key, " requires a value"));
}
}
else if (arg.startsWith("-") && arg.length === 2) {
// Short flag like -h
var flagChar = arg[1];
var key = aliasMap[flagChar] || flagChar;
var def = defs[key];
if (!def) {
result._.push(arg);
continue;
}
if (def.type === "flag") {
result[key] = true;
}
else {
throw new Error("Short flag -".concat(flagChar, " cannot have a value"));
}
}
else {
// Positional argument
result._.push(arg);
}
}
return result;
}
function printHelp(defs, usage) {
console.log(usage);
console.log("\nOptions:");
for (var _i = 0, _a = Object.entries(defs); _i < _a.length; _i++) {
var _b = _a[_i], key = _b[0], def = _b[1];
var line = " --".concat(key);
if (def.alias) {
line += ", -".concat(def.alias);
}
if (def.type !== "flag") {
if (def.choices) {
// Show choices instead of type
var simpleChoices = def.choices.filter(function (c) { return typeof c === "string"; });
if (simpleChoices.length === def.choices.length) {
// All choices are simple strings
line += " <".concat(simpleChoices.join("|"), ">");
}
else {
// Has descriptions, just show the type
var typeStr = def.type === "file" ? "path" : def.type;
line += " <".concat(typeStr, ">");
}
}
else {
var typeStr = def.type === "file" ? "path" : def.type;
line += " <".concat(typeStr, ">");
}
}
if (def.description) {
// Pad to align descriptions
line = line.padEnd(30) + def.description;
}
if (def.default !== undefined && def.type !== "flag" && def.showDefault !== false) {
if (typeof def.showDefault === "string") {
line += " (default: ".concat(def.showDefault, ")");
}
else {
line += " (default: ".concat(def.default, ")");
}
}
console.log(line);
// Print choices with descriptions if available
if (def.choices) {
var hasDescriptions = def.choices.some(function (c) { return typeof c === "object" && c.description; });
if (hasDescriptions) {
for (var _c = 0, _d = def.choices; _c < _d.length; _c++) {
var choice = _d[_c];
if (typeof choice === "object") {
var choiceLine = " ".concat(choice.value).padEnd(30) + (choice.description || "");
console.log(choiceLine);
}
}
}
}
}
}

View file

@ -0,0 +1,9 @@
#!/usr/bin/env node
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var main_js_1 = require("./main.js");
// Run as CLI - this file should always be executed, not imported
(0, main_js_1.main)(process.argv.slice(2)).catch(function (err) {
console.error(err);
process.exit(1);
});

364
packages/agent/src/main.js Normal file
View file

@ -0,0 +1,364 @@
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = main;
var chalk_1 = require("chalk");
var readline_1 = require("readline");
var agent_js_1 = require("./agent.js");
var args_js_1 = require("./args.js");
var console_renderer_js_1 = require("./renderers/console-renderer.js");
var json_renderer_js_1 = require("./renderers/json-renderer.js");
var tui_renderer_js_1 = require("./renderers/tui-renderer.js");
var session_manager_js_1 = require("./session-manager.js");
// Define argument structure
var argDefs = {
"base-url": {
type: "string",
default: "https://api.openai.com/v1",
description: "API base URL",
},
"api-key": {
type: "string",
default: process.env.OPENAI_API_KEY || "",
description: "API key",
showDefault: "$OPENAI_API_KEY",
},
model: {
type: "string",
default: "gpt-5-mini",
description: "Model name",
},
api: {
type: "string",
default: "completions",
description: "API type",
choices: [
{ value: "completions", description: "OpenAI Chat Completions API (most models)" },
{ value: "responses", description: "OpenAI Responses API (GPT-OSS models)" },
],
},
"system-prompt": {
type: "string",
default: "You are a helpful assistant.",
description: "System prompt",
},
continue: {
type: "flag",
alias: "c",
description: "Continue previous session",
},
json: {
type: "flag",
description: "Output as JSONL",
},
help: {
type: "flag",
alias: "h",
description: "Show this help message",
},
};
function printHelp() {
var usage = "Usage: pi-agent [options] [messages...]\n\nExamples:\n# Single message (default OpenAI, GPT-5 Mini, OPENAI_API_KEY env var)\npi-agent \"What is 2+2?\"\n\n# Multiple messages processed sequentially\npi-agent \"What is 2+2?\" \"What about 3+3?\"\n\n# Interactive chat mode (no messages = interactive)\npi-agent\n\n# Continue most recently modified session in current directory\npi-agent --continue \"Follow up question\"\n\n# GPT-OSS via Groq\npi-agent --base-url https://api.groq.com/openai/v1 --api-key $GROQ_API_KEY --model openai/gpt-oss-120b\n\n# GLM 4.5 via OpenRouter\npi-agent --base-url https://openrouter.ai/api/v1 --api-key $OPENROUTER_API_KEY --model z-ai/glm-4.5\n\n# Claude via Anthropic (no prompt caching support - see https://docs.anthropic.com/en/api/openai-sdk)\npi-agent --base-url https://api.anthropic.com/v1 --api-key $ANTHROPIC_API_KEY --model claude-opus-4-1-20250805";
(0, args_js_1.printHelp)(argDefs, usage);
}
function runJsonInteractiveMode(config, sessionManager) {
return __awaiter(this, void 0, void 0, function () {
var rl, renderer, agent, isProcessing, pendingMessage, processMessage;
var _this = this;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
rl = (0, readline_1.createInterface)({
input: process.stdin,
output: process.stdout,
terminal: false, // Don't interpret control characters
});
renderer = new json_renderer_js_1.JsonRenderer();
agent = new agent_js_1.Agent(config, renderer, sessionManager);
isProcessing = false;
pendingMessage = null;
processMessage = function (content) { return __awaiter(_this, void 0, void 0, function () {
var e_1, msg;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
isProcessing = true;
_a.label = 1;
case 1:
_a.trys.push([1, 3, 5, 8]);
return [4 /*yield*/, agent.ask(content)];
case 2:
_a.sent();
return [3 /*break*/, 8];
case 3:
e_1 = _a.sent();
return [4 /*yield*/, renderer.on({ type: "error", message: e_1.message })];
case 4:
_a.sent();
return [3 /*break*/, 8];
case 5:
isProcessing = false;
if (!pendingMessage) return [3 /*break*/, 7];
msg = pendingMessage;
pendingMessage = null;
return [4 /*yield*/, processMessage(msg)];
case 6:
_a.sent();
_a.label = 7;
case 7: return [7 /*endfinally*/];
case 8: return [2 /*return*/];
}
});
}); };
// Listen for lines from stdin
rl.on("line", function (line) {
try {
var command = JSON.parse(line);
switch (command.type) {
case "interrupt":
agent.interrupt();
isProcessing = false;
break;
case "message":
if (!command.content) {
renderer.on({ type: "error", message: "Message content is required" });
return;
}
if (isProcessing) {
// Queue the message for when the agent is done
pendingMessage = command.content;
}
else {
processMessage(command.content);
}
break;
default:
renderer.on({ type: "error", message: "Unknown command type: ".concat(command.type) });
}
}
catch (e) {
renderer.on({ type: "error", message: "Invalid JSON: ".concat(e) });
}
});
// Wait for stdin to close
return [4 /*yield*/, new Promise(function (resolve) {
rl.on("close", function () {
resolve();
});
})];
case 1:
// Wait for stdin to close
_a.sent();
return [2 /*return*/];
}
});
});
}
function runTuiInteractiveMode(agentConfig, sessionManager) {
return __awaiter(this, void 0, void 0, function () {
var sessionData, renderer, agent, _i, _a, sessionEvent, event_1, userInput, e_2;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
sessionData = sessionManager.getSessionData();
if (sessionData) {
console.log(chalk_1.default.dim("Resuming session with ".concat(sessionData.events.length, " events")));
}
renderer = new tui_renderer_js_1.TuiRenderer();
// Initialize TUI BEFORE creating the agent to prevent double init
return [4 /*yield*/, renderer.init()];
case 1:
// Initialize TUI BEFORE creating the agent to prevent double init
_b.sent();
agent = new agent_js_1.Agent(agentConfig, renderer, sessionManager);
renderer.setInterruptCallback(function () {
agent.interrupt();
});
if (!sessionData) return [3 /*break*/, 6];
agent.setEvents(sessionData ? sessionData.events.map(function (e) { return e.event; }) : []);
_i = 0, _a = sessionData.events;
_b.label = 2;
case 2:
if (!(_i < _a.length)) return [3 /*break*/, 6];
sessionEvent = _a[_i];
event_1 = sessionEvent.event;
if (!(event_1.type === "assistant_start")) return [3 /*break*/, 3];
renderer.renderAssistantLabel();
return [3 /*break*/, 5];
case 3: return [4 /*yield*/, renderer.on(event_1)];
case 4:
_b.sent();
_b.label = 5;
case 5:
_i++;
return [3 /*break*/, 2];
case 6:
if (!true) return [3 /*break*/, 13];
return [4 /*yield*/, renderer.getUserInput()];
case 7:
userInput = _b.sent();
_b.label = 8;
case 8:
_b.trys.push([8, 10, , 12]);
return [4 /*yield*/, agent.ask(userInput)];
case 9:
_b.sent();
return [3 /*break*/, 12];
case 10:
e_2 = _b.sent();
return [4 /*yield*/, renderer.on({ type: "error", message: e_2.message })];
case 11:
_b.sent();
return [3 /*break*/, 12];
case 12: return [3 /*break*/, 6];
case 13: return [2 /*return*/];
}
});
});
}
function runSingleShotMode(agentConfig, sessionManager, messages, jsonOutput) {
return __awaiter(this, void 0, void 0, function () {
var sessionData, renderer, agent, _i, messages_1, msg, e_3;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
sessionData = sessionManager.getSessionData();
renderer = jsonOutput ? new json_renderer_js_1.JsonRenderer() : new console_renderer_js_1.ConsoleRenderer();
agent = new agent_js_1.Agent(agentConfig, renderer, sessionManager);
if (sessionData) {
if (!jsonOutput) {
console.log(chalk_1.default.dim("Resuming session with ".concat(sessionData.events.length, " events")));
}
agent.setEvents(sessionData ? sessionData.events.map(function (e) { return e.event; }) : []);
}
_i = 0, messages_1 = messages;
_a.label = 1;
case 1:
if (!(_i < messages_1.length)) return [3 /*break*/, 7];
msg = messages_1[_i];
_a.label = 2;
case 2:
_a.trys.push([2, 4, , 6]);
return [4 /*yield*/, agent.ask(msg)];
case 3:
_a.sent();
return [3 /*break*/, 6];
case 4:
e_3 = _a.sent();
return [4 /*yield*/, renderer.on({ type: "error", message: e_3.message })];
case 5:
_a.sent();
return [3 /*break*/, 6];
case 6:
_i++;
return [3 /*break*/, 1];
case 7: return [2 /*return*/];
}
});
});
}
// Main function to use Agent as standalone CLI
function main(args) {
return __awaiter(this, void 0, void 0, function () {
var parsed, baseURL, apiKey, model, continueSession, api, systemPrompt, jsonOutput, messages, isInteractive, sessionManager, agentConfig, sessionData;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
parsed = (0, args_js_1.parseArgs)(argDefs, args);
// Show help if requested
if (parsed.help) {
printHelp();
return [2 /*return*/];
}
baseURL = parsed["base-url"];
apiKey = parsed["api-key"];
model = parsed.model;
continueSession = parsed.continue;
api = parsed.api;
systemPrompt = parsed["system-prompt"];
jsonOutput = parsed.json;
messages = parsed._;
if (!apiKey) {
throw new Error("API key required (use --api-key or set OPENAI_API_KEY)");
}
isInteractive = messages.length === 0;
sessionManager = new session_manager_js_1.SessionManager(continueSession);
agentConfig = {
apiKey: apiKey,
baseURL: baseURL,
model: model,
api: api,
systemPrompt: systemPrompt,
};
if (continueSession) {
sessionData = sessionManager.getSessionData();
if (sessionData) {
agentConfig = __assign(__assign({}, sessionData.config), { apiKey: apiKey });
}
}
if (!isInteractive) return [3 /*break*/, 5];
if (!jsonOutput) return [3 /*break*/, 2];
return [4 /*yield*/, runJsonInteractiveMode(agentConfig, sessionManager)];
case 1:
_a.sent();
return [3 /*break*/, 4];
case 2: return [4 /*yield*/, runTuiInteractiveMode(agentConfig, sessionManager)];
case 3:
_a.sent();
_a.label = 4;
case 4: return [3 /*break*/, 7];
case 5: return [4 /*yield*/, runSingleShotMode(agentConfig, sessionManager, messages, jsonOutput)];
case 6:
_a.sent();
_a.label = 7;
case 7: return [2 /*return*/];
}
});
});
}

View file

@ -0,0 +1,196 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ConsoleRenderer = void 0;
var chalk_1 = require("chalk");
var ConsoleRenderer = /** @class */ (function () {
function ConsoleRenderer() {
this.frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
this.currentFrame = 0;
this.animationInterval = null;
this.isAnimating = false;
this.animationLine = "";
this.isTTY = process.stdout.isTTY;
this.toolCallCount = 0;
this.lastInputTokens = 0;
this.lastOutputTokens = 0;
this.lastCacheReadTokens = 0;
this.lastCacheWriteTokens = 0;
this.lastReasoningTokens = 0;
}
ConsoleRenderer.prototype.startAnimation = function (text) {
var _this = this;
if (text === void 0) { text = "Thinking"; }
if (this.isAnimating || !this.isTTY)
return;
this.isAnimating = true;
this.currentFrame = 0;
// Write initial frame
this.animationLine = "".concat(chalk_1.default.cyan(this.frames[this.currentFrame]), " ").concat(chalk_1.default.dim(text));
process.stdout.write(this.animationLine);
this.animationInterval = setInterval(function () {
// Clear current line
process.stdout.write("\r".concat(" ".repeat(_this.animationLine.length), "\r"));
// Update frame
_this.currentFrame = (_this.currentFrame + 1) % _this.frames.length;
_this.animationLine = "".concat(chalk_1.default.cyan(_this.frames[_this.currentFrame]), " ").concat(chalk_1.default.dim(text));
process.stdout.write(_this.animationLine);
}, 80);
};
ConsoleRenderer.prototype.stopAnimation = function () {
if (!this.isAnimating)
return;
if (this.animationInterval) {
clearInterval(this.animationInterval);
this.animationInterval = null;
}
// Clear the animation line
process.stdout.write("\r".concat(" ".repeat(this.animationLine.length), "\r"));
this.isAnimating = false;
this.animationLine = "";
};
ConsoleRenderer.prototype.displayMetrics = function () {
// Build metrics display
var metricsText = chalk_1.default.dim("\u2191".concat(this.lastInputTokens.toLocaleString(), " \u2193").concat(this.lastOutputTokens.toLocaleString()));
// Add reasoning tokens if present
if (this.lastReasoningTokens > 0) {
metricsText += chalk_1.default.dim(" \u26A1".concat(this.lastReasoningTokens.toLocaleString()));
}
// Add cache info if available
if (this.lastCacheReadTokens > 0 || this.lastCacheWriteTokens > 0) {
var cacheText = [];
if (this.lastCacheReadTokens > 0) {
cacheText.push("\u27F2".concat(this.lastCacheReadTokens.toLocaleString()));
}
if (this.lastCacheWriteTokens > 0) {
cacheText.push("\u27F3".concat(this.lastCacheWriteTokens.toLocaleString()));
}
metricsText += chalk_1.default.dim(" (".concat(cacheText.join(" "), ")"));
}
// Add tool call count
if (this.toolCallCount > 0) {
metricsText += chalk_1.default.dim(" \u2692 ".concat(this.toolCallCount));
}
console.log(metricsText);
console.log();
};
ConsoleRenderer.prototype.on = function (event) {
return __awaiter(this, void 0, void 0, function () {
var lines, maxLines, truncated, toShow, text;
return __generator(this, function (_a) {
// Stop animation for any new event except token_usage
if (event.type !== "token_usage" && this.isAnimating) {
this.stopAnimation();
}
switch (event.type) {
case "session_start":
console.log(chalk_1.default.blue("[Session started] ID: ".concat(event.sessionId, ", Model: ").concat(event.model, ", API: ").concat(event.api, ", Base URL: ").concat(event.baseURL)));
console.log(chalk_1.default.dim("System Prompt: ".concat(event.systemPrompt, "\n")));
break;
case "assistant_start":
console.log(chalk_1.default.hex("#FFA500")("[assistant]"));
this.startAnimation();
break;
case "reasoning":
this.stopAnimation();
console.log(chalk_1.default.dim("[thinking]"));
console.log(chalk_1.default.dim(event.text));
console.log();
// Resume animation after showing thinking
this.startAnimation("Processing");
break;
case "tool_call":
this.stopAnimation();
this.toolCallCount++;
console.log(chalk_1.default.yellow("[tool] ".concat(event.name, "(").concat(event.args, ")")));
// Resume animation while tool executes
this.startAnimation("Running ".concat(event.name));
break;
case "tool_result": {
this.stopAnimation();
lines = event.result.split("\n");
maxLines = 10;
truncated = lines.length > maxLines;
toShow = truncated ? lines.slice(0, maxLines) : lines;
text = toShow.join("\n");
console.log(event.isError ? chalk_1.default.red(text) : chalk_1.default.gray(text));
if (truncated) {
console.log(chalk_1.default.dim("... (".concat(lines.length - maxLines, " more lines)")));
}
console.log();
// Resume animation after tool result
this.startAnimation("Thinking");
break;
}
case "assistant_message":
this.stopAnimation();
console.log(event.text);
console.log();
// Display metrics after assistant message
this.displayMetrics();
break;
case "error":
this.stopAnimation();
console.error(chalk_1.default.red("[error] ".concat(event.message, "\n")));
break;
case "user_message":
console.log(chalk_1.default.green("[user]"));
console.log(event.text);
console.log();
break;
case "interrupted":
this.stopAnimation();
console.log(chalk_1.default.red("[Interrupted by user]\n"));
break;
case "token_usage":
// Store token usage for display after assistant message
this.lastInputTokens = event.inputTokens;
this.lastOutputTokens = event.outputTokens;
this.lastCacheReadTokens = event.cacheReadTokens;
this.lastCacheWriteTokens = event.cacheWriteTokens;
this.lastReasoningTokens = event.reasoningTokens;
// Don't stop animation for this event
break;
}
return [2 /*return*/];
});
});
};
return ConsoleRenderer;
}());
exports.ConsoleRenderer = ConsoleRenderer;

View file

@ -0,0 +1,53 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.JsonRenderer = void 0;
var JsonRenderer = /** @class */ (function () {
function JsonRenderer() {
}
JsonRenderer.prototype.on = function (event) {
return __awaiter(this, void 0, void 0, function () {
return __generator(this, function (_a) {
console.log(JSON.stringify(event));
return [2 /*return*/];
});
});
};
return JsonRenderer;
}());
exports.JsonRenderer = JsonRenderer;

View file

@ -0,0 +1,386 @@
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TuiRenderer = void 0;
var pi_tui_1 = require("@mariozechner/pi-tui");
var chalk_1 = require("chalk");
var LoadingAnimation = /** @class */ (function (_super) {
__extends(LoadingAnimation, _super);
function LoadingAnimation(ui) {
var _this = _super.call(this, "", { bottom: 1 }) || this;
_this.frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
_this.currentFrame = 0;
_this.intervalId = null;
_this.ui = null;
_this.ui = ui;
_this.start();
return _this;
}
LoadingAnimation.prototype.start = function () {
var _this = this;
this.updateDisplay();
this.intervalId = setInterval(function () {
_this.currentFrame = (_this.currentFrame + 1) % _this.frames.length;
_this.updateDisplay();
}, 80);
};
LoadingAnimation.prototype.stop = function () {
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
};
LoadingAnimation.prototype.updateDisplay = function () {
var frame = this.frames[this.currentFrame];
this.setText("".concat(chalk_1.default.cyan(frame), " ").concat(chalk_1.default.dim("Thinking...")));
if (this.ui) {
this.ui.requestRender();
}
};
return LoadingAnimation;
}(pi_tui_1.TextComponent));
var TuiRenderer = /** @class */ (function () {
function TuiRenderer() {
this.isInitialized = false;
this.currentLoadingAnimation = null;
this.lastSigintTime = 0;
this.lastInputTokens = 0;
this.lastOutputTokens = 0;
this.lastCacheReadTokens = 0;
this.lastCacheWriteTokens = 0;
this.lastReasoningTokens = 0;
this.toolCallCount = 0;
this.tokenStatusComponent = null;
this.ui = new pi_tui_1.TUI();
this.chatContainer = new pi_tui_1.Container();
this.statusContainer = new pi_tui_1.Container();
this.editor = new pi_tui_1.TextEditor();
this.tokenContainer = new pi_tui_1.Container();
// Setup autocomplete for file paths and slash commands
var autocompleteProvider = new pi_tui_1.CombinedAutocompleteProvider([], process.cwd());
this.editor.setAutocompleteProvider(autocompleteProvider);
}
TuiRenderer.prototype.init = function () {
return __awaiter(this, void 0, void 0, function () {
var header;
var _this = this;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
if (this.isInitialized)
return [2 /*return*/];
header = new pi_tui_1.TextComponent(chalk_1.default.gray(chalk_1.default.blueBright(">> pi interactive chat <<<")) +
"\n" +
chalk_1.default.dim("Press Escape to interrupt while processing") +
"\n" +
chalk_1.default.dim("Press CTRL+C to clear the text editor") +
"\n" +
chalk_1.default.dim("Press CTRL+C twice quickly to exit"), { bottom: 1 });
// Setup UI layout
this.ui.addChild(header);
this.ui.addChild(this.chatContainer);
this.ui.addChild(this.statusContainer);
this.ui.addChild(new pi_tui_1.WhitespaceComponent(1));
this.ui.addChild(this.editor);
this.ui.addChild(this.tokenContainer);
this.ui.setFocus(this.editor);
// Set up global key handler for Escape and Ctrl+C
this.ui.onGlobalKeyPress = function (data) {
// Intercept Escape key when processing
if (data === "\x1b" && _this.currentLoadingAnimation) {
// Call interrupt callback if set
if (_this.onInterruptCallback) {
_this.onInterruptCallback();
}
// Stop the loading animation immediately
if (_this.currentLoadingAnimation) {
_this.currentLoadingAnimation.stop();
_this.statusContainer.clear();
_this.currentLoadingAnimation = null;
}
// Don't show message here - the interrupted event will handle it
// Re-enable editor submission
_this.editor.disableSubmit = false;
_this.ui.requestRender();
// Don't forward to editor
return false;
}
// Handle Ctrl+C (raw mode sends \x03)
if (data === "\x03") {
var now = Date.now();
var timeSinceLastCtrlC = now - _this.lastSigintTime;
if (timeSinceLastCtrlC < 500) {
// Second Ctrl+C within 500ms - exit
_this.stop();
process.exit(0);
}
else {
// First Ctrl+C - clear the editor
_this.clearEditor();
_this.lastSigintTime = now;
}
// Don't forward to editor
return false;
}
// Forward all other keys
return true;
};
// Handle editor submission
this.editor.onSubmit = function (text) {
text = text.trim();
if (!text)
return;
if (_this.onInputCallback) {
_this.onInputCallback(text);
}
};
// Start the UI
return [4 /*yield*/, this.ui.start()];
case 1:
// Start the UI
_a.sent();
this.isInitialized = true;
return [2 /*return*/];
}
});
});
};
TuiRenderer.prototype.on = function (event) {
return __awaiter(this, void 0, void 0, function () {
var thinkingContainer, thinkingLines, _i, thinkingLines_1, line, lines, maxLines, truncated, toShow, resultContainer, _a, toShow_1, line;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
if (!!this.isInitialized) return [3 /*break*/, 2];
return [4 /*yield*/, this.init()];
case 1:
_b.sent();
_b.label = 2;
case 2:
switch (event.type) {
case "assistant_start":
this.chatContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.hex("#FFA500")("[assistant]")));
// Disable editor submission while processing
this.editor.disableSubmit = true;
// Start loading animation in the status container
this.statusContainer.clear();
this.currentLoadingAnimation = new LoadingAnimation(this.ui);
this.statusContainer.addChild(this.currentLoadingAnimation);
break;
case "reasoning": {
thinkingContainer = new pi_tui_1.Container();
thinkingContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.dim("[thinking]")));
thinkingLines = event.text.split("\n");
for (_i = 0, thinkingLines_1 = thinkingLines; _i < thinkingLines_1.length; _i++) {
line = thinkingLines_1[_i];
thinkingContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.dim(line)));
}
thinkingContainer.addChild(new pi_tui_1.WhitespaceComponent(1));
this.chatContainer.addChild(thinkingContainer);
break;
}
case "tool_call":
this.toolCallCount++;
this.updateTokenDisplay();
this.chatContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.yellow("[tool] ".concat(event.name, "(").concat(event.args, ")"))));
break;
case "tool_result": {
lines = event.result.split("\n");
maxLines = 10;
truncated = lines.length > maxLines;
toShow = truncated ? lines.slice(0, maxLines) : lines;
resultContainer = new pi_tui_1.Container();
for (_a = 0, toShow_1 = toShow; _a < toShow_1.length; _a++) {
line = toShow_1[_a];
resultContainer.addChild(new pi_tui_1.TextComponent(event.isError ? chalk_1.default.red(line) : chalk_1.default.gray(line)));
}
if (truncated) {
resultContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.dim("... (".concat(lines.length - maxLines, " more lines)"))));
}
resultContainer.addChild(new pi_tui_1.WhitespaceComponent(1));
this.chatContainer.addChild(resultContainer);
break;
}
case "assistant_message":
// Stop loading animation when assistant responds
if (this.currentLoadingAnimation) {
this.currentLoadingAnimation.stop();
this.currentLoadingAnimation = null;
this.statusContainer.clear();
}
// Re-enable editor submission
this.editor.disableSubmit = false;
// Use MarkdownComponent for rich formatting
this.chatContainer.addChild(new pi_tui_1.MarkdownComponent(event.text));
this.chatContainer.addChild(new pi_tui_1.WhitespaceComponent(1));
break;
case "error":
// Stop loading animation on error
if (this.currentLoadingAnimation) {
this.currentLoadingAnimation.stop();
this.currentLoadingAnimation = null;
this.statusContainer.clear();
}
// Re-enable editor submission
this.editor.disableSubmit = false;
this.chatContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.red("[error] ".concat(event.message)), { bottom: 1 }));
break;
case "user_message":
// Render user message
this.chatContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.green("[user]")));
this.chatContainer.addChild(new pi_tui_1.TextComponent(event.text, { bottom: 1 }));
break;
case "token_usage":
// Store the latest token counts (not cumulative since prompt includes full context)
this.lastInputTokens = event.inputTokens;
this.lastOutputTokens = event.outputTokens;
this.lastCacheReadTokens = event.cacheReadTokens;
this.lastCacheWriteTokens = event.cacheWriteTokens;
this.lastReasoningTokens = event.reasoningTokens;
this.updateTokenDisplay();
break;
case "interrupted":
// Stop the loading animation
if (this.currentLoadingAnimation) {
this.currentLoadingAnimation.stop();
this.currentLoadingAnimation = null;
this.statusContainer.clear();
}
// Show interrupted message
this.chatContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.red("[Interrupted by user]"), { bottom: 1 }));
// Re-enable editor submission
this.editor.disableSubmit = false;
break;
}
this.ui.requestRender();
return [2 /*return*/];
}
});
});
};
TuiRenderer.prototype.updateTokenDisplay = function () {
// Clear and update token display
this.tokenContainer.clear();
// Build token display text
var tokenText = chalk_1.default.dim("\u2191".concat(this.lastInputTokens.toLocaleString(), " \u2193").concat(this.lastOutputTokens.toLocaleString()));
// Add reasoning tokens if present
if (this.lastReasoningTokens > 0) {
tokenText += chalk_1.default.dim(" \u26A1".concat(this.lastReasoningTokens.toLocaleString()));
}
// Add cache info if available
if (this.lastCacheReadTokens > 0 || this.lastCacheWriteTokens > 0) {
var cacheText = [];
if (this.lastCacheReadTokens > 0) {
cacheText.push("\u27F2".concat(this.lastCacheReadTokens.toLocaleString()));
}
if (this.lastCacheWriteTokens > 0) {
cacheText.push("\u27F3".concat(this.lastCacheWriteTokens.toLocaleString()));
}
tokenText += chalk_1.default.dim(" (".concat(cacheText.join(" "), ")"));
}
// Add tool call count
if (this.toolCallCount > 0) {
tokenText += chalk_1.default.dim(" \u2692 ".concat(this.toolCallCount));
}
this.tokenStatusComponent = new pi_tui_1.TextComponent(tokenText);
this.tokenContainer.addChild(this.tokenStatusComponent);
};
TuiRenderer.prototype.getUserInput = function () {
return __awaiter(this, void 0, void 0, function () {
var _this = this;
return __generator(this, function (_a) {
return [2 /*return*/, new Promise(function (resolve) {
_this.onInputCallback = function (text) {
_this.onInputCallback = undefined; // Clear callback
resolve(text);
};
})];
});
});
};
TuiRenderer.prototype.setInterruptCallback = function (callback) {
this.onInterruptCallback = callback;
};
TuiRenderer.prototype.clearEditor = function () {
var _this = this;
this.editor.setText("");
// Show hint in status container
this.statusContainer.clear();
var hint = new pi_tui_1.TextComponent(chalk_1.default.dim("Press Ctrl+C again to exit"));
this.statusContainer.addChild(hint);
this.ui.requestRender();
// Clear the hint after 500ms
setTimeout(function () {
_this.statusContainer.clear();
_this.ui.requestRender();
}, 500);
};
TuiRenderer.prototype.renderAssistantLabel = function () {
// Just render the assistant label without starting animations
// Used for restored session history
this.chatContainer.addChild(new pi_tui_1.TextComponent(chalk_1.default.hex("#FFA500")("[assistant]")));
this.ui.requestRender();
};
TuiRenderer.prototype.stop = function () {
if (this.currentLoadingAnimation) {
this.currentLoadingAnimation.stop();
this.currentLoadingAnimation = null;
}
if (this.isInitialized) {
this.ui.stop();
this.isInitialized = false;
}
};
return TuiRenderer;
}());
exports.TuiRenderer = TuiRenderer;

View file

@ -0,0 +1,194 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SessionManager = void 0;
var crypto_1 = require("crypto");
var fs_1 = require("fs");
var os_1 = require("os");
var path_1 = require("path");
// Simple UUID v4 generator
function uuidv4() {
var bytes = (0, crypto_1.randomBytes)(16);
bytes[6] = (bytes[6] & 0x0f) | 0x40; // Version 4
bytes[8] = (bytes[8] & 0x3f) | 0x80; // Variant 10
var hex = bytes.toString("hex");
return "".concat(hex.slice(0, 8), "-").concat(hex.slice(8, 12), "-").concat(hex.slice(12, 16), "-").concat(hex.slice(16, 20), "-").concat(hex.slice(20, 32));
}
var SessionManager = /** @class */ (function () {
function SessionManager(continueSession) {
if (continueSession === void 0) { continueSession = false; }
this.sessionDir = this.getSessionDirectory();
if (continueSession) {
var mostRecent = this.findMostRecentlyModifiedSession();
if (mostRecent) {
this.sessionFile = mostRecent;
// Load session ID from file
this.loadSessionId();
}
else {
// No existing session, create new
this.initNewSession();
}
}
else {
this.initNewSession();
}
}
SessionManager.prototype.getSessionDirectory = function () {
var cwd = process.cwd();
var safePath = "--" + cwd.replace(/^\//, "").replace(/\//g, "-") + "--";
var piConfigDir = (0, path_1.resolve)(process.env.PI_CONFIG_DIR || (0, path_1.join)((0, os_1.homedir)(), ".pi"));
var sessionDir = (0, path_1.join)(piConfigDir, "sessions", safePath);
if (!(0, fs_1.existsSync)(sessionDir)) {
(0, fs_1.mkdirSync)(sessionDir, { recursive: true });
}
return sessionDir;
};
SessionManager.prototype.initNewSession = function () {
this.sessionId = uuidv4();
var timestamp = new Date().toISOString().replace(/[:.]/g, "-");
this.sessionFile = (0, path_1.join)(this.sessionDir, "".concat(timestamp, "_").concat(this.sessionId, ".jsonl"));
};
SessionManager.prototype.findMostRecentlyModifiedSession = function () {
var _this = this;
var _a;
try {
var files = (0, fs_1.readdirSync)(this.sessionDir)
.filter(function (f) { return f.endsWith(".jsonl"); })
.map(function (f) { return ({
name: f,
path: (0, path_1.join)(_this.sessionDir, f),
mtime: (0, fs_1.statSync)((0, path_1.join)(_this.sessionDir, f)).mtime,
}); })
.sort(function (a, b) { return b.mtime.getTime() - a.mtime.getTime(); });
return ((_a = files[0]) === null || _a === void 0 ? void 0 : _a.path) || null;
}
catch (_b) {
return null;
}
};
SessionManager.prototype.loadSessionId = function () {
if (!(0, fs_1.existsSync)(this.sessionFile))
return;
var lines = (0, fs_1.readFileSync)(this.sessionFile, "utf8").trim().split("\n");
for (var _i = 0, lines_1 = lines; _i < lines_1.length; _i++) {
var line = lines_1[_i];
try {
var entry = JSON.parse(line);
if (entry.type === "session") {
this.sessionId = entry.id;
return;
}
}
catch (_a) {
// Skip malformed lines
}
}
// If no session entry found, create new ID
this.sessionId = uuidv4();
};
SessionManager.prototype.startSession = function (config) {
var entry = {
type: "session",
id: this.sessionId,
timestamp: new Date().toISOString(),
cwd: process.cwd(),
config: config,
};
(0, fs_1.appendFileSync)(this.sessionFile, JSON.stringify(entry) + "\n");
};
SessionManager.prototype.on = function (event) {
return __awaiter(this, void 0, void 0, function () {
var entry;
return __generator(this, function (_a) {
entry = {
type: "event",
timestamp: new Date().toISOString(),
event: event,
};
(0, fs_1.appendFileSync)(this.sessionFile, JSON.stringify(entry) + "\n");
return [2 /*return*/];
});
});
};
SessionManager.prototype.getSessionData = function () {
if (!(0, fs_1.existsSync)(this.sessionFile))
return null;
var config = null;
var events = [];
var totalUsage = {
type: "token_usage",
inputTokens: 0,
outputTokens: 0,
totalTokens: 0,
cacheReadTokens: 0,
cacheWriteTokens: 0,
reasoningTokens: 0,
};
var lines = (0, fs_1.readFileSync)(this.sessionFile, "utf8").trim().split("\n");
for (var _i = 0, lines_2 = lines; _i < lines_2.length; _i++) {
var line = lines_2[_i];
try {
var entry = JSON.parse(line);
if (entry.type === "session") {
config = entry.config;
this.sessionId = entry.id;
}
else if (entry.type === "event") {
var eventEntry = entry;
events.push(eventEntry);
if (eventEntry.event.type === "token_usage") {
totalUsage = entry.event;
}
}
}
catch (_a) {
// Skip malformed lines
}
}
return config ? { config: config, events: events, totalUsage: totalUsage } : null;
};
SessionManager.prototype.getSessionId = function () {
return this.sessionId;
};
SessionManager.prototype.getSessionFile = function () {
return this.sessionFile;
};
return SessionManager;
}());
exports.SessionManager = SessionManager;

View file

@ -0,0 +1,316 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.toolsForChat = exports.toolsForResponses = void 0;
exports.executeTool = executeTool;
var node_child_process_1 = require("node:child_process");
var node_fs_1 = require("node:fs");
var node_path_1 = require("node:path");
var glob_1 = require("glob");
// For GPT-OSS models via responses API
exports.toolsForResponses = [
{
type: "function",
name: "read",
description: "Read contents of a file",
parameters: {
type: "object",
properties: {
path: {
type: "string",
description: "Path to the file to read",
},
},
required: ["path"],
},
},
{
type: "function",
name: "list",
description: "List contents of a directory",
parameters: {
type: "object",
properties: {
path: {
type: "string",
description: "Path to the directory (default: current directory)",
},
},
},
},
{
type: "function",
name: "bash",
description: "Execute a command in Bash",
parameters: {
type: "object",
properties: {
command: {
type: "string",
description: "Command to execute",
},
},
required: ["command"],
},
},
{
type: "function",
name: "glob",
description: "Find files matching a glob pattern",
parameters: {
type: "object",
properties: {
pattern: {
type: "string",
description: "Glob pattern to match files (e.g., '**/*.ts', 'src/**/*.json')",
},
path: {
type: "string",
description: "Directory to search in (default: current directory)",
},
},
required: ["pattern"],
},
},
{
type: "function",
name: "rg",
description: "Search using ripgrep.",
parameters: {
type: "object",
properties: {
args: {
type: "string",
description: 'Arguments to pass directly to ripgrep. Examples: "-l prompt" or "-i TODO" or "--type ts className" or "functionName src/". Never add quotes around the search pattern.',
},
},
required: ["args"],
},
},
];
// For standard chat API (OpenAI format)
exports.toolsForChat = exports.toolsForResponses.map(function (tool) { return ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters,
},
}); });
// Helper to execute commands with abort support
function execWithAbort(command, signal) {
return __awaiter(this, void 0, void 0, function () {
return __generator(this, function (_a) {
return [2 /*return*/, new Promise(function (resolve, reject) {
var _a, _b;
var child = (0, node_child_process_1.spawn)(command, {
shell: true,
signal: signal,
});
var stdout = "";
var stderr = "";
var MAX_OUTPUT_SIZE = 1024 * 1024; // 1MB limit
var outputTruncated = false;
(_a = child.stdout) === null || _a === void 0 ? void 0 : _a.on("data", function (data) {
var chunk = data.toString();
if (stdout.length + chunk.length > MAX_OUTPUT_SIZE) {
if (!outputTruncated) {
stdout += "\n... [Output truncated - exceeded 1MB limit] ...";
outputTruncated = true;
}
}
else {
stdout += chunk;
}
});
(_b = child.stderr) === null || _b === void 0 ? void 0 : _b.on("data", function (data) {
var chunk = data.toString();
if (stderr.length + chunk.length > MAX_OUTPUT_SIZE) {
if (!outputTruncated) {
stderr += "\n... [Output truncated - exceeded 1MB limit] ...";
outputTruncated = true;
}
}
else {
stderr += chunk;
}
});
child.on("error", function (error) {
reject(error);
});
child.on("close", function (code) {
if (signal === null || signal === void 0 ? void 0 : signal.aborted) {
reject(new Error("Interrupted"));
}
else if (code !== 0 && code !== null) {
// For some commands like ripgrep, exit code 1 is normal (no matches)
if (code === 1 && command.includes("rg")) {
resolve(""); // No matches for ripgrep
}
else if (stderr && !stdout) {
reject(new Error(stderr));
}
else {
resolve(stdout || "");
}
}
else {
resolve(stdout || stderr || "");
}
});
// Kill the process if signal is aborted
if (signal) {
signal.addEventListener("abort", function () {
child.kill("SIGTERM");
}, { once: true });
}
})];
});
});
}
function executeTool(name, args, signal) {
return __awaiter(this, void 0, void 0, function () {
var parsed, _a, path, file, stats, MAX_FILE_SIZE, fd, buffer, data, path, dir, entries, command, output, e_1, pattern, searchPath, matches, e_2, args_1, cmd, output, e_3;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
parsed = JSON.parse(args);
_a = name;
switch (_a) {
case "read": return [3 /*break*/, 1];
case "list": return [3 /*break*/, 2];
case "bash": return [3 /*break*/, 3];
case "glob": return [3 /*break*/, 7];
case "rg": return [3 /*break*/, 11];
}
return [3 /*break*/, 15];
case 1:
{
path = parsed.path;
if (!path)
return [2 /*return*/, "Error: path parameter is required"];
file = (0, node_path_1.resolve)(path);
if (!(0, node_fs_1.existsSync)(file))
return [2 /*return*/, "File not found: ".concat(file)];
stats = (0, node_fs_1.statSync)(file);
MAX_FILE_SIZE = 1024 * 1024;
if (stats.size > MAX_FILE_SIZE) {
fd = (0, node_fs_1.openSync)(file, "r");
buffer = Buffer.alloc(MAX_FILE_SIZE);
(0, node_fs_1.readSync)(fd, buffer, 0, MAX_FILE_SIZE, 0);
(0, node_fs_1.closeSync)(fd);
return [2 /*return*/, buffer.toString("utf8") + "\n\n... [File truncated - exceeded 1MB limit] ..."];
}
data = (0, node_fs_1.readFileSync)(file, "utf8");
return [2 /*return*/, data];
}
_b.label = 2;
case 2:
{
path = parsed.path || ".";
dir = (0, node_path_1.resolve)(path);
if (!(0, node_fs_1.existsSync)(dir))
return [2 /*return*/, "Directory not found: ".concat(dir)];
entries = (0, node_fs_1.readdirSync)(dir, { withFileTypes: true });
return [2 /*return*/, entries.map(function (entry) { return (entry.isDirectory() ? entry.name + "/" : entry.name); }).join("\n")];
}
_b.label = 3;
case 3:
command = parsed.command;
if (!command)
return [2 /*return*/, "Error: command parameter is required"];
_b.label = 4;
case 4:
_b.trys.push([4, 6, , 7]);
return [4 /*yield*/, execWithAbort(command, signal)];
case 5:
output = _b.sent();
return [2 /*return*/, output || "Command executed successfully"];
case 6:
e_1 = _b.sent();
if (e_1.message === "Interrupted") {
throw e_1; // Re-throw interruption
}
throw new Error("Command failed: ".concat(e_1.message));
case 7:
pattern = parsed.pattern;
if (!pattern)
return [2 /*return*/, "Error: pattern parameter is required"];
searchPath = parsed.path || process.cwd();
_b.label = 8;
case 8:
_b.trys.push([8, 10, , 11]);
return [4 /*yield*/, (0, glob_1.glob)(pattern, {
cwd: searchPath,
dot: true,
nodir: false,
mark: true, // Add / to directories
})];
case 9:
matches = _b.sent();
if (matches.length === 0) {
return [2 /*return*/, "No files found matching the pattern"];
}
// Sort by modification time (most recent first) if possible
return [2 /*return*/, matches.sort().join("\n")];
case 10:
e_2 = _b.sent();
return [2 /*return*/, "Glob error: ".concat(e_2.message)];
case 11:
args_1 = parsed.args;
if (!args_1)
return [2 /*return*/, "Error: args parameter is required"];
cmd = "rg ".concat(args_1, " < /dev/null");
_b.label = 12;
case 12:
_b.trys.push([12, 14, , 15]);
return [4 /*yield*/, execWithAbort(cmd, signal)];
case 13:
output = _b.sent();
return [2 /*return*/, output.trim() || "No matches found"];
case 14:
e_3 = _b.sent();
if (e_3.message === "Interrupted") {
throw e_3; // Re-throw interruption
}
return [2 /*return*/, "ripgrep error: ".concat(e_3.message)];
case 15: return [2 /*return*/, "Unknown tool: ".concat(name)];
}
});
});
}

View file

@ -1,12 +1,12 @@
{
"name": "@mariozechner/pi",
"version": "0.5.7",
"version": "0.5.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@mariozechner/pi",
"version": "0.5.7",
"version": "0.5.8",
"license": "MIT",
"dependencies": {
"@ai-sdk/openai": "^2.0.5",

View file

@ -1,6 +1,6 @@
{
"name": "@mariozechner/pi",
"version": "0.5.7",
"version": "0.5.8",
"description": "CLI tool for managing vLLM deployments on GPU pods",
"type": "module",
"bin": {
@ -34,7 +34,7 @@
"node": ">=20.0.0"
},
"dependencies": {
"@mariozechner/pi-agent": "^0.5.7",
"@mariozechner/pi-agent": "^0.5.8",
"chalk": "^5.5.0"
},
"devDependencies": {}

View file

@ -285,6 +285,8 @@ WRAPPER
let interrupted = false;
let startupComplete = false;
let startupFailed = false;
let failureReason = "";
// Handle Ctrl+C
const sigintHandler = () => {
@ -305,6 +307,28 @@ WRAPPER
startupComplete = true;
logProcess.kill(); // Stop tailing logs
}
// Check for failure indicators
if (line.includes("Model runner exiting with code") && !line.includes("code 0")) {
startupFailed = true;
failureReason = "Model runner failed to start";
logProcess.kill();
}
if (line.includes("Script exited with code") && !line.includes("code 0")) {
startupFailed = true;
failureReason = "Script failed to execute";
logProcess.kill();
}
if (line.includes("torch.OutOfMemoryError") || line.includes("CUDA out of memory")) {
startupFailed = true;
failureReason = "Out of GPU memory (OOM)";
// Don't kill immediately - let it show more error context
}
if (line.includes("RuntimeError: Engine core initialization failed")) {
startupFailed = true;
failureReason = "vLLM engine initialization failed";
logProcess.kill();
}
}
}
};
@ -315,7 +339,30 @@ WRAPPER
await new Promise<void>((resolve) => logProcess.on("exit", resolve));
process.removeListener("SIGINT", sigintHandler);
if (startupComplete) {
if (startupFailed) {
// Model failed to start - clean up and report error
console.log("\n" + chalk.red(`✗ Model failed to start: ${failureReason}`));
// Remove the failed model from config
const config = loadConfig();
delete config.pods[podName].models[name];
saveConfig(config);
console.log(chalk.yellow("\nModel has been removed from configuration."));
// Provide helpful suggestions based on failure reason
if (failureReason.includes("OOM") || failureReason.includes("memory")) {
console.log("\n" + chalk.bold("Suggestions:"));
console.log(" • Try reducing GPU memory utilization: --memory 50%");
console.log(" • Use a smaller context window: --context 4k");
console.log(" • Use a quantized version of the model (e.g., FP8)");
console.log(" • Use more GPUs with tensor parallelism");
console.log(" • Try a smaller model variant");
}
console.log("\n" + chalk.cyan('Check full logs: pi ssh "tail -100 ~/.vllm_logs/' + name + '.log"'));
process.exit(1);
} else if (startupComplete) {
// Model started successfully - output connection details
console.log("\n" + chalk.green("✓ Model started successfully!"));
console.log("\n" + chalk.bold("Connection Details:"));

View file

@ -155,9 +155,9 @@
"--tensor-parallel-size",
"16",
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"glm45",
"--enable-auto-tool-choice"
]
},
@ -168,9 +168,9 @@
"--tensor-parallel-size",
"8",
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"glm45",
"--enable-auto-tool-choice"
]
}
@ -187,9 +187,9 @@
"--tensor-parallel-size",
"8",
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"glm45",
"--enable-auto-tool-choice"
]
},
@ -200,9 +200,9 @@
"--tensor-parallel-size",
"4",
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"glm45",
"--enable-auto-tool-choice"
]
}
@ -218,12 +218,10 @@
"--tensor-parallel-size",
"2",
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"--enable-auto-tool-choice",
"--quantization",
"fp8"
"glm45",
"--enable-auto-tool-choice"
],
"env": {
"VLLM_ATTENTION_BACKEND": "XFORMERS"
@ -235,12 +233,10 @@
"gpuTypes": ["H200"],
"args": [
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"--enable-auto-tool-choice",
"--quantization",
"fp8"
"glm45",
"--enable-auto-tool-choice"
],
"env": {
"VLLM_ATTENTION_BACKEND": "XFORMERS"
@ -259,9 +255,9 @@
"--tensor-parallel-size",
"2",
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"glm45",
"--enable-auto-tool-choice"
],
"notes": "Non-quantized BF16 version, more compatible"
@ -271,9 +267,9 @@
"gpuTypes": ["H200"],
"args": [
"--tool-call-parser",
"glm4_moe",
"glm45",
"--reasoning-parser",
"glm4_moe",
"glm45",
"--enable-auto-tool-choice",
"--gpu-memory-utilization",
"0.95"

View file

@ -1,12 +1,12 @@
{
"name": "@mariozechner/tui",
"version": "0.5.7",
"version": "0.5.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@mariozechner/tui",
"version": "0.5.7",
"version": "0.5.8",
"license": "MIT",
"dependencies": {
"@types/mime-types": "^2.1.4",

View file

@ -1,6 +1,6 @@
{
"name": "@mariozechner/pi-tui",
"version": "0.5.7",
"version": "0.5.8",
"description": "Terminal User Interface library with differential rendering for efficient text-based applications",
"type": "module",
"main": "dist/index.js",

View file

@ -238,9 +238,6 @@ export class TUI extends Container {
} catch (error) {
console.error("Error starting terminal:", error);
}
// Initial render
this.renderToScreen();
}
stop(): void {
@ -268,10 +265,11 @@ export class TUI extends Container {
this.collectRenderCommands(this, termWidth, currentRenderCommands);
if (this.isFirstRender) {
this.executeInitialRender(currentRenderCommands);
this.renderInitial(currentRenderCommands);
this.isFirstRender = false;
} else {
this.executeDifferentialRender(currentRenderCommands, termHeight);
// this.executeDifferentialRender(currentRenderCommands, termHeight);
this.renderDifferential(currentRenderCommands, termHeight);
}
// Save for next render
@ -295,7 +293,7 @@ export class TUI extends Container {
}
}
private executeInitialRender(commands: RenderCommand[]): void {
private renderInitial(commands: RenderCommand[]): void {
let output = "";
const lines: string[] = [];
@ -324,6 +322,115 @@ export class TUI extends Container {
});
}
private renderDifferential(currentCommands: RenderCommand[], termHeight: number): void {
const viewportHeight = termHeight - 1; // Leave one line for cursor
// Build the new lines array
const newLines: string[] = [];
for (const command of currentCommands) {
newLines.push(...command.lines);
}
const totalNewLines = newLines.length;
const totalOldLines = this.previousLines.length;
// Find the first line that changed
let firstChangedLineOffset = -1;
let currentLineOffset = 0;
for (let i = 0; i < currentCommands.length; i++) {
const current = currentCommands[i];
const previous = i < this.previousRenderCommands.length ? this.previousRenderCommands[i] : null;
// Check if this is a new component or component was removed/reordered
if (!previous || previous.id !== current.id) {
firstChangedLineOffset = currentLineOffset;
break;
}
// Check if component content or size changed
if (current.changed) {
firstChangedLineOffset = currentLineOffset;
break;
}
currentLineOffset += current.lines.length;
}
// Also check if we have fewer components now (components removed from end)
if (firstChangedLineOffset === -1 && currentCommands.length < this.previousRenderCommands.length) {
firstChangedLineOffset = currentLineOffset;
}
// If nothing changed, do nothing
if (firstChangedLineOffset === -1) {
this.previousLines = newLines;
return;
}
// Calculate where the first change is relative to the viewport
// If our content exceeds viewport, some is in scrollback
const contentStartInViewport = Math.max(0, totalOldLines - viewportHeight);
const changePositionInViewport = firstChangedLineOffset - contentStartInViewport;
let output = "";
let linesRedrawn = 0;
if (changePositionInViewport < 0) {
// The change is above the viewport - we cannot reach it with cursor
// MUST do full re-render
output = "\x1b[3J\x1b[H"; // Clear scrollback and screen, then home cursor
// Render ALL lines
for (let i = 0; i < newLines.length; i++) {
if (i > 0) output += "\r\n";
output += newLines[i];
}
// Add final newline
if (newLines.length > 0) output += "\r\n";
linesRedrawn = newLines.length;
} else {
// The change is in the viewport - we can update from there
// Calculate how many lines up to move from current cursor position
const linesToMoveUp = totalOldLines - firstChangedLineOffset;
if (linesToMoveUp > 0) {
output += `\x1b[${linesToMoveUp}A`;
}
// Clear from here to end of screen
output += "\x1b[0J";
// Render everything from the first change onwards
const linesToRender = newLines.slice(firstChangedLineOffset);
for (let i = 0; i < linesToRender.length; i++) {
if (i > 0) output += "\r\n";
output += linesToRender[i];
}
// Add final newline
if (linesToRender.length > 0) output += "\r\n";
linesRedrawn = linesToRender.length;
}
this.terminal.write(output);
// Save what we rendered
this.previousLines = newLines;
this.totalLinesRedrawn += linesRedrawn;
logger.debug("TUI", "Differential render", {
linesRedrawn,
firstChangedLineOffset,
changePositionInViewport,
totalNewLines,
totalOldLines,
});
}
private executeDifferentialRender(currentCommands: RenderCommand[], termHeight: number): void {
let output = "";
let linesRedrawn = 0;
@ -380,47 +487,30 @@ export class TUI extends Container {
currentLineOffset += current.lines.length;
}
// Move cursor to top of our content
if (oldVisibleLines > 0) {
output += `\x1b[${oldVisibleLines}A`;
}
if (needFullRedraw) {
// Clear each old line to avoid wrapping artifacts
for (let i = 0; i < oldVisibleLines; i++) {
if (i > 0) output += `\x1b[1B`; // Move down one line
output += "\x1b[2K"; // Clear entire line
}
// Move back to start position
if (oldVisibleLines > 1) {
output += `\x1b[${oldVisibleLines - 1}A`;
}
// Ensure cursor is at beginning of line
output += "\r";
// Clear any remaining lines
output += "\x1b[0J"; // Clear from cursor to end of screen
// When we need a full redraw, we must clear the entire scrollback buffer
// and render ALL components, not just what fits in the viewport
// Determine what to render
let linesToRender: string[];
if (totalNewLines <= viewportHeight) {
// Everything fits - render all
linesToRender = newLines;
} else {
// Only render what fits in viewport (last N lines)
linesToRender = newLines.slice(-viewportHeight);
}
// Clear the entire screen and scrollback buffer
output = "\x1b[2J\x1b[3J\x1b[H";
// Output the lines
for (let i = 0; i < linesToRender.length; i++) {
// Render ALL lines, letting the terminal handle scrolling naturally
for (let i = 0; i < newLines.length; i++) {
if (i > 0) output += "\r\n";
output += linesToRender[i];
output += newLines[i];
}
// Add final newline
if (linesToRender.length > 0) output += "\r\n";
// Add final newline to position cursor below content
if (newLines.length > 0) output += "\r\n";
linesRedrawn = linesToRender.length;
linesRedrawn = newLines.length;
} else {
// We can only do differential updates for components in the viewport
// Move cursor to top of visible content
if (oldVisibleLines > 0) {
output += `\x1b[${oldVisibleLines}A`;
}
// Do line-by-line diff for visible portion only
const oldVisible =
totalOldLines > viewportHeight ? this.previousLines.slice(-viewportHeight) : this.previousLines;

View file

@ -0,0 +1,193 @@
import { test, describe } from "node:test";
import assert from "node:assert";
import { VirtualTerminal } from "./virtual-terminal.js";
import { TUI, Container, TextComponent, TextEditor } from "../src/index.js";
describe("Differential Rendering - Dynamic Content", () => {
test("handles static text, dynamic container, and text editor correctly", async () => {
const terminal = new VirtualTerminal(80, 10); // Small viewport to test scrolling
const ui = new TUI(terminal);
ui.start();
// Step 1: Add a static text component
const staticText = new TextComponent("Static Header Text");
ui.addChild(staticText);
// Step 2: Add an initially empty container
const container = new Container();
ui.addChild(container);
// Step 3: Add a text editor field
const editor = new TextEditor();
ui.addChild(editor);
ui.setFocus(editor);
// Wait for next tick to complete and flush virtual terminal
await new Promise(resolve => process.nextTick(resolve));
await terminal.flush();
// Step 4: Check initial output in scrollbuffer
let scrollBuffer = terminal.getScrollBuffer();
let viewport = terminal.getViewport();
console.log("Initial render:");
console.log("Viewport lines:", viewport.length);
console.log("ScrollBuffer lines:", scrollBuffer.length);
// Count non-empty lines in scrollbuffer
let nonEmptyInBuffer = scrollBuffer.filter(line => line.trim() !== "").length;
console.log("Non-empty lines in scrollbuffer:", nonEmptyInBuffer);
// Verify initial render has static text in scrollbuffer
assert.ok(scrollBuffer.some(line => line.includes("Static Header Text")),
`Expected static text in scrollbuffer`);
// Step 5: Add 100 text components to container
console.log("\nAdding 100 components to container...");
for (let i = 1; i <= 100; i++) {
container.addChild(new TextComponent(`Dynamic Item ${i}`));
}
// Request render after adding all components
ui.requestRender();
// Wait for next tick to complete and flush
await new Promise(resolve => process.nextTick(resolve));
await terminal.flush();
// Step 6: Check output after adding 100 components
scrollBuffer = terminal.getScrollBuffer();
viewport = terminal.getViewport();
console.log("\nAfter adding 100 items:");
console.log("Viewport lines:", viewport.length);
console.log("ScrollBuffer lines:", scrollBuffer.length);
// Count all dynamic items in scrollbuffer
let dynamicItemsInBuffer = 0;
let allItemNumbers = new Set<number>();
for (const line of scrollBuffer) {
const match = line.match(/Dynamic Item (\d+)/);
if (match) {
dynamicItemsInBuffer++;
allItemNumbers.add(parseInt(match[1]));
}
}
console.log("Dynamic items found in scrollbuffer:", dynamicItemsInBuffer);
console.log("Unique item numbers:", allItemNumbers.size);
console.log("Item range:", Math.min(...allItemNumbers), "-", Math.max(...allItemNumbers));
// CRITICAL TEST: The scrollbuffer should contain ALL 100 items
// This is what the differential render should preserve!
assert.strictEqual(allItemNumbers.size, 100,
`Expected all 100 unique items in scrollbuffer, but found ${allItemNumbers.size}`);
// Verify items are 1-100
for (let i = 1; i <= 100; i++) {
assert.ok(allItemNumbers.has(i), `Missing Dynamic Item ${i} in scrollbuffer`);
}
// Also verify the static header is still in scrollbuffer
assert.ok(scrollBuffer.some(line => line.includes("Static Header Text")),
"Static header should still be in scrollbuffer");
// And the editor should be there too
assert.ok(scrollBuffer.some(line => line.includes("╭") && line.includes("╮")),
"Editor top border should be in scrollbuffer");
assert.ok(scrollBuffer.some(line => line.includes("╰") && line.includes("╯")),
"Editor bottom border should be in scrollbuffer");
ui.stop();
});
test("differential render correctly updates only changed components", async () => {
const terminal = new VirtualTerminal(80, 24);
const ui = new TUI(terminal);
ui.start();
// Create multiple containers with different content
const header = new TextComponent("=== Application Header ===");
const statusContainer = new Container();
const contentContainer = new Container();
const footer = new TextComponent("=== Footer ===");
ui.addChild(header);
ui.addChild(statusContainer);
ui.addChild(contentContainer);
ui.addChild(footer);
// Add initial content
statusContainer.addChild(new TextComponent("Status: Ready"));
contentContainer.addChild(new TextComponent("Content Line 1"));
contentContainer.addChild(new TextComponent("Content Line 2"));
// Initial render
await new Promise(resolve => process.nextTick(resolve));
await terminal.flush();
let viewport = terminal.getViewport();
assert.strictEqual(viewport[0], "=== Application Header ===");
assert.strictEqual(viewport[1], "Status: Ready");
assert.strictEqual(viewport[2], "Content Line 1");
assert.strictEqual(viewport[3], "Content Line 2");
assert.strictEqual(viewport[4], "=== Footer ===");
// Track lines redrawn
const initialLinesRedrawn = ui.getLinesRedrawn();
// Update only the status
statusContainer.clear();
statusContainer.addChild(new TextComponent("Status: Processing..."));
ui.requestRender();
await new Promise(resolve => process.nextTick(resolve));
await terminal.flush();
viewport = terminal.getViewport();
assert.strictEqual(viewport[0], "=== Application Header ===");
assert.strictEqual(viewport[1], "Status: Processing...");
assert.strictEqual(viewport[2], "Content Line 1");
assert.strictEqual(viewport[3], "Content Line 2");
assert.strictEqual(viewport[4], "=== Footer ===");
const afterStatusUpdate = ui.getLinesRedrawn();
const statusUpdateLines = afterStatusUpdate - initialLinesRedrawn;
console.log(`Lines redrawn for status update: ${statusUpdateLines}`);
// Add many items to content container
for (let i = 3; i <= 20; i++) {
contentContainer.addChild(new TextComponent(`Content Line ${i}`));
}
ui.requestRender();
await new Promise(resolve => process.nextTick(resolve));
await terminal.flush();
viewport = terminal.getViewport();
// With 24 rows - 1 for cursor = 23 visible
// We have: 1 header + 1 status + 20 content + 1 footer = 23 lines
// Should fit exactly
assert.strictEqual(viewport[0], "=== Application Header ===");
assert.strictEqual(viewport[1], "Status: Processing...");
assert.strictEqual(viewport[21], "Content Line 20");
assert.strictEqual(viewport[22], "=== Footer ===");
// Now update just one content line
const contentLine10 = contentContainer.getChild(9) as TextComponent;
contentLine10.setText("Content Line 10 - MODIFIED");
ui.requestRender();
await new Promise(resolve => process.nextTick(resolve));
await terminal.flush();
viewport = terminal.getViewport();
assert.strictEqual(viewport[11], "Content Line 10 - MODIFIED");
assert.strictEqual(viewport[0], "=== Application Header ==="); // Should be unchanged
assert.strictEqual(viewport[22], "=== Footer ==="); // Should be unchanged
ui.stop();
});
});

View file

@ -1,7 +1,23 @@
- agent/tui: broken rendering of resumed session messages
- start session, "read all README.md files except in node_modules
- stop session
- resume session, messages are cut off?
- pods: pi start outputs all models that can be run on the pod. however, it doesn't check the vllm version. e.g. gpt-oss can only run via vllm+gpt-oss. glm4.5 can only run on vllm nightly.
- agent: improve reasoning section in README.md
- agent: ultrathink to temporarily set reasoning_effort?
- agent: ripgrep tool is very broken
[tool] rg({"args":"-l --hidden --glob \"**/README.md\""})
ripgrep error: rg: ripgrep requires at least one pattern to execute a search
- agent: gpt-5/responses api seems to be broken?
- prompt: read all README.md files
- output:
[error] 400 Item 'fc_68990b4ddf60819e9138b7a496da3fcb04d5f47f123043f7' of type 'function_call' was provided without its required 'reasoning' item: 'rs_68990b4d5784819eac65086d9a6e42e704d5f47f123043f7'.
- agent: need to figure out a models max context lenght
- Add automatic context length detection via models endpoint
- Cache per baseURL/model combination in $PI_CONFIG_DIR/models.json or ~/.pi/models.json