Merge branch 'main' into feat/custom-thinking-budgets

This commit is contained in:
Melih Mucuk 2026-01-08 00:39:11 +03:00
commit d311978dfd
41 changed files with 1664 additions and 538 deletions

View file

@ -6,6 +6,14 @@
- `thinkingBudgets` option in `SimpleStreamOptions` for customizing token budgets per thinking level on token-based providers ([#529](https://github.com/badlogic/pi-mono/pull/529) by [@melihmucuk](https://github.com/melihmucuk))
### Breaking Changes
- Removed OpenAI Codex model aliases (`gpt-5`, `gpt-5-mini`, `gpt-5-nano`, `codex-mini-latest`, `gpt-5-codex`, `gpt-5.1-codex`, `gpt-5.1-chat-latest`). Use canonical model IDs: `gpt-5.1`, `gpt-5.1-codex-max`, `gpt-5.1-codex-mini`, `gpt-5.2`, `gpt-5.2-codex`. ([#536](https://github.com/badlogic/pi-mono/pull/536) by [@ghoulr](https://github.com/ghoulr))
### Fixed
- Fixed OpenAI Codex context window from 400,000 to 272,000 tokens to match Codex CLI defaults and prevent 400 errors. ([#536](https://github.com/badlogic/pi-mono/pull/536) by [@ghoulr](https://github.com/ghoulr))
## [0.37.8] - 2026-01-07
## [0.37.7] - 2026-01-07

View file

@ -443,37 +443,15 @@ async function generateModels() {
}
// OpenAI Codex (ChatGPT OAuth) models
// NOTE: These are not fetched from models.dev; we keep a small, explicit list to avoid aliases.
// Context window is based on observed server limits (400s above ~272k), not marketing numbers.
const CODEX_BASE_URL = "https://chatgpt.com/backend-api";
const CODEX_CONTEXT = 400000;
const CODEX_CONTEXT = 272000;
const CODEX_MAX_TOKENS = 128000;
const codexModels: Model<"openai-codex-responses">[] = [
{
id: "gpt-5.2-codex",
name: "GPT-5.2 Codex",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5.2",
name: "GPT-5.2",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5.1-codex-max",
name: "GPT-5.1 Codex Max",
id: "gpt-5.1",
name: "GPT-5.1",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
@ -484,8 +462,8 @@ async function generateModels() {
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5.1-codex",
name: "GPT-5.1 Codex",
id: "gpt-5.1-codex-max",
name: "GPT-5.1 Codex Max",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
@ -508,98 +486,26 @@ async function generateModels() {
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "codex-mini-latest",
name: "Codex Mini Latest",
id: "gpt-5.2",
name: "GPT-5.2",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.5, output: 6, cacheRead: 0.375, cacheWrite: 0 },
cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5-codex-mini",
name: "gpt-5-codex-mini",
id: "gpt-5.2-codex",
name: "GPT-5.2 Codex",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 0.25, output: 2, cacheRead: 0.025, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5-codex",
name: "gpt-5-codex",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.25, output: 10, cacheRead: 0.125, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5.1",
name: "GPT-5.1",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.25, output: 10, cacheRead: 0.125, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5.1-chat-latest",
name: "gpt-5.1-chat-latest",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.25, output: 10, cacheRead: 0.125, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5",
name: "gpt-5",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 1.25, output: 10, cacheRead: 0.125, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5-mini",
name: "gpt-5-mini",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 0.25, output: 2, cacheRead: 0.025, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},
{
id: "gpt-5-nano",
name: "gpt-5-nano",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: CODEX_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 0.05, output: 0.4, cacheRead: 0.005, cacheWrite: 0 },
cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 },
contextWindow: CODEX_CONTEXT,
maxTokens: CODEX_MAX_TOKENS,
},

View file

@ -415,6 +415,23 @@ export const MODELS = {
contextWindow: 131072,
maxTokens: 40960,
} satisfies Model<"openai-completions">,
"zai-glm-4.7": {
id: "zai-glm-4.7",
name: "Z.AI GLM-4.7",
api: "openai-completions",
provider: "cerebras",
baseUrl: "https://api.cerebras.ai/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 40000,
} satisfies Model<"openai-completions">,
},
"github-copilot": {
"claude-haiku-4.5": {
@ -2774,108 +2791,6 @@ export const MODELS = {
} satisfies Model<"openai-responses">,
},
"openai-codex": {
"codex-mini-latest": {
id: "codex-mini-latest",
name: "Codex Mini Latest",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 1.5,
output: 6,
cacheRead: 0.375,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5": {
id: "gpt-5",
name: "gpt-5",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 1.25,
output: 10,
cacheRead: 0.125,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5-codex": {
id: "gpt-5-codex",
name: "gpt-5-codex",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 1.25,
output: 10,
cacheRead: 0.125,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5-codex-mini": {
id: "gpt-5-codex-mini",
name: "gpt-5-codex-mini",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0.25,
output: 2,
cacheRead: 0.025,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5-mini": {
id: "gpt-5-mini",
name: "gpt-5-mini",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0.25,
output: 2,
cacheRead: 0.025,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5-nano": {
id: "gpt-5-nano",
name: "gpt-5-nano",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0.05,
output: 0.4,
cacheRead: 0.005,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.1": {
id: "gpt-5.1",
name: "GPT-5.1",
@ -2890,41 +2805,7 @@ export const MODELS = {
cacheRead: 0.125,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.1-chat-latest": {
id: "gpt-5.1-chat-latest",
name: "gpt-5.1-chat-latest",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 1.25,
output: 10,
cacheRead: 0.125,
cacheWrite: 0,
},
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.1-codex": {
id: "gpt-5.1-codex",
name: "GPT-5.1 Codex",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: {
input: 1.25,
output: 10,
cacheRead: 0.125,
cacheWrite: 0,
},
contextWindow: 400000,
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.1-codex-max": {
@ -2941,7 +2822,7 @@ export const MODELS = {
cacheRead: 0.125,
cacheWrite: 0,
},
contextWindow: 400000,
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.1-codex-mini": {
@ -2958,7 +2839,7 @@ export const MODELS = {
cacheRead: 0.025,
cacheWrite: 0,
},
contextWindow: 400000,
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.2": {
@ -2975,7 +2856,7 @@ export const MODELS = {
cacheRead: 0.175,
cacheWrite: 0,
},
contextWindow: 400000,
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
"gpt-5.2-codex": {
@ -2992,7 +2873,7 @@ export const MODELS = {
cacheRead: 0.175,
cacheWrite: 0,
},
contextWindow: 400000,
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-codex-responses">,
},

View file

@ -37,7 +37,6 @@ import { buildCodexPiBridge } from "./openai-codex/prompts/pi-codex-bridge.js";
import { buildCodexSystemPrompt } from "./openai-codex/prompts/system-prompt.js";
import {
type CodexRequestOptions,
normalizeModel,
type RequestBody,
transformRequestBody,
} from "./openai-codex/request-transformer.js";
@ -111,8 +110,7 @@ export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses"
params.tools = convertTools(context.tools);
}
const normalizedModel = normalizeModel(params.model);
const codexInstructions = await getCodexInstructions(normalizedModel);
const codexInstructions = await getCodexInstructions(params.model);
const bridgeText = buildCodexPiBridge(context.tools);
const systemPrompt = buildCodexSystemPrompt({
codexInstructions,
@ -120,7 +118,6 @@ export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses"
userSystemPrompt: context.systemPrompt,
});
params.model = normalizedModel;
params.instructions = systemPrompt.instructions;
const codexOptions: CodexRequestOptions = {

View file

@ -44,17 +44,17 @@ export type CacheMetadata = {
url: string;
};
export function getModelFamily(normalizedModel: string): ModelFamily {
if (normalizedModel.includes("gpt-5.2-codex") || normalizedModel.includes("gpt 5.2 codex")) {
export function getModelFamily(model: string): ModelFamily {
if (model.includes("gpt-5.2-codex") || model.includes("gpt 5.2 codex")) {
return "gpt-5.2-codex";
}
if (normalizedModel.includes("codex-max")) {
if (model.includes("codex-max")) {
return "codex-max";
}
if (normalizedModel.includes("codex") || normalizedModel.startsWith("codex-")) {
if (model.includes("codex") || model.startsWith("codex-")) {
return "codex";
}
if (normalizedModel.includes("gpt-5.2")) {
if (model.includes("gpt-5.2")) {
return "gpt-5.2";
}
return "gpt-5.1";
@ -96,8 +96,8 @@ async function getLatestReleaseTag(): Promise<string> {
throw new Error("Failed to determine latest release tag from GitHub");
}
export async function getCodexInstructions(normalizedModel = "gpt-5.1-codex"): Promise<string> {
const modelFamily = getModelFamily(normalizedModel);
export async function getCodexInstructions(model = "gpt-5.1-codex"): Promise<string> {
const modelFamily = getModelFamily(model);
const promptFile = PROMPT_FILES[modelFamily];
const cacheDir = getCacheDir();
const cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);

View file

@ -41,155 +41,26 @@ export interface RequestBody {
[key: string]: unknown;
}
const MODEL_MAP: Record<string, string> = {
"gpt-5.1-codex": "gpt-5.1-codex",
"gpt-5.1-codex-low": "gpt-5.1-codex",
"gpt-5.1-codex-medium": "gpt-5.1-codex",
"gpt-5.1-codex-high": "gpt-5.1-codex",
"gpt-5.1-codex-max": "gpt-5.1-codex-max",
"gpt-5.1-codex-max-low": "gpt-5.1-codex-max",
"gpt-5.1-codex-max-medium": "gpt-5.1-codex-max",
"gpt-5.1-codex-max-high": "gpt-5.1-codex-max",
"gpt-5.1-codex-max-xhigh": "gpt-5.1-codex-max",
"gpt-5.2": "gpt-5.2",
"gpt-5.2-none": "gpt-5.2",
"gpt-5.2-low": "gpt-5.2",
"gpt-5.2-medium": "gpt-5.2",
"gpt-5.2-high": "gpt-5.2",
"gpt-5.2-xhigh": "gpt-5.2",
"gpt-5.2-codex": "gpt-5.2-codex",
"gpt-5.2-codex-low": "gpt-5.2-codex",
"gpt-5.2-codex-medium": "gpt-5.2-codex",
"gpt-5.2-codex-high": "gpt-5.2-codex",
"gpt-5.2-codex-xhigh": "gpt-5.2-codex",
"gpt-5.1-codex-mini": "gpt-5.1-codex-mini",
"gpt-5.1-codex-mini-medium": "gpt-5.1-codex-mini",
"gpt-5.1-codex-mini-high": "gpt-5.1-codex-mini",
"gpt-5.1": "gpt-5.1",
"gpt-5.1-none": "gpt-5.1",
"gpt-5.1-low": "gpt-5.1",
"gpt-5.1-medium": "gpt-5.1",
"gpt-5.1-high": "gpt-5.1",
"gpt-5.1-chat-latest": "gpt-5.1",
"gpt-5-codex": "gpt-5.1-codex",
"codex-mini-latest": "gpt-5.1-codex-mini",
"gpt-5-codex-mini": "gpt-5.1-codex-mini",
"gpt-5-codex-mini-medium": "gpt-5.1-codex-mini",
"gpt-5-codex-mini-high": "gpt-5.1-codex-mini",
"gpt-5": "gpt-5.1",
"gpt-5-mini": "gpt-5.1",
"gpt-5-nano": "gpt-5.1",
};
function getNormalizedModel(modelId: string): string | undefined {
if (MODEL_MAP[modelId]) return MODEL_MAP[modelId];
const lowerModelId = modelId.toLowerCase();
const match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);
return match ? MODEL_MAP[match] : undefined;
}
export function normalizeModel(model: string | undefined): string {
if (!model) return "gpt-5.1";
function clampReasoningEffort(model: string, effort: ReasoningConfig["effort"]): ReasoningConfig["effort"] {
// Codex backend expects exact model IDs. Do not normalize model names here.
const modelId = model.includes("/") ? model.split("/").pop()! : model;
const mappedModel = getNormalizedModel(modelId);
if (mappedModel) return mappedModel;
const normalized = modelId.toLowerCase();
if (normalized.includes("gpt-5.2-codex") || normalized.includes("gpt 5.2 codex")) {
return "gpt-5.2-codex";
}
if (normalized.includes("gpt-5.2") || normalized.includes("gpt 5.2")) {
return "gpt-5.2";
}
if (normalized.includes("gpt-5.1-codex-max") || normalized.includes("gpt 5.1 codex max")) {
return "gpt-5.1-codex-max";
}
if (normalized.includes("gpt-5.1-codex-mini") || normalized.includes("gpt 5.1 codex mini")) {
return "gpt-5.1-codex-mini";
}
if (
normalized.includes("codex-mini-latest") ||
normalized.includes("gpt-5-codex-mini") ||
normalized.includes("gpt 5 codex mini")
) {
return "codex-mini-latest";
}
if (normalized.includes("gpt-5.1-codex") || normalized.includes("gpt 5.1 codex")) {
return "gpt-5.1-codex";
}
if (normalized.includes("gpt-5.1") || normalized.includes("gpt 5.1")) {
return "gpt-5.1";
}
if (normalized.includes("codex")) {
return "gpt-5.1-codex";
}
if (normalized.includes("gpt-5") || normalized.includes("gpt 5")) {
return "gpt-5.1";
// gpt-5.1 does not support xhigh.
if (modelId === "gpt-5.1" && effort === "xhigh") {
return "high";
}
return "gpt-5.1";
// gpt-5.1-codex-mini only supports medium/high.
if (modelId === "gpt-5.1-codex-mini") {
return effort === "high" || effort === "xhigh" ? "high" : "medium";
}
return effort;
}
function getReasoningConfig(modelName: string | undefined, options: CodexRequestOptions = {}): ReasoningConfig {
const normalizedName = modelName?.toLowerCase() ?? "";
const isGpt52Codex = normalizedName.includes("gpt-5.2-codex") || normalizedName.includes("gpt 5.2 codex");
const isGpt52General = (normalizedName.includes("gpt-5.2") || normalizedName.includes("gpt 5.2")) && !isGpt52Codex;
const isCodexMax = normalizedName.includes("codex-max") || normalizedName.includes("codex max");
const isCodexMini =
normalizedName.includes("codex-mini") ||
normalizedName.includes("codex mini") ||
normalizedName.includes("codex_mini") ||
normalizedName.includes("codex-mini-latest");
const isCodex = normalizedName.includes("codex") && !isCodexMini;
const isLightweight = !isCodexMini && (normalizedName.includes("nano") || normalizedName.includes("mini"));
const isGpt51General =
(normalizedName.includes("gpt-5.1") || normalizedName.includes("gpt 5.1")) &&
!isCodex &&
!isCodexMax &&
!isCodexMini;
const supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;
const supportsNone = isGpt52General || isGpt51General;
const defaultEffort: ReasoningConfig["effort"] = isCodexMini
? "medium"
: supportsXhigh
? "high"
: isLightweight
? "minimal"
: "medium";
let effort = options.reasoningEffort || defaultEffort;
if (isCodexMini) {
if (effort === "minimal" || effort === "low" || effort === "none") {
effort = "medium";
}
if (effort === "xhigh") {
effort = "high";
}
if (effort !== "high" && effort !== "medium") {
effort = "medium";
}
}
if (!supportsXhigh && effort === "xhigh") {
effort = "high";
}
if (!supportsNone && effort === "none") {
effort = "low";
}
if (isCodex && effort === "minimal") {
effort = "low";
}
function getReasoningConfig(model: string, options: CodexRequestOptions): ReasoningConfig {
return {
effort,
effort: clampReasoningEffort(model, options.reasoningEffort as ReasoningConfig["effort"]),
summary: options.reasoningSummary ?? "auto",
};
}
@ -213,9 +84,6 @@ export async function transformRequestBody(
options: CodexRequestOptions = {},
prompt?: { instructions: string; developerMessages: string[] },
): Promise<RequestBody> {
const normalizedModel = normalizeModel(body.model);
body.model = normalizedModel;
body.store = false;
body.stream = true;
@ -270,7 +138,7 @@ export async function transformRequestBody(
}
if (options.reasoningEffort !== undefined) {
const reasoningConfig = getReasoningConfig(normalizedModel, options);
const reasoningConfig = getReasoningConfig(body.model, options);
body.reasoning = {
...body.reasoning,
...reasoningConfig,

View file

@ -3,11 +3,7 @@ import { tmpdir } from "node:os";
import { join } from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { getCodexInstructions } from "../src/providers/openai-codex/prompts/codex.js";
import {
normalizeModel,
type RequestBody,
transformRequestBody,
} from "../src/providers/openai-codex/request-transformer.js";
import { type RequestBody, transformRequestBody } from "../src/providers/openai-codex/request-transformer.js";
import { parseCodexError } from "../src/providers/openai-codex/response-handler.js";
const DEFAULT_PROMPT_PREFIX =
@ -59,9 +55,21 @@ describe("openai-codex request transformer", () => {
});
});
describe("openai-codex model normalization", () => {
it("maps space-separated codex-mini names to codex-mini-latest", () => {
expect(normalizeModel("gpt 5 codex mini")).toBe("codex-mini-latest");
describe("openai-codex reasoning effort clamping", () => {
it("clamps gpt-5.1 xhigh to high", async () => {
const body: RequestBody = { model: "gpt-5.1", input: [] };
const transformed = await transformRequestBody(body, { reasoningEffort: "xhigh" });
expect(transformed.reasoning?.effort).toBe("high");
});
it("clamps gpt-5.1-codex-mini to medium/high only", async () => {
const body: RequestBody = { model: "gpt-5.1-codex-mini", input: [] };
const low = await transformRequestBody({ ...body }, { reasoningEffort: "low" });
expect(low.reasoning?.effort).toBe("medium");
const xhigh = await transformRequestBody({ ...body }, { reasoningEffort: "xhigh" });
expect(xhigh.reasoning?.effort).toBe("high");
});
});

View file

@ -2,10 +2,20 @@
## [Unreleased]
### Breaking Changes
- `ctx.ui.custom()` factory signature changed from `(tui, theme, done)` to `(tui, theme, keybindings, done)` for consistency with other input-handling factories
### Added
- Extension UI dialogs (`ctx.ui.select()`, `ctx.ui.confirm()`, `ctx.ui.input()`) now support a `timeout` option that auto-dismisses the dialog with a live countdown display. Simpler alternative to `AbortSignal` for timed dialogs.
- `thinkingBudgets` setting to customize token budgets per thinking level for token-based providers ([#529](https://github.com/badlogic/pi-mono/pull/529) by [@melihmucuk](https://github.com/melihmucuk))
- Extensions can now provide custom editor components via `ctx.ui.setEditorComponent((tui, theme, keybindings) => ...)`. Extend `CustomEditor` for full app keybinding support (escape, ctrl+d, model switching, etc.). See `examples/extensions/modal-editor.ts`, `examples/extensions/rainbow-editor.ts`, and `docs/tui.md` Pattern 7.
### Fixed
- Default thinking level from settings now applies correctly when `enabledModels` is configured. Previously, models without explicit thinking level suffixes (e.g., `claude-opus-4-5` instead of `claude-opus-4-5:high`) would override `defaultThinkingLevel` with "off"
- External edits to `settings.json` while pi is running are now preserved when pi saves settings (e.g., when changing thinking level via Shift+Tab)
## [0.37.8] - 2026-01-07
@ -114,6 +124,7 @@
This release unifies hooks and custom tools into a single "extensions" system and renames "slash commands" to "prompt templates". ([#454](https://github.com/badlogic/pi-mono/issues/454))
**Before migrating, read:**
- [docs/extensions.md](docs/extensions.md) - Full API reference
- [README.md](README.md) - Extensions section with examples
- [examples/extensions/](examples/extensions/) - Working examples
@ -123,14 +134,17 @@ This release unifies hooks and custom tools into a single "extensions" system an
Hooks and custom tools are now unified as **extensions**. Both were TypeScript modules exporting a factory function that receives an API object. Now there's one concept, one discovery location, one CLI flag, one settings.json entry.
**Automatic migration:**
- `commands/` directories are automatically renamed to `prompts/` on startup (both `~/.pi/agent/commands/` and `.pi/commands/`)
**Manual migration required:**
1. Move files from `hooks/` and `tools/` directories to `extensions/` (deprecation warnings shown on startup)
2. Update imports and type names in your extension code
3. Update `settings.json` if you have explicit hook and custom tool paths configured
**Directory changes:**
```
# Before
~/.pi/agent/hooks/*.ts → ~/.pi/agent/extensions/*.ts
@ -140,6 +154,7 @@ Hooks and custom tools are now unified as **extensions**. Both were TypeScript m
```
**Extension discovery rules** (in `extensions/` directories):
1. **Direct files:** `extensions/*.ts` or `*.js` → loaded directly
2. **Subdirectory with index:** `extensions/myext/index.ts` → loaded as single extension
3. **Subdirectory with package.json:** `extensions/myext/package.json` with `"pi"` field → loads declared paths
@ -158,6 +173,7 @@ Hooks and custom tools are now unified as **extensions**. Both were TypeScript m
No recursion beyond one level. Complex packages must use the `package.json` manifest. Dependencies are resolved via jiti, and extensions can be published to and installed from npm.
**Type renames:**
- `HookAPI``ExtensionAPI`
- `HookContext``ExtensionContext`
- `HookCommandContext``ExtensionCommandContext`
@ -170,6 +186,7 @@ No recursion beyond one level. Complex packages must use the `package.json` mani
- `HookMessage``CustomMessage`
**Import changes:**
```typescript
// Before (hook)
import type { HookAPI, HookContext } from "@mariozechner/pi-coding-agent";
@ -212,6 +229,7 @@ export default function (pi: ExtensionAPI) {
- `ctx.sessionManager` - Read session entries, get branch history
**Settings changes:**
```json
// Before
{
@ -226,6 +244,7 @@ export default function (pi: ExtensionAPI) {
```
**CLI changes:**
```bash
# Before
pi --hook ./safety.ts --tool ./todo.ts
@ -241,22 +260,26 @@ pi --extension ./safety.ts -e ./todo.ts
**Automatic migration:** The `commands/` directory is automatically renamed to `prompts/` on startup (if `prompts/` doesn't exist). Works for both regular directories and symlinks.
**Directory changes:**
```
~/.pi/agent/commands/*.md → ~/.pi/agent/prompts/*.md
.pi/commands/*.md → .pi/prompts/*.md
```
**SDK type renames:**
- `FileSlashCommand``PromptTemplate`
- `LoadSlashCommandsOptions``LoadPromptTemplatesOptions`
**SDK function renames:**
- `discoverSlashCommands()``discoverPromptTemplates()`
- `loadSlashCommands()``loadPromptTemplates()`
- `expandSlashCommand()``expandPromptTemplate()`
- `getCommandsDir()``getPromptsDir()`
**SDK option renames:**
- `CreateAgentSessionOptions.slashCommands``.promptTemplates`
- `AgentSession.fileCommands``.promptTemplates`
- `PromptOptions.expandSlashCommands``.expandPromptTemplates`
@ -264,17 +287,20 @@ pi --extension ./safety.ts -e ./todo.ts
### SDK Migration
**Discovery functions:**
- `discoverAndLoadHooks()``discoverAndLoadExtensions()`
- `discoverAndLoadCustomTools()` → merged into `discoverAndLoadExtensions()`
- `loadHooks()``loadExtensions()`
- `loadCustomTools()` → merged into `loadExtensions()`
**Runner and wrapper:**
- `HookRunner``ExtensionRunner`
- `wrapToolsWithHooks()``wrapToolsWithExtensions()`
- `wrapToolWithHooks()``wrapToolWithExtensions()`
**CreateAgentSessionOptions:**
- `.hooks` → removed (use `.additionalExtensionPaths` for paths)
- `.additionalHookPaths``.additionalExtensionPaths`
- `.preloadedHooks``.preloadedExtensions`
@ -283,6 +309,7 @@ pi --extension ./safety.ts -e ./todo.ts
- `.slashCommands``.promptTemplates`
**AgentSession:**
- `.hookRunner``.extensionRunner`
- `.fileCommands``.promptTemplates`
- `.sendHookMessage()``.sendCustomMessage()`
@ -290,6 +317,7 @@ pi --extension ./safety.ts -e ./todo.ts
### Session Migration
**Automatic.** Session version bumped from 2 to 3. Existing sessions are migrated on first load:
- Message role `"hookMessage"``"custom"`
### Breaking Changes
@ -403,7 +431,7 @@ pi --extension ./safety.ts -e ./todo.ts
- `steer()` and `followUp()` now expand file-based slash commands and error on hook commands (hook commands cannot be queued)
- `prompt()` accepts new `streamingBehavior` option (`"steer"` or `"followUp"`) to specify queueing behavior during streaming
- RPC `prompt` command now accepts optional `streamingBehavior` field
([#420](https://github.com/badlogic/pi-mono/issues/420))
([#420](https://github.com/badlogic/pi-mono/issues/420))
### Fixed
@ -489,10 +517,12 @@ See [docs/session.md](docs/session.md) for the file format and `SessionManager`
The hooks API has been restructured with more granular events and better session access.
**Type renames:**
- `HookEventContext``HookContext`
- `HookCommandContext` is now a new interface extending `HookContext` with session control methods
**Event changes:**
- The monolithic `session` event is now split into granular events: `session_start`, `session_before_switch`, `session_switch`, `session_before_branch`, `session_branch`, `session_before_compact`, `session_compact`, `session_shutdown`
- `session_before_switch` and `session_switch` events now include `reason: "new" | "resume"` to distinguish between `/new` and `/resume`
- New `session_before_tree` and `session_tree` events for `/tree` navigation (hook can provide custom branch summary)
@ -501,6 +531,7 @@ The hooks API has been restructured with more granular events and better session
- Session entries are no longer passed in events. Use `ctx.sessionManager.getEntries()` or `ctx.sessionManager.getBranch()` instead
**API changes:**
- `pi.send(text, attachments?)``pi.sendMessage(message, triggerTurn?)` (creates `CustomMessageEntry`)
- New `pi.appendEntry(customType, data?)` for hook state persistence (not in LLM context)
- New `pi.registerCommand(name, options)` for custom slash commands (handler receives `HookCommandContext`)
@ -515,6 +546,7 @@ The hooks API has been restructured with more granular events and better session
- New `ctx.modelRegistry` and `ctx.model` for API key resolution
**HookCommandContext (slash commands only):**
- `ctx.waitForIdle()` - wait for agent to finish streaming
- `ctx.newSession(options?)` - create new sessions with optional setup callback
- `ctx.branch(entryId)` - branch from a specific entry
@ -523,6 +555,7 @@ The hooks API has been restructured with more granular events and better session
These methods are only on `HookCommandContext` (not `HookContext`) because they can deadlock if called from event handlers that run inside the agent loop.
**Removed:**
- `hookTimeout` setting (hooks no longer have timeouts; use Ctrl+C to abort)
- `resolveApiKey` parameter (use `ctx.modelRegistry.getApiKey(model)`)
@ -533,12 +566,14 @@ See [docs/hooks.md](docs/hooks.md) and [examples/hooks/](examples/hooks/) for th
The custom tools API has been restructured to mirror the hooks pattern with a context object.
**Type renames:**
- `CustomAgentTool``CustomTool`
- `ToolAPI``CustomToolAPI`
- `ToolContext``CustomToolContext`
- `ToolSessionEvent``CustomToolSessionEvent`
**Execute signature changed:**
```typescript
// Before (v0.30.2)
execute(toolCallId, params, signal, onUpdate)
@ -548,11 +583,13 @@ execute(toolCallId, params, onUpdate, ctx, signal?)
```
The new `ctx: CustomToolContext` provides `sessionManager`, `modelRegistry`, `model`, and agent state methods:
- `ctx.isIdle()` - check if agent is streaming
- `ctx.hasQueuedMessages()` - check if user has queued messages (skip interactive prompts)
- `ctx.abort()` - abort current operation (fire-and-forget)
**Session event changes:**
- `CustomToolSessionEvent` now only has `reason` and `previousSessionFile`
- Session entries are no longer in the event. Use `ctx.sessionManager.getBranch()` or `ctx.sessionManager.getEntries()` to reconstruct state
- Reasons: `"start" | "switch" | "branch" | "tree" | "shutdown"` (no separate `"new"` reason; `/new` triggers `"switch"`)
@ -563,6 +600,7 @@ See [docs/custom-tools.md](docs/custom-tools.md) and [examples/custom-tools/](ex
### SDK Migration
**Type changes:**
- `CustomAgentTool``CustomTool`
- `AppMessage``AgentMessage`
- `sessionFile` returns `string | undefined` (was `string | null`)
@ -570,6 +608,7 @@ See [docs/custom-tools.md](docs/custom-tools.md) and [examples/custom-tools/](ex
- `Attachment` type removed. Use `ImageContent` from `@mariozechner/pi-ai` instead. Add images directly to message content arrays.
**AgentSession API:**
- `branch(entryIndex: number)``branch(entryId: string)`
- `getUserMessagesForBranching()` returns `{ entryId, text }` instead of `{ entryIndex, text }`
- `reset()``newSession(options?)` where options has optional `parentSession` for lineage tracking
@ -577,9 +616,11 @@ See [docs/custom-tools.md](docs/custom-tools.md) and [examples/custom-tools/](ex
- New `navigateTree(targetId, options?)` for in-place tree navigation
**Hook integration:**
- New `sendHookMessage(message, triggerTurn?)` for hook message injection
**SessionManager API:**
- Method renames: `saveXXX()``appendXXX()` (e.g., `appendMessage`, `appendCompaction`)
- `branchInPlace()``branch()`
- `reset()``newSession(options?)` with optional `parentSession` for lineage tracking
@ -597,10 +638,13 @@ See [docs/custom-tools.md](docs/custom-tools.md) and [examples/custom-tools/](ex
`ModelRegistry` is a new class that manages model discovery and API key resolution. It combines built-in models with custom models from `models.json` and resolves API keys via `AuthStorage`.
```typescript
import { discoverAuthStorage, discoverModels } from "@mariozechner/pi-coding-agent";
import {
discoverAuthStorage,
discoverModels,
} from "@mariozechner/pi-coding-agent";
const authStorage = discoverAuthStorage(); // ~/.pi/agent/auth.json
const modelRegistry = discoverModels(authStorage); // + ~/.pi/agent/models.json
const authStorage = discoverAuthStorage(); // ~/.pi/agent/auth.json
const modelRegistry = discoverModels(authStorage); // + ~/.pi/agent/models.json
// Get all models (built-in + custom)
const allModels = modelRegistry.getAll();
@ -618,6 +662,7 @@ const apiKey = await modelRegistry.getApiKey(model);
This replaces the old `resolveApiKey` callback pattern. Hooks and custom tools access it via `ctx.modelRegistry`.
**Renamed exports:**
- `messageTransformer``convertToLlm`
- `SessionContext` alias `LoadedSession` removed
@ -626,17 +671,21 @@ See [docs/sdk.md](docs/sdk.md) and [examples/sdk/](examples/sdk/) for the curren
### RPC Migration
**Session commands:**
- `reset` command → `new_session` command with optional `parentSession` field
**Branching commands:**
- `branch` command: `entryIndex``entryId`
- `get_branch_messages` response: `entryIndex``entryId`
**Type changes:**
- Messages are now `AgentMessage` (was `AppMessage`)
- `prompt` command: `attachments` field replaced with `images` field using `ImageContent` format
**Compaction events:**
- `auto_compaction_start` now includes `reason` field (`"threshold"` or `"overflow"`)
- `auto_compaction_end` now includes `willRetry` field
- `compact` response includes full `CompactionResult` (`summary`, `firstKeptEntryId`, `tokensBefore`, `details`)
@ -646,6 +695,7 @@ See [docs/rpc.md](docs/rpc.md) for the current protocol.
### Structured Compaction
Compaction and branch summarization now use a structured output format:
- Clear sections: Goal, Progress, Key Information, File Operations
- File tracking: `readFiles` and `modifiedFiles` arrays in `details`, accumulated across compactions
- Conversations are serialized to text before summarization to prevent the model from "continuing" them
@ -655,6 +705,7 @@ The `before_compact` and `before_tree` hook events allow custom compaction imple
### Interactive Mode
**`/tree` command:**
- Navigate the full session tree in-place
- Search by typing, page with ←/→
- Filter modes (Ctrl+O): default → no-tools → user-only → labeled-only → all
@ -662,12 +713,14 @@ The `before_compact` and `before_tree` hook events allow custom compaction imple
- Selecting a branch switches context and optionally injects a summary of the abandoned branch
**Entry labels:**
- Bookmark any entry via `/tree` → select → `l`
- Labels appear in tree view and persist as `LabelEntry`
**Theme changes (breaking for custom themes):**
Custom themes must add these new color tokens or they will fail to load:
- `selectedBg`: background for selected/highlighted items in tree selector and other components
- `customMessageBg`: background for hook-injected messages (`CustomMessageEntry`)
- `customMessageText`: text color for hook messages
@ -676,6 +729,7 @@ Custom themes must add these new color tokens or they will fail to load:
Total color count increased from 46 to 50. See [docs/theme.md](docs/theme.md) for the full color list and copy values from the built-in dark/light themes.
**Settings:**
- `enabledModels`: allowlist models in `settings.json` (same format as `--models` CLI)
### Added
@ -773,6 +827,7 @@ Total color count increased from 46 to 50. See [docs/theme.md](docs/theme.md) fo
- **Credential storage refactored**: API keys and OAuth tokens are now stored in `~/.pi/agent/auth.json` instead of `oauth.json` and `settings.json`. Existing credentials are automatically migrated on first run. ([#296](https://github.com/badlogic/pi-mono/issues/296))
- **SDK API changes** ([#296](https://github.com/badlogic/pi-mono/issues/296)):
- Added `AuthStorage` class for credential management (API keys and OAuth tokens)
- Added `ModelRegistry` class for model discovery and API key resolution
- Added `discoverAuthStorage()` and `discoverModels()` discovery functions
@ -811,6 +866,7 @@ Total color count increased from 46 to 50. See [docs/theme.md](docs/theme.md) fo
### Added
- **Compaction hook improvements**: The `before_compact` session event now includes:
- `previousSummary`: Summary from the last compaction (if any), so hooks can preserve accumulated context
- `messagesToKeep`: Messages that will be kept after the summary (recent turns), in addition to `messagesToSummarize`
- `resolveApiKey`: Function to resolve API keys for any model (checks settings, OAuth, env vars)
@ -1073,6 +1129,7 @@ Total color count increased from 46 to 50. See [docs/theme.md](docs/theme.md) fo
- Improved system prompt documentation section with clearer pointers to specific doc files for custom models, themes, skills, hooks, custom tools, and RPC.
- Cleaned up documentation:
- `theme.md`: Added missing color tokens (`thinkingXhigh`, `bashMode`)
- `skills.md`: Rewrote with better framing and examples
- `hooks.md`: Fixed timeout/error handling docs, added import aliases section

View file

@ -1170,6 +1170,10 @@ ctx.ui.setTitle("pi - my-project");
// Editor text
ctx.ui.setEditorText("Prefill text");
const current = ctx.ui.getEditorText();
// Custom editor (vim mode, emacs mode, etc.)
ctx.ui.setEditorComponent((tui, theme, keybindings) => new VimEditor(tui, theme, keybindings));
ctx.ui.setEditorComponent(undefined); // Restore default editor
```
**Examples:**
@ -1177,6 +1181,7 @@ const current = ctx.ui.getEditorText();
- `ctx.ui.setWidget()`: [plan-mode.ts](../examples/extensions/plan-mode.ts)
- `ctx.ui.setFooter()`: [custom-footer.ts](../examples/extensions/custom-footer.ts)
- `ctx.ui.setHeader()`: [custom-header.ts](../examples/extensions/custom-header.ts)
- `ctx.ui.setEditorComponent()`: [modal-editor.ts](../examples/extensions/modal-editor.ts)
### Custom Components
@ -1185,7 +1190,7 @@ For complex UI, use `ctx.ui.custom()`. This temporarily replaces the editor with
```typescript
import { Text, Component } from "@mariozechner/pi-tui";
const result = await ctx.ui.custom<boolean>((tui, theme, done) => {
const result = await ctx.ui.custom<boolean>((tui, theme, keybindings, done) => {
const text = new Text("Press Enter to confirm, Escape to cancel", 1, 1);
text.onKey = (key) => {
@ -1205,12 +1210,56 @@ if (result) {
The callback receives:
- `tui` - TUI instance (for screen dimensions, focus management)
- `theme` - Current theme for styling
- `keybindings` - App keybinding manager (for checking shortcuts)
- `done(value)` - Call to close component and return value
See [tui.md](tui.md) for the full component API.
**Examples:** [handoff.ts](../examples/extensions/handoff.ts), [plan-mode.ts](../examples/extensions/plan-mode.ts), [preset.ts](../examples/extensions/preset.ts), [qna.ts](../examples/extensions/qna.ts), [snake.ts](../examples/extensions/snake.ts), [todo.ts](../examples/extensions/todo.ts), [tools.ts](../examples/extensions/tools.ts)
### Custom Editor
Replace the main input editor with a custom implementation (vim mode, emacs mode, etc.):
```typescript
import { CustomEditor, type ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { matchesKey } from "@mariozechner/pi-tui";
class VimEditor extends CustomEditor {
private mode: "normal" | "insert" = "insert";
handleInput(data: string): void {
if (matchesKey(data, "escape") && this.mode === "insert") {
this.mode = "normal";
return;
}
if (this.mode === "normal" && data === "i") {
this.mode = "insert";
return;
}
super.handleInput(data); // App keybindings + text editing
}
}
export default function (pi: ExtensionAPI) {
pi.on("session_start", (_event, ctx) => {
ctx.ui.setEditorComponent((_tui, theme, keybindings) =>
new VimEditor(theme, keybindings)
);
});
}
```
**Key points:**
- Extend `CustomEditor` (not base `Editor`) to get app keybindings (escape to abort, ctrl+d, model switching)
- Call `super.handleInput(data)` for keys you don't handle
- Factory receives `theme` and `keybindings` from the app
- Pass `undefined` to restore default: `ctx.ui.setEditorComponent(undefined)`
See [tui.md](tui.md) Pattern 7 for a complete example with mode indicator.
**Examples:** [modal-editor.ts](../examples/extensions/modal-editor.ts)
### Message Rendering
Register a custom renderer for messages with your `customType`:

View file

@ -361,7 +361,7 @@ pi.registerCommand("pick", {
{ value: "opt3", label: "Option 3" }, // description is optional
];
const result = await ctx.ui.custom<string | null>((tui, theme, done) => {
const result = await ctx.ui.custom<string | null>((tui, theme, _kb, done) => {
const container = new Container();
// Top border
@ -413,7 +413,7 @@ import { BorderedLoader } from "@mariozechner/pi-coding-agent";
pi.registerCommand("fetch", {
handler: async (_args, ctx) => {
const result = await ctx.ui.custom<string | null>((tui, theme, done) => {
const result = await ctx.ui.custom<string | null>((tui, theme, _kb, done) => {
const loader = new BorderedLoader(tui, theme, "Fetching data...");
loader.onAbort = () => done(null);
@ -451,7 +451,7 @@ pi.registerCommand("settings", {
{ id: "color", label: "Color output", currentValue: "on", values: ["on", "off"] },
];
await ctx.ui.custom((_tui, theme, done) => {
await ctx.ui.custom((_tui, theme, _kb, done) => {
const container = new Container();
container.addChild(new Text(theme.fg("accent", theme.bold("Settings")), 1, 1));
@ -541,9 +541,85 @@ ctx.ui.setFooter(undefined);
**Examples:** [custom-footer.ts](../examples/extensions/custom-footer.ts)
### Pattern 7: Custom Editor (vim mode, etc.)
Replace the main input editor with a custom implementation. Useful for modal editing (vim), different keybindings (emacs), or specialized input handling.
```typescript
import { CustomEditor, type ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { matchesKey, truncateToWidth } from "@mariozechner/pi-tui";
type Mode = "normal" | "insert";
class VimEditor extends CustomEditor {
private mode: Mode = "insert";
handleInput(data: string): void {
// Escape: switch to normal mode, or pass through for app handling
if (matchesKey(data, "escape")) {
if (this.mode === "insert") {
this.mode = "normal";
return;
}
// In normal mode, escape aborts agent (handled by CustomEditor)
super.handleInput(data);
return;
}
// Insert mode: pass everything to CustomEditor
if (this.mode === "insert") {
super.handleInput(data);
return;
}
// Normal mode: vim-style navigation
switch (data) {
case "i": this.mode = "insert"; return;
case "h": super.handleInput("\x1b[D"); return; // Left
case "j": super.handleInput("\x1b[B"); return; // Down
case "k": super.handleInput("\x1b[A"); return; // Up
case "l": super.handleInput("\x1b[C"); return; // Right
}
// Pass unhandled keys to super (ctrl+c, etc.), but filter printable chars
if (data.length === 1 && data.charCodeAt(0) >= 32) return;
super.handleInput(data);
}
render(width: number): string[] {
const lines = super.render(width);
// Add mode indicator to bottom border (use truncateToWidth for ANSI-safe truncation)
if (lines.length > 0) {
const label = this.mode === "normal" ? " NORMAL " : " INSERT ";
const lastLine = lines[lines.length - 1]!;
// Pass "" as ellipsis to avoid adding "..." when truncating
lines[lines.length - 1] = truncateToWidth(lastLine, width - label.length, "") + label;
}
return lines;
}
}
export default function (pi: ExtensionAPI) {
pi.on("session_start", (_event, ctx) => {
// Factory receives theme and keybindings from the app
ctx.ui.setEditorComponent((tui, theme, keybindings) =>
new VimEditor(theme, keybindings)
);
});
}
```
**Key points:**
- **Extend `CustomEditor`** (not base `Editor`) to get app keybindings (escape to abort, ctrl+d to exit, model switching, etc.)
- **Call `super.handleInput(data)`** for keys you don't handle
- **Factory pattern**: `setEditorComponent` receives a factory function that gets `tui`, `theme`, and `keybindings`
- **Pass `undefined`** to restore the default editor: `ctx.ui.setEditorComponent(undefined)`
**Examples:** [modal-editor.ts](../examples/extensions/modal-editor.ts)
## Key Rules
1. **Always use theme from callback** - Don't import theme directly. Use `theme` from the `ctx.ui.custom((tui, theme, done) => ...)` callback.
1. **Always use theme from callback** - Don't import theme directly. Use `theme` from the `ctx.ui.custom((tui, theme, keybindings, done) => ...)` callback.
2. **Always type DynamicBorder color param** - Write `(s: string) => theme.fg("accent", s)`, not `(s) => theme.fg("accent", s)`.
@ -560,5 +636,6 @@ ctx.ui.setFooter(undefined);
- **Settings toggles**: [examples/extensions/tools.ts](../examples/extensions/tools.ts) - SettingsList for tool enable/disable
- **Status indicators**: [examples/extensions/plan-mode.ts](../examples/extensions/plan-mode.ts) - setStatus and setWidget
- **Custom footer**: [examples/extensions/custom-footer.ts](../examples/extensions/custom-footer.ts) - setFooter with stats
- **Custom editor**: [examples/extensions/modal-editor.ts](../examples/extensions/modal-editor.ts) - Vim-like modal editing
- **Snake game**: [examples/extensions/snake.ts](../examples/extensions/snake.ts) - Full game with keyboard input, game loop
- **Custom tool rendering**: [examples/extensions/todo.ts](../examples/extensions/todo.ts) - renderCall and renderResult

View file

@ -45,6 +45,7 @@ cp permission-gate.ts ~/.pi/agent/extensions/
| `snake.ts` | Snake game with custom UI, keyboard handling, and session persistence |
| `send-user-message.ts` | Demonstrates `pi.sendUserMessage()` for sending user messages from extensions |
| `timed-confirm.ts` | Demonstrates AbortSignal for auto-dismissing `ctx.ui.confirm()` and `ctx.ui.select()` dialogs |
| `modal-editor.ts` | Custom vim-like modal editor via `ctx.ui.setEditorComponent()` |
### Git Integration

View file

@ -75,7 +75,7 @@ export default function (pi: ExtensionAPI) {
const currentSessionFile = ctx.sessionManager.getSessionFile();
// Generate the handoff prompt with loader UI
const result = await ctx.ui.custom<string | null>((tui, theme, done) => {
const result = await ctx.ui.custom<string | null>((tui, theme, _kb, done) => {
const loader = new BorderedLoader(tui, theme, `Generating handoff prompt...`);
loader.onAbort = () => done(null);

View file

@ -0,0 +1,85 @@
/**
* Modal Editor - vim-like modal editing example
*
* Usage: pi --extension ./examples/extensions/modal-editor.ts
*
* - Escape: insert normal mode (in normal mode, aborts agent)
* - i: normal insert mode
* - hjkl: navigation in normal mode
* - ctrl+c, ctrl+d, etc. work in both modes
*/
import { CustomEditor, type ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { matchesKey, truncateToWidth, visibleWidth } from "@mariozechner/pi-tui";
// Normal mode key mappings: key -> escape sequence (or null for mode switch)
const NORMAL_KEYS: Record<string, string | null> = {
h: "\x1b[D", // left
j: "\x1b[B", // down
k: "\x1b[A", // up
l: "\x1b[C", // right
"0": "\x01", // line start
$: "\x05", // line end
x: "\x1b[3~", // delete char
i: null, // insert mode
a: null, // append (insert + right)
};
class ModalEditor extends CustomEditor {
private mode: "normal" | "insert" = "insert";
handleInput(data: string): void {
// Escape toggles to normal mode, or passes through for app handling
if (matchesKey(data, "escape")) {
if (this.mode === "insert") {
this.mode = "normal";
} else {
super.handleInput(data); // abort agent, etc.
}
return;
}
// Insert mode: pass everything through
if (this.mode === "insert") {
super.handleInput(data);
return;
}
// Normal mode: check mapped keys
if (data in NORMAL_KEYS) {
const seq = NORMAL_KEYS[data];
if (data === "i") {
this.mode = "insert";
} else if (data === "a") {
this.mode = "insert";
super.handleInput("\x1b[C"); // move right first
} else if (seq) {
super.handleInput(seq);
}
return;
}
// Pass control sequences (ctrl+c, etc.) to super, ignore printable chars
if (data.length === 1 && data.charCodeAt(0) >= 32) return;
super.handleInput(data);
}
render(width: number): string[] {
const lines = super.render(width);
if (lines.length === 0) return lines;
// Add mode indicator to bottom border
const label = this.mode === "normal" ? " NORMAL " : " INSERT ";
const last = lines.length - 1;
if (visibleWidth(lines[last]!) >= label.length) {
lines[last] = truncateToWidth(lines[last]!, width - label.length, "") + label;
}
return lines;
}
}
export default function (pi: ExtensionAPI) {
pi.on("session_start", (_event, ctx) => {
ctx.ui.setEditorComponent((_tui, theme, kb) => new ModalEditor(theme, kb));
});
}

View file

@ -206,7 +206,7 @@ export default function presetExtension(pi: ExtensionAPI) {
description: "Clear active preset, restore defaults",
});
const result = await ctx.ui.custom<string | null>((tui, theme, done) => {
const result = await ctx.ui.custom<string | null>((tui, theme, _kb, done) => {
const container = new Container();
container.addChild(new DynamicBorder((str) => theme.fg("accent", str)));

View file

@ -71,7 +71,7 @@ export default function (pi: ExtensionAPI) {
}
// Run extraction with loader UI
const result = await ctx.ui.custom<string | null>((tui, theme, done) => {
const result = await ctx.ui.custom<string | null>((tui, theme, _kb, done) => {
const loader = new BorderedLoader(tui, theme, `Extracting questions using ${ctx.model!.id}...`);
loader.onAbort = () => done(null);

View file

@ -0,0 +1,95 @@
/**
* Rainbow Editor - highlights "ultrathink" with animated shine effect
*
* Usage: pi --extension ./examples/extensions/rainbow-editor.ts
*/
import { CustomEditor, type ExtensionAPI, type KeybindingsManager } from "@mariozechner/pi-coding-agent";
import type { EditorTheme, TUI } from "@mariozechner/pi-tui";
// Base colors (coral → yellow → green → teal → blue → purple → pink)
const COLORS: [number, number, number][] = [
[233, 137, 115], // coral
[228, 186, 103], // yellow
[141, 192, 122], // green
[102, 194, 179], // teal
[121, 157, 207], // blue
[157, 134, 195], // purple
[206, 130, 172], // pink
];
const RESET = "\x1b[0m";
function brighten(rgb: [number, number, number], factor: number): string {
const [r, g, b] = rgb.map((c) => Math.round(c + (255 - c) * factor));
return `\x1b[38;2;${r};${g};${b}m`;
}
function colorize(text: string, shinePos: number): string {
return (
[...text]
.map((c, i) => {
const baseColor = COLORS[i % COLORS.length]!;
// 3-letter shine: center bright, adjacent dimmer
let factor = 0;
if (shinePos >= 0) {
const dist = Math.abs(i - shinePos);
if (dist === 0) factor = 0.7;
else if (dist === 1) factor = 0.35;
}
return `${brighten(baseColor, factor)}${c}`;
})
.join("") + RESET
);
}
class RainbowEditor extends CustomEditor {
private animationTimer?: ReturnType<typeof setInterval>;
private tui: TUI;
private frame = 0;
constructor(tui: TUI, theme: EditorTheme, keybindings: KeybindingsManager) {
super(theme, keybindings);
this.tui = tui;
}
private hasUltrathink(): boolean {
return /ultrathink/i.test(this.getText());
}
private startAnimation(): void {
if (this.animationTimer) return;
this.animationTimer = setInterval(() => {
this.frame++;
this.tui.requestRender();
}, 60);
}
private stopAnimation(): void {
if (this.animationTimer) {
clearInterval(this.animationTimer);
this.animationTimer = undefined;
}
}
handleInput(data: string): void {
super.handleInput(data);
if (this.hasUltrathink()) {
this.startAnimation();
} else {
this.stopAnimation();
}
}
render(width: number): string[] {
// Cycle: 10 shine positions + 10 pause frames
const cycle = this.frame % 20;
const shinePos = cycle < 10 ? cycle : -1; // -1 means no shine (pause)
return super.render(width).map((line) => line.replace(/ultrathink/gi, (m) => colorize(m, shinePos)));
}
}
export default function (pi: ExtensionAPI) {
pi.on("session_start", (_event, ctx) => {
ctx.ui.setEditorComponent((tui, theme, kb) => new RainbowEditor(tui, theme, kb));
});
}

View file

@ -327,7 +327,7 @@ export default function (pi: ExtensionAPI) {
}
}
await ctx.ui.custom((tui, _theme, done) => {
await ctx.ui.custom((tui, _theme, _kb, done) => {
return new SnakeComponent(
tui,
() => done(undefined),

View file

@ -291,7 +291,7 @@ export default function (pi: ExtensionAPI) {
return;
}
await ctx.ui.custom<void>((_tui, theme, done) => {
await ctx.ui.custom<void>((_tui, theme, _kb, done) => {
return new TodoListComponent(todos, theme, () => done());
});
},

View file

@ -69,7 +69,7 @@ export default function toolsExtension(pi: ExtensionAPI) {
// Refresh tool list
allTools = pi.getAllTools();
await ctx.ui.custom((tui, theme, done) => {
await ctx.ui.custom((tui, theme, _kb, done) => {
// Build settings items for each tool
const items: SettingItem[] = allTools.map((tool) => ({
id: tool,

View file

@ -1262,8 +1262,24 @@ export class AgentSession {
const contextWindow = this.model?.contextWindow ?? 0;
// Skip overflow check if the message came from a different model.
// This handles the case where user switched from a smaller-context model (e.g. opus)
// to a larger-context model (e.g. codex) - the overflow error from the old model
// shouldn't trigger compaction for the new model.
const sameModel =
this.model && assistantMessage.provider === this.model.provider && assistantMessage.model === this.model.id;
// Skip overflow check if the error is from before a compaction in the current path.
// This handles the case where an error was kept after compaction (in the "kept" region).
// The error shouldn't trigger another compaction since we already compacted.
// Example: opus fails → switch to codex → compact → switch back to opus → opus error
// is still in context but shouldn't trigger compaction again.
const compactionEntry = this.sessionManager.getBranch().find((e) => e.type === "compaction");
const errorIsFromBeforeCompaction =
compactionEntry && assistantMessage.timestamp < new Date(compactionEntry.timestamp).getTime();
// Case 1: Overflow - LLM returned context overflow error
if (isContextOverflow(assistantMessage, contextWindow)) {
if (sameModel && !errorIsFromBeforeCompaction && isContextOverflow(assistantMessage, contextWindow)) {
// Remove the error message from agent state (it IS saved to session for history,
// but we don't want it in context for the retry)
const messages = this.agent.state.messages;

View file

@ -11,6 +11,8 @@ export type {
// Re-exports
AgentToolResult,
AgentToolUpdateCallback,
// App keybindings (for custom editors)
AppAction,
AppendEntryHandler,
BashToolResultEvent,
BeforeAgentStartEvent,
@ -42,6 +44,7 @@ export type {
GetAllToolsHandler,
GetThinkingLevelHandler,
GrepToolResultEvent,
KeybindingsManager,
LoadExtensionsResult,
// Loaded Extension
LoadedExtension,

View file

@ -99,6 +99,7 @@ function createNoOpUIContext(): ExtensionUIContext {
setEditorText: () => {},
getEditorText: () => "",
editor: async () => undefined,
setEditorComponent: () => {},
get theme() {
return theme;
},

View file

@ -74,6 +74,7 @@ const noOpUIContext: ExtensionUIContext = {
setEditorText: () => {},
getEditorText: () => "",
editor: async () => undefined,
setEditorComponent: () => {},
get theme() {
return theme;
},

View file

@ -15,12 +15,13 @@ import type {
ThinkingLevel,
} from "@mariozechner/pi-agent-core";
import type { ImageContent, Model, TextContent, ToolResultMessage } from "@mariozechner/pi-ai";
import type { Component, KeyId, TUI } from "@mariozechner/pi-tui";
import type { Component, EditorComponent, EditorTheme, KeyId, TUI } from "@mariozechner/pi-tui";
import type { Static, TSchema } from "@sinclair/typebox";
import type { Theme } from "../../modes/interactive/theme/theme.js";
import type { CompactionPreparation, CompactionResult } from "../compaction/index.js";
import type { EventBus } from "../event-bus.js";
import type { ExecOptions, ExecResult } from "../exec.js";
import type { AppAction, KeybindingsManager } from "../keybindings.js";
import type { CustomMessage } from "../messages.js";
import type { ModelRegistry } from "../model-registry.js";
import type {
@ -41,6 +42,7 @@ import type {
export type { ExecOptions, ExecResult } from "../exec.js";
export type { AgentToolResult, AgentToolUpdateCallback };
export type { AppAction, KeybindingsManager } from "../keybindings.js";
// ============================================================================
// UI Context
@ -92,6 +94,7 @@ export interface ExtensionUIContext {
factory: (
tui: TUI,
theme: Theme,
keybindings: KeybindingsManager,
done: (result: T) => void,
) => (Component & { dispose?(): void }) | Promise<Component & { dispose?(): void }>,
): Promise<T>;
@ -105,6 +108,43 @@ export interface ExtensionUIContext {
/** Show a multi-line editor for text editing. */
editor(title: string, prefill?: string): Promise<string | undefined>;
/**
* Set a custom editor component via factory function.
* Pass undefined to restore the default editor.
*
* The factory receives:
* - `theme`: EditorTheme for styling borders and autocomplete
* - `keybindings`: KeybindingsManager for app-level keybindings
*
* For full app keybinding support (escape, ctrl+d, model switching, etc.),
* extend `CustomEditor` from `@mariozechner/pi-coding-agent` and call
* `super.handleInput(data)` for keys you don't handle.
*
* @example
* ```ts
* import { CustomEditor } from "@mariozechner/pi-coding-agent";
*
* class VimEditor extends CustomEditor {
* private mode: "normal" | "insert" = "insert";
*
* handleInput(data: string): void {
* if (this.mode === "normal") {
* // Handle vim normal mode keys...
* if (data === "i") { this.mode = "insert"; return; }
* }
* super.handleInput(data); // App keybindings + text editing
* }
* }
*
* ctx.ui.setEditorComponent((tui, theme, keybindings) =>
* new VimEditor(tui, theme, keybindings)
* );
* ```
*/
setEditorComponent(
factory: ((tui: TUI, theme: EditorTheme, keybindings: KeybindingsManager) => EditorComponent) | undefined,
): void;
/** Get the current theme for styling. */
readonly theme: Theme;
}

View file

@ -29,7 +29,8 @@ export const defaultModelPerProvider: Record<KnownProvider, string> = {
export interface ScopedModel {
model: Model<Api>;
thinkingLevel: ThinkingLevel;
/** Thinking level if explicitly specified in pattern (e.g., "model:high"), undefined otherwise */
thinkingLevel?: ThinkingLevel;
}
/**
@ -98,7 +99,8 @@ function tryMatchModel(modelPattern: string, availableModels: Model<Api>[]): Mod
export interface ParsedModelResult {
model: Model<Api> | undefined;
thinkingLevel: ThinkingLevel;
/** Thinking level if explicitly specified in pattern, undefined otherwise */
thinkingLevel?: ThinkingLevel;
warning: string | undefined;
}
@ -119,14 +121,14 @@ export function parseModelPattern(pattern: string, availableModels: Model<Api>[]
// Try exact match first
const exactMatch = tryMatchModel(pattern, availableModels);
if (exactMatch) {
return { model: exactMatch, thinkingLevel: "off", warning: undefined };
return { model: exactMatch, thinkingLevel: undefined, warning: undefined };
}
// No match - try splitting on last colon if present
const lastColonIndex = pattern.lastIndexOf(":");
if (lastColonIndex === -1) {
// No colons, pattern simply doesn't match any model
return { model: undefined, thinkingLevel: "off", warning: undefined };
return { model: undefined, thinkingLevel: undefined, warning: undefined };
}
const prefix = pattern.substring(0, lastColonIndex);
@ -137,22 +139,21 @@ export function parseModelPattern(pattern: string, availableModels: Model<Api>[]
const result = parseModelPattern(prefix, availableModels);
if (result.model) {
// Only use this thinking level if no warning from inner recursion
// (if there was an invalid suffix deeper, we already have "off")
return {
model: result.model,
thinkingLevel: result.warning ? "off" : suffix,
thinkingLevel: result.warning ? undefined : suffix,
warning: result.warning,
};
}
return result;
} else {
// Invalid suffix - recurse on prefix with "off" and warn
// Invalid suffix - recurse on prefix and warn
const result = parseModelPattern(prefix, availableModels);
if (result.model) {
return {
model: result.model,
thinkingLevel: "off",
warning: `Invalid thinking level "${suffix}" in pattern "${pattern}". Using "off" instead.`,
thinkingLevel: undefined,
warning: `Invalid thinking level "${suffix}" in pattern "${pattern}". Using default instead.`,
};
}
return result;
@ -180,7 +181,7 @@ export async function resolveModelScope(patterns: string[], modelRegistry: Model
// Extract optional thinking level suffix (e.g., "provider/*:high")
const colonIdx = pattern.lastIndexOf(":");
let globPattern = pattern;
let thinkingLevel: ThinkingLevel = "off";
let thinkingLevel: ThinkingLevel | undefined;
if (colonIdx !== -1) {
const suffix = pattern.substring(colonIdx + 1);
@ -282,7 +283,7 @@ export async function findInitialModel(options: {
if (scopedModels.length > 0 && !isContinuing) {
return {
model: scopedModels[0].model,
thinkingLevel: scopedModels[0].thinkingLevel,
thinkingLevel: scopedModels[0].thinkingLevel ?? defaultThinkingLevel ?? "off",
fallbackMessage: undefined,
};
}

View file

@ -531,6 +531,7 @@ export async function createAgentSession(options: CreateAgentSessionOptions = {}
setEditorText: () => {},
getEditorText: () => "",
editor: async () => undefined,
setEditorComponent: () => {},
get theme() {
return {} as any;
},

View file

@ -40,6 +40,7 @@ export type {
AgentStartEvent,
AgentToolResult,
AgentToolUpdateCallback,
AppAction,
BeforeAgentStartEvent,
ContextEvent,
ExecOptions,
@ -55,6 +56,7 @@ export type {
ExtensionShortcut,
ExtensionUIContext,
ExtensionUIDialogOptions,
KeybindingsManager,
LoadExtensionsResult,
LoadedExtension,
MessageRenderer,

View file

@ -235,6 +235,7 @@ function buildSessionOptions(
scopedModels: ScopedModel[],
sessionManager: SessionManager | undefined,
modelRegistry: ModelRegistry,
settingsManager: SettingsManager,
preloadedExtensions?: LoadedExtension[],
): CreateAgentSessionOptions {
const options: CreateAgentSessionOptions = {};
@ -261,15 +262,21 @@ function buildSessionOptions(
}
// Thinking level
// Only use scoped model's thinking level if it was explicitly specified (e.g., "model:high")
// Otherwise, let the SDK use defaultThinkingLevel from settings
if (parsed.thinking) {
options.thinkingLevel = parsed.thinking;
} else if (scopedModels.length > 0 && !parsed.continue && !parsed.resume) {
} else if (scopedModels.length > 0 && scopedModels[0].thinkingLevel && !parsed.continue && !parsed.resume) {
options.thinkingLevel = scopedModels[0].thinkingLevel;
}
// Scoped models for Ctrl+P cycling
// Scoped models for Ctrl+P cycling - fill in default thinking level for models without explicit level
if (scopedModels.length > 0) {
options.scopedModels = scopedModels;
const defaultThinkingLevel = settingsManager.getDefaultThinkingLevel() ?? "off";
options.scopedModels = scopedModels.map((sm) => ({
model: sm.model,
thinkingLevel: sm.thinkingLevel ?? defaultThinkingLevel,
}));
}
// API key from CLI - set in authStorage
@ -423,7 +430,14 @@ export async function main(args: string[]) {
sessionManager = SessionManager.open(selectedPath);
}
const sessionOptions = buildSessionOptions(parsed, scopedModels, sessionManager, modelRegistry, loadedExtensions);
const sessionOptions = buildSessionOptions(
parsed,
scopedModels,
sessionManager,
modelRegistry,
settingsManager,
loadedExtensions,
);
sessionOptions.authStorage = authStorage;
sessionOptions.modelRegistry = modelRegistry;
sessionOptions.eventBus = eventBus;
@ -471,7 +485,7 @@ export async function main(args: string[]) {
if (scopedModels.length > 0) {
const modelList = scopedModels
.map((sm) => {
const thinkingStr = sm.thinkingLevel !== "off" ? `:${sm.thinkingLevel}` : "";
const thinkingStr = sm.thinkingLevel ? `:${sm.thinkingLevel}` : "";
return `${sm.model.id}${thinkingStr}`;
})
.join(", ");

View file

@ -6,7 +6,7 @@ import type { AppAction, KeybindingsManager } from "../../../core/keybindings.js
*/
export class CustomEditor extends Editor {
private keybindings: KeybindingsManager;
private actionHandlers: Map<AppAction, () => void> = new Map();
public actionHandlers: Map<AppAction, () => void> = new Map();
// Special handlers that can be dynamically replaced
public onEscape?: () => void;

View file

@ -9,7 +9,7 @@ import * as os from "node:os";
import * as path from "node:path";
import type { AgentMessage } from "@mariozechner/pi-agent-core";
import { type AssistantMessage, getOAuthProviders, type Message, type OAuthProvider } from "@mariozechner/pi-ai";
import type { KeyId, SlashCommand } from "@mariozechner/pi-tui";
import type { EditorComponent, EditorTheme, KeyId, SlashCommand } from "@mariozechner/pi-tui";
import {
CombinedAutocompleteProvider,
type Component,
@ -96,7 +96,9 @@ export class InteractiveMode {
private chatContainer: Container;
private pendingMessagesContainer: Container;
private statusContainer: Container;
private editor: CustomEditor;
private defaultEditor: CustomEditor;
private editor: EditorComponent;
private autocompleteProvider: CombinedAutocompleteProvider | undefined;
private editorContainer: Container;
private footer: FooterComponent;
private keybindings: KeybindingsManager;
@ -195,9 +197,10 @@ export class InteractiveMode {
this.statusContainer = new Container();
this.widgetContainer = new Container();
this.keybindings = KeybindingsManager.create();
this.editor = new CustomEditor(getEditorTheme(), this.keybindings);
this.defaultEditor = new CustomEditor(getEditorTheme(), this.keybindings);
this.editor = this.defaultEditor;
this.editorContainer = new Container();
this.editorContainer.addChild(this.editor);
this.editorContainer.addChild(this.editor as Component);
this.footer = new FooterComponent(session);
this.footer.setAutoCompactEnabled(session.autoCompactionEnabled);
@ -238,12 +241,12 @@ export class InteractiveMode {
);
// Setup autocomplete
const autocompleteProvider = new CombinedAutocompleteProvider(
this.autocompleteProvider = new CombinedAutocompleteProvider(
[...slashCommands, ...templateCommands, ...extensionCommands],
process.cwd(),
fdPath,
);
this.editor.setAutocompleteProvider(autocompleteProvider);
this.defaultEditor.setAutocompleteProvider(this.autocompleteProvider);
}
async init(): Promise<void> {
@ -595,8 +598,8 @@ export class InteractiveMode {
hasPendingMessages: () => this.session.pendingMessageCount > 0,
});
// Set up the extension shortcut handler on the editor
this.editor.onExtensionShortcut = (data: string) => {
// Set up the extension shortcut handler on the default editor
this.defaultEditor.onExtensionShortcut = (data: string) => {
for (const [shortcutStr, shortcut] of shortcuts) {
// Cast to KeyId - extension shortcuts use the same format
if (matchesKey(data, shortcutStr as KeyId)) {
@ -753,6 +756,7 @@ export class InteractiveMode {
setEditorText: (text) => this.editor.setText(text),
getEditorText: () => this.editor.getText(),
editor: (title, prefill) => this.showExtensionEditor(title, prefill),
setEditorComponent: (factory) => this.setCustomEditorComponent(factory),
get theme() {
return theme;
},
@ -918,6 +922,65 @@ export class InteractiveMode {
this.ui.requestRender();
}
/**
* Set a custom editor component from an extension.
* Pass undefined to restore the default editor.
*/
private setCustomEditorComponent(
factory: ((tui: TUI, theme: EditorTheme, keybindings: KeybindingsManager) => EditorComponent) | undefined,
): void {
// Save text from current editor before switching
const currentText = this.editor.getText();
this.editorContainer.clear();
if (factory) {
// Create the custom editor with tui, theme, and keybindings
const newEditor = factory(this.ui, getEditorTheme(), this.keybindings);
// Wire up callbacks from the default editor
newEditor.onSubmit = this.defaultEditor.onSubmit;
newEditor.onChange = this.defaultEditor.onChange;
// Copy text from previous editor
newEditor.setText(currentText);
// Copy appearance settings if supported
if (newEditor.borderColor !== undefined) {
newEditor.borderColor = this.defaultEditor.borderColor;
}
// Set autocomplete if supported
if (newEditor.setAutocompleteProvider && this.autocompleteProvider) {
newEditor.setAutocompleteProvider(this.autocompleteProvider);
}
// If extending CustomEditor, copy app-level handlers
// Use duck typing since instanceof fails across jiti module boundaries
const customEditor = newEditor as unknown as Record<string, unknown>;
if ("actionHandlers" in customEditor && customEditor.actionHandlers instanceof Map) {
customEditor.onEscape = this.defaultEditor.onEscape;
customEditor.onCtrlD = this.defaultEditor.onCtrlD;
customEditor.onPasteImage = this.defaultEditor.onPasteImage;
customEditor.onExtensionShortcut = this.defaultEditor.onExtensionShortcut;
// Copy action handlers (clear, suspend, model switching, etc.)
for (const [action, handler] of this.defaultEditor.actionHandlers) {
(customEditor.actionHandlers as Map<string, () => void>).set(action, handler);
}
}
this.editor = newEditor;
} else {
// Restore default editor with text from custom editor
this.defaultEditor.setText(currentText);
this.editor = this.defaultEditor;
}
this.editorContainer.addChild(this.editor as Component);
this.ui.setFocus(this.editor as Component);
this.ui.requestRender();
}
/**
* Show a notification for extensions.
*/
@ -938,6 +1001,7 @@ export class InteractiveMode {
factory: (
tui: TUI,
theme: Theme,
keybindings: KeybindingsManager,
done: (result: T) => void,
) => (Component & { dispose?(): void }) | Promise<Component & { dispose?(): void }>,
): Promise<T> {
@ -956,7 +1020,7 @@ export class InteractiveMode {
resolve(result);
};
Promise.resolve(factory(this.ui, theme, close)).then((c) => {
Promise.resolve(factory(this.ui, theme, this.keybindings, close)).then((c) => {
component = c;
this.editorContainer.clear();
this.editorContainer.addChild(component);
@ -992,7 +1056,9 @@ export class InteractiveMode {
// =========================================================================
private setupKeyHandlers(): void {
this.editor.onEscape = () => {
// Set up handlers on defaultEditor - they use this.editor for text access
// so they work correctly regardless of which editor is active
this.defaultEditor.onEscape = () => {
if (this.loadingAnimation) {
// Abort and restore queued messages to editor
const { steering, followUp } = this.session.clearQueue();
@ -1026,22 +1092,22 @@ export class InteractiveMode {
};
// Register app action handlers
this.editor.onAction("clear", () => this.handleCtrlC());
this.editor.onCtrlD = () => this.handleCtrlD();
this.editor.onAction("suspend", () => this.handleCtrlZ());
this.editor.onAction("cycleThinkingLevel", () => this.cycleThinkingLevel());
this.editor.onAction("cycleModelForward", () => this.cycleModel("forward"));
this.editor.onAction("cycleModelBackward", () => this.cycleModel("backward"));
this.defaultEditor.onAction("clear", () => this.handleCtrlC());
this.defaultEditor.onCtrlD = () => this.handleCtrlD();
this.defaultEditor.onAction("suspend", () => this.handleCtrlZ());
this.defaultEditor.onAction("cycleThinkingLevel", () => this.cycleThinkingLevel());
this.defaultEditor.onAction("cycleModelForward", () => this.cycleModel("forward"));
this.defaultEditor.onAction("cycleModelBackward", () => this.cycleModel("backward"));
// Global debug handler on TUI (works regardless of focus)
this.ui.onDebug = () => this.handleDebugCommand();
this.editor.onAction("selectModel", () => this.showModelSelector());
this.editor.onAction("expandTools", () => this.toggleToolOutputExpansion());
this.editor.onAction("toggleThinking", () => this.toggleThinkingBlockVisibility());
this.editor.onAction("externalEditor", () => this.openExternalEditor());
this.editor.onAction("followUp", () => this.handleFollowUp());
this.defaultEditor.onAction("selectModel", () => this.showModelSelector());
this.defaultEditor.onAction("expandTools", () => this.toggleToolOutputExpansion());
this.defaultEditor.onAction("toggleThinking", () => this.toggleThinkingBlockVisibility());
this.defaultEditor.onAction("externalEditor", () => this.openExternalEditor());
this.defaultEditor.onAction("followUp", () => this.handleFollowUp());
this.editor.onChange = (text: string) => {
this.defaultEditor.onChange = (text: string) => {
const wasBashMode = this.isBashMode;
this.isBashMode = text.trimStart().startsWith("!");
if (wasBashMode !== this.isBashMode) {
@ -1050,7 +1116,7 @@ export class InteractiveMode {
};
// Handle clipboard image paste (triggered on Ctrl+V)
this.editor.onPasteImage = () => {
this.defaultEditor.onPasteImage = () => {
this.handleClipboardImagePaste();
};
}
@ -1070,7 +1136,7 @@ export class InteractiveMode {
fs.writeFileSync(filePath, Buffer.from(image.bytes));
// Insert file path directly
this.editor.insertTextAtCursor(filePath);
this.editor.insertTextAtCursor?.(filePath);
this.ui.requestRender();
} catch {
// Silently ignore clipboard errors (may not have permission, etc.)
@ -1078,7 +1144,7 @@ export class InteractiveMode {
}
private setupEditorSubmitHandler(): void {
this.editor.onSubmit = async (text: string) => {
this.defaultEditor.onSubmit = async (text: string) => {
text = text.trim();
if (!text) return;
@ -1185,7 +1251,7 @@ export class InteractiveMode {
this.editor.setText(text);
return;
}
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
await this.handleBashCommand(command, isExcluded);
this.isBashMode = false;
this.updateEditorBorderColor();
@ -1196,7 +1262,7 @@ export class InteractiveMode {
// Queue input during compaction (extension commands execute immediately)
if (this.session.isCompacting) {
if (this.isExtensionCommand(text)) {
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
this.editor.setText("");
await this.session.prompt(text);
} else {
@ -1208,7 +1274,7 @@ export class InteractiveMode {
// If streaming, use prompt() with steer behavior
// This handles extension commands (execute immediately), prompt template expansion, and queueing
if (this.session.isStreaming) {
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
this.editor.setText("");
await this.session.prompt(text, { streamingBehavior: "steer" });
this.updatePendingMessagesDisplay();
@ -1223,7 +1289,7 @@ export class InteractiveMode {
if (this.onInputCallback) {
this.onInputCallback(text);
}
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
};
}
@ -1393,8 +1459,8 @@ export class InteractiveMode {
case "auto_compaction_start": {
// Keep editor active; submissions are queued during compaction.
// Set up escape to abort auto-compaction
this.autoCompactionEscapeHandler = this.editor.onEscape;
this.editor.onEscape = () => {
this.autoCompactionEscapeHandler = this.defaultEditor.onEscape;
this.defaultEditor.onEscape = () => {
this.session.abortCompaction();
};
// Show compacting indicator with reason
@ -1414,7 +1480,7 @@ export class InteractiveMode {
case "auto_compaction_end": {
// Restore escape handler
if (this.autoCompactionEscapeHandler) {
this.editor.onEscape = this.autoCompactionEscapeHandler;
this.defaultEditor.onEscape = this.autoCompactionEscapeHandler;
this.autoCompactionEscapeHandler = undefined;
}
// Stop loader
@ -1446,8 +1512,8 @@ export class InteractiveMode {
case "auto_retry_start": {
// Set up escape to abort retry
this.retryEscapeHandler = this.editor.onEscape;
this.editor.onEscape = () => {
this.retryEscapeHandler = this.defaultEditor.onEscape;
this.defaultEditor.onEscape = () => {
this.session.abortRetry();
};
// Show retry indicator
@ -1467,7 +1533,7 @@ export class InteractiveMode {
case "auto_retry_end": {
// Restore escape handler
if (this.retryEscapeHandler) {
this.editor.onEscape = this.retryEscapeHandler;
this.defaultEditor.onEscape = this.retryEscapeHandler;
this.retryEscapeHandler = undefined;
}
// Stop loader
@ -1565,7 +1631,7 @@ export class InteractiveMode {
const userComponent = new UserMessageComponent(textContent);
this.chatContainer.addChild(userComponent);
if (options?.populateHistory) {
this.editor.addToHistory(textContent);
this.editor.addToHistory?.(textContent);
}
}
break;
@ -1734,7 +1800,7 @@ export class InteractiveMode {
// Queue input during compaction (extension commands execute immediately)
if (this.session.isCompacting) {
if (this.isExtensionCommand(text)) {
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
this.editor.setText("");
await this.session.prompt(text);
} else {
@ -1746,7 +1812,7 @@ export class InteractiveMode {
// Alt+Enter queues a follow-up message (waits until agent finishes)
// This handles extension commands (execute immediately), prompt template expansion, and queueing
if (this.session.isStreaming) {
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
this.editor.setText("");
await this.session.prompt(text, { streamingBehavior: "followUp" });
this.updatePendingMessagesDisplay();
@ -1833,7 +1899,7 @@ export class InteractiveMode {
return;
}
const currentText = this.editor.getExpandedText();
const currentText = this.editor.getExpandedText?.() ?? this.editor.getText();
const tmpFile = path.join(os.tmpdir(), `pi-editor-${Date.now()}.pi.md`);
try {
@ -1934,7 +2000,7 @@ export class InteractiveMode {
private queueCompactionMessage(text: string, mode: "steer" | "followUp"): void {
this.compactionQueuedMessages.push({ text, mode });
this.editor.addToHistory(text);
this.editor.addToHistory?.(text);
this.editor.setText("");
this.updatePendingMessagesDisplay();
this.showStatus("Queued message for after compaction");
@ -2253,10 +2319,10 @@ export class InteractiveMode {
// Set up escape handler and loader if summarizing
let summaryLoader: Loader | undefined;
const originalOnEscape = this.editor.onEscape;
const originalOnEscape = this.defaultEditor.onEscape;
if (wantsSummary) {
this.editor.onEscape = () => {
this.defaultEditor.onEscape = () => {
this.session.abortBranchSummary();
};
this.chatContainer.addChild(new Spacer(1));
@ -2298,7 +2364,7 @@ export class InteractiveMode {
summaryLoader.stop();
this.statusContainer.clear();
}
this.editor.onEscape = originalOnEscape;
this.defaultEditor.onEscape = originalOnEscape;
}
},
() => {
@ -2921,8 +2987,8 @@ export class InteractiveMode {
this.statusContainer.clear();
// Set up escape handler during compaction
const originalOnEscape = this.editor.onEscape;
this.editor.onEscape = () => {
const originalOnEscape = this.defaultEditor.onEscape;
this.defaultEditor.onEscape = () => {
this.session.abortCompaction();
};
@ -2959,7 +3025,7 @@ export class InteractiveMode {
} finally {
compactingLoader.stop();
this.statusContainer.clear();
this.editor.onEscape = originalOnEscape;
this.defaultEditor.onEscape = originalOnEscape;
}
void this.flushCompactionQueue({ willRetry: false });
}

View file

@ -219,6 +219,10 @@ export async function runRpcMode(session: AgentSession): Promise<never> {
});
},
setEditorComponent(): void {
// Custom editor components not supported in RPC mode
},
get theme() {
return theme;
},

View file

@ -137,6 +137,7 @@ describe.skipIf(!API_KEY)("Compaction extensions", () => {
setEditorText: () => {},
getEditorText: () => "",
editor: async () => undefined,
setEditorComponent: () => {},
get theme() {
return theme;
},

View file

@ -62,24 +62,24 @@ const allModels = [...mockModels, ...mockOpenRouterModels];
describe("parseModelPattern", () => {
describe("simple patterns without colons", () => {
test("exact match returns model with off thinking level", () => {
test("exact match returns model with undefined thinking level", () => {
const result = parseModelPattern("claude-sonnet-4-5", allModels);
expect(result.model?.id).toBe("claude-sonnet-4-5");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toBeUndefined();
});
test("partial match returns best model", () => {
test("partial match returns best model with undefined thinking level", () => {
const result = parseModelPattern("sonnet", allModels);
expect(result.model?.id).toBe("claude-sonnet-4-5");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toBeUndefined();
});
test("no match returns null model", () => {
test("no match returns undefined model and thinking level", () => {
const result = parseModelPattern("nonexistent", allModels);
expect(result.model).toBeUndefined();
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toBeUndefined();
});
});
@ -110,27 +110,27 @@ describe("parseModelPattern", () => {
});
describe("patterns with invalid thinking levels", () => {
test("sonnet:random returns sonnet with off and warning", () => {
test("sonnet:random returns sonnet with undefined thinking level and warning", () => {
const result = parseModelPattern("sonnet:random", allModels);
expect(result.model?.id).toBe("claude-sonnet-4-5");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toContain("Invalid thinking level");
expect(result.warning).toContain("random");
});
test("gpt-4o:invalid returns gpt-4o with off and warning", () => {
test("gpt-4o:invalid returns gpt-4o with undefined thinking level and warning", () => {
const result = parseModelPattern("gpt-4o:invalid", allModels);
expect(result.model?.id).toBe("gpt-4o");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toContain("Invalid thinking level");
});
});
describe("OpenRouter models with colons in IDs", () => {
test("qwen3-coder:exacto matches the model with off", () => {
test("qwen3-coder:exacto matches the model with undefined thinking level", () => {
const result = parseModelPattern("qwen/qwen3-coder:exacto", allModels);
expect(result.model?.id).toBe("qwen/qwen3-coder:exacto");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toBeUndefined();
});
@ -138,7 +138,7 @@ describe("parseModelPattern", () => {
const result = parseModelPattern("openrouter/qwen/qwen3-coder:exacto", allModels);
expect(result.model?.id).toBe("qwen/qwen3-coder:exacto");
expect(result.model?.provider).toBe("openrouter");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toBeUndefined();
});
@ -157,27 +157,27 @@ describe("parseModelPattern", () => {
expect(result.warning).toBeUndefined();
});
test("gpt-4o:extended matches the extended model", () => {
test("gpt-4o:extended matches the extended model with undefined thinking level", () => {
const result = parseModelPattern("openai/gpt-4o:extended", allModels);
expect(result.model?.id).toBe("openai/gpt-4o:extended");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toBeUndefined();
});
});
describe("invalid thinking levels with OpenRouter models", () => {
test("qwen3-coder:exacto:random returns model with off and warning", () => {
test("qwen3-coder:exacto:random returns model with undefined thinking level and warning", () => {
const result = parseModelPattern("qwen/qwen3-coder:exacto:random", allModels);
expect(result.model?.id).toBe("qwen/qwen3-coder:exacto");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toContain("Invalid thinking level");
expect(result.warning).toContain("random");
});
test("qwen3-coder:exacto:high:random returns model with off and warning", () => {
test("qwen3-coder:exacto:high:random returns model with undefined thinking level and warning", () => {
const result = parseModelPattern("qwen/qwen3-coder:exacto:high:random", allModels);
expect(result.model?.id).toBe("qwen/qwen3-coder:exacto");
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
expect(result.warning).toContain("Invalid thinking level");
expect(result.warning).toContain("random");
});
@ -188,7 +188,7 @@ describe("parseModelPattern", () => {
// Empty string is included in all model IDs, so partial matching finds a match
const result = parseModelPattern("", allModels);
expect(result.model).not.toBeNull();
expect(result.thinkingLevel).toBe("off");
expect(result.thinkingLevel).toBeUndefined();
});
test("pattern ending with colon treats empty suffix as invalid", () => {

View file

@ -2,6 +2,15 @@
## [Unreleased]
### Added
- `EditorComponent` interface for custom editor implementations
- `StdinBuffer` class to split batched stdin into individual sequences (adapted from [OpenTUI](https://github.com/anomalyco/opentui), MIT license)
### Fixed
- Key presses no longer dropped when batched with other events over SSH ([#538](https://github.com/badlogic/pi-mono/pull/538))
## [0.37.8] - 2026-01-07
### Added

View file

@ -0,0 +1,65 @@
import type { AutocompleteProvider } from "./autocomplete.js";
import type { Component } from "./tui.js";
/**
* Interface for custom editor components.
*
* This allows extensions to provide their own editor implementation
* (e.g., vim mode, emacs mode, custom keybindings) while maintaining
* compatibility with the core application.
*/
export interface EditorComponent extends Component {
// =========================================================================
// Core text access (required)
// =========================================================================
/** Get the current text content */
getText(): string;
/** Set the text content */
setText(text: string): void;
// =========================================================================
// Callbacks (required)
// =========================================================================
/** Called when user submits (e.g., Enter key) */
onSubmit?: (text: string) => void;
/** Called when text changes */
onChange?: (text: string) => void;
// =========================================================================
// History support (optional)
// =========================================================================
/** Add text to history for up/down navigation */
addToHistory?(text: string): void;
// =========================================================================
// Advanced text manipulation (optional)
// =========================================================================
/** Insert text at current cursor position */
insertTextAtCursor?(text: string): void;
/**
* Get text with any markers expanded (e.g., paste markers).
* Falls back to getText() if not implemented.
*/
getExpandedText?(): string;
// =========================================================================
// Autocomplete support (optional)
// =========================================================================
/** Set the autocomplete provider */
setAutocompleteProvider?(provider: AutocompleteProvider): void;
// =========================================================================
// Appearance (optional)
// =========================================================================
/** Border color function */
borderColor?: (str: string) => string;
}

View file

@ -20,6 +20,8 @@ export { type SettingItem, SettingsList, type SettingsListTheme } from "./compon
export { Spacer } from "./components/spacer.js";
export { Text } from "./components/text.js";
export { TruncatedText } from "./components/truncated-text.js";
// Editor component interface (for custom editors)
export type { EditorComponent } from "./editor-component.js";
// Keybindings
export {
DEFAULT_EDITOR_KEYBINDINGS,
@ -41,6 +43,8 @@ export {
parseKey,
setKittyProtocolActive,
} from "./keys.js";
// Input buffering for batch splitting
export { StdinBuffer, type StdinBufferEventMap, type StdinBufferOptions } from "./stdin-buffer.js";
// Terminal interface and implementations
export { ProcessTerminal, type Terminal } from "./terminal.js";
// Terminal image support

View file

@ -0,0 +1,386 @@
/**
* StdinBuffer buffers input and emits complete sequences.
*
* This is necessary because stdin data events can arrive in partial chunks,
* especially for escape sequences like mouse events. Without buffering,
* partial sequences can be misinterpreted as regular keypresses.
*
* For example, the mouse SGR sequence `\x1b[<35;20;5m` might arrive as:
* - Event 1: `\x1b`
* - Event 2: `[<35`
* - Event 3: `;20;5m`
*
* The buffer accumulates these until a complete sequence is detected.
* Call the `process()` method to feed input data.
*
* Based on code from OpenTUI (https://github.com/anomalyco/opentui)
* MIT License - Copyright (c) 2025 opentui
*/
import { EventEmitter } from "events";
const ESC = "\x1b";
const BRACKETED_PASTE_START = "\x1b[200~";
const BRACKETED_PASTE_END = "\x1b[201~";
/**
* Check if a string is a complete escape sequence or needs more data
*/
function isCompleteSequence(data: string): "complete" | "incomplete" | "not-escape" {
if (!data.startsWith(ESC)) {
return "not-escape";
}
if (data.length === 1) {
return "incomplete";
}
const afterEsc = data.slice(1);
// CSI sequences: ESC [
if (afterEsc.startsWith("[")) {
// Check for old-style mouse sequence: ESC[M + 3 bytes
if (afterEsc.startsWith("[M")) {
// Old-style mouse needs ESC[M + 3 bytes = 6 total
return data.length >= 6 ? "complete" : "incomplete";
}
return isCompleteCsiSequence(data);
}
// OSC sequences: ESC ]
if (afterEsc.startsWith("]")) {
return isCompleteOscSequence(data);
}
// DCS sequences: ESC P ... ESC \ (includes XTVersion responses)
if (afterEsc.startsWith("P")) {
return isCompleteDcsSequence(data);
}
// APC sequences: ESC _ ... ESC \ (includes Kitty graphics responses)
if (afterEsc.startsWith("_")) {
return isCompleteApcSequence(data);
}
// SS3 sequences: ESC O
if (afterEsc.startsWith("O")) {
// ESC O followed by a single character
return afterEsc.length >= 2 ? "complete" : "incomplete";
}
// Meta key sequences: ESC followed by a single character
if (afterEsc.length === 1) {
return "complete";
}
// Unknown escape sequence - treat as complete
return "complete";
}
/**
* Check if CSI sequence is complete
* CSI sequences: ESC [ ... followed by a final byte (0x40-0x7E)
*/
function isCompleteCsiSequence(data: string): "complete" | "incomplete" {
if (!data.startsWith(`${ESC}[`)) {
return "complete";
}
// Need at least ESC [ and one more character
if (data.length < 3) {
return "incomplete";
}
const payload = data.slice(2);
// CSI sequences end with a byte in the range 0x40-0x7E (@-~)
// This includes all letters and several special characters
const lastChar = payload[payload.length - 1];
const lastCharCode = lastChar.charCodeAt(0);
if (lastCharCode >= 0x40 && lastCharCode <= 0x7e) {
// Special handling for SGR mouse sequences
// Format: ESC[<B;X;Ym or ESC[<B;X;YM
if (payload.startsWith("<")) {
// Must have format: <digits;digits;digits[Mm]
const mouseMatch = /^<\d+;\d+;\d+[Mm]$/.test(payload);
if (mouseMatch) {
return "complete";
}
// If it ends with M or m but doesn't match the pattern, still incomplete
if (lastChar === "M" || lastChar === "m") {
// Check if we have the right structure
const parts = payload.slice(1, -1).split(";");
if (parts.length === 3 && parts.every((p) => /^\d+$/.test(p))) {
return "complete";
}
}
return "incomplete";
}
return "complete";
}
return "incomplete";
}
/**
* Check if OSC sequence is complete
* OSC sequences: ESC ] ... ST (where ST is ESC \ or BEL)
*/
function isCompleteOscSequence(data: string): "complete" | "incomplete" {
if (!data.startsWith(`${ESC}]`)) {
return "complete";
}
// OSC sequences end with ST (ESC \) or BEL (\x07)
if (data.endsWith(`${ESC}\\`) || data.endsWith("\x07")) {
return "complete";
}
return "incomplete";
}
/**
* Check if DCS (Device Control String) sequence is complete
* DCS sequences: ESC P ... ST (where ST is ESC \)
* Used for XTVersion responses like ESC P >| ... ESC \
*/
function isCompleteDcsSequence(data: string): "complete" | "incomplete" {
if (!data.startsWith(`${ESC}P`)) {
return "complete";
}
// DCS sequences end with ST (ESC \)
if (data.endsWith(`${ESC}\\`)) {
return "complete";
}
return "incomplete";
}
/**
* Check if APC (Application Program Command) sequence is complete
* APC sequences: ESC _ ... ST (where ST is ESC \)
* Used for Kitty graphics responses like ESC _ G ... ESC \
*/
function isCompleteApcSequence(data: string): "complete" | "incomplete" {
if (!data.startsWith(`${ESC}_`)) {
return "complete";
}
// APC sequences end with ST (ESC \)
if (data.endsWith(`${ESC}\\`)) {
return "complete";
}
return "incomplete";
}
/**
* Split accumulated buffer into complete sequences
*/
function extractCompleteSequences(buffer: string): { sequences: string[]; remainder: string } {
const sequences: string[] = [];
let pos = 0;
while (pos < buffer.length) {
const remaining = buffer.slice(pos);
// Try to extract a sequence starting at this position
if (remaining.startsWith(ESC)) {
// Find the end of this escape sequence
let seqEnd = 1;
while (seqEnd <= remaining.length) {
const candidate = remaining.slice(0, seqEnd);
const status = isCompleteSequence(candidate);
if (status === "complete") {
sequences.push(candidate);
pos += seqEnd;
break;
} else if (status === "incomplete") {
seqEnd++;
} else {
// Should not happen when starting with ESC
sequences.push(candidate);
pos += seqEnd;
break;
}
}
if (seqEnd > remaining.length) {
return { sequences, remainder: remaining };
}
} else {
// Not an escape sequence - take a single character
sequences.push(remaining[0]!);
pos++;
}
}
return { sequences, remainder: "" };
}
export type StdinBufferOptions = {
/**
* Maximum time to wait for sequence completion (default: 10ms)
* After this time, the buffer is flushed even if incomplete
*/
timeout?: number;
};
export type StdinBufferEventMap = {
data: [string];
paste: [string];
};
/**
* Buffers stdin input and emits complete sequences via the 'data' event.
* Handles partial escape sequences that arrive across multiple chunks.
*/
export class StdinBuffer extends EventEmitter<StdinBufferEventMap> {
private buffer: string = "";
private timeout: ReturnType<typeof setTimeout> | null = null;
private readonly timeoutMs: number;
private pasteMode: boolean = false;
private pasteBuffer: string = "";
constructor(options: StdinBufferOptions = {}) {
super();
this.timeoutMs = options.timeout ?? 10;
}
public process(data: string | Buffer): void {
// Clear any pending timeout
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
// Handle high-byte conversion (for compatibility with parseKeypress)
// If buffer has single byte > 127, convert to ESC + (byte - 128)
let str: string;
if (Buffer.isBuffer(data)) {
if (data.length === 1 && data[0]! > 127) {
const byte = data[0]! - 128;
str = `\x1b${String.fromCharCode(byte)}`;
} else {
str = data.toString();
}
} else {
str = data;
}
if (str.length === 0 && this.buffer.length === 0) {
this.emit("data", "");
return;
}
this.buffer += str;
if (this.pasteMode) {
this.pasteBuffer += this.buffer;
this.buffer = "";
const endIndex = this.pasteBuffer.indexOf(BRACKETED_PASTE_END);
if (endIndex !== -1) {
const pastedContent = this.pasteBuffer.slice(0, endIndex);
const remaining = this.pasteBuffer.slice(endIndex + BRACKETED_PASTE_END.length);
this.pasteMode = false;
this.pasteBuffer = "";
this.emit("paste", pastedContent);
if (remaining.length > 0) {
this.process(remaining);
}
}
return;
}
const startIndex = this.buffer.indexOf(BRACKETED_PASTE_START);
if (startIndex !== -1) {
if (startIndex > 0) {
const beforePaste = this.buffer.slice(0, startIndex);
const result = extractCompleteSequences(beforePaste);
for (const sequence of result.sequences) {
this.emit("data", sequence);
}
}
this.buffer = this.buffer.slice(startIndex + BRACKETED_PASTE_START.length);
this.pasteMode = true;
this.pasteBuffer = this.buffer;
this.buffer = "";
const endIndex = this.pasteBuffer.indexOf(BRACKETED_PASTE_END);
if (endIndex !== -1) {
const pastedContent = this.pasteBuffer.slice(0, endIndex);
const remaining = this.pasteBuffer.slice(endIndex + BRACKETED_PASTE_END.length);
this.pasteMode = false;
this.pasteBuffer = "";
this.emit("paste", pastedContent);
if (remaining.length > 0) {
this.process(remaining);
}
}
return;
}
const result = extractCompleteSequences(this.buffer);
this.buffer = result.remainder;
for (const sequence of result.sequences) {
this.emit("data", sequence);
}
if (this.buffer.length > 0) {
this.timeout = setTimeout(() => {
const flushed = this.flush();
for (const sequence of flushed) {
this.emit("data", sequence);
}
}, this.timeoutMs);
}
}
flush(): string[] {
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
if (this.buffer.length === 0) {
return [];
}
const sequences = [this.buffer];
this.buffer = "";
return sequences;
}
clear(): void {
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
this.buffer = "";
this.pasteMode = false;
this.pasteBuffer = "";
}
getBuffer(): string {
return this.buffer;
}
destroy(): void {
this.clear();
}
}

View file

@ -1,4 +1,5 @@
import { setKittyProtocolActive } from "./keys.js";
import { StdinBuffer } from "./stdin-buffer.js";
/**
* Minimal terminal interface for TUI
@ -44,6 +45,8 @@ export class ProcessTerminal implements Terminal {
private inputHandler?: (data: string) => void;
private resizeHandler?: () => void;
private _kittyProtocolActive = false;
private stdinBuffer?: StdinBuffer;
private stdinDataHandler?: (data: string) => void;
get kittyProtocolActive(): boolean {
return this._kittyProtocolActive;
@ -73,6 +76,35 @@ export class ProcessTerminal implements Terminal {
this.queryAndEnableKittyProtocol();
}
/**
* Set up StdinBuffer to split batched input into individual sequences.
* This ensures components receive single events, making matchesKey/isKeyRelease work correctly.
* Note: Does NOT register the stdin handler - that's done after the Kitty protocol query.
*/
private setupStdinBuffer(): void {
this.stdinBuffer = new StdinBuffer({ timeout: 10 });
// Forward individual sequences to the input handler
this.stdinBuffer.on("data", (sequence) => {
if (this.inputHandler) {
this.inputHandler(sequence);
}
});
// Re-wrap paste content with bracketed paste markers for existing editor handling
this.stdinBuffer.on("paste", (content) => {
if (this.inputHandler) {
this.inputHandler(`\x1b[200~${content}\x1b[201~`);
}
});
// Handler that pipes stdin data through the buffer
// Registration happens after Kitty protocol query completes
this.stdinDataHandler = (data: string) => {
this.stdinBuffer!.process(data);
};
}
/**
* Query terminal for Kitty keyboard protocol support and enable if available.
*
@ -91,9 +123,9 @@ export class ProcessTerminal implements Terminal {
const queryHandler = (data: string) => {
if (resolved) {
// Query phase done, forward to user handler
if (this.inputHandler) {
this.inputHandler(data);
// Query phase done, forward to StdinBuffer
if (this.stdinBuffer) {
this.stdinBuffer.process(data);
}
return;
}
@ -112,21 +144,24 @@ export class ProcessTerminal implements Terminal {
// Flag 2 = report event types (press/repeat/release)
process.stdout.write("\x1b[>3u");
// Remove the response from buffer, forward any remaining input
// Remove the response from buffer, forward any remaining input through StdinBuffer
const remaining = buffer.replace(kittyResponsePattern, "");
if (remaining && this.inputHandler) {
this.inputHandler(remaining);
if (remaining && this.stdinBuffer) {
this.stdinBuffer.process(remaining);
}
// Replace with user handler
// Replace query handler with StdinBuffer handler
process.stdin.removeListener("data", queryHandler);
if (this.inputHandler) {
process.stdin.on("data", this.inputHandler);
if (this.stdinDataHandler) {
process.stdin.on("data", this.stdinDataHandler);
}
}
};
// Temporarily intercept input for the query
// Set up StdinBuffer before query (it will receive input after query completes)
this.setupStdinBuffer();
// Temporarily intercept input for the query (before StdinBuffer)
process.stdin.on("data", queryHandler);
// Send query
@ -139,15 +174,15 @@ export class ProcessTerminal implements Terminal {
this._kittyProtocolActive = false;
setKittyProtocolActive(false);
// Forward any buffered input that wasn't a Kitty response
if (buffer && this.inputHandler) {
this.inputHandler(buffer);
// Forward any buffered input that wasn't a Kitty response through StdinBuffer
if (buffer && this.stdinBuffer) {
this.stdinBuffer.process(buffer);
}
// Replace with user handler
// Replace query handler with StdinBuffer handler
process.stdin.removeListener("data", queryHandler);
if (this.inputHandler) {
process.stdin.on("data", this.inputHandler);
if (this.stdinDataHandler) {
process.stdin.on("data", this.stdinDataHandler);
}
}
}, QUERY_TIMEOUT_MS);
@ -164,11 +199,18 @@ export class ProcessTerminal implements Terminal {
setKittyProtocolActive(false);
}
// Remove event handlers
if (this.inputHandler) {
process.stdin.removeListener("data", this.inputHandler);
this.inputHandler = undefined;
// Clean up StdinBuffer
if (this.stdinBuffer) {
this.stdinBuffer.destroy();
this.stdinBuffer = undefined;
}
// Remove event handlers
if (this.stdinDataHandler) {
process.stdin.removeListener("data", this.stdinDataHandler);
this.stdinDataHandler = undefined;
}
this.inputHandler = undefined;
if (this.resizeHandler) {
process.stdout.removeListener("resize", this.resizeHandler);
this.resizeHandler = undefined;

View file

@ -332,7 +332,19 @@ export class TUI extends Container {
].join("\n");
fs.mkdirSync(path.dirname(crashLogPath), { recursive: true });
fs.writeFileSync(crashLogPath, crashData);
throw new Error(`Rendered line ${i} exceeds terminal width. Debug log written to ${crashLogPath}`);
// Clean up terminal state before throwing
this.stop();
const errorMsg = [
`Rendered line ${i} exceeds terminal width (${visibleWidth(line)} > ${width}).`,
"",
"This is likely caused by a custom TUI component not truncating its output.",
"Use visibleWidth() to measure and truncateToWidth() to truncate lines.",
"",
`Debug log written to: ${crashLogPath}`,
].join("\n");
throw new Error(errorMsg);
}
buffer += line;
}

View file

@ -0,0 +1,422 @@
/**
* Tests for StdinBuffer
*
* Based on code from OpenTUI (https://github.com/anomalyco/opentui)
* MIT License - Copyright (c) 2025 opentui
*/
import assert from "node:assert";
import { beforeEach, describe, it } from "node:test";
import { StdinBuffer } from "../src/stdin-buffer.js";
describe("StdinBuffer", () => {
let buffer: StdinBuffer;
let emittedSequences: string[];
beforeEach(() => {
buffer = new StdinBuffer({ timeout: 10 });
// Collect emitted sequences
emittedSequences = [];
buffer.on("data", (sequence) => {
emittedSequences.push(sequence);
});
});
// Helper to process data through the buffer
function processInput(data: string | Buffer): void {
buffer.process(data);
}
// Helper to wait for async operations
async function wait(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
describe("Regular Characters", () => {
it("should pass through regular characters immediately", () => {
processInput("a");
assert.deepStrictEqual(emittedSequences, ["a"]);
});
it("should pass through multiple regular characters", () => {
processInput("abc");
assert.deepStrictEqual(emittedSequences, ["a", "b", "c"]);
});
it("should handle unicode characters", () => {
processInput("hello 世界");
assert.deepStrictEqual(emittedSequences, ["h", "e", "l", "l", "o", " ", "世", "界"]);
});
});
describe("Complete Escape Sequences", () => {
it("should pass through complete mouse SGR sequences", () => {
const mouseSeq = "\x1b[<35;20;5m";
processInput(mouseSeq);
assert.deepStrictEqual(emittedSequences, [mouseSeq]);
});
it("should pass through complete arrow key sequences", () => {
const upArrow = "\x1b[A";
processInput(upArrow);
assert.deepStrictEqual(emittedSequences, [upArrow]);
});
it("should pass through complete function key sequences", () => {
const f1 = "\x1b[11~";
processInput(f1);
assert.deepStrictEqual(emittedSequences, [f1]);
});
it("should pass through meta key sequences", () => {
const metaA = "\x1ba";
processInput(metaA);
assert.deepStrictEqual(emittedSequences, [metaA]);
});
it("should pass through SS3 sequences", () => {
const ss3 = "\x1bOA";
processInput(ss3);
assert.deepStrictEqual(emittedSequences, [ss3]);
});
});
describe("Partial Escape Sequences", () => {
it("should buffer incomplete mouse SGR sequence", async () => {
processInput("\x1b");
assert.deepStrictEqual(emittedSequences, []);
assert.strictEqual(buffer.getBuffer(), "\x1b");
processInput("[<35");
assert.deepStrictEqual(emittedSequences, []);
assert.strictEqual(buffer.getBuffer(), "\x1b[<35");
processInput(";20;5m");
assert.deepStrictEqual(emittedSequences, ["\x1b[<35;20;5m"]);
assert.strictEqual(buffer.getBuffer(), "");
});
it("should buffer incomplete CSI sequence", () => {
processInput("\x1b[");
assert.deepStrictEqual(emittedSequences, []);
processInput("1;");
assert.deepStrictEqual(emittedSequences, []);
processInput("5H");
assert.deepStrictEqual(emittedSequences, ["\x1b[1;5H"]);
});
it("should buffer split across many chunks", () => {
processInput("\x1b");
processInput("[");
processInput("<");
processInput("3");
processInput("5");
processInput(";");
processInput("2");
processInput("0");
processInput(";");
processInput("5");
processInput("m");
assert.deepStrictEqual(emittedSequences, ["\x1b[<35;20;5m"]);
});
it("should flush incomplete sequence after timeout", async () => {
processInput("\x1b[<35");
assert.deepStrictEqual(emittedSequences, []);
// Wait for timeout
await wait(15);
assert.deepStrictEqual(emittedSequences, ["\x1b[<35"]);
});
});
describe("Mixed Content", () => {
it("should handle characters followed by escape sequence", () => {
processInput("abc\x1b[A");
assert.deepStrictEqual(emittedSequences, ["a", "b", "c", "\x1b[A"]);
});
it("should handle escape sequence followed by characters", () => {
processInput("\x1b[Aabc");
assert.deepStrictEqual(emittedSequences, ["\x1b[A", "a", "b", "c"]);
});
it("should handle multiple complete sequences", () => {
processInput("\x1b[A\x1b[B\x1b[C");
assert.deepStrictEqual(emittedSequences, ["\x1b[A", "\x1b[B", "\x1b[C"]);
});
it("should handle partial sequence with preceding characters", () => {
processInput("abc\x1b[<35");
assert.deepStrictEqual(emittedSequences, ["a", "b", "c"]);
assert.strictEqual(buffer.getBuffer(), "\x1b[<35");
processInput(";20;5m");
assert.deepStrictEqual(emittedSequences, ["a", "b", "c", "\x1b[<35;20;5m"]);
});
});
describe("Kitty Keyboard Protocol", () => {
it("should handle Kitty CSI u press events", () => {
// Press 'a' in Kitty protocol
processInput("\x1b[97u");
assert.deepStrictEqual(emittedSequences, ["\x1b[97u"]);
});
it("should handle Kitty CSI u release events", () => {
// Release 'a' in Kitty protocol
processInput("\x1b[97;1:3u");
assert.deepStrictEqual(emittedSequences, ["\x1b[97;1:3u"]);
});
it("should handle batched Kitty press and release", () => {
// Press 'a', release 'a' batched together (common over SSH)
processInput("\x1b[97u\x1b[97;1:3u");
assert.deepStrictEqual(emittedSequences, ["\x1b[97u", "\x1b[97;1:3u"]);
});
it("should handle multiple batched Kitty events", () => {
// Press 'a', release 'a', press 'b', release 'b'
processInput("\x1b[97u\x1b[97;1:3u\x1b[98u\x1b[98;1:3u");
assert.deepStrictEqual(emittedSequences, ["\x1b[97u", "\x1b[97;1:3u", "\x1b[98u", "\x1b[98;1:3u"]);
});
it("should handle Kitty arrow keys with event type", () => {
// Up arrow press with event type
processInput("\x1b[1;1:1A");
assert.deepStrictEqual(emittedSequences, ["\x1b[1;1:1A"]);
});
it("should handle Kitty functional keys with event type", () => {
// Delete key release
processInput("\x1b[3;1:3~");
assert.deepStrictEqual(emittedSequences, ["\x1b[3;1:3~"]);
});
it("should handle plain characters mixed with Kitty sequences", () => {
// Plain 'a' followed by Kitty release
processInput("a\x1b[97;1:3u");
assert.deepStrictEqual(emittedSequences, ["a", "\x1b[97;1:3u"]);
});
it("should handle Kitty sequence followed by plain characters", () => {
processInput("\x1b[97ua");
assert.deepStrictEqual(emittedSequences, ["\x1b[97u", "a"]);
});
it("should handle rapid typing simulation with Kitty protocol", () => {
// Simulates typing "hi" quickly with releases interleaved
processInput("\x1b[104u\x1b[104;1:3u\x1b[105u\x1b[105;1:3u");
assert.deepStrictEqual(emittedSequences, ["\x1b[104u", "\x1b[104;1:3u", "\x1b[105u", "\x1b[105;1:3u"]);
});
});
describe("Mouse Events", () => {
it("should handle mouse press event", () => {
processInput("\x1b[<0;10;5M");
assert.deepStrictEqual(emittedSequences, ["\x1b[<0;10;5M"]);
});
it("should handle mouse release event", () => {
processInput("\x1b[<0;10;5m");
assert.deepStrictEqual(emittedSequences, ["\x1b[<0;10;5m"]);
});
it("should handle mouse move event", () => {
processInput("\x1b[<35;20;5m");
assert.deepStrictEqual(emittedSequences, ["\x1b[<35;20;5m"]);
});
it("should handle split mouse events", () => {
processInput("\x1b[<3");
processInput("5;1");
processInput("5;");
processInput("10m");
assert.deepStrictEqual(emittedSequences, ["\x1b[<35;15;10m"]);
});
it("should handle multiple mouse events", () => {
processInput("\x1b[<35;1;1m\x1b[<35;2;2m\x1b[<35;3;3m");
assert.deepStrictEqual(emittedSequences, ["\x1b[<35;1;1m", "\x1b[<35;2;2m", "\x1b[<35;3;3m"]);
});
it("should handle old-style mouse sequence (ESC[M + 3 bytes)", () => {
processInput("\x1b[M abc");
assert.deepStrictEqual(emittedSequences, ["\x1b[M ab", "c"]);
});
it("should buffer incomplete old-style mouse sequence", () => {
processInput("\x1b[M");
assert.strictEqual(buffer.getBuffer(), "\x1b[M");
processInput(" a");
assert.strictEqual(buffer.getBuffer(), "\x1b[M a");
processInput("b");
assert.deepStrictEqual(emittedSequences, ["\x1b[M ab"]);
});
});
describe("Edge Cases", () => {
it("should handle empty input", () => {
processInput("");
// Empty string emits an empty data event
assert.deepStrictEqual(emittedSequences, [""]);
});
it("should handle lone escape character with timeout", async () => {
processInput("\x1b");
assert.deepStrictEqual(emittedSequences, []);
// After timeout, should emit
await wait(15);
assert.deepStrictEqual(emittedSequences, ["\x1b"]);
});
it("should handle lone escape character with explicit flush", () => {
processInput("\x1b");
assert.deepStrictEqual(emittedSequences, []);
const flushed = buffer.flush();
assert.deepStrictEqual(flushed, ["\x1b"]);
});
it("should handle buffer input", () => {
processInput(Buffer.from("\x1b[A"));
assert.deepStrictEqual(emittedSequences, ["\x1b[A"]);
});
it("should handle very long sequences", () => {
const longSeq = `\x1b[${"1;".repeat(50)}H`;
processInput(longSeq);
assert.deepStrictEqual(emittedSequences, [longSeq]);
});
});
describe("Flush", () => {
it("should flush incomplete sequences", () => {
processInput("\x1b[<35");
const flushed = buffer.flush();
assert.deepStrictEqual(flushed, ["\x1b[<35"]);
assert.strictEqual(buffer.getBuffer(), "");
});
it("should return empty array if nothing to flush", () => {
const flushed = buffer.flush();
assert.deepStrictEqual(flushed, []);
});
it("should emit flushed data via timeout", async () => {
processInput("\x1b[<35");
assert.deepStrictEqual(emittedSequences, []);
// Wait for timeout to flush
await wait(15);
assert.deepStrictEqual(emittedSequences, ["\x1b[<35"]);
});
});
describe("Clear", () => {
it("should clear buffered content without emitting", () => {
processInput("\x1b[<35");
assert.strictEqual(buffer.getBuffer(), "\x1b[<35");
buffer.clear();
assert.strictEqual(buffer.getBuffer(), "");
assert.deepStrictEqual(emittedSequences, []);
});
});
describe("Bracketed Paste", () => {
let emittedPaste: string[] = [];
beforeEach(() => {
buffer = new StdinBuffer({ timeout: 10 });
// Collect emitted sequences
emittedSequences = [];
buffer.on("data", (sequence) => {
emittedSequences.push(sequence);
});
// Collect paste events
emittedPaste = [];
buffer.on("paste", (data) => {
emittedPaste.push(data);
});
});
it("should emit paste event for complete bracketed paste", () => {
const pasteStart = "\x1b[200~";
const pasteEnd = "\x1b[201~";
const content = "hello world";
processInput(pasteStart + content + pasteEnd);
assert.deepStrictEqual(emittedPaste, ["hello world"]);
assert.deepStrictEqual(emittedSequences, []); // No data events during paste
});
it("should handle paste arriving in chunks", () => {
processInput("\x1b[200~");
assert.deepStrictEqual(emittedPaste, []);
processInput("hello ");
assert.deepStrictEqual(emittedPaste, []);
processInput("world\x1b[201~");
assert.deepStrictEqual(emittedPaste, ["hello world"]);
assert.deepStrictEqual(emittedSequences, []);
});
it("should handle paste with input before and after", () => {
processInput("a");
processInput("\x1b[200~pasted\x1b[201~");
processInput("b");
assert.deepStrictEqual(emittedSequences, ["a", "b"]);
assert.deepStrictEqual(emittedPaste, ["pasted"]);
});
it("should handle paste with newlines", () => {
processInput("\x1b[200~line1\nline2\nline3\x1b[201~");
assert.deepStrictEqual(emittedPaste, ["line1\nline2\nline3"]);
assert.deepStrictEqual(emittedSequences, []);
});
it("should handle paste with unicode", () => {
processInput("\x1b[200~Hello 世界 🎉\x1b[201~");
assert.deepStrictEqual(emittedPaste, ["Hello 世界 🎉"]);
assert.deepStrictEqual(emittedSequences, []);
});
});
describe("Destroy", () => {
it("should clear buffer on destroy", () => {
processInput("\x1b[<35");
assert.strictEqual(buffer.getBuffer(), "\x1b[<35");
buffer.destroy();
assert.strictEqual(buffer.getBuffer(), "");
});
it("should clear pending timeouts on destroy", async () => {
processInput("\x1b[<35");
buffer.destroy();
// Wait longer than timeout
await wait(15);
// Should not have emitted anything
assert.deepStrictEqual(emittedSequences, []);
});
});
});