Merge branch 'main' into feat/tui-overlay-options

This commit is contained in:
Mario Zechner 2026-01-13 22:06:02 +01:00
commit 7d45e434de
90 changed files with 10277 additions and 1700 deletions

View file

@ -242,7 +242,15 @@ ${chalk.bold("Environment Variables:")}
CEREBRAS_API_KEY - Cerebras API key
XAI_API_KEY - xAI Grok API key
OPENROUTER_API_KEY - OpenRouter API key
AI_GATEWAY_API_KEY - Vercel AI Gateway API key
ZAI_API_KEY - ZAI API key
MISTRAL_API_KEY - Mistral API key
MINIMAX_API_KEY - MiniMax API key
AWS_PROFILE - AWS profile for Amazon Bedrock
AWS_ACCESS_KEY_ID - AWS access key for Amazon Bedrock
AWS_SECRET_ACCESS_KEY - AWS secret key for Amazon Bedrock
AWS_BEARER_TOKEN_BEDROCK - Bedrock API key (bearer token)
AWS_REGION - AWS region for Amazon Bedrock (e.g., us-east-1)
${ENV_AGENT_DIR.padEnd(23)} - Session storage directory (default: ~/${CONFIG_DIR_NAME}/agent)
${chalk.bold("Available Tools (default: read, bash, edit, write):")}

View file

@ -25,7 +25,7 @@ function formatTokenCount(count: number): string {
* List available models, optionally filtered by search pattern
*/
export async function listModels(modelRegistry: ModelRegistry, searchPattern?: string): Promise<void> {
const models = await modelRegistry.getAvailable();
const models = modelRegistry.getAvailable();
if (models.length === 0) {
console.log("No models available. Set API keys in environment variables.");

View file

@ -1520,8 +1520,8 @@ export class AgentSession {
if (isContextOverflow(message, contextWindow)) return false;
const err = message.errorMessage;
// Match: overloaded_error, rate limit, 429, 500, 502, 503, 504, service unavailable, connection error
return /overloaded|rate.?limit|too many requests|429|500|502|503|504|service.?unavailable|server error|internal error|connection.?error/i.test(
// Match: overloaded_error, rate limit, 429, 500, 502, 503, 504, service unavailable, connection error, other side closed
return /overloaded|rate.?limit|too many requests|429|500|502|503|504|service.?unavailable|server error|internal error|connection.?error|other side closed/i.test(
err,
);
}

View file

@ -1,5 +1,7 @@
/**
* Extension loader - loads TypeScript extension modules using jiti.
*
* Uses @mariozechner/jiti fork with virtualModules support for compiled Bun binaries.
*/
import * as fs from "node:fs";
@ -7,9 +9,19 @@ import { createRequire } from "node:module";
import * as os from "node:os";
import * as path from "node:path";
import { fileURLToPath } from "node:url";
import { createJiti } from "@mariozechner/jiti";
import * as _bundledPiAgentCore from "@mariozechner/pi-agent-core";
import * as _bundledPiAi from "@mariozechner/pi-ai";
import type { KeyId } from "@mariozechner/pi-tui";
import { createJiti } from "jiti";
import * as _bundledPiTui from "@mariozechner/pi-tui";
// Static imports of packages that extensions may use.
// These MUST be static so Bun bundles them into the compiled binary.
// The virtualModules option then makes them available to extensions.
import * as _bundledTypebox from "@sinclair/typebox";
import { getAgentDir, isBunBinary } from "../../config.js";
// NOTE: This import works because loader.ts exports are NOT re-exported from index.ts,
// avoiding a circular dependency. Extensions can import from @mariozechner/pi-coding-agent.
import * as _bundledPiCodingAgent from "../../index.js";
import { createEventBus, type EventBus } from "../event-bus.js";
import type { ExecOptions } from "../exec.js";
import { execCommand } from "../exec.js";
@ -24,8 +36,21 @@ import type {
ToolDefinition,
} from "./types.js";
/** Modules available to extensions via virtualModules (for compiled Bun binary) */
const VIRTUAL_MODULES: Record<string, unknown> = {
"@sinclair/typebox": _bundledTypebox,
"@mariozechner/pi-agent-core": _bundledPiAgentCore,
"@mariozechner/pi-tui": _bundledPiTui,
"@mariozechner/pi-ai": _bundledPiAi,
"@mariozechner/pi-coding-agent": _bundledPiCodingAgent,
};
const require = createRequire(import.meta.url);
/**
* Get aliases for jiti (used in Node.js/development mode).
* In Bun binary mode, virtualModules is used instead.
*/
let _aliases: Record<string, string> | null = null;
function getAliases(): Record<string, string> {
if (_aliases) return _aliases;
@ -38,11 +63,12 @@ function getAliases(): Record<string, string> {
_aliases = {
"@mariozechner/pi-coding-agent": packageIndex,
"@mariozechner/pi-coding-agent/extensions": path.resolve(__dirname, "index.js"),
"@mariozechner/pi-agent-core": require.resolve("@mariozechner/pi-agent-core"),
"@mariozechner/pi-tui": require.resolve("@mariozechner/pi-tui"),
"@mariozechner/pi-ai": require.resolve("@mariozechner/pi-ai"),
"@sinclair/typebox": typeboxRoot,
};
return _aliases;
}
@ -213,18 +239,15 @@ function createExtensionAPI(
return api;
}
async function loadBun(path: string) {
const module = await import(path);
const factory = (module.default ?? module) as ExtensionFactory;
return typeof factory !== "function" ? undefined : factory;
}
async function loadJiti(path: string) {
async function loadExtensionModule(extensionPath: string) {
const jiti = createJiti(import.meta.url, {
alias: getAliases(),
// In Bun binary: use virtualModules for bundled packages (no filesystem resolution)
// Also disable tryNative so jiti handles ALL imports (not just the entry point)
// In Node.js/dev: use aliases to resolve to node_modules paths
...(isBunBinary ? { virtualModules: VIRTUAL_MODULES, tryNative: false } : { alias: getAliases() }),
});
const module = await jiti.import(path, { default: true });
const module = await jiti.import(extensionPath, { default: true });
const factory = module as ExtensionFactory;
return typeof factory !== "function" ? undefined : factory;
}
@ -254,7 +277,7 @@ async function loadExtension(
const resolvedPath = resolvePath(extensionPath, cwd);
try {
const factory = isBunBinary ? await loadBun(resolvedPath) : await loadJiti(resolvedPath);
const factory = await loadExtensionModule(resolvedPath);
if (!factory) {
return { extension: null, error: `Extension does not export a valid factory function: ${extensionPath}` };
}

View file

@ -37,6 +37,7 @@ const ModelDefinitionSchema = Type.Object({
Type.Literal("openai-codex-responses"),
Type.Literal("anthropic-messages"),
Type.Literal("google-generative-ai"),
Type.Literal("bedrock-converse-stream"),
]),
),
reasoning: Type.Boolean(),
@ -63,6 +64,7 @@ const ProviderConfigSchema = Type.Object({
Type.Literal("openai-codex-responses"),
Type.Literal("anthropic-messages"),
Type.Literal("google-generative-ai"),
Type.Literal("bedrock-converse-stream"),
]),
),
headers: Type.Optional(Type.Record(Type.String(), Type.String())),
@ -373,6 +375,13 @@ export class ModelRegistry {
return this.authStorage.getApiKey(model.provider);
}
/**
* Get API key for a provider.
*/
async getApiKeyForProvider(provider: string): Promise<string | undefined> {
return this.authStorage.getApiKey(provider);
}
/**
* Check if a model is using OAuth credentials (subscription).
*/

View file

@ -11,6 +11,7 @@ import type { ModelRegistry } from "./model-registry.js";
/** Default model IDs for each known provider */
export const defaultModelPerProvider: Record<KnownProvider, string> = {
"amazon-bedrock": "global.anthropic.claude-sonnet-4-5-20250929-v1:0",
anthropic: "claude-sonnet-4-5",
openai: "gpt-5.1-codex",
"openai-codex": "gpt-5.2-codex",
@ -20,11 +21,13 @@ export const defaultModelPerProvider: Record<KnownProvider, string> = {
"google-vertex": "gemini-3-pro-preview",
"github-copilot": "gpt-4o",
openrouter: "openai/gpt-5.1-codex",
"vercel-ai-gateway": "anthropic/claude-opus-4.5",
xai: "grok-4-fast-non-reasoning",
groq: "openai/gpt-oss-120b",
cerebras: "zai-glm-4.6",
zai: "glm-4.6",
mistral: "devstral-medium-latest",
minimax: "MiniMax-M2.1",
opencode: "claude-opus-4-5",
};

View file

@ -628,14 +628,16 @@ export async function createAgentSession(options: CreateAgentSessionOptions = {}
steeringMode: settingsManager.getSteeringMode(),
followUpMode: settingsManager.getFollowUpMode(),
thinkingBudgets: settingsManager.getThinkingBudgets(),
getApiKey: async () => {
const currentModel = agent.state.model;
if (!currentModel) {
getApiKey: async (provider) => {
// Use the provider argument from the in-flight request;
// agent.state.model may already be switched mid-turn.
const resolvedProvider = provider || agent.state.model?.provider;
if (!resolvedProvider) {
throw new Error("No model selected");
}
const key = await modelRegistry.getApiKey(currentModel);
const key = await modelRegistry.getApiKeyForProvider(resolvedProvider);
if (!key) {
throw new Error(`No API key found for provider "${currentModel.provider}"`);
throw new Error(`No API key found for provider "${resolvedProvider}"`);
}
return key;
},

View file

@ -88,8 +88,6 @@ export type {
UserBashEventResult,
} from "./core/extensions/index.js";
export {
createExtensionRuntime,
discoverAndLoadExtensions,
ExtensionRunner,
isBashToolResult,
isEditToolResult,
@ -98,7 +96,6 @@ export {
isLsToolResult,
isReadToolResult,
isWriteToolResult,
loadExtensions,
wrapRegisteredTool,
wrapRegisteredTools,
wrapToolsWithExtensions,

View file

@ -12,6 +12,55 @@ import {
import { theme } from "../theme/theme.js";
import { DynamicBorder } from "./dynamic-border.js";
// EnabledIds: null = all enabled (no filter), string[] = explicit ordered list
type EnabledIds = string[] | null;
function isEnabled(enabledIds: EnabledIds, id: string): boolean {
return enabledIds === null || enabledIds.includes(id);
}
function toggle(enabledIds: EnabledIds, id: string): EnabledIds {
if (enabledIds === null) return [id]; // First toggle: start with only this one
const index = enabledIds.indexOf(id);
if (index >= 0) return [...enabledIds.slice(0, index), ...enabledIds.slice(index + 1)];
return [...enabledIds, id];
}
function enableAll(enabledIds: EnabledIds, allIds: string[], targetIds?: string[]): EnabledIds {
if (enabledIds === null) return null; // Already all enabled
const targets = targetIds ?? allIds;
const result = [...enabledIds];
for (const id of targets) {
if (!result.includes(id)) result.push(id);
}
return result.length === allIds.length ? null : result;
}
function clearAll(enabledIds: EnabledIds, allIds: string[], targetIds?: string[]): EnabledIds {
if (enabledIds === null) {
return targetIds ? allIds.filter((id) => !targetIds.includes(id)) : [];
}
const targets = new Set(targetIds ?? enabledIds);
return enabledIds.filter((id) => !targets.has(id));
}
function move(enabledIds: EnabledIds, allIds: string[], id: string, delta: number): EnabledIds {
const list = enabledIds ?? [...allIds];
const index = list.indexOf(id);
if (index < 0) return list;
const newIndex = index + delta;
if (newIndex < 0 || newIndex >= list.length) return list;
const result = [...list];
[result[index], result[newIndex]] = [result[newIndex], result[index]];
return result;
}
function getSortedIds(enabledIds: EnabledIds, allIds: string[]): string[] {
if (enabledIds === null) return allIds;
const enabledSet = new Set(enabledIds);
return [...enabledIds, ...allIds.filter((id) => !enabledSet.has(id))];
}
interface ModelItem {
fullId: string;
model: Model<any>;
@ -44,7 +93,9 @@ export interface ModelsCallbacks {
* Changes are session-only until explicitly persisted with Ctrl+S.
*/
export class ScopedModelsSelectorComponent extends Container {
private items: ModelItem[] = [];
private modelsById: Map<string, Model<any>> = new Map();
private allIds: string[] = [];
private enabledIds: EnabledIds = null;
private filteredItems: ModelItem[] = [];
private selectedIndex = 0;
private searchInput: Input;
@ -58,28 +109,14 @@ export class ScopedModelsSelectorComponent extends Container {
super();
this.callbacks = callbacks;
// Group models by provider for organized display
const modelsByProvider = new Map<string, Model<any>[]>();
for (const model of config.allModels) {
const list = modelsByProvider.get(model.provider) ?? [];
list.push(model);
modelsByProvider.set(model.provider, list);
const fullId = `${model.provider}/${model.id}`;
this.modelsById.set(fullId, model);
this.allIds.push(fullId);
}
// Build items - group by provider
for (const [provider, models] of modelsByProvider) {
for (const model of models) {
const fullId = `${provider}/${model.id}`;
// If no filter defined, all models are enabled by default
const isEnabled = !config.hasEnabledModelsFilter || config.enabledModelIds.has(fullId);
this.items.push({
fullId,
model,
enabled: isEnabled,
});
}
}
this.filteredItems = this.getSortedItems();
this.enabledIds = config.hasEnabledModelsFilter ? [...config.enabledModelIds] : null;
this.filteredItems = this.buildItems();
// Header
this.addChild(new DynamicBorder());
@ -103,41 +140,34 @@ export class ScopedModelsSelectorComponent extends Container {
this.addChild(this.footerText);
this.addChild(new DynamicBorder());
this.updateList();
}
/** Get items sorted with enabled items first */
private getSortedItems(): ModelItem[] {
const enabled = this.items.filter((i) => i.enabled);
const disabled = this.items.filter((i) => !i.enabled);
return [...enabled, ...disabled];
private buildItems(): ModelItem[] {
return getSortedIds(this.enabledIds, this.allIds).map((id) => ({
fullId: id,
model: this.modelsById.get(id)!,
enabled: isEnabled(this.enabledIds, id),
}));
}
private getFooterText(): string {
const enabledCount = this.items.filter((i) => i.enabled).length;
const allEnabled = enabledCount === this.items.length;
const countText = allEnabled ? "all enabled" : `${enabledCount}/${this.items.length} enabled`;
const parts = ["Enter toggle", "^A all", "^X clear", "^P provider", "^S save", countText];
if (this.isDirty) {
return theme.fg("dim", ` ${parts.join(" · ")} `) + theme.fg("warning", "(unsaved)");
}
return theme.fg("dim", ` ${parts.join(" · ")}`);
const enabledCount = this.enabledIds?.length ?? this.allIds.length;
const allEnabled = this.enabledIds === null;
const countText = allEnabled ? "all enabled" : `${enabledCount}/${this.allIds.length} enabled`;
const parts = ["Enter toggle", "^A all", "^X clear", "^P provider", "Alt+↑↓ reorder", "^S save", countText];
return this.isDirty
? theme.fg("dim", ` ${parts.join(" · ")} `) + theme.fg("warning", "(unsaved)")
: theme.fg("dim", ` ${parts.join(" · ")}`);
}
private updateFooter(): void {
this.footerText.setText(this.getFooterText());
}
private filterItems(query: string): void {
const sorted = this.getSortedItems();
if (!query) {
this.filteredItems = sorted;
} else {
this.filteredItems = fuzzyFilter(sorted, query, (item) => `${item.model.id} ${item.model.provider}`);
}
private refresh(): void {
const query = this.searchInput.getValue();
const items = this.buildItems();
this.filteredItems = query ? fuzzyFilter(items, query, (i) => `${i.model.id} ${i.model.provider}`) : items;
this.selectedIndex = Math.min(this.selectedIndex, Math.max(0, this.filteredItems.length - 1));
this.updateList();
this.footerText.setText(this.getFooterText());
}
private updateList(): void {
@ -153,53 +183,26 @@ export class ScopedModelsSelectorComponent extends Container {
Math.min(this.selectedIndex - Math.floor(this.maxVisible / 2), this.filteredItems.length - this.maxVisible),
);
const endIndex = Math.min(startIndex + this.maxVisible, this.filteredItems.length);
// Only show status if there's a filter (not all models enabled)
const allEnabled = this.items.every((i) => i.enabled);
const allEnabled = this.enabledIds === null;
for (let i = startIndex; i < endIndex; i++) {
const item = this.filteredItems[i];
if (!item) continue;
const item = this.filteredItems[i]!;
const isSelected = i === this.selectedIndex;
const prefix = isSelected ? theme.fg("accent", "→ ") : " ";
const modelText = isSelected ? theme.fg("accent", item.model.id) : item.model.id;
const providerBadge = theme.fg("muted", ` [${item.model.provider}]`);
// Only show checkmarks when there's actually a filter
const status = allEnabled ? "" : item.enabled ? theme.fg("success", " ✓") : theme.fg("dim", " ✗");
this.listContainer.addChild(new Text(`${prefix}${modelText}${providerBadge}${status}`, 0, 0));
}
// Add scroll indicator if needed
if (startIndex > 0 || endIndex < this.filteredItems.length) {
const scrollInfo = theme.fg("muted", ` (${this.selectedIndex + 1}/${this.filteredItems.length})`);
this.listContainer.addChild(new Text(scrollInfo, 0, 0));
this.listContainer.addChild(
new Text(theme.fg("muted", ` (${this.selectedIndex + 1}/${this.filteredItems.length})`), 0, 0),
);
}
}
private toggleItem(item: ModelItem): void {
// If all models are currently enabled (no scope yet), first toggle starts fresh:
// clear all and enable only the selected model
const allEnabled = this.items.every((i) => i.enabled);
if (allEnabled) {
for (const i of this.items) {
i.enabled = false;
}
item.enabled = true;
this.isDirty = true;
this.callbacks.onClearAll();
this.callbacks.onModelToggle(item.fullId, true);
} else {
item.enabled = !item.enabled;
this.isDirty = true;
this.callbacks.onModelToggle(item.fullId, item.enabled);
}
// Re-sort and re-filter to move item to correct section
this.filterItems(this.searchInput.getValue());
this.updateFooter();
}
handleInput(data: string): void {
const kb = getEditorKeybindings();
@ -217,70 +220,81 @@ export class ScopedModelsSelectorComponent extends Container {
return;
}
// Alt+Up/Down - Reorder enabled models
if (matchesKey(data, Key.alt("up")) || matchesKey(data, Key.alt("down"))) {
const item = this.filteredItems[this.selectedIndex];
if (item && isEnabled(this.enabledIds, item.fullId)) {
const delta = matchesKey(data, Key.alt("up")) ? -1 : 1;
const enabledList = this.enabledIds ?? this.allIds;
const currentIndex = enabledList.indexOf(item.fullId);
const newIndex = currentIndex + delta;
// Only move if within bounds
if (newIndex >= 0 && newIndex < enabledList.length) {
this.enabledIds = move(this.enabledIds, this.allIds, item.fullId, delta);
this.isDirty = true;
this.selectedIndex += delta;
this.refresh();
}
}
return;
}
// Toggle on Enter
if (matchesKey(data, Key.enter)) {
const item = this.filteredItems[this.selectedIndex];
if (item) {
this.toggleItem(item);
const wasAllEnabled = this.enabledIds === null;
this.enabledIds = toggle(this.enabledIds, item.fullId);
this.isDirty = true;
if (wasAllEnabled) this.callbacks.onClearAll();
this.callbacks.onModelToggle(item.fullId, isEnabled(this.enabledIds, item.fullId));
this.refresh();
}
return;
}
// Ctrl+A - Enable all (filtered if search active, otherwise all)
if (matchesKey(data, Key.ctrl("a"))) {
const targets = this.searchInput.getValue() ? this.filteredItems : this.items;
for (const item of targets) {
item.enabled = true;
}
const targetIds = this.searchInput.getValue() ? this.filteredItems.map((i) => i.fullId) : undefined;
this.enabledIds = enableAll(this.enabledIds, this.allIds, targetIds);
this.isDirty = true;
this.callbacks.onEnableAll(targets.map((i) => i.fullId));
this.filterItems(this.searchInput.getValue());
this.updateFooter();
this.callbacks.onEnableAll(targetIds ?? this.allIds);
this.refresh();
return;
}
// Ctrl+X - Clear all (filtered if search active, otherwise all)
if (matchesKey(data, Key.ctrl("x"))) {
const targets = this.searchInput.getValue() ? this.filteredItems : this.items;
for (const item of targets) {
item.enabled = false;
}
const targetIds = this.searchInput.getValue() ? this.filteredItems.map((i) => i.fullId) : undefined;
this.enabledIds = clearAll(this.enabledIds, this.allIds, targetIds);
this.isDirty = true;
this.callbacks.onClearAll();
this.filterItems(this.searchInput.getValue());
this.updateFooter();
this.refresh();
return;
}
// Ctrl+P - Toggle provider of current item
if (matchesKey(data, Key.ctrl("p"))) {
const currentItem = this.filteredItems[this.selectedIndex];
if (currentItem) {
const provider = currentItem.model.provider;
const providerItems = this.items.filter((i) => i.model.provider === provider);
const allEnabled = providerItems.every((i) => i.enabled);
const newState = !allEnabled;
for (const item of providerItems) {
item.enabled = newState;
}
const item = this.filteredItems[this.selectedIndex];
if (item) {
const provider = item.model.provider;
const providerIds = this.allIds.filter((id) => this.modelsById.get(id)!.provider === provider);
const allEnabled = providerIds.every((id) => isEnabled(this.enabledIds, id));
this.enabledIds = allEnabled
? clearAll(this.enabledIds, this.allIds, providerIds)
: enableAll(this.enabledIds, this.allIds, providerIds);
this.isDirty = true;
this.callbacks.onToggleProvider(
provider,
providerItems.map((i) => i.fullId),
newState,
);
this.filterItems(this.searchInput.getValue());
this.updateFooter();
this.callbacks.onToggleProvider(provider, providerIds, !allEnabled);
this.refresh();
}
return;
}
// Ctrl+S - Save/persist to settings
if (matchesKey(data, Key.ctrl("s"))) {
const enabledIds = this.items.filter((i) => i.enabled).map((i) => i.fullId);
this.callbacks.onPersist(enabledIds);
this.callbacks.onPersist(this.enabledIds ?? [...this.allIds]);
this.isDirty = false;
this.updateFooter();
this.footerText.setText(this.getFooterText());
return;
}
@ -288,7 +302,7 @@ export class ScopedModelsSelectorComponent extends Container {
if (matchesKey(data, Key.ctrl("c"))) {
if (this.searchInput.getValue()) {
this.searchInput.setValue("");
this.filterItems("");
this.refresh();
} else {
this.callbacks.onCancel();
}
@ -303,7 +317,7 @@ export class ScopedModelsSelectorComponent extends Container {
// Pass everything else to search input
this.searchInput.handleInput(data);
this.filterItems(this.searchInput.getValue());
this.refresh();
}
getSearchInput(): Input {

View file

@ -603,10 +603,9 @@ export class InteractiveMode {
const entries = parseChangelog(changelogPath);
if (!lastVersion) {
if (entries.length > 0) {
this.settingsManager.setLastChangelogVersion(VERSION);
return entries.map((e) => e.content).join("\n\n");
}
// Fresh install - just record the version, don't show changelog
this.settingsManager.setLastChangelogVersion(VERSION);
return undefined;
} else {
const newEntries = getNewEntries(entries, lastVersion);
if (newEntries.length > 0) {
@ -3271,7 +3270,7 @@ export class InteractiveMode {
}
// Create the preview URL
const previewUrl = `https://shittycodingagent.ai/session?${gistId}`;
const previewUrl = `https://buildwithpi.ai/session?${gistId}`;
this.showStatus(`Share URL: ${previewUrl}\nGist: ${gistUrl}`);
} catch (error: unknown) {
if (!loader.signal.aborted) {

View file

@ -2,13 +2,13 @@
"$schema": "https://raw.githubusercontent.com/badlogic/pi-mono/main/packages/coding-agent/theme-schema.json",
"name": "light",
"vars": {
"teal": "#5f8787",
"blue": "#5f87af",
"green": "#87af87",
"red": "#af5f5f",
"yellow": "#d7af5f",
"teal": "#5a8080",
"blue": "#547da7",
"green": "#588458",
"red": "#aa5555",
"yellow": "#9a7326",
"mediumGray": "#6c6c6c",
"dimGray": "#8a8a8a",
"dimGray": "#767676",
"lightGray": "#b0b0b0",
"selectedBg": "#d0d0e0",
"userMsgBg": "#e8e8e8",
@ -68,9 +68,9 @@
"syntaxPunctuation": "#000000",
"thinkingOff": "lightGray",
"thinkingMinimal": "#9e9e9e",
"thinkingLow": "#5f87af",
"thinkingMedium": "#5f8787",
"thinkingMinimal": "#767676",
"thinkingLow": "blue",
"thinkingMedium": "teal",
"thinkingHigh": "#875f87",
"thinkingXhigh": "#8b008b",

View file

@ -29,6 +29,12 @@ export interface PrintModeOptions {
*/
export async function runPrintMode(session: AgentSession, options: PrintModeOptions): Promise<void> {
const { mode, messages = [], initialMessage, initialImages } = options;
if (mode === "json") {
const header = session.sessionManager.getHeader();
if (header) {
console.log(JSON.stringify(header));
}
}
// Set up extensions for print mode (no UI, no command context)
const extensionRunner = session.extensionRunner;
if (extensionRunner) {

View file

@ -1,3 +1,5 @@
import { getVips } from "./vips.js";
/**
* Convert image to PNG format for terminal display.
* Kitty graphics protocol requires PNG format (f=100).
@ -11,16 +13,23 @@ export async function convertToPng(
return { data: base64Data, mimeType };
}
const vips = await getVips();
if (!vips) {
// wasm-vips not available
return null;
}
try {
const sharp = (await import("sharp")).default;
const buffer = Buffer.from(base64Data, "base64");
const pngBuffer = await sharp(buffer).png().toBuffer();
const img = vips.Image.newFromBuffer(buffer);
const pngBuffer = img.writeToBuffer(".png");
img.delete();
return {
data: pngBuffer.toString("base64"),
data: Buffer.from(pngBuffer).toString("base64"),
mimeType: "image/png",
};
} catch {
// Sharp not available or conversion failed
// Conversion failed
return null;
}
}

View file

@ -1,4 +1,5 @@
import type { ImageContent } from "@mariozechner/pi-ai";
import { getVips } from "./vips.js";
export interface ImageResizeOptions {
maxWidth?: number; // Default: 2000
@ -29,9 +30,9 @@ const DEFAULT_OPTIONS: Required<ImageResizeOptions> = {
/** Helper to pick the smaller of two buffers */
function pickSmaller(
a: { buffer: Buffer; mimeType: string },
b: { buffer: Buffer; mimeType: string },
): { buffer: Buffer; mimeType: string } {
a: { buffer: Uint8Array; mimeType: string },
b: { buffer: Uint8Array; mimeType: string },
): { buffer: Uint8Array; mimeType: string } {
return a.buffer.length <= b.buffer.length ? a : b;
}
@ -39,7 +40,7 @@ function pickSmaller(
* Resize an image to fit within the specified max dimensions and file size.
* Returns the original image if it already fits within the limits.
*
* Uses sharp for image processing. If sharp is not available (e.g., in some
* Uses wasm-vips for image processing. If wasm-vips is not available (e.g., in some
* environments), returns the original image unchanged.
*
* Strategy for staying under maxBytes:
@ -52,12 +53,29 @@ export async function resizeImage(img: ImageContent, options?: ImageResizeOption
const opts = { ...DEFAULT_OPTIONS, ...options };
const buffer = Buffer.from(img.data, "base64");
let sharp: typeof import("sharp") | undefined;
const vipsOrNull = await getVips();
if (!vipsOrNull) {
// wasm-vips not available - return original image
// We can't get dimensions without vips, so return 0s
return {
data: img.data,
mimeType: img.mimeType,
originalWidth: 0,
originalHeight: 0,
width: 0,
height: 0,
wasResized: false,
};
}
// Capture non-null reference for use in nested functions
const vips = vipsOrNull;
// Load image to get metadata
let sourceImg: InstanceType<typeof vips.Image>;
try {
sharp = (await import("sharp")).default;
sourceImg = vips.Image.newFromBuffer(buffer);
} catch {
// Sharp not available - return original image
// We can't get dimensions without sharp, so return 0s
// Failed to load image
return {
data: img.data,
mimeType: img.mimeType,
@ -69,16 +87,14 @@ export async function resizeImage(img: ImageContent, options?: ImageResizeOption
};
}
const sharpImg = sharp(buffer);
const metadata = await sharpImg.metadata();
const originalWidth = metadata.width ?? 0;
const originalHeight = metadata.height ?? 0;
const format = metadata.format ?? img.mimeType?.split("/")[1] ?? "png";
const originalWidth = sourceImg.width;
const originalHeight = sourceImg.height;
// Check if already within all limits (dimensions AND size)
const originalSize = buffer.length;
if (originalWidth <= opts.maxWidth && originalHeight <= opts.maxHeight && originalSize <= opts.maxBytes) {
sourceImg.delete();
const format = img.mimeType?.split("/")[1] ?? "png";
return {
data: img.data,
mimeType: img.mimeType ?? `image/${format}`,
@ -104,37 +120,45 @@ export async function resizeImage(img: ImageContent, options?: ImageResizeOption
}
// Helper to resize and encode in both formats, returning the smaller one
async function tryBothFormats(
function tryBothFormats(
width: number,
height: number,
jpegQuality: number,
): Promise<{ buffer: Buffer; mimeType: string }> {
const resized = await sharp!(buffer)
.resize(width, height, { fit: "inside", withoutEnlargement: true })
.toBuffer();
): { buffer: Uint8Array; mimeType: string } {
// Load image fresh and resize using scale factor
// (Using newFromBuffer + resize instead of thumbnailBuffer to avoid lazy re-read issues)
const img = vips.Image.newFromBuffer(buffer);
const scale = Math.min(width / img.width, height / img.height);
const resized = scale < 1 ? img.resize(scale) : img;
const [pngBuffer, jpegBuffer] = await Promise.all([
sharp!(resized).png({ compressionLevel: 9 }).toBuffer(),
sharp!(resized).jpeg({ quality: jpegQuality }).toBuffer(),
]);
const pngBuffer = resized.writeToBuffer(".png");
const jpegBuffer = resized.writeToBuffer(".jpg", { Q: jpegQuality });
if (resized !== img) {
resized.delete();
}
img.delete();
return pickSmaller({ buffer: pngBuffer, mimeType: "image/png" }, { buffer: jpegBuffer, mimeType: "image/jpeg" });
}
// Clean up the source image
sourceImg.delete();
// Try to produce an image under maxBytes
const qualitySteps = [85, 70, 55, 40];
const scaleSteps = [1.0, 0.75, 0.5, 0.35, 0.25];
let best: { buffer: Buffer; mimeType: string };
let best: { buffer: Uint8Array; mimeType: string };
let finalWidth = targetWidth;
let finalHeight = targetHeight;
// First attempt: resize to target dimensions, try both formats
best = await tryBothFormats(targetWidth, targetHeight, opts.jpegQuality);
best = tryBothFormats(targetWidth, targetHeight, opts.jpegQuality);
if (best.buffer.length <= opts.maxBytes) {
return {
data: best.buffer.toString("base64"),
data: Buffer.from(best.buffer).toString("base64"),
mimeType: best.mimeType,
originalWidth,
originalHeight,
@ -146,11 +170,11 @@ export async function resizeImage(img: ImageContent, options?: ImageResizeOption
// Still too large - try JPEG with decreasing quality (and compare to PNG each time)
for (const quality of qualitySteps) {
best = await tryBothFormats(targetWidth, targetHeight, quality);
best = tryBothFormats(targetWidth, targetHeight, quality);
if (best.buffer.length <= opts.maxBytes) {
return {
data: best.buffer.toString("base64"),
data: Buffer.from(best.buffer).toString("base64"),
mimeType: best.mimeType,
originalWidth,
originalHeight,
@ -172,11 +196,11 @@ export async function resizeImage(img: ImageContent, options?: ImageResizeOption
}
for (const quality of qualitySteps) {
best = await tryBothFormats(finalWidth, finalHeight, quality);
best = tryBothFormats(finalWidth, finalHeight, quality);
if (best.buffer.length <= opts.maxBytes) {
return {
data: best.buffer.toString("base64"),
data: Buffer.from(best.buffer).toString("base64"),
mimeType: best.mimeType,
originalWidth,
originalHeight,
@ -191,7 +215,7 @@ export async function resizeImage(img: ImageContent, options?: ImageResizeOption
// Last resort: return smallest version we produced even if over limit
// (the API will reject it, but at least we tried everything)
return {
data: best.buffer.toString("base64"),
data: Buffer.from(best.buffer).toString("base64"),
mimeType: best.mimeType,
originalWidth,
originalHeight,

View file

@ -0,0 +1,40 @@
/**
* Singleton wrapper for wasm-vips initialization.
* wasm-vips requires async initialization, so we cache the instance.
*/
import type Vips from "wasm-vips";
let vipsInstance: Awaited<ReturnType<typeof Vips>> | null = null;
let vipsInitPromise: Promise<Awaited<ReturnType<typeof Vips>> | null> | null = null;
/**
* Get the initialized wasm-vips instance.
* Returns null if wasm-vips is not available or fails to initialize.
*/
export async function getVips(): Promise<Awaited<ReturnType<typeof Vips>> | null> {
if (vipsInstance) {
return vipsInstance;
}
if (vipsInitPromise) {
return vipsInitPromise;
}
vipsInitPromise = (async () => {
try {
const VipsInit = (await import("wasm-vips")).default;
vipsInstance = await VipsInit();
return vipsInstance;
} catch {
// wasm-vips not available
return null;
}
})();
const result = await vipsInitPromise;
if (!result) {
vipsInitPromise = null; // Allow retry on failure
}
return result;
}