v0.5.6: Fix CLI execution when installed globally

This commit is contained in:
Mario Zechner 2025-08-09 20:16:59 +02:00
parent db86195dd9
commit 9fee306075
10 changed files with 308 additions and 309 deletions

10
package-lock.json generated
View file

@ -778,10 +778,10 @@
},
"packages/agent": {
"name": "@mariozechner/pi-agent",
"version": "0.5.5",
"version": "0.5.6",
"license": "MIT",
"dependencies": {
"@mariozechner/pi-tui": "^0.5.4",
"@mariozechner/pi-tui": "^0.5.5",
"@types/glob": "^8.1.0",
"chalk": "^5.5.0",
"glob": "^11.0.3",
@ -1222,10 +1222,10 @@
},
"packages/pods": {
"name": "@mariozechner/pi",
"version": "0.5.5",
"version": "0.5.6",
"license": "MIT",
"dependencies": {
"@mariozechner/pi-agent": "^0.5.4",
"@mariozechner/pi-agent": "^0.5.5",
"chalk": "^5.5.0"
},
"bin": {
@ -1238,7 +1238,7 @@
},
"packages/tui": {
"name": "@mariozechner/pi-tui",
"version": "0.5.5",
"version": "0.5.6",
"license": "MIT",
"dependencies": {
"@types/mime-types": "^2.1.4",

View file

@ -1,12 +1,12 @@
{
"name": "@mariozechner/pi-agent",
"version": "0.5.5",
"version": "0.5.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@mariozechner/pi-agent",
"version": "0.5.5",
"version": "0.5.6",
"license": "MIT",
"dependencies": {
"@mariozechner/tui": "^0.1.1",

View file

@ -1,6 +1,6 @@
{
"name": "@mariozechner/pi-agent",
"version": "0.5.5",
"version": "0.5.6",
"description": "General-purpose agent with tool calling and session persistence",
"type": "module",
"bin": {
@ -18,7 +18,7 @@
"prepublishOnly": "npm run clean && npm run build"
},
"dependencies": {
"@mariozechner/pi-tui": "^0.5.5",
"@mariozechner/pi-tui": "^0.5.6",
"@types/glob": "^8.1.0",
"chalk": "^5.5.0",
"glob": "^11.0.3",

View file

@ -1,295 +1,9 @@
#!/usr/bin/env node
import chalk from "chalk";
import { createInterface } from "readline";
import type { AgentConfig } from "./agent.js";
import { Agent } from "./agent.js";
import { parseArgs, printHelp as printHelpArgs } from "./args.js";
import { ConsoleRenderer } from "./renderers/console-renderer.js";
import { JsonRenderer } from "./renderers/json-renderer.js";
import { TuiRenderer } from "./renderers/tui-renderer.js";
import { SessionManager } from "./session-manager.js";
// Define argument structure
const argDefs = {
"base-url": {
type: "string" as const,
default: "https://api.openai.com/v1",
description: "API base URL",
},
"api-key": {
type: "string" as const,
default: process.env.OPENAI_API_KEY || "",
description: "API key",
showDefault: "$OPENAI_API_KEY",
},
model: {
type: "string" as const,
default: "gpt-5-mini",
description: "Model name",
},
api: {
type: "string" as const,
default: "completions",
description: "API type",
choices: [
{ value: "completions", description: "OpenAI Chat Completions API (most models)" },
{ value: "responses", description: "OpenAI Responses API (GPT-OSS models)" },
],
},
"system-prompt": {
type: "string" as const,
default: "You are a helpful assistant.",
description: "System prompt",
},
continue: {
type: "flag" as const,
alias: "c",
description: "Continue previous session",
},
json: {
type: "flag" as const,
description: "Output as JSONL",
},
help: {
type: "flag" as const,
alias: "h",
description: "Show this help message",
},
};
import { main } from "./main.js";
interface JsonCommand {
type: "message" | "interrupt";
content?: string;
}
function printHelp(): void {
const usage = `Usage: pi-agent [options] [messages...]
Examples:
# Single message (default OpenAI, GPT-5 Mini, OPENAI_API_KEY env var)
pi-agent "What is 2+2?"
# Multiple messages processed sequentially
pi-agent "What is 2+2?" "What about 3+3?"
# Interactive chat mode (no messages = interactive)
pi-agent
# Continue most recently modified session in current directory
pi-agent --continue "Follow up question"
# GPT-OSS via Groq
pi-agent --base-url https://api.groq.com/openai/v1 --api-key $GROQ_API_KEY --model openai/gpt-oss-120b
# GLM 4.5 via OpenRouter
pi-agent --base-url https://openrouter.ai/api/v1 --api-key $OPENROUTER_API_KEY --model z-ai/glm-4.5
# Claude via Anthropic (no prompt caching support - see https://docs.anthropic.com/en/api/openai-sdk)
pi-agent --base-url https://api.anthropic.com/v1 --api-key $ANTHROPIC_API_KEY --model claude-opus-4-1-20250805`;
printHelpArgs(argDefs, usage);
}
async function runJsonInteractiveMode(config: AgentConfig, sessionManager: SessionManager): Promise<void> {
const rl = createInterface({
input: process.stdin,
output: process.stdout,
terminal: false, // Don't interpret control characters
});
const renderer = new JsonRenderer();
const agent = new Agent(config, renderer, sessionManager);
let isProcessing = false;
let pendingMessage: string | null = null;
const processMessage = async (content: string): Promise<void> => {
isProcessing = true;
try {
await agent.ask(content);
} catch (e: any) {
await renderer.on({ type: "error", message: e.message });
} finally {
isProcessing = false;
// Process any pending message
if (pendingMessage) {
const msg = pendingMessage;
pendingMessage = null;
await processMessage(msg);
}
}
};
// Listen for lines from stdin
rl.on("line", (line) => {
try {
const command = JSON.parse(line) as JsonCommand;
switch (command.type) {
case "interrupt":
agent.interrupt();
isProcessing = false;
break;
case "message":
if (!command.content) {
renderer.on({ type: "error", message: "Message content is required" });
return;
}
if (isProcessing) {
// Queue the message for when the agent is done
pendingMessage = command.content;
} else {
processMessage(command.content);
}
break;
default:
renderer.on({ type: "error", message: `Unknown command type: ${(command as any).type}` });
}
} catch (e) {
renderer.on({ type: "error", message: `Invalid JSON: ${e}` });
}
});
// Wait for stdin to close
await new Promise<void>((resolve) => {
rl.on("close", () => {
resolve();
});
});
}
async function runTuiInteractiveMode(agentConfig: AgentConfig, sessionManager: SessionManager): Promise<void> {
const sessionData = sessionManager.getSessionData();
if (sessionData) {
console.log(chalk.dim(`Resuming session with ${sessionData.events.length} events`));
}
const renderer = new TuiRenderer();
// Initialize TUI BEFORE creating the agent to prevent double init
await renderer.init();
const agent = new Agent(agentConfig, renderer, sessionManager);
renderer.setInterruptCallback(() => {
agent.interrupt();
});
if (sessionData) {
agent.setEvents(sessionData ? sessionData.events.map((e) => e.event) : []);
for (const sessionEvent of sessionData.events) {
const event = sessionEvent.event;
if (event.type === "assistant_start") {
renderer.renderAssistantLabel();
} else {
await renderer.on(event);
}
}
}
while (true) {
const userInput = await renderer.getUserInput();
try {
await agent.ask(userInput);
} catch (e: any) {
await renderer.on({ type: "error", message: e.message });
}
}
}
async function runSingleShotMode(
agentConfig: AgentConfig,
sessionManager: SessionManager,
messages: string[],
jsonOutput: boolean,
): Promise<void> {
const sessionData = sessionManager.getSessionData();
const renderer = jsonOutput ? new JsonRenderer() : new ConsoleRenderer();
const agent = new Agent(agentConfig, renderer, sessionManager);
if (sessionData) {
if (!jsonOutput) {
console.log(chalk.dim(`Resuming session with ${sessionData.events.length} events`));
}
agent.setEvents(sessionData ? sessionData.events.map((e) => e.event) : []);
}
for (const msg of messages) {
try {
await agent.ask(msg);
} catch (e: any) {
await renderer.on({ type: "error", message: e.message });
}
}
}
// Main function to use Agent as standalone CLI
export async function main(args: string[]): Promise<void> {
// Parse arguments
const parsed = parseArgs(argDefs, args);
// Show help if requested
if (parsed.help) {
printHelp();
return;
}
// Extract configuration from parsed args
const baseURL = parsed["base-url"];
const apiKey = parsed["api-key"];
const model = parsed.model;
const continueSession = parsed.continue;
const api = parsed.api as "completions" | "responses";
const systemPrompt = parsed["system-prompt"];
const jsonOutput = parsed.json;
const messages = parsed._; // Positional arguments
if (!apiKey) {
throw new Error("API key required (use --api-key or set OPENAI_API_KEY)");
}
// Determine mode: interactive if no messages provided
const isInteractive = messages.length === 0;
// Create session manager
const sessionManager = new SessionManager(continueSession);
// Create or restore agent
let agentConfig: AgentConfig = {
apiKey,
baseURL,
model,
api,
systemPrompt,
};
if (continueSession) {
const sessionData = sessionManager.getSessionData();
if (sessionData) {
agentConfig = {
...sessionData.config,
apiKey, // Allow overriding API key
};
}
}
// Run in appropriate mode
if (isInteractive) {
if (jsonOutput) {
await runJsonInteractiveMode(agentConfig, sessionManager);
} else {
await runTuiInteractiveMode(agentConfig, sessionManager);
}
} else {
await runSingleShotMode(agentConfig, sessionManager, messages, jsonOutput);
}
}
// Run as CLI if invoked directly
// Only run if this is the main module (not imported)
if (import.meta.url === `file://${process.argv[1]}`) {
main(process.argv.slice(2)).catch((err) => {
console.error(err);
process.exit(1);
});
}
// Run as CLI - this file should always be executed, not imported
main(process.argv.slice(2)).catch((err) => {
console.error(err);
process.exit(1);
});

View file

@ -6,7 +6,7 @@ export type { ArgDef, ArgDefs, ParsedArgs } from "./args.js";
// CLI utilities
export { parseArgs, printHelp } from "./args.js";
// CLI main function
export { main } from "./cli.js";
export { main } from "./main.js";
// Renderers
export { ConsoleRenderer } from "./renderers/console-renderer.js";
export { JsonRenderer } from "./renderers/json-renderer.js";

285
packages/agent/src/main.ts Normal file
View file

@ -0,0 +1,285 @@
import chalk from "chalk";
import { createInterface } from "readline";
import type { AgentConfig } from "./agent.js";
import { Agent } from "./agent.js";
import { parseArgs, printHelp as printHelpArgs } from "./args.js";
import { ConsoleRenderer } from "./renderers/console-renderer.js";
import { JsonRenderer } from "./renderers/json-renderer.js";
import { TuiRenderer } from "./renderers/tui-renderer.js";
import { SessionManager } from "./session-manager.js";
// Define argument structure
const argDefs = {
"base-url": {
type: "string" as const,
default: "https://api.openai.com/v1",
description: "API base URL",
},
"api-key": {
type: "string" as const,
default: process.env.OPENAI_API_KEY || "",
description: "API key",
showDefault: "$OPENAI_API_KEY",
},
model: {
type: "string" as const,
default: "gpt-5-mini",
description: "Model name",
},
api: {
type: "string" as const,
default: "completions",
description: "API type",
choices: [
{ value: "completions", description: "OpenAI Chat Completions API (most models)" },
{ value: "responses", description: "OpenAI Responses API (GPT-OSS models)" },
],
},
"system-prompt": {
type: "string" as const,
default: "You are a helpful assistant.",
description: "System prompt",
},
continue: {
type: "flag" as const,
alias: "c",
description: "Continue previous session",
},
json: {
type: "flag" as const,
description: "Output as JSONL",
},
help: {
type: "flag" as const,
alias: "h",
description: "Show this help message",
},
};
interface JsonCommand {
type: "message" | "interrupt";
content?: string;
}
function printHelp(): void {
const usage = `Usage: pi-agent [options] [messages...]
Examples:
# Single message (default OpenAI, GPT-5 Mini, OPENAI_API_KEY env var)
pi-agent "What is 2+2?"
# Multiple messages processed sequentially
pi-agent "What is 2+2?" "What about 3+3?"
# Interactive chat mode (no messages = interactive)
pi-agent
# Continue most recently modified session in current directory
pi-agent --continue "Follow up question"
# GPT-OSS via Groq
pi-agent --base-url https://api.groq.com/openai/v1 --api-key $GROQ_API_KEY --model openai/gpt-oss-120b
# GLM 4.5 via OpenRouter
pi-agent --base-url https://openrouter.ai/api/v1 --api-key $OPENROUTER_API_KEY --model z-ai/glm-4.5
# Claude via Anthropic (no prompt caching support - see https://docs.anthropic.com/en/api/openai-sdk)
pi-agent --base-url https://api.anthropic.com/v1 --api-key $ANTHROPIC_API_KEY --model claude-opus-4-1-20250805`;
printHelpArgs(argDefs, usage);
}
async function runJsonInteractiveMode(config: AgentConfig, sessionManager: SessionManager): Promise<void> {
const rl = createInterface({
input: process.stdin,
output: process.stdout,
terminal: false, // Don't interpret control characters
});
const renderer = new JsonRenderer();
const agent = new Agent(config, renderer, sessionManager);
let isProcessing = false;
let pendingMessage: string | null = null;
const processMessage = async (content: string): Promise<void> => {
isProcessing = true;
try {
await agent.ask(content);
} catch (e: any) {
await renderer.on({ type: "error", message: e.message });
} finally {
isProcessing = false;
// Process any pending message
if (pendingMessage) {
const msg = pendingMessage;
pendingMessage = null;
await processMessage(msg);
}
}
};
// Listen for lines from stdin
rl.on("line", (line) => {
try {
const command = JSON.parse(line) as JsonCommand;
switch (command.type) {
case "interrupt":
agent.interrupt();
isProcessing = false;
break;
case "message":
if (!command.content) {
renderer.on({ type: "error", message: "Message content is required" });
return;
}
if (isProcessing) {
// Queue the message for when the agent is done
pendingMessage = command.content;
} else {
processMessage(command.content);
}
break;
default:
renderer.on({ type: "error", message: `Unknown command type: ${(command as any).type}` });
}
} catch (e) {
renderer.on({ type: "error", message: `Invalid JSON: ${e}` });
}
});
// Wait for stdin to close
await new Promise<void>((resolve) => {
rl.on("close", () => {
resolve();
});
});
}
async function runTuiInteractiveMode(agentConfig: AgentConfig, sessionManager: SessionManager): Promise<void> {
const sessionData = sessionManager.getSessionData();
if (sessionData) {
console.log(chalk.dim(`Resuming session with ${sessionData.events.length} events`));
}
const renderer = new TuiRenderer();
// Initialize TUI BEFORE creating the agent to prevent double init
await renderer.init();
const agent = new Agent(agentConfig, renderer, sessionManager);
renderer.setInterruptCallback(() => {
agent.interrupt();
});
if (sessionData) {
agent.setEvents(sessionData ? sessionData.events.map((e) => e.event) : []);
for (const sessionEvent of sessionData.events) {
const event = sessionEvent.event;
if (event.type === "assistant_start") {
renderer.renderAssistantLabel();
} else {
await renderer.on(event);
}
}
}
while (true) {
const userInput = await renderer.getUserInput();
try {
await agent.ask(userInput);
} catch (e: any) {
await renderer.on({ type: "error", message: e.message });
}
}
}
async function runSingleShotMode(
agentConfig: AgentConfig,
sessionManager: SessionManager,
messages: string[],
jsonOutput: boolean,
): Promise<void> {
const sessionData = sessionManager.getSessionData();
const renderer = jsonOutput ? new JsonRenderer() : new ConsoleRenderer();
const agent = new Agent(agentConfig, renderer, sessionManager);
if (sessionData) {
if (!jsonOutput) {
console.log(chalk.dim(`Resuming session with ${sessionData.events.length} events`));
}
agent.setEvents(sessionData ? sessionData.events.map((e) => e.event) : []);
}
for (const msg of messages) {
try {
await agent.ask(msg);
} catch (e: any) {
await renderer.on({ type: "error", message: e.message });
}
}
}
// Main function to use Agent as standalone CLI
export async function main(args: string[]): Promise<void> {
// Parse arguments
const parsed = parseArgs(argDefs, args);
// Show help if requested
if (parsed.help) {
printHelp();
return;
}
// Extract configuration from parsed args
const baseURL = parsed["base-url"];
const apiKey = parsed["api-key"];
const model = parsed.model;
const continueSession = parsed.continue;
const api = parsed.api as "completions" | "responses";
const systemPrompt = parsed["system-prompt"];
const jsonOutput = parsed.json;
const messages = parsed._; // Positional arguments
if (!apiKey) {
throw new Error("API key required (use --api-key or set OPENAI_API_KEY)");
}
// Determine mode: interactive if no messages provided
const isInteractive = messages.length === 0;
// Create session manager
const sessionManager = new SessionManager(continueSession);
// Create or restore agent
let agentConfig: AgentConfig = {
apiKey,
baseURL,
model,
api,
systemPrompt,
};
if (continueSession) {
const sessionData = sessionManager.getSessionData();
if (sessionData) {
agentConfig = {
...sessionData.config,
apiKey, // Allow overriding API key
};
}
}
// Run in appropriate mode
if (isInteractive) {
if (jsonOutput) {
await runJsonInteractiveMode(agentConfig, sessionManager);
} else {
await runTuiInteractiveMode(agentConfig, sessionManager);
}
} else {
await runSingleShotMode(agentConfig, sessionManager, messages, jsonOutput);
}
}

View file

@ -1,12 +1,12 @@
{
"name": "@mariozechner/pi",
"version": "0.5.5",
"version": "0.5.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@mariozechner/pi",
"version": "0.5.5",
"version": "0.5.6",
"license": "MIT",
"dependencies": {
"@ai-sdk/openai": "^2.0.5",

View file

@ -1,6 +1,6 @@
{
"name": "@mariozechner/pi",
"version": "0.5.5",
"version": "0.5.6",
"description": "CLI tool for managing vLLM deployments on GPU pods",
"type": "module",
"bin": {
@ -34,7 +34,7 @@
"node": ">=20.0.0"
},
"dependencies": {
"@mariozechner/pi-agent": "^0.5.5",
"@mariozechner/pi-agent": "^0.5.6",
"chalk": "^5.5.0"
},
"devDependencies": {}

View file

@ -1,12 +1,12 @@
{
"name": "@mariozechner/tui",
"version": "0.5.5",
"version": "0.5.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@mariozechner/tui",
"version": "0.5.5",
"version": "0.5.6",
"license": "MIT",
"dependencies": {
"@types/mime-types": "^2.1.4",

View file

@ -1,6 +1,6 @@
{
"name": "@mariozechner/pi-tui",
"version": "0.5.5",
"version": "0.5.6",
"description": "Terminal User Interface library with differential rendering for efficient text-based applications",
"type": "module",
"main": "dist/index.js",