Add pi-daemon package, update models and dependencies

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Advait Paliwal 2026-03-05 17:11:10 -08:00
parent 6506288db4
commit c46fb9bb16
5 changed files with 10874 additions and 27 deletions

10661
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -2304,6 +2304,23 @@ export const MODELS = {
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"azure-openai-responses">,
"gpt-5.4-pro": {
id: "gpt-5.4-pro",
name: "GPT-5.4 Pro",
api: "azure-openai-responses",
provider: "azure-openai-responses",
baseUrl: "",
reasoning: true,
input: ["text", "image"],
cost: {
input: 30,
output: 180,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 1050000,
maxTokens: 128000,
} satisfies Model<"azure-openai-responses">,
"o1": {
id: "o1",
name: "o1",
@ -2880,7 +2897,7 @@ export const MODELS = {
} satisfies Model<"openai-responses">,
"gpt-5.3-codex": {
id: "gpt-5.3-codex",
name: "GPT-5.3 Codex",
name: "GPT-5.3-Codex",
api: "openai-responses",
provider: "github-copilot",
baseUrl: "https://api.individual.githubcopilot.com",
@ -2893,7 +2910,7 @@ export const MODELS = {
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 272000,
contextWindow: 400000,
maxTokens: 128000,
} satisfies Model<"openai-responses">,
"grok-code-fast-1": {
@ -5454,6 +5471,23 @@ export const MODELS = {
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-responses">,
"gpt-5.4-pro": {
id: "gpt-5.4-pro",
name: "GPT-5.4 Pro",
api: "openai-responses",
provider: "openai",
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: {
input: 30,
output: 180,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 1050000,
maxTokens: 128000,
} satisfies Model<"openai-responses">,
"o1": {
id: "o1",
name: "o1",
@ -6172,6 +6206,23 @@ export const MODELS = {
contextWindow: 272000,
maxTokens: 128000,
} satisfies Model<"openai-responses">,
"gpt-5.4-pro": {
id: "gpt-5.4-pro",
name: "GPT-5.4 Pro",
api: "openai-responses",
provider: "opencode",
baseUrl: "https://opencode.ai/zen/v1",
reasoning: true,
input: ["text", "image"],
cost: {
input: 30,
output: 180,
cacheRead: 30,
cacheWrite: 0,
},
contextWindow: 1050000,
maxTokens: 128000,
} satisfies Model<"openai-responses">,
"kimi-k2.5": {
id: "kimi-k2.5",
name: "Kimi K2.5",

View file

@ -0,0 +1,16 @@
{
"name": "@co/pi-daemon",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"dev": "tsx --watch src/main.ts",
"start": "tsx src/main.ts"
},
"dependencies": {
"ws": "^8.18.0"
},
"devDependencies": {
"@types/ws": "^8.5.13"
}
}

View file

@ -0,0 +1,161 @@
import {
type AgentSession,
AuthStorage,
createAgentSession,
ModelRegistry,
SessionManager,
} from "@mariozechner/pi-coding-agent";
import { createServer } from "http";
import { WebSocket, WebSocketServer } from "ws";
const PORT = Number(process.env.PORT ?? 4567);
const CWD = process.argv[2] || process.cwd();
let session: AgentSession | null = null;
const clients = new Set<WebSocket>();
function broadcast(data: unknown) {
const msg = JSON.stringify(data);
for (const ws of clients) {
if (ws.readyState === WebSocket.OPEN) {
ws.send(msg);
}
}
}
async function initSession() {
const authStorage = AuthStorage.create();
const modelRegistry = new ModelRegistry(authStorage);
const result = await createAgentSession({
cwd: CWD,
authStorage,
modelRegistry,
sessionManager: SessionManager.inMemory(),
});
session = result.session;
session.subscribe((event) => {
broadcast({ type: "agent_event", event });
});
console.log(`Session created. Model: ${session.model?.name ?? "auto"}`);
console.log(`Working directory: ${CWD}`);
}
const httpServer = createServer((req, res) => {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Access-Control-Allow-Methods", "GET, POST, OPTIONS");
res.setHeader("Access-Control-Allow-Headers", "Content-Type");
if (req.method === "OPTIONS") {
res.writeHead(204);
res.end();
return;
}
if (req.url === "/health") {
res.writeHead(200, { "Content-Type": "application/json" });
res.end(
JSON.stringify({
status: "ok",
cwd: CWD,
model: session?.model?.name ?? null,
isStreaming: session?.isStreaming ?? false,
clients: clients.size,
}),
);
return;
}
res.writeHead(404);
res.end("Not found");
});
const wss = new WebSocketServer({ server: httpServer });
wss.on("connection", (ws) => {
clients.add(ws);
console.log(`Client connected (${clients.size} total)`);
ws.send(
JSON.stringify({
type: "connected",
cwd: CWD,
model: session?.model?.name ?? null,
isStreaming: session?.isStreaming ?? false,
}),
);
ws.on("message", async (raw) => {
if (!session) {
ws.send(JSON.stringify({ type: "error", error: "Session not initialized" }));
return;
}
let msg: { type: string; [key: string]: unknown };
try {
msg = JSON.parse(raw.toString());
} catch {
ws.send(JSON.stringify({ type: "error", error: "Invalid JSON" }));
return;
}
try {
switch (msg.type) {
case "prompt": {
const text = msg.text as string;
if (!text) break;
if (session.isStreaming) {
await session.followUp(text);
} else {
await session.prompt(text);
}
break;
}
case "steer": {
const text = msg.text as string;
if (text && session.isStreaming) {
await session.steer(text);
}
break;
}
case "abort": {
await session.abort();
break;
}
case "get_messages": {
ws.send(
JSON.stringify({
type: "messages",
messages: session.messages,
}),
);
break;
}
default:
ws.send(JSON.stringify({ type: "error", error: `Unknown command: ${msg.type}` }));
}
} catch (err: unknown) {
const error = err instanceof Error ? err.message : String(err);
ws.send(JSON.stringify({ type: "error", error }));
}
});
ws.on("close", () => {
clients.delete(ws);
console.log(`Client disconnected (${clients.size} total)`);
});
});
await initSession();
httpServer.listen(PORT, () => {
console.log(`Pi daemon running on http://localhost:${PORT}`);
console.log(`WebSocket: ws://localhost:${PORT}`);
});

View file

@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src"
},
"include": ["src"]
}