Remove proxy package

The proxy functionality is now handled by web-ui's createStreamFn
with external proxy servers.
This commit is contained in:
Mario Zechner 2025-12-28 11:59:02 +01:00
parent 6bbe3147d8
commit 0f98decf65
12 changed files with 62 additions and 256 deletions

View file

@ -30,7 +30,7 @@ When reading issues:
When creating issues:
- Add `pkg:*` labels to indicate which package(s) the issue affects
- Available labels: `pkg:agent`, `pkg:ai`, `pkg:coding-agent`, `pkg:mom`, `pkg:pods`, `pkg:proxy`, `pkg:tui`, `pkg:web-ui`
- Available labels: `pkg:agent`, `pkg:ai`, `pkg:coding-agent`, `pkg:mom`, `pkg:pods`, `pkg:tui`, `pkg:web-ui`
- If an issue spans multiple packages, add all relevant labels
When closing issues via commit:
@ -39,7 +39,7 @@ When closing issues via commit:
## Tools
- GitHub CLI for issues/PRs
- Add package labels to issues/PRs: pkg:agent, pkg:ai, pkg:coding-agent, pkg:mom, pkg:pods, pkg:proxy, pkg:tui, pkg:web-ui
- Add package labels to issues/PRs: pkg:agent, pkg:ai, pkg:coding-agent, pkg:mom, pkg:pods, pkg:tui, pkg:web-ui
- TUI interaction: use tmux
## Style

View file

@ -12,7 +12,6 @@ Tools for building AI agents and managing LLM deployments.
| **[@mariozechner/pi-mom](packages/mom)** | Slack bot that delegates messages to the pi coding agent |
| **[@mariozechner/pi-tui](packages/tui)** | Terminal UI library with differential rendering |
| **[@mariozechner/pi-web-ui](packages/web-ui)** | Web components for AI chat interfaces |
| **[@mariozechner/pi-proxy](packages/proxy)** | CORS proxy for browser-based LLM API calls |
| **[@mariozechner/pi-pods](packages/pods)** | CLI for managing vLLM deployments on GPU pods |
## Development

View file

@ -8,8 +8,8 @@
],
"scripts": {
"clean": "npm run clean --workspaces",
"build": "npm run build -w @mariozechner/pi-tui && npm run build -w @mariozechner/pi-ai && npm run build -w @mariozechner/pi-agent-core && npm run build -w @mariozechner/pi-coding-agent && npm run build -w @mariozechner/pi-mom && npm run build -w @mariozechner/pi-web-ui && npm run build -w @mariozechner/pi-proxy && npm run build -w @mariozechner/pi",
"dev": "concurrently --names \"ai,agent,coding-agent,mom,web-ui,tui,proxy\" --prefix-colors \"cyan,yellow,red,white,green,magenta,blue\" \"npm run dev -w @mariozechner/pi-ai\" \"npm run dev -w @mariozechner/pi-agent-core\" \"npm run dev -w @mariozechner/pi-coding-agent\" \"npm run dev -w @mariozechner/pi-mom\" \"npm run dev -w @mariozechner/pi-web-ui\" \"npm run dev -w @mariozechner/pi-tui\" \"npm run dev -w @mariozechner/pi-proxy\"",
"build": "npm run build -w @mariozechner/pi-tui && npm run build -w @mariozechner/pi-ai && npm run build -w @mariozechner/pi-agent-core && npm run build -w @mariozechner/pi-coding-agent && npm run build -w @mariozechner/pi-mom && npm run build -w @mariozechner/pi-web-ui && npm run build -w @mariozechner/pi",
"dev": "concurrently --names \"ai,agent,coding-agent,mom,web-ui,tui\" --prefix-colors \"cyan,yellow,red,white,green,magenta\" \"npm run dev -w @mariozechner/pi-ai\" \"npm run dev -w @mariozechner/pi-agent-core\" \"npm run dev -w @mariozechner/pi-coding-agent\" \"npm run dev -w @mariozechner/pi-mom\" \"npm run dev -w @mariozechner/pi-web-ui\" \"npm run dev -w @mariozechner/pi-tui\"",
"dev:tsc": "concurrently --names \"ai,web-ui\" --prefix-colors \"cyan,green\" \"npm run dev:tsc -w @mariozechner/pi-ai\" \"npm run dev:tsc -w @mariozechner/pi-web-ui\"",
"check": "biome check --write . && tsgo --noEmit && npm run check -w @mariozechner/pi-web-ui",
"test": "npm run test --workspaces --if-present",

View file

@ -6104,9 +6104,9 @@ export const MODELS = {
contextWindow: 32768,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"anthropic/claude-3.5-haiku": {
id: "anthropic/claude-3.5-haiku",
name: "Anthropic: Claude 3.5 Haiku",
"anthropic/claude-3.5-haiku-20241022": {
id: "anthropic/claude-3.5-haiku-20241022",
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
@ -6121,9 +6121,9 @@ export const MODELS = {
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-completions">,
"anthropic/claude-3.5-haiku-20241022": {
id: "anthropic/claude-3.5-haiku-20241022",
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
"anthropic/claude-3.5-haiku": {
id: "anthropic/claude-3.5-haiku",
name: "Anthropic: Claude 3.5 Haiku",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
@ -6359,6 +6359,23 @@ export const MODELS = {
contextWindow: 128000,
maxTokens: 16384,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-8b-instruct": {
id: "meta-llama/llama-3.1-8b-instruct",
name: "Meta: Llama 3.1 8B Instruct",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.02,
output: 0.03,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 16384,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-405b-instruct": {
id: "meta-llama/llama-3.1-405b-instruct",
name: "Meta: Llama 3.1 405B Instruct",
@ -6393,23 +6410,6 @@ export const MODELS = {
contextWindow: 131072,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3.1-8b-instruct": {
id: "meta-llama/llama-3.1-8b-instruct",
name: "Meta: Llama 3.1 8B Instruct",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.02,
output: 0.03,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 16384,
} satisfies Model<"openai-completions">,
"mistralai/mistral-nemo": {
id: "mistralai/mistral-nemo",
name: "Mistral: Mistral Nemo",
@ -6546,6 +6546,23 @@ export const MODELS = {
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openai/gpt-4o-2024-05-13": {
id: "openai/gpt-4o-2024-05-13",
name: "OpenAI: GPT-4o (2024-05-13)",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text", "image"],
cost: {
input: 5,
output: 15,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openai/gpt-4o": {
id: "openai/gpt-4o",
name: "OpenAI: GPT-4o",
@ -6580,23 +6597,6 @@ export const MODELS = {
contextWindow: 128000,
maxTokens: 64000,
} satisfies Model<"openai-completions">,
"openai/gpt-4o-2024-05-13": {
id: "openai/gpt-4o-2024-05-13",
name: "OpenAI: GPT-4o (2024-05-13)",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text", "image"],
cost: {
input: 5,
output: 15,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3-70b-instruct": {
id: "meta-llama/llama-3-70b-instruct",
name: "Meta: Llama 3 70B Instruct",
@ -6835,23 +6835,6 @@ export const MODELS = {
contextWindow: 8191,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openai/gpt-3.5-turbo": {
id: "openai/gpt-3.5-turbo",
name: "OpenAI: GPT-3.5 Turbo",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.5,
output: 1.5,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 16385,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openai/gpt-4": {
id: "openai/gpt-4",
name: "OpenAI: GPT-4",
@ -6869,6 +6852,23 @@ export const MODELS = {
contextWindow: 8191,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openai/gpt-3.5-turbo": {
id: "openai/gpt-3.5-turbo",
name: "OpenAI: GPT-3.5 Turbo",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0.5,
output: 1.5,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 16385,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"openrouter/auto": {
id: "openrouter/auto",
name: "OpenRouter: Auto Router",

View file

@ -1,67 +0,0 @@
# @mariozechner/pi-proxy
CORS and authentication proxy for pi-ai. Enables browser clients to access OAuth-protected endpoints.
## Usage
### CORS Proxy
Zero-config CORS proxy for development:
```bash
# Run directly with tsx
npx tsx packages/proxy/src/cors-proxy.ts 3001
# Or use npm script
npm run dev -w @mariozechner/pi-proxy
# Or install globally and use CLI
npm install -g @mariozechner/pi-proxy
pi-proxy 3001
```
The proxy will forward requests to any URL:
```javascript
// Instead of:
fetch('https://api.anthropic.com/v1/messages', { ... })
// Use:
fetch('http://localhost:3001?url=https://api.anthropic.com/v1/messages', { ... })
```
### OAuth Integration
For Anthropic OAuth tokens, configure your client to use the proxy:
```typescript
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
apiKey: 'oauth_token_here',
baseURL: 'http://localhost:3001?url=https://api.anthropic.com'
});
```
## Future Proxy Types
- **BunnyCDN Edge Function**: Deploy as edge function
- **Managed Proxy**: Self-hosted with provider key management and credential auth
- **Cloudflare Worker**: Deploy as CF worker
## Architecture
The proxy:
1. Accepts requests with `?url=<target>` query parameter
2. Forwards all headers (except `host`, `origin`)
3. Forwards request body for non-GET/HEAD requests
4. Returns response with CORS headers enabled
5. Strips CORS headers from upstream response
## Development
```bash
npm install
npm run build
npm run check
```

View file

@ -1,26 +0,0 @@
{
"name": "@mariozechner/pi-proxy",
"version": "0.30.2",
"type": "module",
"description": "CORS and authentication proxy for pi-ai",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"bin": {
"pi-proxy": "dist/cli.js"
},
"scripts": {
"clean": "rm -rf dist",
"build": "tsc",
"typecheck": "tsgo --noEmit",
"dev": "tsx src/cors-proxy.ts 3001"
},
"dependencies": {
"@hono/node-server": "^1.14.0",
"hono": "^4.6.16"
},
"devDependencies": {
"@types/node": "^22.10.5",
"tsx": "^4.19.2",
"typescript": "^5.7.3"
}
}

View file

@ -1,16 +0,0 @@
#!/usr/bin/env node
import { spawn } from "node:child_process";
import path from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const port = process.argv[2] || "3001";
// Run the CORS proxy
const child = spawn("node", [path.join(__dirname, "cors-proxy.js"), port], {
stdio: "inherit",
});
child.on("exit", (code) => {
process.exit(code || 0);
});

View file

@ -1,73 +0,0 @@
#!/usr/bin/env node
import { serve } from "@hono/node-server";
import { Hono } from "hono";
import { cors } from "hono/cors";
export function createCorsProxy() {
const app = new Hono();
// Enable CORS for all origins
app.use("*", cors());
// Proxy all requests
app.all("*", async (c) => {
const url = new URL(c.req.url);
const targetUrl = url.searchParams.get("url");
if (!targetUrl) {
return c.json({ error: "Missing 'url' query parameter" }, 400);
}
try {
// Forward the request
const headers = new Headers();
c.req.raw.headers.forEach((value, key) => {
// Skip host and origin headers
if (key.toLowerCase() !== "host" && key.toLowerCase() !== "origin") {
headers.set(key, value);
}
});
const response = await fetch(targetUrl, {
method: c.req.method,
headers,
body: c.req.method !== "GET" && c.req.method !== "HEAD" ? await c.req.raw.clone().arrayBuffer() : undefined,
});
// Forward response headers
const responseHeaders = new Headers();
response.headers.forEach((value, key) => {
// Skip CORS headers (we handle them)
if (!key.toLowerCase().startsWith("access-control-")) {
responseHeaders.set(key, value);
}
});
// Return proxied response
return new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers: responseHeaders,
});
} catch (error) {
console.error("Proxy error:", error);
return c.json({ error: error instanceof Error ? error.message : "Proxy request failed" }, 502);
}
});
return app;
}
// CLI entry point
if (import.meta.url === `file://${process.argv[1]}`) {
const app = createCorsProxy();
const port = Number.parseInt(process.argv[2] || "3001", 10);
console.log(`🔌 CORS proxy running on http://localhost:${port}`);
console.log(`Usage: http://localhost:${port}?url=<target-url>`);
serve({
fetch: app.fetch,
port,
});
}

View file

@ -1 +0,0 @@
export { createCorsProxy } from "./cors-proxy.js";

View file

@ -1,8 +0,0 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src"
},
"include": ["src/**/*"]
}

View file

@ -11,7 +11,7 @@
},
"scripts": {
"clean": "rm -rf dist",
"build": "tsc -p tsconfig.build.json && tailwindcss -i ./src/app.css -o ./dist/app.css --minify",
"build": "tsgo -p tsconfig.build.json && tailwindcss -i ./src/app.css -o ./dist/app.css --minify",
"dev": "concurrently --names \"build,example\" --prefix-colors \"cyan,green\" \"tsc -p tsconfig.build.json --watch --preserveWatchOutput\" \"tailwindcss -i ./src/app.css -o ./dist/app.css --watch\" \"npm run dev --prefix example\"",
"dev:tsc": "concurrently --names \"build\" --prefix-colors \"cyan\" \"tsc -p tsconfig.build.json --watch --preserveWatchOutput\" \"tailwindcss -i ./src/app.css -o ./dist/app.css --watch\"",
"check": "biome check --write . && tsc --noEmit && cd example && biome check --write . && tsc --noEmit"

View file

@ -17,8 +17,6 @@
"@mariozechner/pi-mom/*": ["./packages/mom/src/*"],
"@mariozechner/pi": ["./packages/pods/src/index.ts"],
"@mariozechner/pi/*": ["./packages/pods/src/*"],
"@mariozechner/pi-proxy": ["./packages/proxy/src/index.ts"],
"@mariozechner/pi-proxy/*": ["./packages/proxy/src/*"],
"@mariozechner/pi-tui": ["./packages/tui/src/index.ts"],
"@mariozechner/pi-tui/*": ["./packages/tui/src/*"],
"@mariozechner/pi-web-ui": ["./packages/web-ui/src/index.ts"],