fix(ai,coding-agent): make pi-ai browser-safe and move OAuth runtime exports

- add browser smoke bundling check to root check + pre-commit

- lazy-load Bedrock provider registration to avoid browser graph traversal

- remove top-level OAuth runtime exports from @mariozechner/pi-ai

- add @mariozechner/pi-ai/oauth subpath export and update coding-agent imports

- move proxy dispatcher init to coding-agent CLI (Node-only)

- document Bedrock/OAuth browser limitations

closes #1814
This commit is contained in:
Mario Zechner 2026-03-04 20:20:54 +01:00
parent 2af0c98b5f
commit e0754fdbb3
26 changed files with 216 additions and 59 deletions

View file

@ -2,6 +2,10 @@
## [Unreleased]
### Breaking Changes
- Moved Node OAuth runtime exports off the top-level package entry. Import OAuth login/refresh functions from `@mariozechner/pi-ai/oauth` instead of `@mariozechner/pi-ai` ([#1814](https://github.com/badlogic/pi-mono/issues/1814))
### Added
- Added `gemini-3.1-flash-lite-preview` fallback model entry for the `google` provider so it remains selectable until upstream model catalogs include it ([#1785](https://github.com/badlogic/pi-mono/issues/1785), thanks [@n-WN](https://github.com/n-WN)).
@ -9,6 +13,7 @@
### Fixed
- Fixed Gemini 3.1 thinking-level detection in `google` and `google-vertex` providers so `gemini-3.1-*` models use Gemini 3 level-based thinking config instead of budget fallback ([#1785](https://github.com/badlogic/pi-mono/issues/1785), thanks [@n-WN](https://github.com/n-WN)).
- Fixed browser bundling failures by lazy-loading the Bedrock provider and removing Node-only side effects from the default browser import graph ([#1814](https://github.com/badlogic/pi-mono/issues/1814))
## [0.55.4] - 2026-03-02

View file

@ -33,6 +33,7 @@ Unified LLM API with automatic model discovery, provider configuration, token an
- [Cross-Provider Handoffs](#cross-provider-handoffs)
- [Context Serialization](#context-serialization)
- [Browser Usage](#browser-usage)
- [Browser Compatibility Notes](#browser-compatibility-notes)
- [Environment Variables](#environment-variables-nodejs-only)
- [Checking Environment Variables](#checking-environment-variables)
- [OAuth Providers](#oauth-providers)
@ -888,6 +889,13 @@ const response = await complete(model, {
> **Security Warning**: Exposing API keys in frontend code is dangerous. Anyone can extract and abuse your keys. Only use this approach for internal tools or demos. For production applications, use a backend proxy that keeps your API keys secure.
### Browser Compatibility Notes
- Amazon Bedrock (`bedrock-converse-stream`) is not supported in browser environments.
- OAuth login flows are not supported in browser environments. Use the `@mariozechner/pi-ai/oauth` entry point in Node.js.
- In browser builds, Bedrock can still appear in model lists. Calls to Bedrock models fail at runtime.
- Use a server-side proxy or backend service if you need Bedrock or OAuth-based auth from a web app.
### Environment Variables (Node.js only)
In Node.js environments, you can set environment variables to avoid passing API keys:
@ -1018,7 +1026,7 @@ Credentials are saved to `auth.json` in the current directory.
### Programmatic OAuth
The library provides login and token refresh functions. Credential storage is the caller's responsibility.
The library provides login and token refresh functions via the `@mariozechner/pi-ai/oauth` entry point. Credential storage is the caller's responsibility.
```typescript
import {
@ -1036,13 +1044,13 @@ import {
// Types
type OAuthProvider, // 'anthropic' | 'openai-codex' | 'github-copilot' | 'google-gemini-cli' | 'google-antigravity'
type OAuthCredentials,
} from '@mariozechner/pi-ai';
} from '@mariozechner/pi-ai/oauth';
```
### Login Flow Example
```typescript
import { loginGitHubCopilot } from '@mariozechner/pi-ai';
import { loginGitHubCopilot } from '@mariozechner/pi-ai/oauth';
import { writeFileSync } from 'fs';
const credentials = await loginGitHubCopilot({
@ -1066,7 +1074,8 @@ writeFileSync('auth.json', JSON.stringify(auth, null, 2));
Use `getOAuthApiKey()` to get an API key, automatically refreshing if expired:
```typescript
import { getModel, complete, getOAuthApiKey } from '@mariozechner/pi-ai';
import { getModel, complete } from '@mariozechner/pi-ai';
import { getOAuthApiKey } from '@mariozechner/pi-ai/oauth';
import { readFileSync, writeFileSync } from 'fs';
// Load your stored credentials

1
packages/ai/oauth.d.ts vendored Normal file
View file

@ -0,0 +1 @@
export * from "./src/oauth.js";

1
packages/ai/oauth.js Normal file
View file

@ -0,0 +1 @@
export * from "./dist/oauth.js";

View file

@ -5,11 +5,23 @@
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
},
"./oauth": {
"types": "./oauth.d.ts",
"import": "./oauth.js"
}
},
"bin": {
"pi-ai": "./dist/cli.js"
},
"files": [
"dist",
"oauth.js",
"oauth.d.ts",
"README.md"
],
"scripts": {

View file

@ -3,16 +3,20 @@ let _existsSync: typeof import("node:fs").existsSync | null = null;
let _homedir: typeof import("node:os").homedir | null = null;
let _join: typeof import("node:path").join | null = null;
type DynamicImport = (specifier: string) => Promise<unknown>;
const dynamicImport = new Function("specifier", "return import(specifier);") as DynamicImport;
// Eagerly load in Node.js/Bun environment only
if (typeof process !== "undefined" && (process.versions?.node || process.versions?.bun)) {
import("node:fs").then((m) => {
_existsSync = m.existsSync;
dynamicImport("node:fs").then((m) => {
_existsSync = (m as typeof import("node:fs")).existsSync;
});
import("node:os").then((m) => {
_homedir = m.homedir;
dynamicImport("node:os").then((m) => {
_homedir = (m as typeof import("node:os")).homedir;
});
import("node:path").then((m) => {
_join = m.join;
dynamicImport("node:path").then((m) => {
_join = (m as typeof import("node:path")).join;
});
}

View file

@ -16,7 +16,16 @@ export * from "./stream.js";
export * from "./types.js";
export * from "./utils/event-stream.js";
export * from "./utils/json-parse.js";
export * from "./utils/oauth/index.js";
export type {
OAuthAuthInfo,
OAuthCredentials,
OAuthLoginCallbacks,
OAuthPrompt,
OAuthProvider,
OAuthProviderId,
OAuthProviderInfo,
OAuthProviderInterface,
} from "./utils/oauth/types.js";
export * from "./utils/overflow.js";
export * from "./utils/typebox-helpers.js";
export * from "./utils/validation.js";

1
packages/ai/src/oauth.ts Normal file
View file

@ -0,0 +1 @@
export * from "./utils/oauth/index.js";

View file

@ -1,12 +1,19 @@
// NEVER convert to top-level import - breaks browser/Vite builds (web-ui)
let _os: typeof import("node:os") | null = null;
import type * as NodeOs from "node:os";
import type { Tool as OpenAITool, ResponseInput, ResponseStreamEvent } from "openai/resources/responses/responses.js";
// NEVER convert to top-level runtime imports - breaks browser/Vite builds (web-ui)
let _os: typeof NodeOs | null = null;
type DynamicImport = (specifier: string) => Promise<unknown>;
const dynamicImport = new Function("specifier", "return import(specifier);") as DynamicImport;
if (typeof process !== "undefined" && (process.versions?.node || process.versions?.bun)) {
import("node:os").then((m) => {
_os = m;
dynamicImport("node:os").then((m) => {
_os = m as typeof NodeOs;
});
}
import type { Tool as OpenAITool, ResponseInput, ResponseStreamEvent } from "openai/resources/responses/responses.js";
import { getEnvApiKey } from "../env-api-keys.js";
import { supportsXhigh } from "../models.js";
import type {

View file

@ -1,5 +1,13 @@
import { clearApiProviders, registerApiProvider } from "../api-registry.js";
import { streamBedrock, streamSimpleBedrock } from "./amazon-bedrock.js";
import type {
AssistantMessage,
AssistantMessageEvent,
Context,
Model,
SimpleStreamOptions,
StreamOptions,
} from "../types.js";
import { AssistantMessageEventStream } from "../utils/event-stream.js";
import { streamAnthropic, streamSimpleAnthropic } from "./anthropic.js";
import { streamAzureOpenAIResponses, streamSimpleAzureOpenAIResponses } from "./azure-openai-responses.js";
import { streamGoogle, streamSimpleGoogle } from "./google.js";
@ -9,6 +17,100 @@ import { streamOpenAICodexResponses, streamSimpleOpenAICodexResponses } from "./
import { streamOpenAICompletions, streamSimpleOpenAICompletions } from "./openai-completions.js";
import { streamOpenAIResponses, streamSimpleOpenAIResponses } from "./openai-responses.js";
interface BedrockProviderModule {
streamBedrock: (
model: Model<"bedrock-converse-stream">,
context: Context,
options?: StreamOptions,
) => AssistantMessageEventStream;
streamSimpleBedrock: (
model: Model<"bedrock-converse-stream">,
context: Context,
options?: SimpleStreamOptions,
) => AssistantMessageEventStream;
}
type DynamicImport = (specifier: string) => Promise<unknown>;
const dynamicImport = new Function("specifier", "return import(specifier);") as DynamicImport;
async function loadBedrockProviderModule(): Promise<BedrockProviderModule> {
const module = await dynamicImport("./amazon-bedrock.js");
return module as BedrockProviderModule;
}
function forwardStream(target: AssistantMessageEventStream, source: AssistantMessageEventStream): void {
(async () => {
for await (const event of source as AsyncIterable<AssistantMessageEvent>) {
target.push(event);
}
target.end();
})();
}
function createLazyLoadErrorMessage(model: Model<"bedrock-converse-stream">, error: unknown): AssistantMessage {
return {
role: "assistant",
content: [],
api: "bedrock-converse-stream",
provider: model.provider,
model: model.id,
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "error",
errorMessage: error instanceof Error ? error.message : String(error),
timestamp: Date.now(),
};
}
function streamBedrockLazy(
model: Model<"bedrock-converse-stream">,
context: Context,
options?: StreamOptions,
): AssistantMessageEventStream {
const outer = new AssistantMessageEventStream();
loadBedrockProviderModule()
.then((module) => {
const inner = module.streamBedrock(model, context, options);
forwardStream(outer, inner);
})
.catch((error) => {
const message = createLazyLoadErrorMessage(model, error);
outer.push({ type: "error", reason: "error", error: message });
outer.end(message);
});
return outer;
}
function streamSimpleBedrockLazy(
model: Model<"bedrock-converse-stream">,
context: Context,
options?: SimpleStreamOptions,
): AssistantMessageEventStream {
const outer = new AssistantMessageEventStream();
loadBedrockProviderModule()
.then((module) => {
const inner = module.streamSimpleBedrock(model, context, options);
forwardStream(outer, inner);
})
.catch((error) => {
const message = createLazyLoadErrorMessage(model, error);
outer.push({ type: "error", reason: "error", error: message });
outer.end(message);
});
return outer;
}
export function registerBuiltInApiProviders(): void {
registerApiProvider({
api: "anthropic-messages",
@ -60,8 +162,8 @@ export function registerBuiltInApiProviders(): void {
registerApiProvider({
api: "bedrock-converse-stream",
stream: streamBedrock,
streamSimple: streamSimpleBedrock,
stream: streamBedrockLazy,
streamSimple: streamSimpleBedrockLazy,
});
}

View file

@ -1,5 +1,4 @@
import "./providers/register-builtins.js";
import "./utils/http-proxy.js";
import { getApiProvider } from "./api-registry.js";
import type {

View file

@ -1,13 +0,0 @@
/**
* Set up HTTP proxy according to env variables for `fetch` based SDKs in Node.js.
* Bun has builtin support for this.
*
* This module should be imported early by any code that needs proxy support for fetch().
* ES modules are cached, so importing multiple times is safe - setup only runs once.
*/
if (typeof process !== "undefined" && process.versions?.node) {
import("undici").then((m) => {
const { EnvHttpProxyAgent, setGlobalDispatcher } = m;
setGlobalDispatcher(new EnvHttpProxyAgent());
});
}

View file

@ -9,9 +9,6 @@
* - Antigravity (Gemini 3, Claude, GPT-OSS via Google Cloud)
*/
// Set up HTTP proxy for fetch() calls (respects HTTP_PROXY, HTTPS_PROXY env vars)
import "../http-proxy.js";
// Anthropic
export { anthropicOAuthProvider, loginAnthropic, refreshAnthropicToken } from "./anthropic.js";
// GitHub Copilot