mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-15 19:05:11 +00:00
docs(ai): Add browser usage section to README
- Document browser support and API key requirements - Add security warning about exposing keys in frontend - Keep documentation concise and practical
This commit is contained in:
parent
14d93ca8c6
commit
32bede3352
2 changed files with 104 additions and 85 deletions
|
|
@ -427,6 +427,25 @@ if (model) {
|
|||
}
|
||||
```
|
||||
|
||||
## Browser Usage
|
||||
|
||||
The library supports browser environments. You must pass the API key explicitly since environment variables are not available in browsers:
|
||||
|
||||
```typescript
|
||||
import { createLLM } from '@mariozechner/pi-ai';
|
||||
|
||||
// API key must be passed explicitly in browser
|
||||
const llm = createLLM('anthropic', 'claude-3-5-haiku-20241022', {
|
||||
apiKey: 'your-api-key'
|
||||
});
|
||||
|
||||
const response = await llm.generate({
|
||||
messages: [{ role: 'user', content: 'Hello!' }]
|
||||
});
|
||||
```
|
||||
|
||||
> **Security Warning**: Exposing API keys in frontend code is dangerous. Anyone can extract and abuse your keys. Only use this approach for internal tools or demos. For production applications, use a backend proxy that keeps your API keys secure.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Set these environment variables to use `createLLM` without passing API keys:
|
||||
|
|
|
|||
|
|
@ -1598,22 +1598,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 131072,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3.1-405b-instruct": {
|
||||
id: "meta-llama/llama-3.1-405b-instruct",
|
||||
name: "Meta: Llama 3.1 405B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.7999999999999999,
|
||||
output: 0.7999999999999999,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3.1-70b-instruct": {
|
||||
id: "meta-llama/llama-3.1-70b-instruct",
|
||||
name: "Meta: Llama 3.1 70B Instruct",
|
||||
|
|
@ -1630,6 +1614,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 131072,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3.1-405b-instruct": {
|
||||
id: "meta-llama/llama-3.1-405b-instruct",
|
||||
name: "Meta: Llama 3.1 405B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.7999999999999999,
|
||||
output: 0.7999999999999999,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-nemo": {
|
||||
id: "mistralai/mistral-nemo",
|
||||
name: "Mistral: Mistral Nemo",
|
||||
|
|
@ -1646,6 +1646,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 32000,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-7b-instruct-v0.3": {
|
||||
id: "mistralai/mistral-7b-instruct-v0.3",
|
||||
name: "Mistral: Mistral 7B Instruct v0.3",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.028,
|
||||
output: 0.054,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-7b-instruct:free": {
|
||||
id: "mistralai/mistral-7b-instruct:free",
|
||||
name: "Mistral: Mistral 7B Instruct (free)",
|
||||
|
|
@ -1678,22 +1694,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-7b-instruct-v0.3": {
|
||||
id: "mistralai/mistral-7b-instruct-v0.3",
|
||||
name: "Mistral: Mistral 7B Instruct v0.3",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.028,
|
||||
output: 0.054,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"microsoft/phi-3-mini-128k-instruct": {
|
||||
id: "microsoft/phi-3-mini-128k-instruct",
|
||||
name: "Microsoft: Phi-3 Mini 128K Instruct",
|
||||
|
|
@ -1726,22 +1726,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 128000,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3-8b-instruct": {
|
||||
id: "meta-llama/llama-3-8b-instruct",
|
||||
name: "Meta: Llama 3 8B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.03,
|
||||
output: 0.06,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 8192,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3-70b-instruct": {
|
||||
id: "meta-llama/llama-3-70b-instruct",
|
||||
name: "Meta: Llama 3 70B Instruct",
|
||||
|
|
@ -1758,6 +1742,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 8192,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3-8b-instruct": {
|
||||
id: "meta-llama/llama-3-8b-instruct",
|
||||
name: "Meta: Llama 3 8B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.03,
|
||||
output: 0.06,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 8192,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mixtral-8x22b-instruct": {
|
||||
id: "mistralai/mixtral-8x22b-instruct",
|
||||
name: "Mistral: Mixtral 8x22B Instruct",
|
||||
|
|
@ -1854,22 +1854,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 128000,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-small": {
|
||||
id: "mistralai/mistral-small",
|
||||
name: "Mistral Small",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.19999999999999998,
|
||||
output: 0.6,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-tiny": {
|
||||
id: "mistralai/mistral-tiny",
|
||||
name: "Mistral Tiny",
|
||||
|
|
@ -1886,6 +1870,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 32768,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-small": {
|
||||
id: "mistralai/mistral-small",
|
||||
name: "Mistral Small",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.19999999999999998,
|
||||
output: 0.6,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mixtral-8x7b-instruct": {
|
||||
id: "mistralai/mixtral-8x7b-instruct",
|
||||
name: "Mistral: Mixtral 8x7B Instruct",
|
||||
|
|
@ -2473,21 +2473,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 16385,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"gpt-3.5-turbo": {
|
||||
id: "gpt-3.5-turbo",
|
||||
name: "OpenAI: GPT-3.5 Turbo",
|
||||
provider: "openai",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 16385,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"gpt-4": {
|
||||
id: "gpt-4",
|
||||
name: "OpenAI: GPT-4",
|
||||
|
|
@ -2518,6 +2503,21 @@ export const PROVIDERS = {
|
|||
contextWindow: 8191,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"gpt-3.5-turbo": {
|
||||
id: "gpt-3.5-turbo",
|
||||
name: "OpenAI: GPT-3.5 Turbo",
|
||||
provider: "openai",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 16385,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
},
|
||||
},
|
||||
anthropic: {
|
||||
|
|
@ -2597,9 +2597,9 @@ export const PROVIDERS = {
|
|||
contextWindow: 200000,
|
||||
maxTokens: 64000,
|
||||
} satisfies Model,
|
||||
"claude-3-5-haiku-latest": {
|
||||
id: "claude-3-5-haiku-latest",
|
||||
name: "Anthropic: Claude 3.5 Haiku",
|
||||
"claude-3-5-haiku-20241022": {
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
||||
provider: "anthropic",
|
||||
reasoning: false,
|
||||
input: ["text", "image"],
|
||||
|
|
@ -2612,9 +2612,9 @@ export const PROVIDERS = {
|
|||
contextWindow: 200000,
|
||||
maxTokens: 8192,
|
||||
} satisfies Model,
|
||||
"claude-3-5-haiku-20241022": {
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
||||
"claude-3-5-haiku-latest": {
|
||||
id: "claude-3-5-haiku-latest",
|
||||
name: "Anthropic: Claude 3.5 Haiku",
|
||||
provider: "anthropic",
|
||||
reasoning: false,
|
||||
input: ["text", "image"],
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue