mirror of
https://github.com/getcompanion-ai/co-mono.git
synced 2026-04-16 13:04:08 +00:00
docs(ai): Improve README with model discovery and capabilities documentation
- Added note that library only includes tool-calling capable models - Added Model Discovery section showing how to enumerate models - Added examples for finding models with specific capabilities - Added cache read/write costs to model capabilities display - Clarified that models are auto-fetched from APIs at build time
This commit is contained in:
parent
d46a98ec10
commit
dae40167a3
10 changed files with 167 additions and 108 deletions
|
|
@ -1598,22 +1598,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 131072,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3.1-405b-instruct": {
|
||||
id: "meta-llama/llama-3.1-405b-instruct",
|
||||
name: "Meta: Llama 3.1 405B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.7999999999999999,
|
||||
output: 0.7999999999999999,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3.1-70b-instruct": {
|
||||
id: "meta-llama/llama-3.1-70b-instruct",
|
||||
name: "Meta: Llama 3.1 70B Instruct",
|
||||
|
|
@ -1630,6 +1614,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 131072,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3.1-405b-instruct": {
|
||||
id: "meta-llama/llama-3.1-405b-instruct",
|
||||
name: "Meta: Llama 3.1 405B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.7999999999999999,
|
||||
output: 0.7999999999999999,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-nemo": {
|
||||
id: "mistralai/mistral-nemo",
|
||||
name: "Mistral: Mistral Nemo",
|
||||
|
|
@ -1646,6 +1646,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 32000,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-7b-instruct-v0.3": {
|
||||
id: "mistralai/mistral-7b-instruct-v0.3",
|
||||
name: "Mistral: Mistral 7B Instruct v0.3",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.028,
|
||||
output: 0.054,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-7b-instruct:free": {
|
||||
id: "mistralai/mistral-7b-instruct:free",
|
||||
name: "Mistral: Mistral 7B Instruct (free)",
|
||||
|
|
@ -1678,22 +1694,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-7b-instruct-v0.3": {
|
||||
id: "mistralai/mistral-7b-instruct-v0.3",
|
||||
name: "Mistral: Mistral 7B Instruct v0.3",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.028,
|
||||
output: 0.054,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"microsoft/phi-3-mini-128k-instruct": {
|
||||
id: "microsoft/phi-3-mini-128k-instruct",
|
||||
name: "Microsoft: Phi-3 Mini 128K Instruct",
|
||||
|
|
@ -1726,22 +1726,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 128000,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3-8b-instruct": {
|
||||
id: "meta-llama/llama-3-8b-instruct",
|
||||
name: "Meta: Llama 3 8B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.03,
|
||||
output: 0.06,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 8192,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3-70b-instruct": {
|
||||
id: "meta-llama/llama-3-70b-instruct",
|
||||
name: "Meta: Llama 3 70B Instruct",
|
||||
|
|
@ -1758,6 +1742,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 8192,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"meta-llama/llama-3-8b-instruct": {
|
||||
id: "meta-llama/llama-3-8b-instruct",
|
||||
name: "Meta: Llama 3 8B Instruct",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.03,
|
||||
output: 0.06,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 8192,
|
||||
maxTokens: 16384,
|
||||
} satisfies Model,
|
||||
"mistralai/mixtral-8x22b-instruct": {
|
||||
id: "mistralai/mixtral-8x22b-instruct",
|
||||
name: "Mistral: Mixtral 8x22B Instruct",
|
||||
|
|
@ -1854,22 +1854,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 128000,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-small": {
|
||||
id: "mistralai/mistral-small",
|
||||
name: "Mistral Small",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.19999999999999998,
|
||||
output: 0.6,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-tiny": {
|
||||
id: "mistralai/mistral-tiny",
|
||||
name: "Mistral Tiny",
|
||||
|
|
@ -1886,6 +1870,22 @@ export const PROVIDERS = {
|
|||
contextWindow: 32768,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mistral-small": {
|
||||
id: "mistralai/mistral-small",
|
||||
name: "Mistral Small",
|
||||
provider: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.19999999999999998,
|
||||
output: 0.6,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 32768,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"mistralai/mixtral-8x7b-instruct": {
|
||||
id: "mistralai/mixtral-8x7b-instruct",
|
||||
name: "Mistral: Mixtral 8x7B Instruct",
|
||||
|
|
@ -2473,21 +2473,6 @@ export const PROVIDERS = {
|
|||
contextWindow: 16385,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"gpt-3.5-turbo": {
|
||||
id: "gpt-3.5-turbo",
|
||||
name: "OpenAI: GPT-3.5 Turbo",
|
||||
provider: "openai",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 16385,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"gpt-4": {
|
||||
id: "gpt-4",
|
||||
name: "OpenAI: GPT-4",
|
||||
|
|
@ -2518,6 +2503,21 @@ export const PROVIDERS = {
|
|||
contextWindow: 8191,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
"gpt-3.5-turbo": {
|
||||
id: "gpt-3.5-turbo",
|
||||
name: "OpenAI: GPT-3.5 Turbo",
|
||||
provider: "openai",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
cacheRead: 0,
|
||||
cacheWrite: 0,
|
||||
},
|
||||
contextWindow: 16385,
|
||||
maxTokens: 4096,
|
||||
} satisfies Model,
|
||||
},
|
||||
},
|
||||
anthropic: {
|
||||
|
|
@ -2597,9 +2597,9 @@ export const PROVIDERS = {
|
|||
contextWindow: 200000,
|
||||
maxTokens: 64000,
|
||||
} satisfies Model,
|
||||
"claude-3-5-haiku-latest": {
|
||||
id: "claude-3-5-haiku-latest",
|
||||
name: "Anthropic: Claude 3.5 Haiku",
|
||||
"claude-3-5-haiku-20241022": {
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
||||
provider: "anthropic",
|
||||
reasoning: false,
|
||||
input: ["text", "image"],
|
||||
|
|
@ -2612,9 +2612,9 @@ export const PROVIDERS = {
|
|||
contextWindow: 200000,
|
||||
maxTokens: 8192,
|
||||
} satisfies Model,
|
||||
"claude-3-5-haiku-20241022": {
|
||||
id: "claude-3-5-haiku-20241022",
|
||||
name: "Anthropic: Claude 3.5 Haiku (2024-10-22)",
|
||||
"claude-3-5-haiku-latest": {
|
||||
id: "claude-3-5-haiku-latest",
|
||||
name: "Anthropic: Claude 3.5 Haiku",
|
||||
provider: "anthropic",
|
||||
reasoning: false,
|
||||
input: ["text", "image"],
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue