Fix javascript-repl renderer to use console-block component

This commit is contained in:
Mario Zechner 2025-10-04 21:44:23 +02:00
parent 8212623af0
commit 9d6267a915
5 changed files with 55 additions and 34 deletions

View file

@ -1949,8 +1949,8 @@ export const MODELS = {
reasoning: false,
input: ["text"],
cost: {
input: 0.39999999999999997,
output: 2,
input: 0.39,
output: 1.9,
cacheRead: 0,
cacheWrite: 0,
},
@ -2034,8 +2034,8 @@ export const MODELS = {
reasoning: true,
input: ["text"],
cost: {
input: 0.25,
output: 1,
input: 0.3,
output: 1.2,
cacheRead: 0,
cacheWrite: 0,
},
@ -2187,8 +2187,8 @@ export const MODELS = {
reasoning: false,
input: ["text"],
cost: {
input: 0.07,
output: 0.28,
input: 0.08,
output: 0.33,
cacheRead: 0,
cacheWrite: 0,
},
@ -2204,13 +2204,13 @@ export const MODELS = {
reasoning: true,
input: ["text"],
cost: {
input: 0.38,
output: 1.5999999999999999,
input: 0.35,
output: 1.55,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 4096,
maxTokens: 131072,
} satisfies Model<"openai-completions">,
"z-ai/glm-4.5-air:free": {
id: "z-ai/glm-4.5-air:free",
@ -2544,8 +2544,8 @@ export const MODELS = {
reasoning: false,
input: ["text"],
cost: {
input: 0.04,
output: 0.14,
input: 0.049999999999999996,
output: 0.22,
cacheRead: 0,
cacheWrite: 0,
},
@ -2663,8 +2663,8 @@ export const MODELS = {
reasoning: true,
input: ["text"],
cost: {
input: 0.04,
output: 0.14,
input: 0.049999999999999996,
output: 0.22,
cacheRead: 0,
cacheWrite: 0,
},
@ -2680,8 +2680,8 @@ export const MODELS = {
reasoning: true,
input: ["text"],
cost: {
input: 0.03,
output: 0.13,
input: 0.049999999999999996,
output: 0.19999999999999998,
cacheRead: 0,
cacheWrite: 0,
},
@ -2850,13 +2850,13 @@ export const MODELS = {
reasoning: false,
input: ["text", "image"],
cost: {
input: 0.04,
output: 0.15,
input: 0.049999999999999996,
output: 0.09999999999999999,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 131072,
maxTokens: 131072,
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
"microsoft/phi-4-multimodal-instruct": {
id: "microsoft/phi-4-multimodal-instruct",
@ -2969,13 +2969,13 @@ export const MODELS = {
reasoning: false,
input: ["text"],
cost: {
input: 0.04,
output: 0.15,
input: 0.049999999999999996,
output: 0.08,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 32768,
maxTokens: 32768,
maxTokens: 16384,
} satisfies Model<"openai-completions">,
"deepseek/deepseek-r1-distill-llama-70b": {
id: "deepseek/deepseek-r1-distill-llama-70b",
@ -3037,13 +3037,13 @@ export const MODELS = {
reasoning: false,
input: ["text"],
cost: {
input: 0.24999987999999998,
output: 0.999999888,
input: 0.3,
output: 0.85,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 163840,
maxTokens: 4096,
maxTokens: 163840,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3.3-70b-instruct:free": {
id: "meta-llama/llama-3.3-70b-instruct:free",
@ -3632,8 +3632,8 @@ export const MODELS = {
reasoning: false,
input: ["text"],
cost: {
input: 0.39999999999999997,
output: 0.39999999999999997,
input: 0.54,
output: 0.54,
cacheRead: 0,
cacheWrite: 0,
},