feat(tui): overlay positioning API with CSS-like values

Add OverlayOptions for configurable positioning (anchor, margins, offsets,
percentages). Add OverlayHandle for programmatic visibility control with
hide/setHidden/isHidden. Add visible callback for responsive overlays.

Extension API: ctx.ui.custom() now accepts overlayOptions and onHandle callback.

Examples: overlay-qa-tests.ts (10 test commands), doom-overlay (DOOM at 35 FPS).
This commit is contained in:
Nico Bailon 2026-01-12 22:12:56 -08:00
parent d29f268f46
commit a4ccff382c
22 changed files with 1344 additions and 103 deletions

View file

@ -3643,7 +3643,7 @@ export const MODELS = {
cacheWrite: 18.75,
},
contextWindow: 200000,
maxTokens: 4096,
maxTokens: 32000,
} satisfies Model<"openai-completions">,
"anthropic/claude-opus-4.5": {
id: "anthropic/claude-opus-4.5",
@ -3660,7 +3660,7 @@ export const MODELS = {
cacheWrite: 6.25,
},
contextWindow: 200000,
maxTokens: 32000,
maxTokens: 64000,
} satisfies Model<"openai-completions">,
"anthropic/claude-sonnet-4": {
id: "anthropic/claude-sonnet-4",
@ -3977,13 +3977,13 @@ export const MODELS = {
reasoning: true,
input: ["text"],
cost: {
input: 0.39999999999999997,
output: 1.75,
input: 0.44999999999999996,
output: 2.1500000000000004,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 163840,
maxTokens: 65536,
contextWindow: 131072,
maxTokens: 32768,
} satisfies Model<"openai-completions">,
"deepseek/deepseek-r1-distill-llama-70b": {
id: "deepseek/deepseek-r1-distill-llama-70b",
@ -4359,23 +4359,6 @@ export const MODELS = {
contextWindow: 256000,
maxTokens: 128000,
} satisfies Model<"openai-completions">,
"kwaipilot/kat-coder-pro:free": {
id: "kwaipilot/kat-coder-pro:free",
name: "Kwaipilot: KAT-Coder-Pro V1 (free)",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: false,
input: ["text"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 256000,
maxTokens: 128000,
} satisfies Model<"openai-completions">,
"meta-llama/llama-3-70b-instruct": {
id: "meta-llama/llama-3-70b-instruct",
name: "Meta: Llama 3 70B Instruct",
@ -4589,13 +4572,13 @@ export const MODELS = {
reasoning: true,
input: ["text"],
cost: {
input: 0.28,
output: 1.2,
cacheRead: 0.14,
input: 0.27,
output: 1.12,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 196608,
maxTokens: 4096,
maxTokens: 65536,
} satisfies Model<"openai-completions">,
"mistralai/codestral-2508": {
id: "mistralai/codestral-2508",

Binary file not shown.

After

Width:  |  Height:  |  Size: 321 B