first commit

This commit is contained in:
Harivansh Rathi 2024-12-14 00:37:18 -05:00
commit 1cdbffff09
200 changed files with 30007 additions and 0 deletions

29
app/utils/buffer.ts Normal file
View file

@ -0,0 +1,29 @@
export function bufferWatchEvents<T extends unknown[]>(timeInMs: number, cb: (events: T[]) => unknown) {
let timeoutId: number | undefined;
let events: T[] = [];
// keep track of the processing of the previous batch so we can wait for it
let processing: Promise<unknown> = Promise.resolve();
const scheduleBufferTick = () => {
timeoutId = self.setTimeout(async () => {
// we wait until the previous batch is entirely processed so events are processed in order
await processing;
if (events.length > 0) {
processing = Promise.resolve(cb(events));
}
timeoutId = undefined;
events = [];
}, timeInMs);
};
return (...args: T) => {
events.push(args);
if (!timeoutId) {
scheduleBufferTick();
}
};
}

61
app/utils/classNames.ts Normal file
View file

@ -0,0 +1,61 @@
/**
* Copyright (c) 2018 Jed Watson.
* Licensed under the MIT License (MIT), see:
*
* @link http://jedwatson.github.io/classnames
*/
type ClassNamesArg = undefined | string | Record<string, boolean> | ClassNamesArg[];
/**
* A simple JavaScript utility for conditionally joining classNames together.
*
* @param args A series of classes or object with key that are class and values
* that are interpreted as boolean to decide whether or not the class
* should be included in the final class.
*/
export function classNames(...args: ClassNamesArg[]): string {
let classes = '';
for (const arg of args) {
classes = appendClass(classes, parseValue(arg));
}
return classes;
}
function parseValue(arg: ClassNamesArg) {
if (typeof arg === 'string' || typeof arg === 'number') {
return arg;
}
if (typeof arg !== 'object') {
return '';
}
if (Array.isArray(arg)) {
return classNames(...arg);
}
let classes = '';
for (const key in arg) {
if (arg[key]) {
classes = appendClass(classes, key);
}
}
return classes;
}
function appendClass(value: string, newClass: string | undefined) {
if (!newClass) {
return value;
}
if (value) {
return value + ' ' + newClass;
}
return value + newClass;
}

525
app/utils/constants.ts Normal file
View file

@ -0,0 +1,525 @@
import Cookies from 'js-cookie';
import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
import type { ProviderInfo, IProviderSetting } from '~/types/model';
import { createScopedLogger } from './logger';
export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
export const PROMPT_COOKIE_KEY = 'cachedPrompt';
const logger = createScopedLogger('Constants');
const PROVIDER_LIST: ProviderInfo[] = [
{
name: 'Anthropic',
staticModels: [
{
name: 'claude-3-5-sonnet-latest',
label: 'Claude 3.5 Sonnet (new)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{
name: 'claude-3-5-sonnet-20240620',
label: 'Claude 3.5 Sonnet (old)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{
name: 'claude-3-5-haiku-latest',
label: 'Claude 3.5 Haiku (new)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://console.anthropic.com/settings/keys',
},
{
name: 'Ollama',
staticModels: [],
getDynamicModels: getOllamaModels,
getApiKeyLink: 'https://ollama.com/download',
labelForGetApiKey: 'Download Ollama',
icon: 'i-ph:cloud-arrow-down',
},
{
name: 'OpenAILike',
staticModels: [],
getDynamicModels: getOpenAILikeModels,
},
{
name: 'Cohere',
staticModels: [
{ name: 'command-r-plus-08-2024', label: 'Command R plus Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-r-08-2024', label: 'Command R Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-r-plus', label: 'Command R plus', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-r', label: 'Command R', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command', label: 'Command', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-nightly', label: 'Command Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-light', label: 'Command Light', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'command-light-nightly', label: 'Command Light Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
],
getApiKeyLink: 'https://dashboard.cohere.com/api-keys',
},
{
name: 'OpenRouter',
staticModels: [
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 8000 },
{
name: 'anthropic/claude-3.5-sonnet',
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'anthropic/claude-3-haiku',
label: 'Anthropic: Claude 3 Haiku (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'deepseek/deepseek-coder',
label: 'Deepseek-Coder V2 236B (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'google/gemini-flash-1.5',
label: 'Google Gemini Flash 1.5 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'google/gemini-pro-1.5',
label: 'Google Gemini Pro 1.5 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{
name: 'mistralai/mistral-nemo',
label: 'OpenRouter Mistral Nemo (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'qwen/qwen-110b-chat',
label: 'OpenRouter Qwen 110b Chat (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 },
],
getDynamicModels: getOpenRouterModels,
getApiKeyLink: 'https://openrouter.ai/settings/keys',
},
{
name: 'Google',
staticModels: [
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-2.0-flash-exp', label: 'Gemini 2.0 Flash', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-flash-002', label: 'Gemini 1.5 Flash-002', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-exp-1206', label: 'Gemini exp-1206', provider: 'Google', maxTokenAllowed: 8192 },
],
getApiKeyLink: 'https://aistudio.google.com/app/apikey',
},
{
name: 'Groq',
staticModels: [
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://console.groq.com/keys',
},
{
name: 'HuggingFace',
staticModels: [
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-72B-Instruct',
label: 'Qwen2.5-72B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.1-70B-Instruct',
label: 'Llama-3.1-70B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.1-405B',
label: 'Llama-3.1-405B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
],
getApiKeyLink: 'https://huggingface.co/settings/tokens',
},
{
name: 'OpenAI',
staticModels: [
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://platform.openai.com/api-keys',
},
{
name: 'xAI',
staticModels: [{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }],
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key',
},
{
name: 'Deepseek',
staticModels: [
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://platform.deepseek.com/apiKeys',
},
{
name: 'Mistral',
staticModels: [
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://console.mistral.ai/api-keys/',
},
{
name: 'LMStudio',
staticModels: [],
getDynamicModels: getLMStudioModels,
getApiKeyLink: 'https://lmstudio.ai/',
labelForGetApiKey: 'Get LMStudio',
icon: 'i-ph:cloud-arrow-down',
},
{
name: 'Together',
getDynamicModels: getTogetherModels,
staticModels: [
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen/Qwen2.5-Coder-32B-Instruct',
provider: 'Together',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
label: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
provider: 'Together',
maxTokenAllowed: 8000,
},
{
name: 'mistralai/Mixtral-8x7B-Instruct-v0.1',
label: 'Mixtral 8x7B Instruct',
provider: 'Together',
maxTokenAllowed: 8192,
},
],
getApiKeyLink: 'https://api.together.xyz/settings/api-keys',
},
];
export const DEFAULT_PROVIDER = PROVIDER_LIST[0];
const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat();
export let MODEL_LIST: ModelInfo[] = [...staticModels];
export async function getModelList(
apiKeys: Record<string, string>,
providerSettings?: Record<string, IProviderSetting>,
) {
MODEL_LIST = [
...(
await Promise.all(
PROVIDER_LIST.filter(
(p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])),
)
).flat(),
...staticModels,
];
return MODEL_LIST;
}
async function getTogetherModels(apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
try {
const baseUrl = settings?.baseUrl || import.meta.env.TOGETHER_API_BASE_URL || '';
const provider = 'Together';
if (!baseUrl) {
return [];
}
let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
if (apiKeys && apiKeys[provider]) {
apiKey = apiKeys[provider];
}
if (!apiKey) {
return [];
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
const data: any[] = (res || []).filter((model: any) => model.type == 'chat');
return data.map((m: any) => ({
name: m.id,
label: `${m.display_name} - in:$${m.pricing.input.toFixed(
2,
)} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
provider,
maxTokenAllowed: 8000,
}));
} catch (e) {
console.error('Error getting OpenAILike models:', e);
return [];
}
}
const getOllamaBaseUrl = (settings?: IProviderSetting) => {
const defaultBaseUrl = settings?.baseUrl || import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
// Check if we're in the browser
if (typeof window !== 'undefined') {
// Frontend always uses localhost
return defaultBaseUrl;
}
// Backend: Check if we're running in Docker
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
};
async function getOllamaModels(apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
/*
* if (typeof window === 'undefined') {
* return [];
* }
*/
try {
const baseUrl = getOllamaBaseUrl(settings);
const response = await fetch(`${baseUrl}/api/tags`);
const data = (await response.json()) as OllamaApiResponse;
return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: 'Ollama',
maxTokenAllowed: 8000,
}));
} catch (e: any) {
logger.warn('Failed to get Ollama models: ', e.message || '');
return [];
}
}
async function getOpenAILikeModels(
apiKeys?: Record<string, string>,
settings?: IProviderSetting,
): Promise<ModelInfo[]> {
try {
const baseUrl = settings?.baseUrl || import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
if (!baseUrl) {
return [];
}
let apiKey = '';
if (apiKeys && apiKeys.OpenAILike) {
apiKey = apiKeys.OpenAILike;
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'OpenAILike',
}));
} catch (e) {
console.error('Error getting OpenAILike models:', e);
return [];
}
}
type OpenRouterModelsResponse = {
data: {
name: string;
id: string;
context_length: number;
pricing: {
prompt: number;
completion: number;
};
}[];
};
async function getOpenRouterModels(): Promise<ModelInfo[]> {
const data: OpenRouterModelsResponse = await (
await fetch('https://openrouter.ai/api/v1/models', {
headers: {
'Content-Type': 'application/json',
},
})
).json();
return data.data
.sort((a, b) => a.name.localeCompare(b.name))
.map((m) => ({
name: m.id,
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
2,
)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
provider: 'OpenRouter',
maxTokenAllowed: 8000,
}));
}
async function getLMStudioModels(_apiKeys?: Record<string, string>, settings?: IProviderSetting): Promise<ModelInfo[]> {
if (typeof window === 'undefined') {
return [];
}
try {
const baseUrl = settings?.baseUrl || import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
const response = await fetch(`${baseUrl}/v1/models`);
const data = (await response.json()) as any;
return data.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'LMStudio',
}));
} catch (e: any) {
logger.warn('Failed to get LMStudio models: ', e.message || '');
return [];
}
}
async function initializeModelList(providerSettings?: Record<string, IProviderSetting>): Promise<ModelInfo[]> {
let apiKeys: Record<string, string> = {};
try {
const storedApiKeys = Cookies.get('apiKeys');
if (storedApiKeys) {
const parsedKeys = JSON.parse(storedApiKeys);
if (typeof parsedKeys === 'object' && parsedKeys !== null) {
apiKeys = parsedKeys;
}
}
} catch (error: any) {
logger.warn(`Failed to fetch apikeys from cookies: ${error?.message}`);
}
MODEL_LIST = [
...(
await Promise.all(
PROVIDER_LIST.filter(
(p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])),
)
).flat(),
...staticModels,
];
return MODEL_LIST;
}
export {
getOllamaModels,
getOpenAILikeModels,
getLMStudioModels,
initializeModelList,
getOpenRouterModels,
PROVIDER_LIST,
};

17
app/utils/debounce.ts Normal file
View file

@ -0,0 +1,17 @@
export function debounce<Args extends any[]>(fn: (...args: Args) => void, delay = 100) {
if (delay === 0) {
return fn;
}
let timer: number | undefined;
return function <U>(this: U, ...args: Args) {
const context = this;
clearTimeout(timer);
timer = window.setTimeout(() => {
fn.apply(context, args);
}, delay);
};
}

11
app/utils/diff.spec.ts Normal file
View file

@ -0,0 +1,11 @@
import { describe, expect, it } from 'vitest';
import { extractRelativePath } from './diff';
import { WORK_DIR } from './constants';
describe('Diff', () => {
it('should strip out Work_dir', () => {
const filePath = `${WORK_DIR}/index.js`;
const result = extractRelativePath(filePath);
expect(result).toBe('index.js');
});
});

117
app/utils/diff.ts Normal file
View file

@ -0,0 +1,117 @@
import { createTwoFilesPatch } from 'diff';
import type { FileMap } from '~/lib/stores/files';
import { MODIFICATIONS_TAG_NAME, WORK_DIR } from './constants';
export const modificationsRegex = new RegExp(
`^<${MODIFICATIONS_TAG_NAME}>[\\s\\S]*?<\\/${MODIFICATIONS_TAG_NAME}>\\s+`,
'g',
);
interface ModifiedFile {
type: 'diff' | 'file';
content: string;
}
type FileModifications = Record<string, ModifiedFile>;
export function computeFileModifications(files: FileMap, modifiedFiles: Map<string, string>) {
const modifications: FileModifications = {};
let hasModifiedFiles = false;
for (const [filePath, originalContent] of modifiedFiles) {
const file = files[filePath];
if (file?.type !== 'file') {
continue;
}
const unifiedDiff = diffFiles(filePath, originalContent, file.content);
if (!unifiedDiff) {
// files are identical
continue;
}
hasModifiedFiles = true;
if (unifiedDiff.length > file.content.length) {
// if there are lots of changes we simply grab the current file content since it's smaller than the diff
modifications[filePath] = { type: 'file', content: file.content };
} else {
// otherwise we use the diff since it's smaller
modifications[filePath] = { type: 'diff', content: unifiedDiff };
}
}
if (!hasModifiedFiles) {
return undefined;
}
return modifications;
}
/**
* Computes a diff in the unified format. The only difference is that the header is omitted
* because it will always assume that you're comparing two versions of the same file and
* it allows us to avoid the extra characters we send back to the llm.
*
* @see https://www.gnu.org/software/diffutils/manual/html_node/Unified-Format.html
*/
export function diffFiles(fileName: string, oldFileContent: string, newFileContent: string) {
let unifiedDiff = createTwoFilesPatch(fileName, fileName, oldFileContent, newFileContent);
const patchHeaderEnd = `--- ${fileName}\n+++ ${fileName}\n`;
const headerEndIndex = unifiedDiff.indexOf(patchHeaderEnd);
if (headerEndIndex >= 0) {
unifiedDiff = unifiedDiff.slice(headerEndIndex + patchHeaderEnd.length);
}
if (unifiedDiff === '') {
return undefined;
}
return unifiedDiff;
}
const regex = new RegExp(`^${WORK_DIR}\/`);
/**
* Strips out the work directory from the file path.
*/
export function extractRelativePath(filePath: string) {
return filePath.replace(regex, '');
}
/**
* Converts the unified diff to HTML.
*
* Example:
*
* ```html
* <bolt_file_modifications>
* <diff path="/home/project/index.js">
* - console.log('Hello, World!');
* + console.log('Hello, Bolt!');
* </diff>
* </bolt_file_modifications>
* ```
*/
export function fileModificationsToHTML(modifications: FileModifications) {
const entries = Object.entries(modifications);
if (entries.length === 0) {
return undefined;
}
const result: string[] = [`<${MODIFICATIONS_TAG_NAME}>`];
for (const [filePath, { type, content }] of entries) {
result.push(`<${type} path=${JSON.stringify(filePath)}>`, content, `</${type}>`);
}
result.push(`</${MODIFICATIONS_TAG_NAME}>`);
return result.join('\n');
}

3
app/utils/easings.ts Normal file
View file

@ -0,0 +1,3 @@
import { cubicBezier } from 'framer-motion';
export const cubicEasingFn = cubicBezier(0.4, 0, 0.2, 1);

105
app/utils/fileUtils.ts Normal file
View file

@ -0,0 +1,105 @@
import ignore from 'ignore';
// Common patterns to ignore, similar to .gitignore
export const IGNORE_PATTERNS = [
'node_modules/**',
'.git/**',
'dist/**',
'build/**',
'.next/**',
'coverage/**',
'.cache/**',
'.vscode/**',
'.idea/**',
'**/*.log',
'**/.DS_Store',
'**/npm-debug.log*',
'**/yarn-debug.log*',
'**/yarn-error.log*',
];
export const MAX_FILES = 1000;
export const ig = ignore().add(IGNORE_PATTERNS);
export const generateId = () => Math.random().toString(36).substring(2, 15);
export const isBinaryFile = async (file: File): Promise<boolean> => {
const chunkSize = 1024;
const buffer = new Uint8Array(await file.slice(0, chunkSize).arrayBuffer());
for (let i = 0; i < buffer.length; i++) {
const byte = buffer[i];
if (byte === 0 || (byte < 32 && byte !== 9 && byte !== 10 && byte !== 13)) {
return true;
}
}
return false;
};
export const shouldIncludeFile = (path: string): boolean => {
return !ig.ignores(path);
};
const readPackageJson = async (files: File[]): Promise<{ scripts?: Record<string, string> } | null> => {
const packageJsonFile = files.find((f) => f.webkitRelativePath.endsWith('package.json'));
if (!packageJsonFile) {
return null;
}
try {
const content = await new Promise<string>((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => resolve(reader.result as string);
reader.onerror = reject;
reader.readAsText(packageJsonFile);
});
return JSON.parse(content);
} catch (error) {
console.error('Error reading package.json:', error);
return null;
}
};
export const detectProjectType = async (
files: File[],
): Promise<{ type: string; setupCommand: string; followupMessage: string }> => {
const hasFile = (name: string) => files.some((f) => f.webkitRelativePath.endsWith(name));
if (hasFile('package.json')) {
const packageJson = await readPackageJson(files);
const scripts = packageJson?.scripts || {};
// Check for preferred commands in priority order
const preferredCommands = ['dev', 'start', 'preview'];
const availableCommand = preferredCommands.find((cmd) => scripts[cmd]);
if (availableCommand) {
return {
type: 'Node.js',
setupCommand: `npm install && npm run ${availableCommand}`,
followupMessage: `Found "${availableCommand}" script in package.json. Running "npm run ${availableCommand}" after installation.`,
};
}
return {
type: 'Node.js',
setupCommand: 'npm install',
followupMessage:
'Would you like me to inspect package.json to determine the available scripts for running this project?',
};
}
if (hasFile('index.html')) {
return {
type: 'Static',
setupCommand: 'npx --yes serve',
followupMessage: '',
};
}
return { type: '', setupCommand: '', followupMessage: '' };
};

68
app/utils/folderImport.ts Normal file
View file

@ -0,0 +1,68 @@
import type { Message } from 'ai';
import { generateId } from './fileUtils';
import { detectProjectCommands, createCommandsMessage } from './projectCommands';
export const createChatFromFolder = async (
files: File[],
binaryFiles: string[],
folderName: string,
): Promise<Message[]> => {
const fileArtifacts = await Promise.all(
files.map(async (file) => {
return new Promise<{ content: string; path: string }>((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const content = reader.result as string;
const relativePath = file.webkitRelativePath.split('/').slice(1).join('/');
resolve({
content,
path: relativePath,
});
};
reader.onerror = reject;
reader.readAsText(file);
});
}),
);
const commands = await detectProjectCommands(fileArtifacts);
const commandsMessage = createCommandsMessage(commands);
const binaryFilesMessage =
binaryFiles.length > 0
? `\n\nSkipped ${binaryFiles.length} binary files:\n${binaryFiles.map((f) => `- ${f}`).join('\n')}`
: '';
const filesMessage: Message = {
role: 'assistant',
content: `I've imported the contents of the "${folderName}" folder.${binaryFilesMessage}
<boltArtifact id="imported-files" title="Imported Files">
${fileArtifacts
.map(
(file) => `<boltAction type="file" filePath="${file.path}">
${file.content}
</boltAction>`,
)
.join('\n\n')}
</boltArtifact>`,
id: generateId(),
createdAt: new Date(),
};
const userMessage: Message = {
role: 'user',
id: generateId(),
content: `Import the "${folderName}" folder`,
createdAt: new Date(),
};
const messages = [userMessage, filesMessage];
if (commandsMessage) {
messages.push(commandsMessage);
}
return messages;
};

112
app/utils/logger.ts Normal file
View file

@ -0,0 +1,112 @@
export type DebugLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error';
type LoggerFunction = (...messages: any[]) => void;
interface Logger {
trace: LoggerFunction;
debug: LoggerFunction;
info: LoggerFunction;
warn: LoggerFunction;
error: LoggerFunction;
setLevel: (level: DebugLevel) => void;
}
let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';
const isWorker = 'HTMLRewriter' in globalThis;
const supportsColor = !isWorker;
export const logger: Logger = {
trace: (...messages: any[]) => log('trace', undefined, messages),
debug: (...messages: any[]) => log('debug', undefined, messages),
info: (...messages: any[]) => log('info', undefined, messages),
warn: (...messages: any[]) => log('warn', undefined, messages),
error: (...messages: any[]) => log('error', undefined, messages),
setLevel,
};
export function createScopedLogger(scope: string): Logger {
return {
trace: (...messages: any[]) => log('trace', scope, messages),
debug: (...messages: any[]) => log('debug', scope, messages),
info: (...messages: any[]) => log('info', scope, messages),
warn: (...messages: any[]) => log('warn', scope, messages),
error: (...messages: any[]) => log('error', scope, messages),
setLevel,
};
}
function setLevel(level: DebugLevel) {
if ((level === 'trace' || level === 'debug') && import.meta.env.PROD) {
return;
}
currentLevel = level;
}
function log(level: DebugLevel, scope: string | undefined, messages: any[]) {
const levelOrder: DebugLevel[] = ['trace', 'debug', 'info', 'warn', 'error'];
if (levelOrder.indexOf(level) < levelOrder.indexOf(currentLevel)) {
return;
}
const allMessages = messages.reduce((acc, current) => {
if (acc.endsWith('\n')) {
return acc + current;
}
if (!acc) {
return current;
}
return `${acc} ${current}`;
}, '');
if (!supportsColor) {
console.log(`[${level.toUpperCase()}]`, allMessages);
return;
}
const labelBackgroundColor = getColorForLevel(level);
const labelTextColor = level === 'warn' ? 'black' : 'white';
const labelStyles = getLabelStyles(labelBackgroundColor, labelTextColor);
const scopeStyles = getLabelStyles('#77828D', 'white');
const styles = [labelStyles];
if (typeof scope === 'string') {
styles.push('', scopeStyles);
}
console.log(`%c${level.toUpperCase()}${scope ? `%c %c${scope}` : ''}`, ...styles, allMessages);
}
function getLabelStyles(color: string, textColor: string) {
return `background-color: ${color}; color: white; border: 4px solid ${color}; color: ${textColor};`;
}
function getColorForLevel(level: DebugLevel): string {
switch (level) {
case 'trace':
case 'debug': {
return '#77828D';
}
case 'info': {
return '#1389FD';
}
case 'warn': {
return '#FFDB6C';
}
case 'error': {
return '#EE4744';
}
default: {
return 'black';
}
}
}
export const renderLogger = createScopedLogger('Render');

116
app/utils/markdown.ts Normal file
View file

@ -0,0 +1,116 @@
import rehypeRaw from 'rehype-raw';
import remarkGfm from 'remark-gfm';
import type { PluggableList, Plugin } from 'unified';
import rehypeSanitize, { defaultSchema, type Options as RehypeSanitizeOptions } from 'rehype-sanitize';
import { SKIP, visit } from 'unist-util-visit';
import type { UnistNode, UnistParent } from 'node_modules/unist-util-visit/lib';
export const allowedHTMLElements = [
'a',
'b',
'blockquote',
'br',
'code',
'dd',
'del',
'details',
'div',
'dl',
'dt',
'em',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
'hr',
'i',
'ins',
'kbd',
'li',
'ol',
'p',
'pre',
'q',
'rp',
'rt',
'ruby',
's',
'samp',
'source',
'span',
'strike',
'strong',
'sub',
'summary',
'sup',
'table',
'tbody',
'td',
'tfoot',
'th',
'thead',
'tr',
'ul',
'var',
];
const rehypeSanitizeOptions: RehypeSanitizeOptions = {
...defaultSchema,
tagNames: allowedHTMLElements,
attributes: {
...defaultSchema.attributes,
div: [...(defaultSchema.attributes?.div ?? []), 'data*', ['className', '__boltArtifact__']],
},
strip: [],
};
export function remarkPlugins(limitedMarkdown: boolean) {
const plugins: PluggableList = [remarkGfm];
if (limitedMarkdown) {
plugins.unshift(limitedMarkdownPlugin);
}
return plugins;
}
export function rehypePlugins(html: boolean) {
const plugins: PluggableList = [];
if (html) {
plugins.push(rehypeRaw, [rehypeSanitize, rehypeSanitizeOptions]);
}
return plugins;
}
const limitedMarkdownPlugin: Plugin = () => {
return (tree, file) => {
const contents = file.toString();
visit(tree, (node: UnistNode, index, parent: UnistParent) => {
if (
index == null ||
['paragraph', 'text', 'inlineCode', 'code', 'strong', 'emphasis'].includes(node.type) ||
!node.position
) {
return true;
}
let value = contents.slice(node.position.start.offset, node.position.end.offset);
if (node.type === 'heading') {
value = `\n${value}`;
}
parent.children[index] = {
type: 'text',
value,
} as any;
return [SKIP, index] as const;
});
};
};

4
app/utils/mobile.ts Normal file
View file

@ -0,0 +1,4 @@
export function isMobile() {
// we use sm: as the breakpoint for mobile. It's currently set to 640px
return globalThis.innerWidth < 640;
}

View file

@ -0,0 +1,80 @@
import type { Message } from 'ai';
import { generateId } from './fileUtils';
export interface ProjectCommands {
type: string;
setupCommand: string;
followupMessage: string;
}
interface FileContent {
content: string;
path: string;
}
export async function detectProjectCommands(files: FileContent[]): Promise<ProjectCommands> {
const hasFile = (name: string) => files.some((f) => f.path.endsWith(name));
if (hasFile('package.json')) {
const packageJsonFile = files.find((f) => f.path.endsWith('package.json'));
if (!packageJsonFile) {
return { type: '', setupCommand: '', followupMessage: '' };
}
try {
const packageJson = JSON.parse(packageJsonFile.content);
const scripts = packageJson?.scripts || {};
// Check for preferred commands in priority order
const preferredCommands = ['dev', 'start', 'preview'];
const availableCommand = preferredCommands.find((cmd) => scripts[cmd]);
if (availableCommand) {
return {
type: 'Node.js',
setupCommand: `npm install && npm run ${availableCommand}`,
followupMessage: `Found "${availableCommand}" script in package.json. Running "npm run ${availableCommand}" after installation.`,
};
}
return {
type: 'Node.js',
setupCommand: 'npm install',
followupMessage:
'Would you like me to inspect package.json to determine the available scripts for running this project?',
};
} catch (error) {
console.error('Error parsing package.json:', error);
return { type: '', setupCommand: '', followupMessage: '' };
}
}
if (hasFile('index.html')) {
return {
type: 'Static',
setupCommand: 'npx --yes serve',
followupMessage: '',
};
}
return { type: '', setupCommand: '', followupMessage: '' };
}
export function createCommandsMessage(commands: ProjectCommands): Message | null {
if (!commands.setupCommand) {
return null;
}
return {
role: 'assistant',
content: `
<boltArtifact id="project-setup" title="Project Setup">
<boltAction type="shell">
${commands.setupCommand}
</boltAction>
</boltArtifact>${commands.followupMessage ? `\n\n${commands.followupMessage}` : ''}`,
id: generateId(),
createdAt: new Date(),
};
}

19
app/utils/promises.ts Normal file
View file

@ -0,0 +1,19 @@
export function withResolvers<T>(): PromiseWithResolvers<T> {
if (typeof Promise.withResolvers === 'function') {
return Promise.withResolvers();
}
let resolve!: (value: T | PromiseLike<T>) => void;
let reject!: (reason?: any) => void;
const promise = new Promise<T>((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
});
return {
resolve,
reject,
promise,
};
}

6
app/utils/react.ts Normal file
View file

@ -0,0 +1,6 @@
import { memo } from 'react';
export const genericMemo: <T extends keyof JSX.IntrinsicElements | React.JSXElementConstructor<any>>(
component: T,
propsAreEqual?: (prevProps: React.ComponentProps<T>, nextProps: React.ComponentProps<T>) => boolean,
) => T & { displayName?: string } = memo;

220
app/utils/shell.ts Normal file
View file

@ -0,0 +1,220 @@
import type { WebContainer, WebContainerProcess } from '@webcontainer/api';
import type { ITerminal } from '~/types/terminal';
import { withResolvers } from './promises';
import { atom } from 'nanostores';
export async function newShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
const args: string[] = [];
// we spawn a JSH process with a fallback cols and rows in case the process is not attached yet to a visible terminal
const process = await webcontainer.spawn('/bin/jsh', ['--osc', ...args], {
terminal: {
cols: terminal.cols ?? 80,
rows: terminal.rows ?? 15,
},
});
const input = process.input.getWriter();
const output = process.output;
const jshReady = withResolvers<void>();
let isInteractive = false;
output.pipeTo(
new WritableStream({
write(data) {
if (!isInteractive) {
const [, osc] = data.match(/\x1b\]654;([^\x07]+)\x07/) || [];
if (osc === 'interactive') {
// wait until we see the interactive OSC
isInteractive = true;
jshReady.resolve();
}
}
terminal.write(data);
},
}),
);
terminal.onData((data) => {
// console.log('terminal onData', { data, isInteractive });
if (isInteractive) {
input.write(data);
}
});
await jshReady.promise;
return process;
}
export type ExecutionResult = { output: string; exitCode: number } | undefined;
export class BoltShell {
#initialized: (() => void) | undefined;
#readyPromise: Promise<void>;
#webcontainer: WebContainer | undefined;
#terminal: ITerminal | undefined;
#process: WebContainerProcess | undefined;
executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
#outputStream: ReadableStreamDefaultReader<string> | undefined;
#shellInputStream: WritableStreamDefaultWriter<string> | undefined;
constructor() {
this.#readyPromise = new Promise((resolve) => {
this.#initialized = resolve;
});
}
ready() {
return this.#readyPromise;
}
async init(webcontainer: WebContainer, terminal: ITerminal) {
this.#webcontainer = webcontainer;
this.#terminal = terminal;
const { process, output } = await this.newBoltShellProcess(webcontainer, terminal);
this.#process = process;
this.#outputStream = output.getReader();
await this.waitTillOscCode('interactive');
this.#initialized?.();
}
get terminal() {
return this.#terminal;
}
get process() {
return this.#process;
}
async executeCommand(sessionId: string, command: string): Promise<ExecutionResult> {
if (!this.process || !this.terminal) {
return undefined;
}
const state = this.executionState.get();
/*
* interrupt the current execution
* this.#shellInputStream?.write('\x03');
*/
this.terminal.input('\x03');
if (state && state.executionPrms) {
await state.executionPrms;
}
//start a new execution
this.terminal.input(command.trim() + '\n');
//wait for the execution to finish
const executionPromise = this.getCurrentExecutionResult();
this.executionState.set({ sessionId, active: true, executionPrms: executionPromise });
const resp = await executionPromise;
this.executionState.set({ sessionId, active: false });
return resp;
}
async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
const args: string[] = [];
// we spawn a JSH process with a fallback cols and rows in case the process is not attached yet to a visible terminal
const process = await webcontainer.spawn('/bin/jsh', ['--osc', ...args], {
terminal: {
cols: terminal.cols ?? 80,
rows: terminal.rows ?? 15,
},
});
const input = process.input.getWriter();
this.#shellInputStream = input;
const [internalOutput, terminalOutput] = process.output.tee();
const jshReady = withResolvers<void>();
let isInteractive = false;
terminalOutput.pipeTo(
new WritableStream({
write(data) {
if (!isInteractive) {
const [, osc] = data.match(/\x1b\]654;([^\x07]+)\x07/) || [];
if (osc === 'interactive') {
// wait until we see the interactive OSC
isInteractive = true;
jshReady.resolve();
}
}
terminal.write(data);
},
}),
);
terminal.onData((data) => {
// console.log('terminal onData', { data, isInteractive });
if (isInteractive) {
input.write(data);
}
});
await jshReady.promise;
return { process, output: internalOutput };
}
async getCurrentExecutionResult(): Promise<ExecutionResult> {
const { output, exitCode } = await this.waitTillOscCode('exit');
return { output, exitCode };
}
async waitTillOscCode(waitCode: string) {
let fullOutput = '';
let exitCode: number = 0;
if (!this.#outputStream) {
return { output: fullOutput, exitCode };
}
const tappedStream = this.#outputStream;
while (true) {
const { value, done } = await tappedStream.read();
if (done) {
break;
}
const text = value || '';
fullOutput += text;
// Check if command completion signal with exit code
const [, osc, , , code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
if (osc === 'exit') {
exitCode = parseInt(code, 10);
}
if (osc === waitCode) {
break;
}
}
return { output: fullOutput, exitCode };
}
}
export function newBoltShellProcess() {
return new BoltShell();
}

23
app/utils/stripIndent.ts Normal file
View file

@ -0,0 +1,23 @@
export function stripIndents(value: string): string;
export function stripIndents(strings: TemplateStringsArray, ...values: any[]): string;
export function stripIndents(arg0: string | TemplateStringsArray, ...values: any[]) {
if (typeof arg0 !== 'string') {
const processedString = arg0.reduce((acc, curr, i) => {
acc += curr + (values[i] ?? '');
return acc;
}, '');
return _stripIndents(processedString);
}
return _stripIndents(arg0);
}
function _stripIndents(value: string) {
return value
.split('\n')
.map((line) => line.trim())
.join('\n')
.trimStart()
.replace(/[\r\n]$/, '');
}

11
app/utils/terminal.ts Normal file
View file

@ -0,0 +1,11 @@
const reset = '\x1b[0m';
export const escapeCodes = {
reset,
clear: '\x1b[g',
red: '\x1b[1;31m',
};
export const coloredText = {
red: (text: string) => `${escapeCodes.red}${text}${reset}`,
};

28
app/utils/types.ts Normal file
View file

@ -0,0 +1,28 @@
interface OllamaModelDetails {
parent_model: string;
format: string;
family: string;
families: string[];
parameter_size: string;
quantization_level: string;
}
export interface OllamaModel {
name: string;
model: string;
modified_at: string;
size: number;
digest: string;
details: OllamaModelDetails;
}
export interface OllamaApiResponse {
models: OllamaModel[];
}
export interface ModelInfo {
name: string;
label: string;
provider: string;
maxTokenAllowed: number;
}

3
app/utils/unreachable.ts Normal file
View file

@ -0,0 +1,3 @@
export function unreachable(message: string): never {
throw new Error(`Unreachable: ${message}`);
}