mirror of
https://github.com/harivansh-afk/clanker-agent.git
synced 2026-04-18 18:03:44 +00:00
move pi-mono into companion-cloud as apps/companion-os
- Copy all pi-mono source into apps/companion-os/ - Update Dockerfile to COPY pre-built binary instead of downloading from GitHub Releases - Update deploy-staging.yml to build pi from source (bun compile) before Docker build - Add apps/companion-os/** to path triggers - No more cross-repo dispatch needed Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
commit
0250f72976
579 changed files with 206942 additions and 0 deletions
358
packages/coding-agent/src/core/tools/bash.ts
Normal file
358
packages/coding-agent/src/core/tools/bash.ts
Normal file
|
|
@ -0,0 +1,358 @@
|
|||
import { randomBytes } from "node:crypto";
|
||||
import { createWriteStream, existsSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { spawn } from "child_process";
|
||||
import {
|
||||
getShellConfig,
|
||||
getShellEnv,
|
||||
killProcessTree,
|
||||
} from "../../utils/shell.js";
|
||||
import {
|
||||
DEFAULT_MAX_BYTES,
|
||||
DEFAULT_MAX_LINES,
|
||||
formatSize,
|
||||
type TruncationResult,
|
||||
truncateTail,
|
||||
} from "./truncate.js";
|
||||
|
||||
/**
|
||||
* Generate a unique temp file path for bash output
|
||||
*/
|
||||
function getTempFilePath(): string {
|
||||
const id = randomBytes(8).toString("hex");
|
||||
return join(tmpdir(), `pi-bash-${id}.log`);
|
||||
}
|
||||
|
||||
const bashSchema = Type.Object({
|
||||
command: Type.String({ description: "Bash command to execute" }),
|
||||
timeout: Type.Optional(
|
||||
Type.Number({
|
||||
description: "Timeout in seconds (optional, no default timeout)",
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
export type BashToolInput = Static<typeof bashSchema>;
|
||||
|
||||
export interface BashToolDetails {
|
||||
truncation?: TruncationResult;
|
||||
fullOutputPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluggable operations for the bash tool.
|
||||
* Override these to delegate command execution to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface BashOperations {
|
||||
/**
|
||||
* Execute a command and stream output.
|
||||
* @param command - The command to execute
|
||||
* @param cwd - Working directory
|
||||
* @param options - Execution options
|
||||
* @returns Promise resolving to exit code (null if killed)
|
||||
*/
|
||||
exec: (
|
||||
command: string,
|
||||
cwd: string,
|
||||
options: {
|
||||
onData: (data: Buffer) => void;
|
||||
signal?: AbortSignal;
|
||||
timeout?: number;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
},
|
||||
) => Promise<{ exitCode: number | null }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default bash operations using local shell
|
||||
*/
|
||||
const defaultBashOperations: BashOperations = {
|
||||
exec: (command, cwd, { onData, signal, timeout, env }) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { shell, args } = getShellConfig();
|
||||
|
||||
if (!existsSync(cwd)) {
|
||||
reject(
|
||||
new Error(
|
||||
`Working directory does not exist: ${cwd}\nCannot execute bash commands.`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const child = spawn(shell, [...args, command], {
|
||||
cwd,
|
||||
detached: true,
|
||||
env: env ?? getShellEnv(),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
|
||||
let timedOut = false;
|
||||
|
||||
// Set timeout if provided
|
||||
let timeoutHandle: NodeJS.Timeout | undefined;
|
||||
if (timeout !== undefined && timeout > 0) {
|
||||
timeoutHandle = setTimeout(() => {
|
||||
timedOut = true;
|
||||
if (child.pid) {
|
||||
killProcessTree(child.pid);
|
||||
}
|
||||
}, timeout * 1000);
|
||||
}
|
||||
|
||||
// Stream stdout and stderr
|
||||
if (child.stdout) {
|
||||
child.stdout.on("data", onData);
|
||||
}
|
||||
if (child.stderr) {
|
||||
child.stderr.on("data", onData);
|
||||
}
|
||||
|
||||
// Handle shell spawn errors
|
||||
child.on("error", (err) => {
|
||||
if (timeoutHandle) clearTimeout(timeoutHandle);
|
||||
if (signal) signal.removeEventListener("abort", onAbort);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
// Handle abort signal - kill entire process tree
|
||||
const onAbort = () => {
|
||||
if (child.pid) {
|
||||
killProcessTree(child.pid);
|
||||
}
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
if (signal.aborted) {
|
||||
onAbort();
|
||||
} else {
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Handle process exit
|
||||
child.on("close", (code) => {
|
||||
if (timeoutHandle) clearTimeout(timeoutHandle);
|
||||
if (signal) signal.removeEventListener("abort", onAbort);
|
||||
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
if (timedOut) {
|
||||
reject(new Error(`timeout:${timeout}`));
|
||||
return;
|
||||
}
|
||||
|
||||
resolve({ exitCode: code });
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export interface BashSpawnContext {
|
||||
command: string;
|
||||
cwd: string;
|
||||
env: NodeJS.ProcessEnv;
|
||||
}
|
||||
|
||||
export type BashSpawnHook = (context: BashSpawnContext) => BashSpawnContext;
|
||||
|
||||
function resolveSpawnContext(
|
||||
command: string,
|
||||
cwd: string,
|
||||
spawnHook?: BashSpawnHook,
|
||||
): BashSpawnContext {
|
||||
const baseContext: BashSpawnContext = {
|
||||
command,
|
||||
cwd,
|
||||
env: { ...getShellEnv() },
|
||||
};
|
||||
|
||||
return spawnHook ? spawnHook(baseContext) : baseContext;
|
||||
}
|
||||
|
||||
export interface BashToolOptions {
|
||||
/** Custom operations for command execution. Default: local shell */
|
||||
operations?: BashOperations;
|
||||
/** Command prefix prepended to every command (e.g., "shopt -s expand_aliases" for alias support) */
|
||||
commandPrefix?: string;
|
||||
/** Hook to adjust command, cwd, or env before execution */
|
||||
spawnHook?: BashSpawnHook;
|
||||
}
|
||||
|
||||
export function createBashTool(
|
||||
cwd: string,
|
||||
options?: BashToolOptions,
|
||||
): AgentTool<typeof bashSchema> {
|
||||
const ops = options?.operations ?? defaultBashOperations;
|
||||
const commandPrefix = options?.commandPrefix;
|
||||
const spawnHook = options?.spawnHook;
|
||||
|
||||
return {
|
||||
name: "bash",
|
||||
label: "bash",
|
||||
description: `Execute a bash command in the current working directory. Returns stdout and stderr. Output is truncated to last ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). If truncated, full output is saved to a temp file. Optionally provide a timeout in seconds.`,
|
||||
parameters: bashSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{ command, timeout }: { command: string; timeout?: number },
|
||||
signal?: AbortSignal,
|
||||
onUpdate?,
|
||||
) => {
|
||||
// Apply command prefix if configured (e.g., "shopt -s expand_aliases" for alias support)
|
||||
const resolvedCommand = commandPrefix
|
||||
? `${commandPrefix}\n${command}`
|
||||
: command;
|
||||
const spawnContext = resolveSpawnContext(resolvedCommand, cwd, spawnHook);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// We'll stream to a temp file if output gets large
|
||||
let tempFilePath: string | undefined;
|
||||
let tempFileStream: ReturnType<typeof createWriteStream> | undefined;
|
||||
let totalBytes = 0;
|
||||
|
||||
// Keep a rolling buffer of the last chunk for tail truncation
|
||||
const chunks: Buffer[] = [];
|
||||
let chunksBytes = 0;
|
||||
// Keep more than we need so we have enough for truncation
|
||||
const maxChunksBytes = DEFAULT_MAX_BYTES * 2;
|
||||
|
||||
const handleData = (data: Buffer) => {
|
||||
totalBytes += data.length;
|
||||
|
||||
// Start writing to temp file once we exceed the threshold
|
||||
if (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {
|
||||
tempFilePath = getTempFilePath();
|
||||
tempFileStream = createWriteStream(tempFilePath);
|
||||
// Write all buffered chunks to the file
|
||||
for (const chunk of chunks) {
|
||||
tempFileStream.write(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
// Write to temp file if we have one
|
||||
if (tempFileStream) {
|
||||
tempFileStream.write(data);
|
||||
}
|
||||
|
||||
// Keep rolling buffer of recent data
|
||||
chunks.push(data);
|
||||
chunksBytes += data.length;
|
||||
|
||||
// Trim old chunks if buffer is too large
|
||||
while (chunksBytes > maxChunksBytes && chunks.length > 1) {
|
||||
const removed = chunks.shift()!;
|
||||
chunksBytes -= removed.length;
|
||||
}
|
||||
|
||||
// Stream partial output to callback (truncated rolling buffer)
|
||||
if (onUpdate) {
|
||||
const fullBuffer = Buffer.concat(chunks);
|
||||
const fullText = fullBuffer.toString("utf-8");
|
||||
const truncation = truncateTail(fullText);
|
||||
onUpdate({
|
||||
content: [{ type: "text", text: truncation.content || "" }],
|
||||
details: {
|
||||
truncation: truncation.truncated ? truncation : undefined,
|
||||
fullOutputPath: tempFilePath,
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
ops
|
||||
.exec(spawnContext.command, spawnContext.cwd, {
|
||||
onData: handleData,
|
||||
signal,
|
||||
timeout,
|
||||
env: spawnContext.env,
|
||||
})
|
||||
.then(({ exitCode }) => {
|
||||
// Close temp file stream
|
||||
if (tempFileStream) {
|
||||
tempFileStream.end();
|
||||
}
|
||||
|
||||
// Combine all buffered chunks
|
||||
const fullBuffer = Buffer.concat(chunks);
|
||||
const fullOutput = fullBuffer.toString("utf-8");
|
||||
|
||||
// Apply tail truncation
|
||||
const truncation = truncateTail(fullOutput);
|
||||
let outputText = truncation.content || "(no output)";
|
||||
|
||||
// Build details with truncation info
|
||||
let details: BashToolDetails | undefined;
|
||||
|
||||
if (truncation.truncated) {
|
||||
details = {
|
||||
truncation,
|
||||
fullOutputPath: tempFilePath,
|
||||
};
|
||||
|
||||
// Build actionable notice
|
||||
const startLine =
|
||||
truncation.totalLines - truncation.outputLines + 1;
|
||||
const endLine = truncation.totalLines;
|
||||
|
||||
if (truncation.lastLinePartial) {
|
||||
// Edge case: last line alone > 30KB
|
||||
const lastLineSize = formatSize(
|
||||
Buffer.byteLength(
|
||||
fullOutput.split("\n").pop() || "",
|
||||
"utf-8",
|
||||
),
|
||||
);
|
||||
outputText += `\n\n[Showing last ${formatSize(truncation.outputBytes)} of line ${endLine} (line is ${lastLineSize}). Full output: ${tempFilePath}]`;
|
||||
} else if (truncation.truncatedBy === "lines") {
|
||||
outputText += `\n\n[Showing lines ${startLine}-${endLine} of ${truncation.totalLines}. Full output: ${tempFilePath}]`;
|
||||
} else {
|
||||
outputText += `\n\n[Showing lines ${startLine}-${endLine} of ${truncation.totalLines} (${formatSize(DEFAULT_MAX_BYTES)} limit). Full output: ${tempFilePath}]`;
|
||||
}
|
||||
}
|
||||
|
||||
if (exitCode !== 0 && exitCode !== null) {
|
||||
outputText += `\n\nCommand exited with code ${exitCode}`;
|
||||
reject(new Error(outputText));
|
||||
} else {
|
||||
resolve({
|
||||
content: [{ type: "text", text: outputText }],
|
||||
details,
|
||||
});
|
||||
}
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
// Close temp file stream
|
||||
if (tempFileStream) {
|
||||
tempFileStream.end();
|
||||
}
|
||||
|
||||
// Combine all buffered chunks for error output
|
||||
const fullBuffer = Buffer.concat(chunks);
|
||||
let output = fullBuffer.toString("utf-8");
|
||||
|
||||
if (err.message === "aborted") {
|
||||
if (output) output += "\n\n";
|
||||
output += "Command aborted";
|
||||
reject(new Error(output));
|
||||
} else if (err.message.startsWith("timeout:")) {
|
||||
const timeoutSecs = err.message.split(":")[1];
|
||||
if (output) output += "\n\n";
|
||||
output += `Command timed out after ${timeoutSecs} seconds`;
|
||||
reject(new Error(output));
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default bash tool using process.cwd() - for backwards compatibility */
|
||||
export const bashTool = createBashTool(process.cwd());
|
||||
317
packages/coding-agent/src/core/tools/edit-diff.ts
Normal file
317
packages/coding-agent/src/core/tools/edit-diff.ts
Normal file
|
|
@ -0,0 +1,317 @@
|
|||
/**
|
||||
* Shared diff computation utilities for the edit tool.
|
||||
* Used by both edit.ts (for execution) and tool-execution.ts (for preview rendering).
|
||||
*/
|
||||
|
||||
import * as Diff from "diff";
|
||||
import { constants } from "fs";
|
||||
import { access, readFile } from "fs/promises";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
|
||||
export function detectLineEnding(content: string): "\r\n" | "\n" {
|
||||
const crlfIdx = content.indexOf("\r\n");
|
||||
const lfIdx = content.indexOf("\n");
|
||||
if (lfIdx === -1) return "\n";
|
||||
if (crlfIdx === -1) return "\n";
|
||||
return crlfIdx < lfIdx ? "\r\n" : "\n";
|
||||
}
|
||||
|
||||
export function normalizeToLF(text: string): string {
|
||||
return text.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
||||
}
|
||||
|
||||
export function restoreLineEndings(
|
||||
text: string,
|
||||
ending: "\r\n" | "\n",
|
||||
): string {
|
||||
return ending === "\r\n" ? text.replace(/\n/g, "\r\n") : text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize text for fuzzy matching. Applies progressive transformations:
|
||||
* - Strip trailing whitespace from each line
|
||||
* - Normalize smart quotes to ASCII equivalents
|
||||
* - Normalize Unicode dashes/hyphens to ASCII hyphen
|
||||
* - Normalize special Unicode spaces to regular space
|
||||
*/
|
||||
export function normalizeForFuzzyMatch(text: string): string {
|
||||
return (
|
||||
text
|
||||
// Strip trailing whitespace per line
|
||||
.split("\n")
|
||||
.map((line) => line.trimEnd())
|
||||
.join("\n")
|
||||
// Smart single quotes → '
|
||||
.replace(/[\u2018\u2019\u201A\u201B]/g, "'")
|
||||
// Smart double quotes → "
|
||||
.replace(/[\u201C\u201D\u201E\u201F]/g, '"')
|
||||
// Various dashes/hyphens → -
|
||||
// U+2010 hyphen, U+2011 non-breaking hyphen, U+2012 figure dash,
|
||||
// U+2013 en-dash, U+2014 em-dash, U+2015 horizontal bar, U+2212 minus
|
||||
.replace(/[\u2010\u2011\u2012\u2013\u2014\u2015\u2212]/g, "-")
|
||||
// Special spaces → regular space
|
||||
// U+00A0 NBSP, U+2002-U+200A various spaces, U+202F narrow NBSP,
|
||||
// U+205F medium math space, U+3000 ideographic space
|
||||
.replace(/[\u00A0\u2002-\u200A\u202F\u205F\u3000]/g, " ")
|
||||
);
|
||||
}
|
||||
|
||||
export interface FuzzyMatchResult {
|
||||
/** Whether a match was found */
|
||||
found: boolean;
|
||||
/** The index where the match starts (in the content that should be used for replacement) */
|
||||
index: number;
|
||||
/** Length of the matched text */
|
||||
matchLength: number;
|
||||
/** Whether fuzzy matching was used (false = exact match) */
|
||||
usedFuzzyMatch: boolean;
|
||||
/**
|
||||
* The content to use for replacement operations.
|
||||
* When exact match: original content. When fuzzy match: normalized content.
|
||||
*/
|
||||
contentForReplacement: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find oldText in content, trying exact match first, then fuzzy match.
|
||||
* When fuzzy matching is used, the returned contentForReplacement is the
|
||||
* fuzzy-normalized version of the content (trailing whitespace stripped,
|
||||
* Unicode quotes/dashes normalized to ASCII).
|
||||
*/
|
||||
export function fuzzyFindText(
|
||||
content: string,
|
||||
oldText: string,
|
||||
): FuzzyMatchResult {
|
||||
// Try exact match first
|
||||
const exactIndex = content.indexOf(oldText);
|
||||
if (exactIndex !== -1) {
|
||||
return {
|
||||
found: true,
|
||||
index: exactIndex,
|
||||
matchLength: oldText.length,
|
||||
usedFuzzyMatch: false,
|
||||
contentForReplacement: content,
|
||||
};
|
||||
}
|
||||
|
||||
// Try fuzzy match - work entirely in normalized space
|
||||
const fuzzyContent = normalizeForFuzzyMatch(content);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(oldText);
|
||||
const fuzzyIndex = fuzzyContent.indexOf(fuzzyOldText);
|
||||
|
||||
if (fuzzyIndex === -1) {
|
||||
return {
|
||||
found: false,
|
||||
index: -1,
|
||||
matchLength: 0,
|
||||
usedFuzzyMatch: false,
|
||||
contentForReplacement: content,
|
||||
};
|
||||
}
|
||||
|
||||
// When fuzzy matching, we work in the normalized space for replacement.
|
||||
// This means the output will have normalized whitespace/quotes/dashes,
|
||||
// which is acceptable since we're fixing minor formatting differences anyway.
|
||||
return {
|
||||
found: true,
|
||||
index: fuzzyIndex,
|
||||
matchLength: fuzzyOldText.length,
|
||||
usedFuzzyMatch: true,
|
||||
contentForReplacement: fuzzyContent,
|
||||
};
|
||||
}
|
||||
|
||||
/** Strip UTF-8 BOM if present, return both the BOM (if any) and the text without it */
|
||||
export function stripBom(content: string): { bom: string; text: string } {
|
||||
return content.startsWith("\uFEFF")
|
||||
? { bom: "\uFEFF", text: content.slice(1) }
|
||||
: { bom: "", text: content };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unified diff string with line numbers and context.
|
||||
* Returns both the diff string and the first changed line number (in the new file).
|
||||
*/
|
||||
export function generateDiffString(
|
||||
oldContent: string,
|
||||
newContent: string,
|
||||
contextLines = 4,
|
||||
): { diff: string; firstChangedLine: number | undefined } {
|
||||
const parts = Diff.diffLines(oldContent, newContent);
|
||||
const output: string[] = [];
|
||||
|
||||
const oldLines = oldContent.split("\n");
|
||||
const newLines = newContent.split("\n");
|
||||
const maxLineNum = Math.max(oldLines.length, newLines.length);
|
||||
const lineNumWidth = String(maxLineNum).length;
|
||||
|
||||
let oldLineNum = 1;
|
||||
let newLineNum = 1;
|
||||
let lastWasChange = false;
|
||||
let firstChangedLine: number | undefined;
|
||||
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i];
|
||||
const raw = part.value.split("\n");
|
||||
if (raw[raw.length - 1] === "") {
|
||||
raw.pop();
|
||||
}
|
||||
|
||||
if (part.added || part.removed) {
|
||||
// Capture the first changed line (in the new file)
|
||||
if (firstChangedLine === undefined) {
|
||||
firstChangedLine = newLineNum;
|
||||
}
|
||||
|
||||
// Show the change
|
||||
for (const line of raw) {
|
||||
if (part.added) {
|
||||
const lineNum = String(newLineNum).padStart(lineNumWidth, " ");
|
||||
output.push(`+${lineNum} ${line}`);
|
||||
newLineNum++;
|
||||
} else {
|
||||
// removed
|
||||
const lineNum = String(oldLineNum).padStart(lineNumWidth, " ");
|
||||
output.push(`-${lineNum} ${line}`);
|
||||
oldLineNum++;
|
||||
}
|
||||
}
|
||||
lastWasChange = true;
|
||||
} else {
|
||||
// Context lines - only show a few before/after changes
|
||||
const nextPartIsChange =
|
||||
i < parts.length - 1 && (parts[i + 1].added || parts[i + 1].removed);
|
||||
|
||||
if (lastWasChange || nextPartIsChange) {
|
||||
// Show context
|
||||
let linesToShow = raw;
|
||||
let skipStart = 0;
|
||||
let skipEnd = 0;
|
||||
|
||||
if (!lastWasChange) {
|
||||
// Show only last N lines as leading context
|
||||
skipStart = Math.max(0, raw.length - contextLines);
|
||||
linesToShow = raw.slice(skipStart);
|
||||
}
|
||||
|
||||
if (!nextPartIsChange && linesToShow.length > contextLines) {
|
||||
// Show only first N lines as trailing context
|
||||
skipEnd = linesToShow.length - contextLines;
|
||||
linesToShow = linesToShow.slice(0, contextLines);
|
||||
}
|
||||
|
||||
// Add ellipsis if we skipped lines at start
|
||||
if (skipStart > 0) {
|
||||
output.push(` ${"".padStart(lineNumWidth, " ")} ...`);
|
||||
// Update line numbers for the skipped leading context
|
||||
oldLineNum += skipStart;
|
||||
newLineNum += skipStart;
|
||||
}
|
||||
|
||||
for (const line of linesToShow) {
|
||||
const lineNum = String(oldLineNum).padStart(lineNumWidth, " ");
|
||||
output.push(` ${lineNum} ${line}`);
|
||||
oldLineNum++;
|
||||
newLineNum++;
|
||||
}
|
||||
|
||||
// Add ellipsis if we skipped lines at end
|
||||
if (skipEnd > 0) {
|
||||
output.push(` ${"".padStart(lineNumWidth, " ")} ...`);
|
||||
// Update line numbers for the skipped trailing context
|
||||
oldLineNum += skipEnd;
|
||||
newLineNum += skipEnd;
|
||||
}
|
||||
} else {
|
||||
// Skip these context lines entirely
|
||||
oldLineNum += raw.length;
|
||||
newLineNum += raw.length;
|
||||
}
|
||||
|
||||
lastWasChange = false;
|
||||
}
|
||||
}
|
||||
|
||||
return { diff: output.join("\n"), firstChangedLine };
|
||||
}
|
||||
|
||||
export interface EditDiffResult {
|
||||
diff: string;
|
||||
firstChangedLine: number | undefined;
|
||||
}
|
||||
|
||||
export interface EditDiffError {
|
||||
error: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the diff for an edit operation without applying it.
|
||||
* Used for preview rendering in the TUI before the tool executes.
|
||||
*/
|
||||
export async function computeEditDiff(
|
||||
path: string,
|
||||
oldText: string,
|
||||
newText: string,
|
||||
cwd: string,
|
||||
): Promise<EditDiffResult | EditDiffError> {
|
||||
const absolutePath = resolveToCwd(path, cwd);
|
||||
|
||||
try {
|
||||
// Check if file exists and is readable
|
||||
try {
|
||||
await access(absolutePath, constants.R_OK);
|
||||
} catch {
|
||||
return { error: `File not found: ${path}` };
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const rawContent = await readFile(absolutePath, "utf-8");
|
||||
|
||||
// Strip BOM before matching (LLM won't include invisible BOM in oldText)
|
||||
const { text: content } = stripBom(rawContent);
|
||||
|
||||
const normalizedContent = normalizeToLF(content);
|
||||
const normalizedOldText = normalizeToLF(oldText);
|
||||
const normalizedNewText = normalizeToLF(newText);
|
||||
|
||||
// Find the old text using fuzzy matching (tries exact match first, then fuzzy)
|
||||
const matchResult = fuzzyFindText(normalizedContent, normalizedOldText);
|
||||
|
||||
if (!matchResult.found) {
|
||||
return {
|
||||
error: `Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Count occurrences using fuzzy-normalized content for consistency
|
||||
const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(normalizedOldText);
|
||||
const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
|
||||
|
||||
if (occurrences > 1) {
|
||||
return {
|
||||
error: `Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Compute the new content using the matched position
|
||||
// When fuzzy matching was used, contentForReplacement is the normalized version
|
||||
const baseContent = matchResult.contentForReplacement;
|
||||
const newContent =
|
||||
baseContent.substring(0, matchResult.index) +
|
||||
normalizedNewText +
|
||||
baseContent.substring(matchResult.index + matchResult.matchLength);
|
||||
|
||||
// Check if it would actually change anything
|
||||
if (baseContent === newContent) {
|
||||
return {
|
||||
error: `No changes would be made to ${path}. The replacement produces identical content.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Generate the diff
|
||||
return generateDiffString(baseContent, newContent);
|
||||
} catch (err) {
|
||||
return { error: err instanceof Error ? err.message : String(err) };
|
||||
}
|
||||
}
|
||||
253
packages/coding-agent/src/core/tools/edit.ts
Normal file
253
packages/coding-agent/src/core/tools/edit.ts
Normal file
|
|
@ -0,0 +1,253 @@
|
|||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { constants } from "fs";
|
||||
import {
|
||||
access as fsAccess,
|
||||
readFile as fsReadFile,
|
||||
writeFile as fsWriteFile,
|
||||
} from "fs/promises";
|
||||
import {
|
||||
detectLineEnding,
|
||||
fuzzyFindText,
|
||||
generateDiffString,
|
||||
normalizeForFuzzyMatch,
|
||||
normalizeToLF,
|
||||
restoreLineEndings,
|
||||
stripBom,
|
||||
} from "./edit-diff.js";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
|
||||
const editSchema = Type.Object({
|
||||
path: Type.String({
|
||||
description: "Path to the file to edit (relative or absolute)",
|
||||
}),
|
||||
oldText: Type.String({
|
||||
description: "Exact text to find and replace (must match exactly)",
|
||||
}),
|
||||
newText: Type.String({
|
||||
description: "New text to replace the old text with",
|
||||
}),
|
||||
});
|
||||
|
||||
export type EditToolInput = Static<typeof editSchema>;
|
||||
|
||||
export interface EditToolDetails {
|
||||
/** Unified diff of the changes made */
|
||||
diff: string;
|
||||
/** Line number of the first change in the new file (for editor navigation) */
|
||||
firstChangedLine?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluggable operations for the edit tool.
|
||||
* Override these to delegate file editing to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface EditOperations {
|
||||
/** Read file contents as a Buffer */
|
||||
readFile: (absolutePath: string) => Promise<Buffer>;
|
||||
/** Write content to a file */
|
||||
writeFile: (absolutePath: string, content: string) => Promise<void>;
|
||||
/** Check if file is readable and writable (throw if not) */
|
||||
access: (absolutePath: string) => Promise<void>;
|
||||
}
|
||||
|
||||
const defaultEditOperations: EditOperations = {
|
||||
readFile: (path) => fsReadFile(path),
|
||||
writeFile: (path, content) => fsWriteFile(path, content, "utf-8"),
|
||||
access: (path) => fsAccess(path, constants.R_OK | constants.W_OK),
|
||||
};
|
||||
|
||||
export interface EditToolOptions {
|
||||
/** Custom operations for file editing. Default: local filesystem */
|
||||
operations?: EditOperations;
|
||||
}
|
||||
|
||||
export function createEditTool(
|
||||
cwd: string,
|
||||
options?: EditToolOptions,
|
||||
): AgentTool<typeof editSchema> {
|
||||
const ops = options?.operations ?? defaultEditOperations;
|
||||
|
||||
return {
|
||||
name: "edit",
|
||||
label: "edit",
|
||||
description:
|
||||
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
|
||||
parameters: editSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{
|
||||
path,
|
||||
oldText,
|
||||
newText,
|
||||
}: { path: string; oldText: string; newText: string },
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
const absolutePath = resolveToCwd(path, cwd);
|
||||
|
||||
return new Promise<{
|
||||
content: Array<{ type: "text"; text: string }>;
|
||||
details: EditToolDetails | undefined;
|
||||
}>((resolve, reject) => {
|
||||
// Check if already aborted
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("Operation aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
let aborted = false;
|
||||
|
||||
// Set up abort handler
|
||||
const onAbort = () => {
|
||||
aborted = true;
|
||||
reject(new Error("Operation aborted"));
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
}
|
||||
|
||||
// Perform the edit operation
|
||||
(async () => {
|
||||
try {
|
||||
// Check if file exists
|
||||
try {
|
||||
await ops.access(absolutePath);
|
||||
} catch {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
reject(new Error(`File not found: ${path}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if aborted before reading
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const buffer = await ops.readFile(absolutePath);
|
||||
const rawContent = buffer.toString("utf-8");
|
||||
|
||||
// Check if aborted after reading
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Strip BOM before matching (LLM won't include invisible BOM in oldText)
|
||||
const { bom, text: content } = stripBom(rawContent);
|
||||
|
||||
const originalEnding = detectLineEnding(content);
|
||||
const normalizedContent = normalizeToLF(content);
|
||||
const normalizedOldText = normalizeToLF(oldText);
|
||||
const normalizedNewText = normalizeToLF(newText);
|
||||
|
||||
// Find the old text using fuzzy matching (tries exact match first, then fuzzy)
|
||||
const matchResult = fuzzyFindText(
|
||||
normalizedContent,
|
||||
normalizedOldText,
|
||||
);
|
||||
|
||||
if (!matchResult.found) {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
`Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Count occurrences using fuzzy-normalized content for consistency
|
||||
const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(normalizedOldText);
|
||||
const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
|
||||
|
||||
if (occurrences > 1) {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
`Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if aborted before writing
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Perform replacement using the matched text position
|
||||
// When fuzzy matching was used, contentForReplacement is the normalized version
|
||||
const baseContent = matchResult.contentForReplacement;
|
||||
const newContent =
|
||||
baseContent.substring(0, matchResult.index) +
|
||||
normalizedNewText +
|
||||
baseContent.substring(
|
||||
matchResult.index + matchResult.matchLength,
|
||||
);
|
||||
|
||||
// Verify the replacement actually changed something
|
||||
if (baseContent === newContent) {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
`No changes made to ${path}. The replacement produced identical content. This might indicate an issue with special characters or the text not existing as expected.`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const finalContent =
|
||||
bom + restoreLineEndings(newContent, originalEnding);
|
||||
await ops.writeFile(absolutePath, finalContent);
|
||||
|
||||
// Check if aborted after writing
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean up abort handler
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
|
||||
const diffResult = generateDiffString(baseContent, newContent);
|
||||
resolve({
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Successfully replaced text in ${path}.`,
|
||||
},
|
||||
],
|
||||
details: {
|
||||
diff: diffResult.diff,
|
||||
firstChangedLine: diffResult.firstChangedLine,
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
// Clean up abort handler
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
|
||||
if (!aborted) {
|
||||
reject(error);
|
||||
}
|
||||
}
|
||||
})();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default edit tool using process.cwd() - for backwards compatibility */
|
||||
export const editTool = createEditTool(process.cwd());
|
||||
308
packages/coding-agent/src/core/tools/find.ts
Normal file
308
packages/coding-agent/src/core/tools/find.ts
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { spawnSync } from "child_process";
|
||||
import { existsSync } from "fs";
|
||||
import { globSync } from "glob";
|
||||
import path from "path";
|
||||
import { ensureTool } from "../../utils/tools-manager.js";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
import {
|
||||
DEFAULT_MAX_BYTES,
|
||||
formatSize,
|
||||
type TruncationResult,
|
||||
truncateHead,
|
||||
} from "./truncate.js";
|
||||
|
||||
const findSchema = Type.Object({
|
||||
pattern: Type.String({
|
||||
description:
|
||||
"Glob pattern to match files, e.g. '*.ts', '**/*.json', or 'src/**/*.spec.ts'",
|
||||
}),
|
||||
path: Type.Optional(
|
||||
Type.String({
|
||||
description: "Directory to search in (default: current directory)",
|
||||
}),
|
||||
),
|
||||
limit: Type.Optional(
|
||||
Type.Number({ description: "Maximum number of results (default: 1000)" }),
|
||||
),
|
||||
});
|
||||
|
||||
export type FindToolInput = Static<typeof findSchema>;
|
||||
|
||||
const DEFAULT_LIMIT = 1000;
|
||||
|
||||
export interface FindToolDetails {
|
||||
truncation?: TruncationResult;
|
||||
resultLimitReached?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluggable operations for the find tool.
|
||||
* Override these to delegate file search to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface FindOperations {
|
||||
/** Check if path exists */
|
||||
exists: (absolutePath: string) => Promise<boolean> | boolean;
|
||||
/** Find files matching glob pattern. Returns relative paths. */
|
||||
glob: (
|
||||
pattern: string,
|
||||
cwd: string,
|
||||
options: { ignore: string[]; limit: number },
|
||||
) => Promise<string[]> | string[];
|
||||
}
|
||||
|
||||
const defaultFindOperations: FindOperations = {
|
||||
exists: existsSync,
|
||||
glob: (_pattern, _searchCwd, _options) => {
|
||||
// This is a placeholder - actual fd execution happens in execute
|
||||
return [];
|
||||
},
|
||||
};
|
||||
|
||||
export interface FindToolOptions {
|
||||
/** Custom operations for find. Default: local filesystem + fd */
|
||||
operations?: FindOperations;
|
||||
}
|
||||
|
||||
export function createFindTool(
|
||||
cwd: string,
|
||||
options?: FindToolOptions,
|
||||
): AgentTool<typeof findSchema> {
|
||||
const customOps = options?.operations;
|
||||
|
||||
return {
|
||||
name: "find",
|
||||
label: "find",
|
||||
description: `Search for files by glob pattern. Returns matching file paths relative to the search directory. Respects .gitignore. Output is truncated to ${DEFAULT_LIMIT} results or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first).`,
|
||||
parameters: findSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{
|
||||
pattern,
|
||||
path: searchDir,
|
||||
limit,
|
||||
}: { pattern: string; path?: string; limit?: number },
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("Operation aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
const onAbort = () => reject(new Error("Operation aborted"));
|
||||
signal?.addEventListener("abort", onAbort, { once: true });
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
const searchPath = resolveToCwd(searchDir || ".", cwd);
|
||||
const effectiveLimit = limit ?? DEFAULT_LIMIT;
|
||||
const ops = customOps ?? defaultFindOperations;
|
||||
|
||||
// If custom operations provided with glob, use that
|
||||
if (customOps?.glob) {
|
||||
if (!(await ops.exists(searchPath))) {
|
||||
reject(new Error(`Path not found: ${searchPath}`));
|
||||
return;
|
||||
}
|
||||
|
||||
const results = await ops.glob(pattern, searchPath, {
|
||||
ignore: ["**/node_modules/**", "**/.git/**"],
|
||||
limit: effectiveLimit,
|
||||
});
|
||||
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
|
||||
if (results.length === 0) {
|
||||
resolve({
|
||||
content: [
|
||||
{ type: "text", text: "No files found matching pattern" },
|
||||
],
|
||||
details: undefined,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Relativize paths
|
||||
const relativized = results.map((p) => {
|
||||
if (p.startsWith(searchPath)) {
|
||||
return p.slice(searchPath.length + 1);
|
||||
}
|
||||
return path.relative(searchPath, p);
|
||||
});
|
||||
|
||||
const resultLimitReached = relativized.length >= effectiveLimit;
|
||||
const rawOutput = relativized.join("\n");
|
||||
const truncation = truncateHead(rawOutput, {
|
||||
maxLines: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
|
||||
let resultOutput = truncation.content;
|
||||
const details: FindToolDetails = {};
|
||||
const notices: string[] = [];
|
||||
|
||||
if (resultLimitReached) {
|
||||
notices.push(`${effectiveLimit} results limit reached`);
|
||||
details.resultLimitReached = effectiveLimit;
|
||||
}
|
||||
|
||||
if (truncation.truncated) {
|
||||
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
|
||||
details.truncation = truncation;
|
||||
}
|
||||
|
||||
if (notices.length > 0) {
|
||||
resultOutput += `\n\n[${notices.join(". ")}]`;
|
||||
}
|
||||
|
||||
resolve({
|
||||
content: [{ type: "text", text: resultOutput }],
|
||||
details: Object.keys(details).length > 0 ? details : undefined,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Default: use fd
|
||||
const fdPath = await ensureTool("fd", true);
|
||||
if (!fdPath) {
|
||||
reject(
|
||||
new Error("fd is not available and could not be downloaded"),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Build fd arguments
|
||||
const args: string[] = [
|
||||
"--glob",
|
||||
"--color=never",
|
||||
"--hidden",
|
||||
"--max-results",
|
||||
String(effectiveLimit),
|
||||
];
|
||||
|
||||
// Include .gitignore files
|
||||
const gitignoreFiles = new Set<string>();
|
||||
const rootGitignore = path.join(searchPath, ".gitignore");
|
||||
if (existsSync(rootGitignore)) {
|
||||
gitignoreFiles.add(rootGitignore);
|
||||
}
|
||||
|
||||
try {
|
||||
const nestedGitignores = globSync("**/.gitignore", {
|
||||
cwd: searchPath,
|
||||
dot: true,
|
||||
absolute: true,
|
||||
ignore: ["**/node_modules/**", "**/.git/**"],
|
||||
});
|
||||
for (const file of nestedGitignores) {
|
||||
gitignoreFiles.add(file);
|
||||
}
|
||||
} catch {
|
||||
// Ignore glob errors
|
||||
}
|
||||
|
||||
for (const gitignorePath of gitignoreFiles) {
|
||||
args.push("--ignore-file", gitignorePath);
|
||||
}
|
||||
|
||||
args.push(pattern, searchPath);
|
||||
|
||||
const result = spawnSync(fdPath, args, {
|
||||
encoding: "utf-8",
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
});
|
||||
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
|
||||
if (result.error) {
|
||||
reject(new Error(`Failed to run fd: ${result.error.message}`));
|
||||
return;
|
||||
}
|
||||
|
||||
const output = result.stdout?.trim() || "";
|
||||
|
||||
if (result.status !== 0) {
|
||||
const errorMsg =
|
||||
result.stderr?.trim() || `fd exited with code ${result.status}`;
|
||||
if (!output) {
|
||||
reject(new Error(errorMsg));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!output) {
|
||||
resolve({
|
||||
content: [
|
||||
{ type: "text", text: "No files found matching pattern" },
|
||||
],
|
||||
details: undefined,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const lines = output.split("\n");
|
||||
const relativized: string[] = [];
|
||||
|
||||
for (const rawLine of lines) {
|
||||
const line = rawLine.replace(/\r$/, "").trim();
|
||||
if (!line) continue;
|
||||
|
||||
const hadTrailingSlash =
|
||||
line.endsWith("/") || line.endsWith("\\");
|
||||
let relativePath = line;
|
||||
if (line.startsWith(searchPath)) {
|
||||
relativePath = line.slice(searchPath.length + 1);
|
||||
} else {
|
||||
relativePath = path.relative(searchPath, line);
|
||||
}
|
||||
|
||||
if (hadTrailingSlash && !relativePath.endsWith("/")) {
|
||||
relativePath += "/";
|
||||
}
|
||||
|
||||
relativized.push(relativePath);
|
||||
}
|
||||
|
||||
const resultLimitReached = relativized.length >= effectiveLimit;
|
||||
const rawOutput = relativized.join("\n");
|
||||
const truncation = truncateHead(rawOutput, {
|
||||
maxLines: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
|
||||
let resultOutput = truncation.content;
|
||||
const details: FindToolDetails = {};
|
||||
const notices: string[] = [];
|
||||
|
||||
if (resultLimitReached) {
|
||||
notices.push(
|
||||
`${effectiveLimit} results limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`,
|
||||
);
|
||||
details.resultLimitReached = effectiveLimit;
|
||||
}
|
||||
|
||||
if (truncation.truncated) {
|
||||
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
|
||||
details.truncation = truncation;
|
||||
}
|
||||
|
||||
if (notices.length > 0) {
|
||||
resultOutput += `\n\n[${notices.join(". ")}]`;
|
||||
}
|
||||
|
||||
resolve({
|
||||
content: [{ type: "text", text: resultOutput }],
|
||||
details: Object.keys(details).length > 0 ? details : undefined,
|
||||
});
|
||||
} catch (e: any) {
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
reject(e);
|
||||
}
|
||||
})();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default find tool using process.cwd() - for backwards compatibility */
|
||||
export const findTool = createFindTool(process.cwd());
|
||||
412
packages/coding-agent/src/core/tools/grep.ts
Normal file
412
packages/coding-agent/src/core/tools/grep.ts
Normal file
|
|
@ -0,0 +1,412 @@
|
|||
import { createInterface } from "node:readline";
|
||||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { spawn } from "child_process";
|
||||
import { readFileSync, statSync } from "fs";
|
||||
import path from "path";
|
||||
import { ensureTool } from "../../utils/tools-manager.js";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
import {
|
||||
DEFAULT_MAX_BYTES,
|
||||
formatSize,
|
||||
GREP_MAX_LINE_LENGTH,
|
||||
type TruncationResult,
|
||||
truncateHead,
|
||||
truncateLine,
|
||||
} from "./truncate.js";
|
||||
|
||||
const grepSchema = Type.Object({
|
||||
pattern: Type.String({
|
||||
description: "Search pattern (regex or literal string)",
|
||||
}),
|
||||
path: Type.Optional(
|
||||
Type.String({
|
||||
description: "Directory or file to search (default: current directory)",
|
||||
}),
|
||||
),
|
||||
glob: Type.Optional(
|
||||
Type.String({
|
||||
description:
|
||||
"Filter files by glob pattern, e.g. '*.ts' or '**/*.spec.ts'",
|
||||
}),
|
||||
),
|
||||
ignoreCase: Type.Optional(
|
||||
Type.Boolean({ description: "Case-insensitive search (default: false)" }),
|
||||
),
|
||||
literal: Type.Optional(
|
||||
Type.Boolean({
|
||||
description:
|
||||
"Treat pattern as literal string instead of regex (default: false)",
|
||||
}),
|
||||
),
|
||||
context: Type.Optional(
|
||||
Type.Number({
|
||||
description:
|
||||
"Number of lines to show before and after each match (default: 0)",
|
||||
}),
|
||||
),
|
||||
limit: Type.Optional(
|
||||
Type.Number({
|
||||
description: "Maximum number of matches to return (default: 100)",
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
export type GrepToolInput = Static<typeof grepSchema>;
|
||||
|
||||
const DEFAULT_LIMIT = 100;
|
||||
|
||||
export interface GrepToolDetails {
|
||||
truncation?: TruncationResult;
|
||||
matchLimitReached?: number;
|
||||
linesTruncated?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluggable operations for the grep tool.
|
||||
* Override these to delegate search to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface GrepOperations {
|
||||
/** Check if path is a directory. Throws if path doesn't exist. */
|
||||
isDirectory: (absolutePath: string) => Promise<boolean> | boolean;
|
||||
/** Read file contents for context lines */
|
||||
readFile: (absolutePath: string) => Promise<string> | string;
|
||||
}
|
||||
|
||||
const defaultGrepOperations: GrepOperations = {
|
||||
isDirectory: (p) => statSync(p).isDirectory(),
|
||||
readFile: (p) => readFileSync(p, "utf-8"),
|
||||
};
|
||||
|
||||
export interface GrepToolOptions {
|
||||
/** Custom operations for grep. Default: local filesystem + ripgrep */
|
||||
operations?: GrepOperations;
|
||||
}
|
||||
|
||||
export function createGrepTool(
|
||||
cwd: string,
|
||||
options?: GrepToolOptions,
|
||||
): AgentTool<typeof grepSchema> {
|
||||
const customOps = options?.operations;
|
||||
|
||||
return {
|
||||
name: "grep",
|
||||
label: "grep",
|
||||
description: `Search file contents for a pattern. Returns matching lines with file paths and line numbers. Respects .gitignore. Output is truncated to ${DEFAULT_LIMIT} matches or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Long lines are truncated to ${GREP_MAX_LINE_LENGTH} chars.`,
|
||||
parameters: grepSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{
|
||||
pattern,
|
||||
path: searchDir,
|
||||
glob,
|
||||
ignoreCase,
|
||||
literal,
|
||||
context,
|
||||
limit,
|
||||
}: {
|
||||
pattern: string;
|
||||
path?: string;
|
||||
glob?: string;
|
||||
ignoreCase?: boolean;
|
||||
literal?: boolean;
|
||||
context?: number;
|
||||
limit?: number;
|
||||
},
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("Operation aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
let settled = false;
|
||||
const settle = (fn: () => void) => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
fn();
|
||||
}
|
||||
};
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
const rgPath = await ensureTool("rg", true);
|
||||
if (!rgPath) {
|
||||
settle(() =>
|
||||
reject(
|
||||
new Error(
|
||||
"ripgrep (rg) is not available and could not be downloaded",
|
||||
),
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const searchPath = resolveToCwd(searchDir || ".", cwd);
|
||||
const ops = customOps ?? defaultGrepOperations;
|
||||
|
||||
let isDirectory: boolean;
|
||||
try {
|
||||
isDirectory = await ops.isDirectory(searchPath);
|
||||
} catch (_err) {
|
||||
settle(() => reject(new Error(`Path not found: ${searchPath}`)));
|
||||
return;
|
||||
}
|
||||
const contextValue = context && context > 0 ? context : 0;
|
||||
const effectiveLimit = Math.max(1, limit ?? DEFAULT_LIMIT);
|
||||
|
||||
const formatPath = (filePath: string): string => {
|
||||
if (isDirectory) {
|
||||
const relative = path.relative(searchPath, filePath);
|
||||
if (relative && !relative.startsWith("..")) {
|
||||
return relative.replace(/\\/g, "/");
|
||||
}
|
||||
}
|
||||
return path.basename(filePath);
|
||||
};
|
||||
|
||||
const fileCache = new Map<string, string[]>();
|
||||
const getFileLines = async (
|
||||
filePath: string,
|
||||
): Promise<string[]> => {
|
||||
let lines = fileCache.get(filePath);
|
||||
if (!lines) {
|
||||
try {
|
||||
const content = await ops.readFile(filePath);
|
||||
lines = content
|
||||
.replace(/\r\n/g, "\n")
|
||||
.replace(/\r/g, "\n")
|
||||
.split("\n");
|
||||
} catch {
|
||||
lines = [];
|
||||
}
|
||||
fileCache.set(filePath, lines);
|
||||
}
|
||||
return lines;
|
||||
};
|
||||
|
||||
const args: string[] = [
|
||||
"--json",
|
||||
"--line-number",
|
||||
"--color=never",
|
||||
"--hidden",
|
||||
];
|
||||
|
||||
if (ignoreCase) {
|
||||
args.push("--ignore-case");
|
||||
}
|
||||
|
||||
if (literal) {
|
||||
args.push("--fixed-strings");
|
||||
}
|
||||
|
||||
if (glob) {
|
||||
args.push("--glob", glob);
|
||||
}
|
||||
|
||||
args.push(pattern, searchPath);
|
||||
|
||||
const child = spawn(rgPath, args, {
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
const rl = createInterface({ input: child.stdout });
|
||||
let stderr = "";
|
||||
let matchCount = 0;
|
||||
let matchLimitReached = false;
|
||||
let linesTruncated = false;
|
||||
let aborted = false;
|
||||
let killedDueToLimit = false;
|
||||
const outputLines: string[] = [];
|
||||
|
||||
const cleanup = () => {
|
||||
rl.close();
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
};
|
||||
|
||||
const stopChild = (dueToLimit: boolean = false) => {
|
||||
if (!child.killed) {
|
||||
killedDueToLimit = dueToLimit;
|
||||
child.kill();
|
||||
}
|
||||
};
|
||||
|
||||
const onAbort = () => {
|
||||
aborted = true;
|
||||
stopChild();
|
||||
};
|
||||
|
||||
signal?.addEventListener("abort", onAbort, { once: true });
|
||||
|
||||
child.stderr?.on("data", (chunk) => {
|
||||
stderr += chunk.toString();
|
||||
});
|
||||
|
||||
const formatBlock = async (
|
||||
filePath: string,
|
||||
lineNumber: number,
|
||||
): Promise<string[]> => {
|
||||
const relativePath = formatPath(filePath);
|
||||
const lines = await getFileLines(filePath);
|
||||
if (!lines.length) {
|
||||
return [`${relativePath}:${lineNumber}: (unable to read file)`];
|
||||
}
|
||||
|
||||
const block: string[] = [];
|
||||
const start =
|
||||
contextValue > 0
|
||||
? Math.max(1, lineNumber - contextValue)
|
||||
: lineNumber;
|
||||
const end =
|
||||
contextValue > 0
|
||||
? Math.min(lines.length, lineNumber + contextValue)
|
||||
: lineNumber;
|
||||
|
||||
for (let current = start; current <= end; current++) {
|
||||
const lineText = lines[current - 1] ?? "";
|
||||
const sanitized = lineText.replace(/\r/g, "");
|
||||
const isMatchLine = current === lineNumber;
|
||||
|
||||
// Truncate long lines
|
||||
const { text: truncatedText, wasTruncated } =
|
||||
truncateLine(sanitized);
|
||||
if (wasTruncated) {
|
||||
linesTruncated = true;
|
||||
}
|
||||
|
||||
if (isMatchLine) {
|
||||
block.push(`${relativePath}:${current}: ${truncatedText}`);
|
||||
} else {
|
||||
block.push(`${relativePath}-${current}- ${truncatedText}`);
|
||||
}
|
||||
}
|
||||
|
||||
return block;
|
||||
};
|
||||
|
||||
// Collect matches during streaming, format after
|
||||
const matches: Array<{ filePath: string; lineNumber: number }> = [];
|
||||
|
||||
rl.on("line", (line) => {
|
||||
if (!line.trim() || matchCount >= effectiveLimit) {
|
||||
return;
|
||||
}
|
||||
|
||||
let event: any;
|
||||
try {
|
||||
event = JSON.parse(line);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === "match") {
|
||||
matchCount++;
|
||||
const filePath = event.data?.path?.text;
|
||||
const lineNumber = event.data?.line_number;
|
||||
|
||||
if (filePath && typeof lineNumber === "number") {
|
||||
matches.push({ filePath, lineNumber });
|
||||
}
|
||||
|
||||
if (matchCount >= effectiveLimit) {
|
||||
matchLimitReached = true;
|
||||
stopChild(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
child.on("error", (error) => {
|
||||
cleanup();
|
||||
settle(() =>
|
||||
reject(new Error(`Failed to run ripgrep: ${error.message}`)),
|
||||
);
|
||||
});
|
||||
|
||||
child.on("close", async (code) => {
|
||||
cleanup();
|
||||
|
||||
if (aborted) {
|
||||
settle(() => reject(new Error("Operation aborted")));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!killedDueToLimit && code !== 0 && code !== 1) {
|
||||
const errorMsg =
|
||||
stderr.trim() || `ripgrep exited with code ${code}`;
|
||||
settle(() => reject(new Error(errorMsg)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (matchCount === 0) {
|
||||
settle(() =>
|
||||
resolve({
|
||||
content: [{ type: "text", text: "No matches found" }],
|
||||
details: undefined,
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Format matches (async to support remote file reading)
|
||||
for (const match of matches) {
|
||||
const block = await formatBlock(
|
||||
match.filePath,
|
||||
match.lineNumber,
|
||||
);
|
||||
outputLines.push(...block);
|
||||
}
|
||||
|
||||
// Apply byte truncation (no line limit since we already have match limit)
|
||||
const rawOutput = outputLines.join("\n");
|
||||
const truncation = truncateHead(rawOutput, {
|
||||
maxLines: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
|
||||
let output = truncation.content;
|
||||
const details: GrepToolDetails = {};
|
||||
|
||||
// Build notices
|
||||
const notices: string[] = [];
|
||||
|
||||
if (matchLimitReached) {
|
||||
notices.push(
|
||||
`${effectiveLimit} matches limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`,
|
||||
);
|
||||
details.matchLimitReached = effectiveLimit;
|
||||
}
|
||||
|
||||
if (truncation.truncated) {
|
||||
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
|
||||
details.truncation = truncation;
|
||||
}
|
||||
|
||||
if (linesTruncated) {
|
||||
notices.push(
|
||||
`Some lines truncated to ${GREP_MAX_LINE_LENGTH} chars. Use read tool to see full lines`,
|
||||
);
|
||||
details.linesTruncated = true;
|
||||
}
|
||||
|
||||
if (notices.length > 0) {
|
||||
output += `\n\n[${notices.join(". ")}]`;
|
||||
}
|
||||
|
||||
settle(() =>
|
||||
resolve({
|
||||
content: [{ type: "text", text: output }],
|
||||
details:
|
||||
Object.keys(details).length > 0 ? details : undefined,
|
||||
}),
|
||||
);
|
||||
});
|
||||
} catch (err) {
|
||||
settle(() => reject(err as Error));
|
||||
}
|
||||
})();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default grep tool using process.cwd() - for backwards compatibility */
|
||||
export const grepTool = createGrepTool(process.cwd());
|
||||
150
packages/coding-agent/src/core/tools/index.ts
Normal file
150
packages/coding-agent/src/core/tools/index.ts
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
export {
|
||||
type BashOperations,
|
||||
type BashSpawnContext,
|
||||
type BashSpawnHook,
|
||||
type BashToolDetails,
|
||||
type BashToolInput,
|
||||
type BashToolOptions,
|
||||
bashTool,
|
||||
createBashTool,
|
||||
} from "./bash.js";
|
||||
export {
|
||||
createEditTool,
|
||||
type EditOperations,
|
||||
type EditToolDetails,
|
||||
type EditToolInput,
|
||||
type EditToolOptions,
|
||||
editTool,
|
||||
} from "./edit.js";
|
||||
export {
|
||||
createFindTool,
|
||||
type FindOperations,
|
||||
type FindToolDetails,
|
||||
type FindToolInput,
|
||||
type FindToolOptions,
|
||||
findTool,
|
||||
} from "./find.js";
|
||||
export {
|
||||
createGrepTool,
|
||||
type GrepOperations,
|
||||
type GrepToolDetails,
|
||||
type GrepToolInput,
|
||||
type GrepToolOptions,
|
||||
grepTool,
|
||||
} from "./grep.js";
|
||||
export {
|
||||
createLsTool,
|
||||
type LsOperations,
|
||||
type LsToolDetails,
|
||||
type LsToolInput,
|
||||
type LsToolOptions,
|
||||
lsTool,
|
||||
} from "./ls.js";
|
||||
export {
|
||||
createReadTool,
|
||||
type ReadOperations,
|
||||
type ReadToolDetails,
|
||||
type ReadToolInput,
|
||||
type ReadToolOptions,
|
||||
readTool,
|
||||
} from "./read.js";
|
||||
export {
|
||||
DEFAULT_MAX_BYTES,
|
||||
DEFAULT_MAX_LINES,
|
||||
formatSize,
|
||||
type TruncationOptions,
|
||||
type TruncationResult,
|
||||
truncateHead,
|
||||
truncateLine,
|
||||
truncateTail,
|
||||
} from "./truncate.js";
|
||||
export {
|
||||
createWriteTool,
|
||||
type WriteOperations,
|
||||
type WriteToolInput,
|
||||
type WriteToolOptions,
|
||||
writeTool,
|
||||
} from "./write.js";
|
||||
|
||||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type BashToolOptions, bashTool, createBashTool } from "./bash.js";
|
||||
import { createEditTool, editTool } from "./edit.js";
|
||||
import { createFindTool, findTool } from "./find.js";
|
||||
import { createGrepTool, grepTool } from "./grep.js";
|
||||
import { createLsTool, lsTool } from "./ls.js";
|
||||
import { createReadTool, type ReadToolOptions, readTool } from "./read.js";
|
||||
import { createWriteTool, writeTool } from "./write.js";
|
||||
|
||||
/** Tool type (AgentTool from pi-ai) */
|
||||
export type Tool = AgentTool<any>;
|
||||
|
||||
// Default tools for full access mode (using process.cwd())
|
||||
export const codingTools: Tool[] = [readTool, bashTool, editTool, writeTool];
|
||||
|
||||
// Read-only tools for exploration without modification (using process.cwd())
|
||||
export const readOnlyTools: Tool[] = [readTool, grepTool, findTool, lsTool];
|
||||
|
||||
// All available tools (using process.cwd())
|
||||
export const allTools = {
|
||||
read: readTool,
|
||||
bash: bashTool,
|
||||
edit: editTool,
|
||||
write: writeTool,
|
||||
grep: grepTool,
|
||||
find: findTool,
|
||||
ls: lsTool,
|
||||
};
|
||||
|
||||
export type ToolName = keyof typeof allTools;
|
||||
|
||||
export interface ToolsOptions {
|
||||
/** Options for the read tool */
|
||||
read?: ReadToolOptions;
|
||||
/** Options for the bash tool */
|
||||
bash?: BashToolOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create coding tools configured for a specific working directory.
|
||||
*/
|
||||
export function createCodingTools(cwd: string, options?: ToolsOptions): Tool[] {
|
||||
return [
|
||||
createReadTool(cwd, options?.read),
|
||||
createBashTool(cwd, options?.bash),
|
||||
createEditTool(cwd),
|
||||
createWriteTool(cwd),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Create read-only tools configured for a specific working directory.
|
||||
*/
|
||||
export function createReadOnlyTools(
|
||||
cwd: string,
|
||||
options?: ToolsOptions,
|
||||
): Tool[] {
|
||||
return [
|
||||
createReadTool(cwd, options?.read),
|
||||
createGrepTool(cwd),
|
||||
createFindTool(cwd),
|
||||
createLsTool(cwd),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Create all tools configured for a specific working directory.
|
||||
*/
|
||||
export function createAllTools(
|
||||
cwd: string,
|
||||
options?: ToolsOptions,
|
||||
): Record<ToolName, Tool> {
|
||||
return {
|
||||
read: createReadTool(cwd, options?.read),
|
||||
bash: createBashTool(cwd, options?.bash),
|
||||
edit: createEditTool(cwd),
|
||||
write: createWriteTool(cwd),
|
||||
grep: createGrepTool(cwd),
|
||||
find: createFindTool(cwd),
|
||||
ls: createLsTool(cwd),
|
||||
};
|
||||
}
|
||||
197
packages/coding-agent/src/core/tools/ls.ts
Normal file
197
packages/coding-agent/src/core/tools/ls.ts
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { existsSync, readdirSync, statSync } from "fs";
|
||||
import nodePath from "path";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
import {
|
||||
DEFAULT_MAX_BYTES,
|
||||
formatSize,
|
||||
type TruncationResult,
|
||||
truncateHead,
|
||||
} from "./truncate.js";
|
||||
|
||||
const lsSchema = Type.Object({
|
||||
path: Type.Optional(
|
||||
Type.String({
|
||||
description: "Directory to list (default: current directory)",
|
||||
}),
|
||||
),
|
||||
limit: Type.Optional(
|
||||
Type.Number({
|
||||
description: "Maximum number of entries to return (default: 500)",
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
export type LsToolInput = Static<typeof lsSchema>;
|
||||
|
||||
const DEFAULT_LIMIT = 500;
|
||||
|
||||
export interface LsToolDetails {
|
||||
truncation?: TruncationResult;
|
||||
entryLimitReached?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluggable operations for the ls tool.
|
||||
* Override these to delegate directory listing to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface LsOperations {
|
||||
/** Check if path exists */
|
||||
exists: (absolutePath: string) => Promise<boolean> | boolean;
|
||||
/** Get file/directory stats. Throws if not found. */
|
||||
stat: (
|
||||
absolutePath: string,
|
||||
) => Promise<{ isDirectory: () => boolean }> | { isDirectory: () => boolean };
|
||||
/** Read directory entries */
|
||||
readdir: (absolutePath: string) => Promise<string[]> | string[];
|
||||
}
|
||||
|
||||
const defaultLsOperations: LsOperations = {
|
||||
exists: existsSync,
|
||||
stat: statSync,
|
||||
readdir: readdirSync,
|
||||
};
|
||||
|
||||
export interface LsToolOptions {
|
||||
/** Custom operations for directory listing. Default: local filesystem */
|
||||
operations?: LsOperations;
|
||||
}
|
||||
|
||||
export function createLsTool(
|
||||
cwd: string,
|
||||
options?: LsToolOptions,
|
||||
): AgentTool<typeof lsSchema> {
|
||||
const ops = options?.operations ?? defaultLsOperations;
|
||||
|
||||
return {
|
||||
name: "ls",
|
||||
label: "ls",
|
||||
description: `List directory contents. Returns entries sorted alphabetically, with '/' suffix for directories. Includes dotfiles. Output is truncated to ${DEFAULT_LIMIT} entries or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first).`,
|
||||
parameters: lsSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{ path, limit }: { path?: string; limit?: number },
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("Operation aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
const onAbort = () => reject(new Error("Operation aborted"));
|
||||
signal?.addEventListener("abort", onAbort, { once: true });
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
const dirPath = resolveToCwd(path || ".", cwd);
|
||||
const effectiveLimit = limit ?? DEFAULT_LIMIT;
|
||||
|
||||
// Check if path exists
|
||||
if (!(await ops.exists(dirPath))) {
|
||||
reject(new Error(`Path not found: ${dirPath}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if path is a directory
|
||||
const stat = await ops.stat(dirPath);
|
||||
if (!stat.isDirectory()) {
|
||||
reject(new Error(`Not a directory: ${dirPath}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// Read directory entries
|
||||
let entries: string[];
|
||||
try {
|
||||
entries = await ops.readdir(dirPath);
|
||||
} catch (e: any) {
|
||||
reject(new Error(`Cannot read directory: ${e.message}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// Sort alphabetically (case-insensitive)
|
||||
entries.sort((a, b) =>
|
||||
a.toLowerCase().localeCompare(b.toLowerCase()),
|
||||
);
|
||||
|
||||
// Format entries with directory indicators
|
||||
const results: string[] = [];
|
||||
let entryLimitReached = false;
|
||||
|
||||
for (const entry of entries) {
|
||||
if (results.length >= effectiveLimit) {
|
||||
entryLimitReached = true;
|
||||
break;
|
||||
}
|
||||
|
||||
const fullPath = nodePath.join(dirPath, entry);
|
||||
let suffix = "";
|
||||
|
||||
try {
|
||||
const entryStat = await ops.stat(fullPath);
|
||||
if (entryStat.isDirectory()) {
|
||||
suffix = "/";
|
||||
}
|
||||
} catch {
|
||||
// Skip entries we can't stat
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push(entry + suffix);
|
||||
}
|
||||
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
|
||||
if (results.length === 0) {
|
||||
resolve({
|
||||
content: [{ type: "text", text: "(empty directory)" }],
|
||||
details: undefined,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Apply byte truncation (no line limit since we already have entry limit)
|
||||
const rawOutput = results.join("\n");
|
||||
const truncation = truncateHead(rawOutput, {
|
||||
maxLines: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
|
||||
let output = truncation.content;
|
||||
const details: LsToolDetails = {};
|
||||
|
||||
// Build notices
|
||||
const notices: string[] = [];
|
||||
|
||||
if (entryLimitReached) {
|
||||
notices.push(
|
||||
`${effectiveLimit} entries limit reached. Use limit=${effectiveLimit * 2} for more`,
|
||||
);
|
||||
details.entryLimitReached = effectiveLimit;
|
||||
}
|
||||
|
||||
if (truncation.truncated) {
|
||||
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
|
||||
details.truncation = truncation;
|
||||
}
|
||||
|
||||
if (notices.length > 0) {
|
||||
output += `\n\n[${notices.join(". ")}]`;
|
||||
}
|
||||
|
||||
resolve({
|
||||
content: [{ type: "text", text: output }],
|
||||
details: Object.keys(details).length > 0 ? details : undefined,
|
||||
});
|
||||
} catch (e: any) {
|
||||
signal?.removeEventListener("abort", onAbort);
|
||||
reject(e);
|
||||
}
|
||||
})();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default ls tool using process.cwd() - for backwards compatibility */
|
||||
export const lsTool = createLsTool(process.cwd());
|
||||
94
packages/coding-agent/src/core/tools/path-utils.ts
Normal file
94
packages/coding-agent/src/core/tools/path-utils.ts
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
import { accessSync, constants } from "node:fs";
|
||||
import * as os from "node:os";
|
||||
import { isAbsolute, resolve as resolvePath } from "node:path";
|
||||
|
||||
const UNICODE_SPACES = /[\u00A0\u2000-\u200A\u202F\u205F\u3000]/g;
|
||||
const NARROW_NO_BREAK_SPACE = "\u202F";
|
||||
function normalizeUnicodeSpaces(str: string): string {
|
||||
return str.replace(UNICODE_SPACES, " ");
|
||||
}
|
||||
|
||||
function tryMacOSScreenshotPath(filePath: string): string {
|
||||
return filePath.replace(/ (AM|PM)\./g, `${NARROW_NO_BREAK_SPACE}$1.`);
|
||||
}
|
||||
|
||||
function tryNFDVariant(filePath: string): string {
|
||||
// macOS stores filenames in NFD (decomposed) form, try converting user input to NFD
|
||||
return filePath.normalize("NFD");
|
||||
}
|
||||
|
||||
function tryCurlyQuoteVariant(filePath: string): string {
|
||||
// macOS uses U+2019 (right single quotation mark) in screenshot names like "Capture d'écran"
|
||||
// Users typically type U+0027 (straight apostrophe)
|
||||
return filePath.replace(/'/g, "\u2019");
|
||||
}
|
||||
|
||||
function fileExists(filePath: string): boolean {
|
||||
try {
|
||||
accessSync(filePath, constants.F_OK);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeAtPrefix(filePath: string): string {
|
||||
return filePath.startsWith("@") ? filePath.slice(1) : filePath;
|
||||
}
|
||||
|
||||
export function expandPath(filePath: string): string {
|
||||
const normalized = normalizeUnicodeSpaces(normalizeAtPrefix(filePath));
|
||||
if (normalized === "~") {
|
||||
return os.homedir();
|
||||
}
|
||||
if (normalized.startsWith("~/")) {
|
||||
return os.homedir() + normalized.slice(1);
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a path relative to the given cwd.
|
||||
* Handles ~ expansion and absolute paths.
|
||||
*/
|
||||
export function resolveToCwd(filePath: string, cwd: string): string {
|
||||
const expanded = expandPath(filePath);
|
||||
if (isAbsolute(expanded)) {
|
||||
return expanded;
|
||||
}
|
||||
return resolvePath(cwd, expanded);
|
||||
}
|
||||
|
||||
export function resolveReadPath(filePath: string, cwd: string): string {
|
||||
const resolved = resolveToCwd(filePath, cwd);
|
||||
|
||||
if (fileExists(resolved)) {
|
||||
return resolved;
|
||||
}
|
||||
|
||||
// Try macOS AM/PM variant (narrow no-break space before AM/PM)
|
||||
const amPmVariant = tryMacOSScreenshotPath(resolved);
|
||||
if (amPmVariant !== resolved && fileExists(amPmVariant)) {
|
||||
return amPmVariant;
|
||||
}
|
||||
|
||||
// Try NFD variant (macOS stores filenames in NFD form)
|
||||
const nfdVariant = tryNFDVariant(resolved);
|
||||
if (nfdVariant !== resolved && fileExists(nfdVariant)) {
|
||||
return nfdVariant;
|
||||
}
|
||||
|
||||
// Try curly quote variant (macOS uses U+2019 in screenshot names)
|
||||
const curlyVariant = tryCurlyQuoteVariant(resolved);
|
||||
if (curlyVariant !== resolved && fileExists(curlyVariant)) {
|
||||
return curlyVariant;
|
||||
}
|
||||
|
||||
// Try combined NFD + curly quote (for French macOS screenshots like "Capture d'écran")
|
||||
const nfdCurlyVariant = tryCurlyQuoteVariant(nfdVariant);
|
||||
if (nfdCurlyVariant !== resolved && fileExists(nfdCurlyVariant)) {
|
||||
return nfdCurlyVariant;
|
||||
}
|
||||
|
||||
return resolved;
|
||||
}
|
||||
265
packages/coding-agent/src/core/tools/read.ts
Normal file
265
packages/coding-agent/src/core/tools/read.ts
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import type { ImageContent, TextContent } from "@mariozechner/pi-ai";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { constants } from "fs";
|
||||
import { access as fsAccess, readFile as fsReadFile } from "fs/promises";
|
||||
import { formatDimensionNote, resizeImage } from "../../utils/image-resize.js";
|
||||
import { detectSupportedImageMimeTypeFromFile } from "../../utils/mime.js";
|
||||
import { resolveReadPath } from "./path-utils.js";
|
||||
import {
|
||||
DEFAULT_MAX_BYTES,
|
||||
DEFAULT_MAX_LINES,
|
||||
formatSize,
|
||||
type TruncationResult,
|
||||
truncateHead,
|
||||
} from "./truncate.js";
|
||||
|
||||
const readSchema = Type.Object({
|
||||
path: Type.String({
|
||||
description: "Path to the file to read (relative or absolute)",
|
||||
}),
|
||||
offset: Type.Optional(
|
||||
Type.Number({
|
||||
description: "Line number to start reading from (1-indexed)",
|
||||
}),
|
||||
),
|
||||
limit: Type.Optional(
|
||||
Type.Number({ description: "Maximum number of lines to read" }),
|
||||
),
|
||||
});
|
||||
|
||||
export type ReadToolInput = Static<typeof readSchema>;
|
||||
|
||||
export interface ReadToolDetails {
|
||||
truncation?: TruncationResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluggable operations for the read tool.
|
||||
* Override these to delegate file reading to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface ReadOperations {
|
||||
/** Read file contents as a Buffer */
|
||||
readFile: (absolutePath: string) => Promise<Buffer>;
|
||||
/** Check if file is readable (throw if not) */
|
||||
access: (absolutePath: string) => Promise<void>;
|
||||
/** Detect image MIME type, return null/undefined for non-images */
|
||||
detectImageMimeType?: (
|
||||
absolutePath: string,
|
||||
) => Promise<string | null | undefined>;
|
||||
}
|
||||
|
||||
const defaultReadOperations: ReadOperations = {
|
||||
readFile: (path) => fsReadFile(path),
|
||||
access: (path) => fsAccess(path, constants.R_OK),
|
||||
detectImageMimeType: detectSupportedImageMimeTypeFromFile,
|
||||
};
|
||||
|
||||
export interface ReadToolOptions {
|
||||
/** Whether to auto-resize images to 2000x2000 max. Default: true */
|
||||
autoResizeImages?: boolean;
|
||||
/** Custom operations for file reading. Default: local filesystem */
|
||||
operations?: ReadOperations;
|
||||
}
|
||||
|
||||
export function createReadTool(
|
||||
cwd: string,
|
||||
options?: ReadToolOptions,
|
||||
): AgentTool<typeof readSchema> {
|
||||
const autoResizeImages = options?.autoResizeImages ?? true;
|
||||
const ops = options?.operations ?? defaultReadOperations;
|
||||
|
||||
return {
|
||||
name: "read",
|
||||
label: "read",
|
||||
description: `Read the contents of a file. Supports text files and images (jpg, png, gif, webp). Images are sent as attachments. For text files, output is truncated to ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Use offset/limit for large files. When you need the full file, continue with offset until complete.`,
|
||||
parameters: readSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{
|
||||
path,
|
||||
offset,
|
||||
limit,
|
||||
}: { path: string; offset?: number; limit?: number },
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
const absolutePath = resolveReadPath(path, cwd);
|
||||
|
||||
return new Promise<{
|
||||
content: (TextContent | ImageContent)[];
|
||||
details: ReadToolDetails | undefined;
|
||||
}>((resolve, reject) => {
|
||||
// Check if already aborted
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("Operation aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
let aborted = false;
|
||||
|
||||
// Set up abort handler
|
||||
const onAbort = () => {
|
||||
aborted = true;
|
||||
reject(new Error("Operation aborted"));
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
}
|
||||
|
||||
// Perform the read operation
|
||||
(async () => {
|
||||
try {
|
||||
// Check if file exists
|
||||
await ops.access(absolutePath);
|
||||
|
||||
// Check if aborted before reading
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const mimeType = ops.detectImageMimeType
|
||||
? await ops.detectImageMimeType(absolutePath)
|
||||
: undefined;
|
||||
|
||||
// Read the file based on type
|
||||
let content: (TextContent | ImageContent)[];
|
||||
let details: ReadToolDetails | undefined;
|
||||
|
||||
if (mimeType) {
|
||||
// Read as image (binary)
|
||||
const buffer = await ops.readFile(absolutePath);
|
||||
const base64 = buffer.toString("base64");
|
||||
|
||||
if (autoResizeImages) {
|
||||
// Resize image if needed
|
||||
const resized = await resizeImage({
|
||||
type: "image",
|
||||
data: base64,
|
||||
mimeType,
|
||||
});
|
||||
const dimensionNote = formatDimensionNote(resized);
|
||||
|
||||
let textNote = `Read image file [${resized.mimeType}]`;
|
||||
if (dimensionNote) {
|
||||
textNote += `\n${dimensionNote}`;
|
||||
}
|
||||
|
||||
content = [
|
||||
{ type: "text", text: textNote },
|
||||
{
|
||||
type: "image",
|
||||
data: resized.data,
|
||||
mimeType: resized.mimeType,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
const textNote = `Read image file [${mimeType}]`;
|
||||
content = [
|
||||
{ type: "text", text: textNote },
|
||||
{ type: "image", data: base64, mimeType },
|
||||
];
|
||||
}
|
||||
} else {
|
||||
// Read as text
|
||||
const buffer = await ops.readFile(absolutePath);
|
||||
const textContent = buffer.toString("utf-8");
|
||||
const allLines = textContent.split("\n");
|
||||
const totalFileLines = allLines.length;
|
||||
|
||||
// Apply offset if specified (1-indexed to 0-indexed)
|
||||
const startLine = offset ? Math.max(0, offset - 1) : 0;
|
||||
const startLineDisplay = startLine + 1; // For display (1-indexed)
|
||||
|
||||
// Check if offset is out of bounds
|
||||
if (startLine >= allLines.length) {
|
||||
throw new Error(
|
||||
`Offset ${offset} is beyond end of file (${allLines.length} lines total)`,
|
||||
);
|
||||
}
|
||||
|
||||
// If limit is specified by user, use it; otherwise we'll let truncateHead decide
|
||||
let selectedContent: string;
|
||||
let userLimitedLines: number | undefined;
|
||||
if (limit !== undefined) {
|
||||
const endLine = Math.min(startLine + limit, allLines.length);
|
||||
selectedContent = allLines.slice(startLine, endLine).join("\n");
|
||||
userLimitedLines = endLine - startLine;
|
||||
} else {
|
||||
selectedContent = allLines.slice(startLine).join("\n");
|
||||
}
|
||||
|
||||
// Apply truncation (respects both line and byte limits)
|
||||
const truncation = truncateHead(selectedContent);
|
||||
|
||||
let outputText: string;
|
||||
|
||||
if (truncation.firstLineExceedsLimit) {
|
||||
// First line at offset exceeds 30KB - tell model to use bash
|
||||
const firstLineSize = formatSize(
|
||||
Buffer.byteLength(allLines[startLine], "utf-8"),
|
||||
);
|
||||
outputText = `[Line ${startLineDisplay} is ${firstLineSize}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${path} | head -c ${DEFAULT_MAX_BYTES}]`;
|
||||
details = { truncation };
|
||||
} else if (truncation.truncated) {
|
||||
// Truncation occurred - build actionable notice
|
||||
const endLineDisplay =
|
||||
startLineDisplay + truncation.outputLines - 1;
|
||||
const nextOffset = endLineDisplay + 1;
|
||||
|
||||
outputText = truncation.content;
|
||||
|
||||
if (truncation.truncatedBy === "lines") {
|
||||
outputText += `\n\n[Showing lines ${startLineDisplay}-${endLineDisplay} of ${totalFileLines}. Use offset=${nextOffset} to continue.]`;
|
||||
} else {
|
||||
outputText += `\n\n[Showing lines ${startLineDisplay}-${endLineDisplay} of ${totalFileLines} (${formatSize(DEFAULT_MAX_BYTES)} limit). Use offset=${nextOffset} to continue.]`;
|
||||
}
|
||||
details = { truncation };
|
||||
} else if (
|
||||
userLimitedLines !== undefined &&
|
||||
startLine + userLimitedLines < allLines.length
|
||||
) {
|
||||
// User specified limit, there's more content, but no truncation
|
||||
const remaining =
|
||||
allLines.length - (startLine + userLimitedLines);
|
||||
const nextOffset = startLine + userLimitedLines + 1;
|
||||
|
||||
outputText = truncation.content;
|
||||
outputText += `\n\n[${remaining} more lines in file. Use offset=${nextOffset} to continue.]`;
|
||||
} else {
|
||||
// No truncation, no user limit exceeded
|
||||
outputText = truncation.content;
|
||||
}
|
||||
|
||||
content = [{ type: "text", text: outputText }];
|
||||
}
|
||||
|
||||
// Check if aborted after reading
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean up abort handler
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
|
||||
resolve({ content, details });
|
||||
} catch (error: any) {
|
||||
// Clean up abort handler
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
|
||||
if (!aborted) {
|
||||
reject(error);
|
||||
}
|
||||
}
|
||||
})();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default read tool using process.cwd() - for backwards compatibility */
|
||||
export const readTool = createReadTool(process.cwd());
|
||||
279
packages/coding-agent/src/core/tools/truncate.ts
Normal file
279
packages/coding-agent/src/core/tools/truncate.ts
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
/**
|
||||
* Shared truncation utilities for tool outputs.
|
||||
*
|
||||
* Truncation is based on two independent limits - whichever is hit first wins:
|
||||
* - Line limit (default: 2000 lines)
|
||||
* - Byte limit (default: 50KB)
|
||||
*
|
||||
* Never returns partial lines (except bash tail truncation edge case).
|
||||
*/
|
||||
|
||||
export const DEFAULT_MAX_LINES = 2000;
|
||||
export const DEFAULT_MAX_BYTES = 50 * 1024; // 50KB
|
||||
export const GREP_MAX_LINE_LENGTH = 500; // Max chars per grep match line
|
||||
|
||||
export interface TruncationResult {
|
||||
/** The truncated content */
|
||||
content: string;
|
||||
/** Whether truncation occurred */
|
||||
truncated: boolean;
|
||||
/** Which limit was hit: "lines", "bytes", or null if not truncated */
|
||||
truncatedBy: "lines" | "bytes" | null;
|
||||
/** Total number of lines in the original content */
|
||||
totalLines: number;
|
||||
/** Total number of bytes in the original content */
|
||||
totalBytes: number;
|
||||
/** Number of complete lines in the truncated output */
|
||||
outputLines: number;
|
||||
/** Number of bytes in the truncated output */
|
||||
outputBytes: number;
|
||||
/** Whether the last line was partially truncated (only for tail truncation edge case) */
|
||||
lastLinePartial: boolean;
|
||||
/** Whether the first line exceeded the byte limit (for head truncation) */
|
||||
firstLineExceedsLimit: boolean;
|
||||
/** The max lines limit that was applied */
|
||||
maxLines: number;
|
||||
/** The max bytes limit that was applied */
|
||||
maxBytes: number;
|
||||
}
|
||||
|
||||
export interface TruncationOptions {
|
||||
/** Maximum number of lines (default: 2000) */
|
||||
maxLines?: number;
|
||||
/** Maximum number of bytes (default: 50KB) */
|
||||
maxBytes?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format bytes as human-readable size.
|
||||
*/
|
||||
export function formatSize(bytes: number): string {
|
||||
if (bytes < 1024) {
|
||||
return `${bytes}B`;
|
||||
} else if (bytes < 1024 * 1024) {
|
||||
return `${(bytes / 1024).toFixed(1)}KB`;
|
||||
} else {
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate content from the head (keep first N lines/bytes).
|
||||
* Suitable for file reads where you want to see the beginning.
|
||||
*
|
||||
* Never returns partial lines. If first line exceeds byte limit,
|
||||
* returns empty content with firstLineExceedsLimit=true.
|
||||
*/
|
||||
export function truncateHead(
|
||||
content: string,
|
||||
options: TruncationOptions = {},
|
||||
): TruncationResult {
|
||||
const maxLines = options.maxLines ?? DEFAULT_MAX_LINES;
|
||||
const maxBytes = options.maxBytes ?? DEFAULT_MAX_BYTES;
|
||||
|
||||
const totalBytes = Buffer.byteLength(content, "utf-8");
|
||||
const lines = content.split("\n");
|
||||
const totalLines = lines.length;
|
||||
|
||||
// Check if no truncation needed
|
||||
if (totalLines <= maxLines && totalBytes <= maxBytes) {
|
||||
return {
|
||||
content,
|
||||
truncated: false,
|
||||
truncatedBy: null,
|
||||
totalLines,
|
||||
totalBytes,
|
||||
outputLines: totalLines,
|
||||
outputBytes: totalBytes,
|
||||
lastLinePartial: false,
|
||||
firstLineExceedsLimit: false,
|
||||
maxLines,
|
||||
maxBytes,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if first line alone exceeds byte limit
|
||||
const firstLineBytes = Buffer.byteLength(lines[0], "utf-8");
|
||||
if (firstLineBytes > maxBytes) {
|
||||
return {
|
||||
content: "",
|
||||
truncated: true,
|
||||
truncatedBy: "bytes",
|
||||
totalLines,
|
||||
totalBytes,
|
||||
outputLines: 0,
|
||||
outputBytes: 0,
|
||||
lastLinePartial: false,
|
||||
firstLineExceedsLimit: true,
|
||||
maxLines,
|
||||
maxBytes,
|
||||
};
|
||||
}
|
||||
|
||||
// Collect complete lines that fit
|
||||
const outputLinesArr: string[] = [];
|
||||
let outputBytesCount = 0;
|
||||
let truncatedBy: "lines" | "bytes" = "lines";
|
||||
|
||||
for (let i = 0; i < lines.length && i < maxLines; i++) {
|
||||
const line = lines[i];
|
||||
const lineBytes = Buffer.byteLength(line, "utf-8") + (i > 0 ? 1 : 0); // +1 for newline
|
||||
|
||||
if (outputBytesCount + lineBytes > maxBytes) {
|
||||
truncatedBy = "bytes";
|
||||
break;
|
||||
}
|
||||
|
||||
outputLinesArr.push(line);
|
||||
outputBytesCount += lineBytes;
|
||||
}
|
||||
|
||||
// If we exited due to line limit
|
||||
if (outputLinesArr.length >= maxLines && outputBytesCount <= maxBytes) {
|
||||
truncatedBy = "lines";
|
||||
}
|
||||
|
||||
const outputContent = outputLinesArr.join("\n");
|
||||
const finalOutputBytes = Buffer.byteLength(outputContent, "utf-8");
|
||||
|
||||
return {
|
||||
content: outputContent,
|
||||
truncated: true,
|
||||
truncatedBy,
|
||||
totalLines,
|
||||
totalBytes,
|
||||
outputLines: outputLinesArr.length,
|
||||
outputBytes: finalOutputBytes,
|
||||
lastLinePartial: false,
|
||||
firstLineExceedsLimit: false,
|
||||
maxLines,
|
||||
maxBytes,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate content from the tail (keep last N lines/bytes).
|
||||
* Suitable for bash output where you want to see the end (errors, final results).
|
||||
*
|
||||
* May return partial first line if the last line of original content exceeds byte limit.
|
||||
*/
|
||||
export function truncateTail(
|
||||
content: string,
|
||||
options: TruncationOptions = {},
|
||||
): TruncationResult {
|
||||
const maxLines = options.maxLines ?? DEFAULT_MAX_LINES;
|
||||
const maxBytes = options.maxBytes ?? DEFAULT_MAX_BYTES;
|
||||
|
||||
const totalBytes = Buffer.byteLength(content, "utf-8");
|
||||
const lines = content.split("\n");
|
||||
const totalLines = lines.length;
|
||||
|
||||
// Check if no truncation needed
|
||||
if (totalLines <= maxLines && totalBytes <= maxBytes) {
|
||||
return {
|
||||
content,
|
||||
truncated: false,
|
||||
truncatedBy: null,
|
||||
totalLines,
|
||||
totalBytes,
|
||||
outputLines: totalLines,
|
||||
outputBytes: totalBytes,
|
||||
lastLinePartial: false,
|
||||
firstLineExceedsLimit: false,
|
||||
maxLines,
|
||||
maxBytes,
|
||||
};
|
||||
}
|
||||
|
||||
// Work backwards from the end
|
||||
const outputLinesArr: string[] = [];
|
||||
let outputBytesCount = 0;
|
||||
let truncatedBy: "lines" | "bytes" = "lines";
|
||||
let lastLinePartial = false;
|
||||
|
||||
for (
|
||||
let i = lines.length - 1;
|
||||
i >= 0 && outputLinesArr.length < maxLines;
|
||||
i--
|
||||
) {
|
||||
const line = lines[i];
|
||||
const lineBytes =
|
||||
Buffer.byteLength(line, "utf-8") + (outputLinesArr.length > 0 ? 1 : 0); // +1 for newline
|
||||
|
||||
if (outputBytesCount + lineBytes > maxBytes) {
|
||||
truncatedBy = "bytes";
|
||||
// Edge case: if we haven't added ANY lines yet and this line exceeds maxBytes,
|
||||
// take the end of the line (partial)
|
||||
if (outputLinesArr.length === 0) {
|
||||
const truncatedLine = truncateStringToBytesFromEnd(line, maxBytes);
|
||||
outputLinesArr.unshift(truncatedLine);
|
||||
outputBytesCount = Buffer.byteLength(truncatedLine, "utf-8");
|
||||
lastLinePartial = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
outputLinesArr.unshift(line);
|
||||
outputBytesCount += lineBytes;
|
||||
}
|
||||
|
||||
// If we exited due to line limit
|
||||
if (outputLinesArr.length >= maxLines && outputBytesCount <= maxBytes) {
|
||||
truncatedBy = "lines";
|
||||
}
|
||||
|
||||
const outputContent = outputLinesArr.join("\n");
|
||||
const finalOutputBytes = Buffer.byteLength(outputContent, "utf-8");
|
||||
|
||||
return {
|
||||
content: outputContent,
|
||||
truncated: true,
|
||||
truncatedBy,
|
||||
totalLines,
|
||||
totalBytes,
|
||||
outputLines: outputLinesArr.length,
|
||||
outputBytes: finalOutputBytes,
|
||||
lastLinePartial,
|
||||
firstLineExceedsLimit: false,
|
||||
maxLines,
|
||||
maxBytes,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate a string to fit within a byte limit (from the end).
|
||||
* Handles multi-byte UTF-8 characters correctly.
|
||||
*/
|
||||
function truncateStringToBytesFromEnd(str: string, maxBytes: number): string {
|
||||
const buf = Buffer.from(str, "utf-8");
|
||||
if (buf.length <= maxBytes) {
|
||||
return str;
|
||||
}
|
||||
|
||||
// Start from the end, skip maxBytes back
|
||||
let start = buf.length - maxBytes;
|
||||
|
||||
// Find a valid UTF-8 boundary (start of a character)
|
||||
while (start < buf.length && (buf[start] & 0xc0) === 0x80) {
|
||||
start++;
|
||||
}
|
||||
|
||||
return buf.slice(start).toString("utf-8");
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate a single line to max characters, adding [truncated] suffix.
|
||||
* Used for grep match lines.
|
||||
*/
|
||||
export function truncateLine(
|
||||
line: string,
|
||||
maxChars: number = GREP_MAX_LINE_LENGTH,
|
||||
): { text: string; wasTruncated: boolean } {
|
||||
if (line.length <= maxChars) {
|
||||
return { text: line, wasTruncated: false };
|
||||
}
|
||||
return {
|
||||
text: `${line.slice(0, maxChars)}... [truncated]`,
|
||||
wasTruncated: true,
|
||||
};
|
||||
}
|
||||
129
packages/coding-agent/src/core/tools/write.ts
Normal file
129
packages/coding-agent/src/core/tools/write.ts
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { type Static, Type } from "@sinclair/typebox";
|
||||
import { mkdir as fsMkdir, writeFile as fsWriteFile } from "fs/promises";
|
||||
import { dirname } from "path";
|
||||
import { resolveToCwd } from "./path-utils.js";
|
||||
|
||||
const writeSchema = Type.Object({
|
||||
path: Type.String({
|
||||
description: "Path to the file to write (relative or absolute)",
|
||||
}),
|
||||
content: Type.String({ description: "Content to write to the file" }),
|
||||
});
|
||||
|
||||
export type WriteToolInput = Static<typeof writeSchema>;
|
||||
|
||||
/**
|
||||
* Pluggable operations for the write tool.
|
||||
* Override these to delegate file writing to remote systems (e.g., SSH).
|
||||
*/
|
||||
export interface WriteOperations {
|
||||
/** Write content to a file */
|
||||
writeFile: (absolutePath: string, content: string) => Promise<void>;
|
||||
/** Create directory (recursively) */
|
||||
mkdir: (dir: string) => Promise<void>;
|
||||
}
|
||||
|
||||
const defaultWriteOperations: WriteOperations = {
|
||||
writeFile: (path, content) => fsWriteFile(path, content, "utf-8"),
|
||||
mkdir: (dir) => fsMkdir(dir, { recursive: true }).then(() => {}),
|
||||
};
|
||||
|
||||
export interface WriteToolOptions {
|
||||
/** Custom operations for file writing. Default: local filesystem */
|
||||
operations?: WriteOperations;
|
||||
}
|
||||
|
||||
export function createWriteTool(
|
||||
cwd: string,
|
||||
options?: WriteToolOptions,
|
||||
): AgentTool<typeof writeSchema> {
|
||||
const ops = options?.operations ?? defaultWriteOperations;
|
||||
|
||||
return {
|
||||
name: "write",
|
||||
label: "write",
|
||||
description:
|
||||
"Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Automatically creates parent directories.",
|
||||
parameters: writeSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{ path, content }: { path: string; content: string },
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
const absolutePath = resolveToCwd(path, cwd);
|
||||
const dir = dirname(absolutePath);
|
||||
|
||||
return new Promise<{
|
||||
content: Array<{ type: "text"; text: string }>;
|
||||
details: undefined;
|
||||
}>((resolve, reject) => {
|
||||
// Check if already aborted
|
||||
if (signal?.aborted) {
|
||||
reject(new Error("Operation aborted"));
|
||||
return;
|
||||
}
|
||||
|
||||
let aborted = false;
|
||||
|
||||
// Set up abort handler
|
||||
const onAbort = () => {
|
||||
aborted = true;
|
||||
reject(new Error("Operation aborted"));
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
}
|
||||
|
||||
// Perform the write operation
|
||||
(async () => {
|
||||
try {
|
||||
// Create parent directories if needed
|
||||
await ops.mkdir(dir);
|
||||
|
||||
// Check if aborted before writing
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Write the file
|
||||
await ops.writeFile(absolutePath, content);
|
||||
|
||||
// Check if aborted after writing
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean up abort handler
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
|
||||
resolve({
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Successfully wrote ${content.length} bytes to ${path}`,
|
||||
},
|
||||
],
|
||||
details: undefined,
|
||||
});
|
||||
} catch (error: any) {
|
||||
// Clean up abort handler
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
|
||||
if (!aborted) {
|
||||
reject(error);
|
||||
}
|
||||
}
|
||||
})();
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Default write tool using process.cwd() - for backwards compatibility */
|
||||
export const writeTool = createWriteTool(process.cwd());
|
||||
Loading…
Add table
Add a link
Reference in a new issue