Fix SDK tools to respect cwd option

Core tools now properly use the cwd passed to createAgentSession().
Added tool factory functions for SDK users who specify custom cwd with explicit tools.

Fixes #279
This commit is contained in:
Mario Zechner 2025-12-22 16:17:55 +01:00
parent 42bc368e70
commit face745f3d
16 changed files with 1243 additions and 1044 deletions

View file

@ -2,6 +2,10 @@
## [Unreleased] ## [Unreleased]
### Fixed
- **SDK tools respect cwd**: Core tools (bash, read, edit, write, grep, find, ls) now properly use the `cwd` option from `createAgentSession()`. Added tool factory functions (`createBashTool`, `createReadTool`, etc.) for SDK users who specify custom `cwd` with explicit tools. ([#279](https://github.com/badlogic/pi-mono/issues/279))
## [0.26.0] - 2025-12-22 ## [0.26.0] - 2025-12-22
### Added ### Added

View file

@ -307,7 +307,7 @@ const { session } = await createAgentSession({
```typescript ```typescript
import { import {
codingTools, // read, bash, edit, write (default) codingTools, // read, bash, edit, write (default)
readOnlyTools, // read, bash readOnlyTools, // read, grep, find, ls
readTool, bashTool, editTool, writeTool, readTool, bashTool, editTool, writeTool,
grepTool, findTool, lsTool, grepTool, findTool, lsTool,
} from "@mariozechner/pi-coding-agent"; } from "@mariozechner/pi-coding-agent";
@ -323,6 +323,45 @@ const { session } = await createAgentSession({
}); });
``` ```
#### Tools with Custom cwd
**Important:** The pre-built tool instances (`readTool`, `bashTool`, etc.) use `process.cwd()` for path resolution. When you specify a custom `cwd` AND provide explicit `tools`, you must use the tool factory functions to ensure paths resolve correctly:
```typescript
import {
createCodingTools, // Creates [read, bash, edit, write] for specific cwd
createReadOnlyTools, // Creates [read, grep, find, ls] for specific cwd
createReadTool,
createBashTool,
createEditTool,
createWriteTool,
createGrepTool,
createFindTool,
createLsTool,
} from "@mariozechner/pi-coding-agent";
const cwd = "/path/to/project";
// Use factory for tool sets
const { session } = await createAgentSession({
cwd,
tools: createCodingTools(cwd), // Tools resolve paths relative to cwd
});
// Or pick specific tools
const { session } = await createAgentSession({
cwd,
tools: [createReadTool(cwd), createBashTool(cwd), createGrepTool(cwd)],
});
```
**When you don't need factories:**
- If you omit `tools`, pi automatically creates them with the correct `cwd`
- If you use `process.cwd()` as your `cwd`, the pre-built instances work fine
**When you must use factories:**
- When you specify both `cwd` (different from `process.cwd()`) AND `tools`
> See [examples/sdk/05-tools.ts](../examples/sdk/05-tools.ts) > See [examples/sdk/05-tools.ts](../examples/sdk/05-tools.ts)
### Custom Tools ### Custom Tools
@ -788,12 +827,18 @@ buildSystemPrompt
SessionManager SessionManager
SettingsManager SettingsManager
// Built-in tools // Built-in tools (use process.cwd())
codingTools codingTools
readOnlyTools readOnlyTools
readTool, bashTool, editTool, writeTool readTool, bashTool, editTool, writeTool
grepTool, findTool, lsTool grepTool, findTool, lsTool
// Tool factories (for custom cwd)
createCodingTools
createReadOnlyTools
createReadTool, createBashTool, createEditTool, createWriteTool
createGrepTool, createFindTool, createLsTool
// Types // Types
type CreateAgentSessionOptions type CreateAgentSessionOptions
type CreateAgentSessionResult type CreateAgentSessionResult

View file

@ -2,6 +2,10 @@
* Tools Configuration * Tools Configuration
* *
* Use built-in tool sets, individual tools, or add custom tools. * Use built-in tool sets, individual tools, or add custom tools.
*
* IMPORTANT: When using a custom `cwd`, you must use the tool factory functions
* (createCodingTools, createReadOnlyTools, createReadTool, etc.) to ensure
* tools resolve paths relative to your cwd, not process.cwd().
*/ */
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
@ -9,28 +13,50 @@ import {
createAgentSession, createAgentSession,
discoverCustomTools, discoverCustomTools,
SessionManager, SessionManager,
codingTools, // read, bash, edit, write (default) codingTools, // read, bash, edit, write - uses process.cwd()
readOnlyTools, // read, bash readOnlyTools, // read, grep, find, ls - uses process.cwd()
createCodingTools, // Factory: creates tools for specific cwd
createReadOnlyTools, // Factory: creates tools for specific cwd
createReadTool,
createBashTool,
createGrepTool,
readTool, readTool,
bashTool, bashTool,
grepTool, grepTool,
type CustomAgentTool, type CustomAgentTool,
} from "../../src/index.js"; } from "../../src/index.js";
// Read-only mode (no edit/write) // Read-only mode (no edit/write) - uses process.cwd()
const { session: readOnly } = await createAgentSession({ const { session: readOnly } = await createAgentSession({
tools: readOnlyTools, tools: readOnlyTools,
sessionManager: SessionManager.inMemory(), sessionManager: SessionManager.inMemory(),
}); });
console.log("Read-only session created"); console.log("Read-only session created");
// Custom tool selection // Custom tool selection - uses process.cwd()
const { session: custom } = await createAgentSession({ const { session: custom } = await createAgentSession({
tools: [readTool, bashTool, grepTool], tools: [readTool, bashTool, grepTool],
sessionManager: SessionManager.inMemory(), sessionManager: SessionManager.inMemory(),
}); });
console.log("Custom tools session created"); console.log("Custom tools session created");
// With custom cwd - MUST use factory functions!
const customCwd = "/path/to/project";
const { session: customCwdSession } = await createAgentSession({
cwd: customCwd,
tools: createCodingTools(customCwd), // Tools resolve paths relative to customCwd
sessionManager: SessionManager.inMemory(),
});
console.log("Custom cwd session created");
// Or pick specific tools for custom cwd
const { session: specificTools } = await createAgentSession({
cwd: customCwd,
tools: [createReadTool(customCwd), createBashTool(customCwd), createGrepTool(customCwd)],
sessionManager: SessionManager.inMemory(),
});
console.log("Specific tools with custom cwd session created");
// Inline custom tool (needs TypeBox schema) // Inline custom tool (needs TypeBox schema)
const weatherTool: CustomAgentTool = { const weatherTool: CustomAgentTool = {
name: "get_weather", name: "get_weather",

View file

@ -3,6 +3,10 @@
* *
* Replace everything - no discovery, explicit configuration. * Replace everything - no discovery, explicit configuration.
* Still uses OAuth from ~/.pi/agent for convenience. * Still uses OAuth from ~/.pi/agent for convenience.
*
* IMPORTANT: When providing `tools` with a custom `cwd`, use the tool factory
* functions (createReadTool, createBashTool, etc.) to ensure tools resolve
* paths relative to your cwd.
*/ */
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
@ -13,8 +17,8 @@ import {
findModel, findModel,
SessionManager, SessionManager,
SettingsManager, SettingsManager,
readTool, createReadTool,
bashTool, createBashTool,
type HookFactory, type HookFactory,
type CustomAgentTool, type CustomAgentTool,
} from "../../src/index.js"; } from "../../src/index.js";
@ -60,8 +64,11 @@ const settingsManager = SettingsManager.inMemory({
retry: { enabled: true, maxRetries: 2 }, retry: { enabled: true, maxRetries: 2 },
}); });
// When using a custom cwd with explicit tools, use the factory functions
const cwd = process.cwd();
const { session } = await createAgentSession({ const { session } = await createAgentSession({
cwd: process.cwd(), cwd,
agentDir: "/tmp/my-agent", agentDir: "/tmp/my-agent",
model, model,
@ -71,7 +78,8 @@ const { session } = await createAgentSession({
systemPrompt: `You are a minimal assistant. systemPrompt: `You are a minimal assistant.
Available: read, bash, status. Be concise.`, Available: read, bash, status. Be concise.`,
tools: [readTool, bashTool], // Use factory functions with the same cwd to ensure path resolution works correctly
tools: [createReadTool(cwd), createBashTool(cwd)],
customTools: [{ tool: statusTool }], customTools: [{ tool: statusTool }],
hooks: [{ factory: auditHook }], hooks: [{ factory: auditHook }],
skills: [], skills: [],

View file

@ -21,7 +21,7 @@ export async function processFileArguments(fileArgs: string[]): Promise<Processe
for (const fileArg of fileArgs) { for (const fileArg of fileArgs) {
// Expand and resolve path (handles ~ expansion and macOS screenshot Unicode spaces) // Expand and resolve path (handles ~ expansion and macOS screenshot Unicode spaces)
const absolutePath = resolve(resolveReadPath(fileArg)); const absolutePath = resolve(resolveReadPath(fileArg, process.cwd()));
// Check if file exists // Check if file exists
try { try {

View file

@ -58,6 +58,15 @@ import {
allTools, allTools,
bashTool, bashTool,
codingTools, codingTools,
createBashTool,
createCodingTools,
createEditTool,
createFindTool,
createGrepTool,
createLsTool,
createReadOnlyTools,
createReadTool,
createWriteTool,
editTool, editTool,
findTool, findTool,
grepTool, grepTool,
@ -138,6 +147,7 @@ export type { FileSlashCommand } from "./slash-commands.js";
export type { Tool } from "./tools/index.js"; export type { Tool } from "./tools/index.js";
export { export {
// Pre-built tools (use process.cwd())
readTool, readTool,
bashTool, bashTool,
editTool, editTool,
@ -148,6 +158,16 @@ export {
codingTools, codingTools,
readOnlyTools, readOnlyTools,
allTools as allBuiltInTools, allTools as allBuiltInTools,
// Tool factories (for custom cwd)
createCodingTools,
createReadOnlyTools,
createReadTool,
createBashTool,
createEditTool,
createWriteTool,
createGrepTool,
createFindTool,
createLsTool,
}; };
// Helper Functions // Helper Functions
@ -526,7 +546,7 @@ export async function createAgentSession(options: CreateAgentSessionOptions = {}
const contextFiles = options.contextFiles ?? discoverContextFiles(cwd, agentDir); const contextFiles = options.contextFiles ?? discoverContextFiles(cwd, agentDir);
const builtInTools = options.tools ?? codingTools; const builtInTools = options.tools ?? createCodingTools(cwd);
let customToolsResult: { tools: LoadedCustomTool[]; setUIContext: (ctx: any, hasUI: boolean) => void }; let customToolsResult: { tools: LoadedCustomTool[]; setUIContext: (ctx: any, hasUI: boolean) => void };
if (options.customTools !== undefined) { if (options.customTools !== undefined) {

View file

@ -26,181 +26,187 @@ export interface BashToolDetails {
fullOutputPath?: string; fullOutputPath?: string;
} }
export const bashTool: AgentTool<typeof bashSchema> = { export function createBashTool(cwd: string): AgentTool<typeof bashSchema> {
name: "bash", return {
label: "bash", name: "bash",
description: `Execute a bash command in the current working directory. Returns stdout and stderr. Output is truncated to last ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). If truncated, full output is saved to a temp file. Optionally provide a timeout in seconds.`, label: "bash",
parameters: bashSchema, description: `Execute a bash command in the current working directory. Returns stdout and stderr. Output is truncated to last ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). If truncated, full output is saved to a temp file. Optionally provide a timeout in seconds.`,
execute: async ( parameters: bashSchema,
_toolCallId: string, execute: async (
{ command, timeout }: { command: string; timeout?: number }, _toolCallId: string,
signal?: AbortSignal, { command, timeout }: { command: string; timeout?: number },
onUpdate?, signal?: AbortSignal,
) => { onUpdate?,
return new Promise((resolve, reject) => { ) => {
const { shell, args } = getShellConfig(); return new Promise((resolve, reject) => {
const child = spawn(shell, [...args, command], { const { shell, args } = getShellConfig();
detached: true, const child = spawn(shell, [...args, command], {
stdio: ["ignore", "pipe", "pipe"], cwd,
}); detached: true,
stdio: ["ignore", "pipe", "pipe"],
});
// We'll stream to a temp file if output gets large // We'll stream to a temp file if output gets large
let tempFilePath: string | undefined; let tempFilePath: string | undefined;
let tempFileStream: ReturnType<typeof createWriteStream> | undefined; let tempFileStream: ReturnType<typeof createWriteStream> | undefined;
let totalBytes = 0; let totalBytes = 0;
// Keep a rolling buffer of the last chunk for tail truncation // Keep a rolling buffer of the last chunk for tail truncation
const chunks: Buffer[] = []; const chunks: Buffer[] = [];
let chunksBytes = 0; let chunksBytes = 0;
// Keep more than we need so we have enough for truncation // Keep more than we need so we have enough for truncation
const maxChunksBytes = DEFAULT_MAX_BYTES * 2; const maxChunksBytes = DEFAULT_MAX_BYTES * 2;
let timedOut = false; let timedOut = false;
// Set timeout if provided // Set timeout if provided
let timeoutHandle: NodeJS.Timeout | undefined; let timeoutHandle: NodeJS.Timeout | undefined;
if (timeout !== undefined && timeout > 0) { if (timeout !== undefined && timeout > 0) {
timeoutHandle = setTimeout(() => { timeoutHandle = setTimeout(() => {
timedOut = true; timedOut = true;
onAbort(); onAbort();
}, timeout * 1000); }, timeout * 1000);
} }
const handleData = (data: Buffer) => { const handleData = (data: Buffer) => {
totalBytes += data.length; totalBytes += data.length;
// Start writing to temp file once we exceed the threshold // Start writing to temp file once we exceed the threshold
if (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) { if (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {
tempFilePath = getTempFilePath(); tempFilePath = getTempFilePath();
tempFileStream = createWriteStream(tempFilePath); tempFileStream = createWriteStream(tempFilePath);
// Write all buffered chunks to the file // Write all buffered chunks to the file
for (const chunk of chunks) { for (const chunk of chunks) {
tempFileStream.write(chunk); tempFileStream.write(chunk);
}
} }
// Write to temp file if we have one
if (tempFileStream) {
tempFileStream.write(data);
}
// Keep rolling buffer of recent data
chunks.push(data);
chunksBytes += data.length;
// Trim old chunks if buffer is too large
while (chunksBytes > maxChunksBytes && chunks.length > 1) {
const removed = chunks.shift()!;
chunksBytes -= removed.length;
}
// Stream partial output to callback (truncated rolling buffer)
if (onUpdate) {
const fullBuffer = Buffer.concat(chunks);
const fullText = fullBuffer.toString("utf-8");
const truncation = truncateTail(fullText);
onUpdate({
content: [{ type: "text", text: truncation.content || "" }],
details: {
truncation: truncation.truncated ? truncation : undefined,
fullOutputPath: tempFilePath,
},
});
}
};
// Collect stdout and stderr together
if (child.stdout) {
child.stdout.on("data", handleData);
}
if (child.stderr) {
child.stderr.on("data", handleData);
} }
// Write to temp file if we have one // Handle process exit
if (tempFileStream) { child.on("close", (code) => {
tempFileStream.write(data); if (timeoutHandle) {
} clearTimeout(timeoutHandle);
}
if (signal) {
signal.removeEventListener("abort", onAbort);
}
// Keep rolling buffer of recent data // Close temp file stream
chunks.push(data); if (tempFileStream) {
chunksBytes += data.length; tempFileStream.end();
}
// Trim old chunks if buffer is too large // Combine all buffered chunks
while (chunksBytes > maxChunksBytes && chunks.length > 1) {
const removed = chunks.shift()!;
chunksBytes -= removed.length;
}
// Stream partial output to callback (truncated rolling buffer)
if (onUpdate) {
const fullBuffer = Buffer.concat(chunks); const fullBuffer = Buffer.concat(chunks);
const fullText = fullBuffer.toString("utf-8"); const fullOutput = fullBuffer.toString("utf-8");
const truncation = truncateTail(fullText);
onUpdate({ if (signal?.aborted) {
content: [{ type: "text", text: truncation.content || "" }], let output = fullOutput;
details: { if (output) output += "\n\n";
truncation: truncation.truncated ? truncation : undefined, output += "Command aborted";
reject(new Error(output));
return;
}
if (timedOut) {
let output = fullOutput;
if (output) output += "\n\n";
output += `Command timed out after ${timeout} seconds`;
reject(new Error(output));
return;
}
// Apply tail truncation
const truncation = truncateTail(fullOutput);
let outputText = truncation.content || "(no output)";
// Build details with truncation info
let details: BashToolDetails | undefined;
if (truncation.truncated) {
details = {
truncation,
fullOutputPath: tempFilePath, fullOutputPath: tempFilePath,
}, };
});
}
};
// Collect stdout and stderr together // Build actionable notice
if (child.stdout) { const startLine = truncation.totalLines - truncation.outputLines + 1;
child.stdout.on("data", handleData); const endLine = truncation.totalLines;
}
if (child.stderr) {
child.stderr.on("data", handleData);
}
// Handle process exit if (truncation.lastLinePartial) {
child.on("close", (code) => { // Edge case: last line alone > 30KB
if (timeoutHandle) { const lastLineSize = formatSize(Buffer.byteLength(fullOutput.split("\n").pop() || "", "utf-8"));
clearTimeout(timeoutHandle); outputText += `\n\n[Showing last ${formatSize(truncation.outputBytes)} of line ${endLine} (line is ${lastLineSize}). Full output: ${tempFilePath}]`;
} } else if (truncation.truncatedBy === "lines") {
if (signal) { outputText += `\n\n[Showing lines ${startLine}-${endLine} of ${truncation.totalLines}. Full output: ${tempFilePath}]`;
signal.removeEventListener("abort", onAbort); } else {
} outputText += `\n\n[Showing lines ${startLine}-${endLine} of ${truncation.totalLines} (${formatSize(DEFAULT_MAX_BYTES)} limit). Full output: ${tempFilePath}]`;
}
}
// Close temp file stream if (code !== 0 && code !== null) {
if (tempFileStream) { outputText += `\n\nCommand exited with code ${code}`;
tempFileStream.end(); reject(new Error(outputText));
}
// Combine all buffered chunks
const fullBuffer = Buffer.concat(chunks);
const fullOutput = fullBuffer.toString("utf-8");
if (signal?.aborted) {
let output = fullOutput;
if (output) output += "\n\n";
output += "Command aborted";
reject(new Error(output));
return;
}
if (timedOut) {
let output = fullOutput;
if (output) output += "\n\n";
output += `Command timed out after ${timeout} seconds`;
reject(new Error(output));
return;
}
// Apply tail truncation
const truncation = truncateTail(fullOutput);
let outputText = truncation.content || "(no output)";
// Build details with truncation info
let details: BashToolDetails | undefined;
if (truncation.truncated) {
details = {
truncation,
fullOutputPath: tempFilePath,
};
// Build actionable notice
const startLine = truncation.totalLines - truncation.outputLines + 1;
const endLine = truncation.totalLines;
if (truncation.lastLinePartial) {
// Edge case: last line alone > 30KB
const lastLineSize = formatSize(Buffer.byteLength(fullOutput.split("\n").pop() || "", "utf-8"));
outputText += `\n\n[Showing last ${formatSize(truncation.outputBytes)} of line ${endLine} (line is ${lastLineSize}). Full output: ${tempFilePath}]`;
} else if (truncation.truncatedBy === "lines") {
outputText += `\n\n[Showing lines ${startLine}-${endLine} of ${truncation.totalLines}. Full output: ${tempFilePath}]`;
} else { } else {
outputText += `\n\n[Showing lines ${startLine}-${endLine} of ${truncation.totalLines} (${formatSize(DEFAULT_MAX_BYTES)} limit). Full output: ${tempFilePath}]`; resolve({ content: [{ type: "text", text: outputText }], details });
}
});
// Handle abort signal - kill entire process tree
const onAbort = () => {
if (child.pid) {
killProcessTree(child.pid);
}
};
if (signal) {
if (signal.aborted) {
onAbort();
} else {
signal.addEventListener("abort", onAbort, { once: true });
} }
} }
if (code !== 0 && code !== null) {
outputText += `\n\nCommand exited with code ${code}`;
reject(new Error(outputText));
} else {
resolve({ content: [{ type: "text", text: outputText }], details });
}
}); });
},
};
}
// Handle abort signal - kill entire process tree /** Default bash tool using process.cwd() - for backwards compatibility */
const onAbort = () => { export const bashTool = createBashTool(process.cwd());
if (child.pid) {
killProcessTree(child.pid);
}
};
if (signal) {
if (signal.aborted) {
onAbort();
} else {
signal.addEventListener("abort", onAbort, { once: true });
}
}
});
},
};

View file

@ -3,8 +3,7 @@ import { Type } from "@sinclair/typebox";
import * as Diff from "diff"; import * as Diff from "diff";
import { constants } from "fs"; import { constants } from "fs";
import { access, readFile, writeFile } from "fs/promises"; import { access, readFile, writeFile } from "fs/promises";
import { resolve as resolvePath } from "path"; import { resolveToCwd } from "./path-utils.js";
import { expandPath } from "./path-utils.js";
/** /**
* Generate a unified diff string with line numbers and context * Generate a unified diff string with line numbers and context
@ -107,151 +106,156 @@ const editSchema = Type.Object({
newText: Type.String({ description: "New text to replace the old text with" }), newText: Type.String({ description: "New text to replace the old text with" }),
}); });
export const editTool: AgentTool<typeof editSchema> = { export function createEditTool(cwd: string): AgentTool<typeof editSchema> {
name: "edit", return {
label: "edit", name: "edit",
description: label: "edit",
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.", description:
parameters: editSchema, "Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
execute: async ( parameters: editSchema,
_toolCallId: string, execute: async (
{ path, oldText, newText }: { path: string; oldText: string; newText: string }, _toolCallId: string,
signal?: AbortSignal, { path, oldText, newText }: { path: string; oldText: string; newText: string },
) => { signal?: AbortSignal,
const absolutePath = resolvePath(expandPath(path)); ) => {
const absolutePath = resolveToCwd(path, cwd);
return new Promise<{ return new Promise<{
content: Array<{ type: "text"; text: string }>; content: Array<{ type: "text"; text: string }>;
details: { diff: string } | undefined; details: { diff: string } | undefined;
}>((resolve, reject) => { }>((resolve, reject) => {
// Check if already aborted // Check if already aborted
if (signal?.aborted) { if (signal?.aborted) {
reject(new Error("Operation aborted")); reject(new Error("Operation aborted"));
return; return;
}
let aborted = false;
// Set up abort handler
const onAbort = () => {
aborted = true;
reject(new Error("Operation aborted"));
};
if (signal) {
signal.addEventListener("abort", onAbort, { once: true });
}
// Perform the edit operation
(async () => {
try {
// Check if file exists
try {
await access(absolutePath, constants.R_OK | constants.W_OK);
} catch {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(new Error(`File not found: ${path}`));
return;
}
// Check if aborted before reading
if (aborted) {
return;
}
// Read the file
const content = await readFile(absolutePath, "utf-8");
// Check if aborted after reading
if (aborted) {
return;
}
// Check if old text exists
if (!content.includes(oldText)) {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(
new Error(
`Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
),
);
return;
}
// Count occurrences
const occurrences = content.split(oldText).length - 1;
if (occurrences > 1) {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(
new Error(
`Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
),
);
return;
}
// Check if aborted before writing
if (aborted) {
return;
}
// Perform replacement using indexOf + substring (raw string replace, no special character interpretation)
// String.replace() interprets $ in the replacement string, so we do manual replacement
const index = content.indexOf(oldText);
const newContent = content.substring(0, index) + newText + content.substring(index + oldText.length);
// Verify the replacement actually changed something
if (content === newContent) {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(
new Error(
`No changes made to ${path}. The replacement produced identical content. This might indicate an issue with special characters or the text not existing as expected.`,
),
);
return;
}
await writeFile(absolutePath, newContent, "utf-8");
// Check if aborted after writing
if (aborted) {
return;
}
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
resolve({
content: [
{
type: "text",
text: `Successfully replaced text in ${path}. Changed ${oldText.length} characters to ${newText.length} characters.`,
},
],
details: { diff: generateDiffString(content, newContent) },
});
} catch (error: any) {
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
if (!aborted) {
reject(error);
}
} }
})();
}); let aborted = false;
},
}; // Set up abort handler
const onAbort = () => {
aborted = true;
reject(new Error("Operation aborted"));
};
if (signal) {
signal.addEventListener("abort", onAbort, { once: true });
}
// Perform the edit operation
(async () => {
try {
// Check if file exists
try {
await access(absolutePath, constants.R_OK | constants.W_OK);
} catch {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(new Error(`File not found: ${path}`));
return;
}
// Check if aborted before reading
if (aborted) {
return;
}
// Read the file
const content = await readFile(absolutePath, "utf-8");
// Check if aborted after reading
if (aborted) {
return;
}
// Check if old text exists
if (!content.includes(oldText)) {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(
new Error(
`Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
),
);
return;
}
// Count occurrences
const occurrences = content.split(oldText).length - 1;
if (occurrences > 1) {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(
new Error(
`Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
),
);
return;
}
// Check if aborted before writing
if (aborted) {
return;
}
// Perform replacement using indexOf + substring (raw string replace, no special character interpretation)
// String.replace() interprets $ in the replacement string, so we do manual replacement
const index = content.indexOf(oldText);
const newContent = content.substring(0, index) + newText + content.substring(index + oldText.length);
// Verify the replacement actually changed something
if (content === newContent) {
if (signal) {
signal.removeEventListener("abort", onAbort);
}
reject(
new Error(
`No changes made to ${path}. The replacement produced identical content. This might indicate an issue with special characters or the text not existing as expected.`,
),
);
return;
}
await writeFile(absolutePath, newContent, "utf-8");
// Check if aborted after writing
if (aborted) {
return;
}
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
resolve({
content: [
{
type: "text",
text: `Successfully replaced text in ${path}. Changed ${oldText.length} characters to ${newText.length} characters.`,
},
],
details: { diff: generateDiffString(content, newContent) },
});
} catch (error: any) {
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
if (!aborted) {
reject(error);
}
}
})();
});
},
};
}
/** Default edit tool using process.cwd() - for backwards compatibility */
export const editTool = createEditTool(process.cwd());

View file

@ -5,7 +5,7 @@ import { existsSync } from "fs";
import { globSync } from "glob"; import { globSync } from "glob";
import path from "path"; import path from "path";
import { ensureTool } from "../../utils/tools-manager.js"; import { ensureTool } from "../../utils/tools-manager.js";
import { expandPath } from "./path-utils.js"; import { resolveToCwd } from "./path-utils.js";
import { DEFAULT_MAX_BYTES, formatSize, type TruncationResult, truncateHead } from "./truncate.js"; import { DEFAULT_MAX_BYTES, formatSize, type TruncationResult, truncateHead } from "./truncate.js";
const findSchema = Type.Object({ const findSchema = Type.Object({
@ -23,168 +23,173 @@ export interface FindToolDetails {
resultLimitReached?: number; resultLimitReached?: number;
} }
export const findTool: AgentTool<typeof findSchema> = { export function createFindTool(cwd: string): AgentTool<typeof findSchema> {
name: "find", return {
label: "find", name: "find",
description: `Search for files by glob pattern. Returns matching file paths relative to the search directory. Respects .gitignore. Output is truncated to ${DEFAULT_LIMIT} results or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first).`, label: "find",
parameters: findSchema, description: `Search for files by glob pattern. Returns matching file paths relative to the search directory. Respects .gitignore. Output is truncated to ${DEFAULT_LIMIT} results or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first).`,
execute: async ( parameters: findSchema,
_toolCallId: string, execute: async (
{ pattern, path: searchDir, limit }: { pattern: string; path?: string; limit?: number }, _toolCallId: string,
signal?: AbortSignal, { pattern, path: searchDir, limit }: { pattern: string; path?: string; limit?: number },
) => { signal?: AbortSignal,
return new Promise((resolve, reject) => { ) => {
if (signal?.aborted) { return new Promise((resolve, reject) => {
reject(new Error("Operation aborted")); if (signal?.aborted) {
return; reject(new Error("Operation aborted"));
} return;
}
const onAbort = () => reject(new Error("Operation aborted")); const onAbort = () => reject(new Error("Operation aborted"));
signal?.addEventListener("abort", onAbort, { once: true }); signal?.addEventListener("abort", onAbort, { once: true });
(async () => {
try {
// Ensure fd is available
const fdPath = await ensureTool("fd", true);
if (!fdPath) {
reject(new Error("fd is not available and could not be downloaded"));
return;
}
const searchPath = path.resolve(expandPath(searchDir || "."));
const effectiveLimit = limit ?? DEFAULT_LIMIT;
// Build fd arguments
const args: string[] = [
"--glob", // Use glob pattern
"--color=never", // No ANSI colors
"--hidden", // Search hidden files (but still respect .gitignore)
"--max-results",
String(effectiveLimit),
];
// Include .gitignore files (root + nested) so fd respects them even outside git repos
const gitignoreFiles = new Set<string>();
const rootGitignore = path.join(searchPath, ".gitignore");
if (existsSync(rootGitignore)) {
gitignoreFiles.add(rootGitignore);
}
(async () => {
try { try {
const nestedGitignores = globSync("**/.gitignore", { // Ensure fd is available
cwd: searchPath, const fdPath = await ensureTool("fd", true);
dot: true, if (!fdPath) {
absolute: true, reject(new Error("fd is not available and could not be downloaded"));
ignore: ["**/node_modules/**", "**/.git/**"],
});
for (const file of nestedGitignores) {
gitignoreFiles.add(file);
}
} catch {
// Ignore glob errors
}
for (const gitignorePath of gitignoreFiles) {
args.push("--ignore-file", gitignorePath);
}
// Pattern and path
args.push(pattern, searchPath);
// Run fd
const result = spawnSync(fdPath, args, {
encoding: "utf-8",
maxBuffer: 10 * 1024 * 1024, // 10MB
});
signal?.removeEventListener("abort", onAbort);
if (result.error) {
reject(new Error(`Failed to run fd: ${result.error.message}`));
return;
}
const output = result.stdout?.trim() || "";
if (result.status !== 0) {
const errorMsg = result.stderr?.trim() || `fd exited with code ${result.status}`;
// fd returns non-zero for some errors but may still have partial output
if (!output) {
reject(new Error(errorMsg));
return; return;
} }
}
if (!output) { const searchPath = resolveToCwd(searchDir || ".", cwd);
resolve({ const effectiveLimit = limit ?? DEFAULT_LIMIT;
content: [{ type: "text", text: "No files found matching pattern" }],
details: undefined, // Build fd arguments
const args: string[] = [
"--glob", // Use glob pattern
"--color=never", // No ANSI colors
"--hidden", // Search hidden files (but still respect .gitignore)
"--max-results",
String(effectiveLimit),
];
// Include .gitignore files (root + nested) so fd respects them even outside git repos
const gitignoreFiles = new Set<string>();
const rootGitignore = path.join(searchPath, ".gitignore");
if (existsSync(rootGitignore)) {
gitignoreFiles.add(rootGitignore);
}
try {
const nestedGitignores = globSync("**/.gitignore", {
cwd: searchPath,
dot: true,
absolute: true,
ignore: ["**/node_modules/**", "**/.git/**"],
});
for (const file of nestedGitignores) {
gitignoreFiles.add(file);
}
} catch {
// Ignore glob errors
}
for (const gitignorePath of gitignoreFiles) {
args.push("--ignore-file", gitignorePath);
}
// Pattern and path
args.push(pattern, searchPath);
// Run fd
const result = spawnSync(fdPath, args, {
encoding: "utf-8",
maxBuffer: 10 * 1024 * 1024, // 10MB
}); });
return;
}
const lines = output.split("\n"); signal?.removeEventListener("abort", onAbort);
const relativized: string[] = [];
for (const rawLine of lines) { if (result.error) {
const line = rawLine.replace(/\r$/, "").trim(); reject(new Error(`Failed to run fd: ${result.error.message}`));
if (!line) { return;
continue;
} }
const hadTrailingSlash = line.endsWith("/") || line.endsWith("\\"); const output = result.stdout?.trim() || "";
let relativePath = line;
if (line.startsWith(searchPath)) { if (result.status !== 0) {
relativePath = line.slice(searchPath.length + 1); // +1 for the / const errorMsg = result.stderr?.trim() || `fd exited with code ${result.status}`;
} else { // fd returns non-zero for some errors but may still have partial output
relativePath = path.relative(searchPath, line); if (!output) {
reject(new Error(errorMsg));
return;
}
} }
if (hadTrailingSlash && !relativePath.endsWith("/")) { if (!output) {
relativePath += "/"; resolve({
content: [{ type: "text", text: "No files found matching pattern" }],
details: undefined,
});
return;
} }
relativized.push(relativePath); const lines = output.split("\n");
const relativized: string[] = [];
for (const rawLine of lines) {
const line = rawLine.replace(/\r$/, "").trim();
if (!line) {
continue;
}
const hadTrailingSlash = line.endsWith("/") || line.endsWith("\\");
let relativePath = line;
if (line.startsWith(searchPath)) {
relativePath = line.slice(searchPath.length + 1); // +1 for the /
} else {
relativePath = path.relative(searchPath, line);
}
if (hadTrailingSlash && !relativePath.endsWith("/")) {
relativePath += "/";
}
relativized.push(relativePath);
}
// Check if we hit the result limit
const resultLimitReached = relativized.length >= effectiveLimit;
// Apply byte truncation (no line limit since we already have result limit)
const rawOutput = relativized.join("\n");
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
let resultOutput = truncation.content;
const details: FindToolDetails = {};
// Build notices
const notices: string[] = [];
if (resultLimitReached) {
notices.push(
`${effectiveLimit} results limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`,
);
details.resultLimitReached = effectiveLimit;
}
if (truncation.truncated) {
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
details.truncation = truncation;
}
if (notices.length > 0) {
resultOutput += `\n\n[${notices.join(". ")}]`;
}
resolve({
content: [{ type: "text", text: resultOutput }],
details: Object.keys(details).length > 0 ? details : undefined,
});
} catch (e: any) {
signal?.removeEventListener("abort", onAbort);
reject(e);
} }
})();
});
},
};
}
// Check if we hit the result limit /** Default find tool using process.cwd() - for backwards compatibility */
const resultLimitReached = relativized.length >= effectiveLimit; export const findTool = createFindTool(process.cwd());
// Apply byte truncation (no line limit since we already have result limit)
const rawOutput = relativized.join("\n");
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
let resultOutput = truncation.content;
const details: FindToolDetails = {};
// Build notices
const notices: string[] = [];
if (resultLimitReached) {
notices.push(
`${effectiveLimit} results limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`,
);
details.resultLimitReached = effectiveLimit;
}
if (truncation.truncated) {
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
details.truncation = truncation;
}
if (notices.length > 0) {
resultOutput += `\n\n[${notices.join(". ")}]`;
}
resolve({
content: [{ type: "text", text: resultOutput }],
details: Object.keys(details).length > 0 ? details : undefined,
});
} catch (e: any) {
signal?.removeEventListener("abort", onAbort);
reject(e);
}
})();
});
},
};

View file

@ -5,7 +5,7 @@ import { spawn } from "child_process";
import { readFileSync, type Stats, statSync } from "fs"; import { readFileSync, type Stats, statSync } from "fs";
import path from "path"; import path from "path";
import { ensureTool } from "../../utils/tools-manager.js"; import { ensureTool } from "../../utils/tools-manager.js";
import { expandPath } from "./path-utils.js"; import { resolveToCwd } from "./path-utils.js";
import { import {
DEFAULT_MAX_BYTES, DEFAULT_MAX_BYTES,
formatSize, formatSize,
@ -37,271 +37,276 @@ export interface GrepToolDetails {
linesTruncated?: boolean; linesTruncated?: boolean;
} }
export const grepTool: AgentTool<typeof grepSchema> = { export function createGrepTool(cwd: string): AgentTool<typeof grepSchema> {
name: "grep", return {
label: "grep", name: "grep",
description: `Search file contents for a pattern. Returns matching lines with file paths and line numbers. Respects .gitignore. Output is truncated to ${DEFAULT_LIMIT} matches or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Long lines are truncated to ${GREP_MAX_LINE_LENGTH} chars.`, label: "grep",
parameters: grepSchema, description: `Search file contents for a pattern. Returns matching lines with file paths and line numbers. Respects .gitignore. Output is truncated to ${DEFAULT_LIMIT} matches or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Long lines are truncated to ${GREP_MAX_LINE_LENGTH} chars.`,
execute: async ( parameters: grepSchema,
_toolCallId: string, execute: async (
{ _toolCallId: string,
pattern, {
path: searchDir, pattern,
glob, path: searchDir,
ignoreCase, glob,
literal, ignoreCase,
context, literal,
limit, context,
}: { limit,
pattern: string; }: {
path?: string; pattern: string;
glob?: string; path?: string;
ignoreCase?: boolean; glob?: string;
literal?: boolean; ignoreCase?: boolean;
context?: number; literal?: boolean;
limit?: number; context?: number;
}, limit?: number;
signal?: AbortSignal, },
) => { signal?: AbortSignal,
return new Promise((resolve, reject) => { ) => {
if (signal?.aborted) { return new Promise((resolve, reject) => {
reject(new Error("Operation aborted")); if (signal?.aborted) {
return; reject(new Error("Operation aborted"));
} return;
let settled = false;
const settle = (fn: () => void) => {
if (!settled) {
settled = true;
fn();
} }
};
(async () => { let settled = false;
try { const settle = (fn: () => void) => {
const rgPath = await ensureTool("rg", true); if (!settled) {
if (!rgPath) { settled = true;
settle(() => reject(new Error("ripgrep (rg) is not available and could not be downloaded"))); fn();
return;
} }
};
const searchPath = path.resolve(expandPath(searchDir || ".")); (async () => {
let searchStat: Stats;
try { try {
searchStat = statSync(searchPath); const rgPath = await ensureTool("rg", true);
} catch (_err) { if (!rgPath) {
settle(() => reject(new Error(`Path not found: ${searchPath}`))); settle(() => reject(new Error("ripgrep (rg) is not available and could not be downloaded")));
return;
}
const isDirectory = searchStat.isDirectory();
const contextValue = context && context > 0 ? context : 0;
const effectiveLimit = Math.max(1, limit ?? DEFAULT_LIMIT);
const formatPath = (filePath: string): string => {
if (isDirectory) {
const relative = path.relative(searchPath, filePath);
if (relative && !relative.startsWith("..")) {
return relative.replace(/\\/g, "/");
}
}
return path.basename(filePath);
};
const fileCache = new Map<string, string[]>();
const getFileLines = (filePath: string): string[] => {
let lines = fileCache.get(filePath);
if (!lines) {
try {
const content = readFileSync(filePath, "utf-8");
lines = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n").split("\n");
} catch {
lines = [];
}
fileCache.set(filePath, lines);
}
return lines;
};
const args: string[] = ["--json", "--line-number", "--color=never", "--hidden"];
if (ignoreCase) {
args.push("--ignore-case");
}
if (literal) {
args.push("--fixed-strings");
}
if (glob) {
args.push("--glob", glob);
}
args.push(pattern, searchPath);
const child = spawn(rgPath, args, { stdio: ["ignore", "pipe", "pipe"] });
const rl = createInterface({ input: child.stdout });
let stderr = "";
let matchCount = 0;
let matchLimitReached = false;
let linesTruncated = false;
let aborted = false;
let killedDueToLimit = false;
const outputLines: string[] = [];
const cleanup = () => {
rl.close();
signal?.removeEventListener("abort", onAbort);
};
const stopChild = (dueToLimit: boolean = false) => {
if (!child.killed) {
killedDueToLimit = dueToLimit;
child.kill();
}
};
const onAbort = () => {
aborted = true;
stopChild();
};
signal?.addEventListener("abort", onAbort, { once: true });
child.stderr?.on("data", (chunk) => {
stderr += chunk.toString();
});
const formatBlock = (filePath: string, lineNumber: number): string[] => {
const relativePath = formatPath(filePath);
const lines = getFileLines(filePath);
if (!lines.length) {
return [`${relativePath}:${lineNumber}: (unable to read file)`];
}
const block: string[] = [];
const start = contextValue > 0 ? Math.max(1, lineNumber - contextValue) : lineNumber;
const end = contextValue > 0 ? Math.min(lines.length, lineNumber + contextValue) : lineNumber;
for (let current = start; current <= end; current++) {
const lineText = lines[current - 1] ?? "";
const sanitized = lineText.replace(/\r/g, "");
const isMatchLine = current === lineNumber;
// Truncate long lines
const { text: truncatedText, wasTruncated } = truncateLine(sanitized);
if (wasTruncated) {
linesTruncated = true;
}
if (isMatchLine) {
block.push(`${relativePath}:${current}: ${truncatedText}`);
} else {
block.push(`${relativePath}-${current}- ${truncatedText}`);
}
}
return block;
};
rl.on("line", (line) => {
if (!line.trim() || matchCount >= effectiveLimit) {
return; return;
} }
let event: any; const searchPath = resolveToCwd(searchDir || ".", cwd);
let searchStat: Stats;
try { try {
event = JSON.parse(line); searchStat = statSync(searchPath);
} catch { } catch (_err) {
settle(() => reject(new Error(`Path not found: ${searchPath}`)));
return; return;
} }
if (event.type === "match") { const isDirectory = searchStat.isDirectory();
matchCount++; const contextValue = context && context > 0 ? context : 0;
const filePath = event.data?.path?.text; const effectiveLimit = Math.max(1, limit ?? DEFAULT_LIMIT);
const lineNumber = event.data?.line_number;
if (filePath && typeof lineNumber === "number") { const formatPath = (filePath: string): string => {
outputLines.push(...formatBlock(filePath, lineNumber)); if (isDirectory) {
const relative = path.relative(searchPath, filePath);
if (relative && !relative.startsWith("..")) {
return relative.replace(/\\/g, "/");
}
}
return path.basename(filePath);
};
const fileCache = new Map<string, string[]>();
const getFileLines = (filePath: string): string[] => {
let lines = fileCache.get(filePath);
if (!lines) {
try {
const content = readFileSync(filePath, "utf-8");
lines = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n").split("\n");
} catch {
lines = [];
}
fileCache.set(filePath, lines);
}
return lines;
};
const args: string[] = ["--json", "--line-number", "--color=never", "--hidden"];
if (ignoreCase) {
args.push("--ignore-case");
}
if (literal) {
args.push("--fixed-strings");
}
if (glob) {
args.push("--glob", glob);
}
args.push(pattern, searchPath);
const child = spawn(rgPath, args, { stdio: ["ignore", "pipe", "pipe"] });
const rl = createInterface({ input: child.stdout });
let stderr = "";
let matchCount = 0;
let matchLimitReached = false;
let linesTruncated = false;
let aborted = false;
let killedDueToLimit = false;
const outputLines: string[] = [];
const cleanup = () => {
rl.close();
signal?.removeEventListener("abort", onAbort);
};
const stopChild = (dueToLimit: boolean = false) => {
if (!child.killed) {
killedDueToLimit = dueToLimit;
child.kill();
}
};
const onAbort = () => {
aborted = true;
stopChild();
};
signal?.addEventListener("abort", onAbort, { once: true });
child.stderr?.on("data", (chunk) => {
stderr += chunk.toString();
});
const formatBlock = (filePath: string, lineNumber: number): string[] => {
const relativePath = formatPath(filePath);
const lines = getFileLines(filePath);
if (!lines.length) {
return [`${relativePath}:${lineNumber}: (unable to read file)`];
} }
if (matchCount >= effectiveLimit) { const block: string[] = [];
matchLimitReached = true; const start = contextValue > 0 ? Math.max(1, lineNumber - contextValue) : lineNumber;
stopChild(true); const end = contextValue > 0 ? Math.min(lines.length, lineNumber + contextValue) : lineNumber;
for (let current = start; current <= end; current++) {
const lineText = lines[current - 1] ?? "";
const sanitized = lineText.replace(/\r/g, "");
const isMatchLine = current === lineNumber;
// Truncate long lines
const { text: truncatedText, wasTruncated } = truncateLine(sanitized);
if (wasTruncated) {
linesTruncated = true;
}
if (isMatchLine) {
block.push(`${relativePath}:${current}: ${truncatedText}`);
} else {
block.push(`${relativePath}-${current}- ${truncatedText}`);
}
} }
}
});
child.on("error", (error) => { return block;
cleanup(); };
settle(() => reject(new Error(`Failed to run ripgrep: ${error.message}`)));
});
child.on("close", (code) => { rl.on("line", (line) => {
cleanup(); if (!line.trim() || matchCount >= effectiveLimit) {
return;
}
if (aborted) { let event: any;
settle(() => reject(new Error("Operation aborted"))); try {
return; event = JSON.parse(line);
} } catch {
return;
}
if (!killedDueToLimit && code !== 0 && code !== 1) { if (event.type === "match") {
const errorMsg = stderr.trim() || `ripgrep exited with code ${code}`; matchCount++;
settle(() => reject(new Error(errorMsg))); const filePath = event.data?.path?.text;
return; const lineNumber = event.data?.line_number;
}
if (filePath && typeof lineNumber === "number") {
outputLines.push(...formatBlock(filePath, lineNumber));
}
if (matchCount >= effectiveLimit) {
matchLimitReached = true;
stopChild(true);
}
}
});
child.on("error", (error) => {
cleanup();
settle(() => reject(new Error(`Failed to run ripgrep: ${error.message}`)));
});
child.on("close", (code) => {
cleanup();
if (aborted) {
settle(() => reject(new Error("Operation aborted")));
return;
}
if (!killedDueToLimit && code !== 0 && code !== 1) {
const errorMsg = stderr.trim() || `ripgrep exited with code ${code}`;
settle(() => reject(new Error(errorMsg)));
return;
}
if (matchCount === 0) {
settle(() =>
resolve({ content: [{ type: "text", text: "No matches found" }], details: undefined }),
);
return;
}
// Apply byte truncation (no line limit since we already have match limit)
const rawOutput = outputLines.join("\n");
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
let output = truncation.content;
const details: GrepToolDetails = {};
// Build notices
const notices: string[] = [];
if (matchLimitReached) {
notices.push(
`${effectiveLimit} matches limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`,
);
details.matchLimitReached = effectiveLimit;
}
if (truncation.truncated) {
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
details.truncation = truncation;
}
if (linesTruncated) {
notices.push(
`Some lines truncated to ${GREP_MAX_LINE_LENGTH} chars. Use read tool to see full lines`,
);
details.linesTruncated = true;
}
if (notices.length > 0) {
output += `\n\n[${notices.join(". ")}]`;
}
if (matchCount === 0) {
settle(() => settle(() =>
resolve({ content: [{ type: "text", text: "No matches found" }], details: undefined }), resolve({
content: [{ type: "text", text: output }],
details: Object.keys(details).length > 0 ? details : undefined,
}),
); );
return; });
} } catch (err) {
settle(() => reject(err as Error));
}
})();
});
},
};
}
// Apply byte truncation (no line limit since we already have match limit) /** Default grep tool using process.cwd() - for backwards compatibility */
const rawOutput = outputLines.join("\n"); export const grepTool = createGrepTool(process.cwd());
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
let output = truncation.content;
const details: GrepToolDetails = {};
// Build notices
const notices: string[] = [];
if (matchLimitReached) {
notices.push(
`${effectiveLimit} matches limit reached. Use limit=${effectiveLimit * 2} for more, or refine pattern`,
);
details.matchLimitReached = effectiveLimit;
}
if (truncation.truncated) {
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
details.truncation = truncation;
}
if (linesTruncated) {
notices.push(
`Some lines truncated to ${GREP_MAX_LINE_LENGTH} chars. Use read tool to see full lines`,
);
details.linesTruncated = true;
}
if (notices.length > 0) {
output += `\n\n[${notices.join(". ")}]`;
}
settle(() =>
resolve({
content: [{ type: "text", text: output }],
details: Object.keys(details).length > 0 ? details : undefined,
}),
);
});
} catch (err) {
settle(() => reject(err as Error));
}
})();
});
},
};

View file

@ -1,32 +1,32 @@
import type { AgentTool } from "@mariozechner/pi-ai"; import type { AgentTool } from "@mariozechner/pi-ai";
export { type BashToolDetails, bashTool } from "./bash.js"; export { type BashToolDetails, bashTool, createBashTool } from "./bash.js";
export { editTool } from "./edit.js"; export { createEditTool, editTool } from "./edit.js";
export { type FindToolDetails, findTool } from "./find.js"; export { createFindTool, type FindToolDetails, findTool } from "./find.js";
export { type GrepToolDetails, grepTool } from "./grep.js"; export { createGrepTool, type GrepToolDetails, grepTool } from "./grep.js";
export { type LsToolDetails, lsTool } from "./ls.js"; export { createLsTool, type LsToolDetails, lsTool } from "./ls.js";
export { type ReadToolDetails, readTool } from "./read.js"; export { createReadTool, type ReadToolDetails, readTool } from "./read.js";
export type { TruncationResult } from "./truncate.js"; export type { TruncationResult } from "./truncate.js";
export { writeTool } from "./write.js"; export { createWriteTool, writeTool } from "./write.js";
import { bashTool } from "./bash.js"; import { bashTool, createBashTool } from "./bash.js";
import { editTool } from "./edit.js"; import { createEditTool, editTool } from "./edit.js";
import { findTool } from "./find.js"; import { createFindTool, findTool } from "./find.js";
import { grepTool } from "./grep.js"; import { createGrepTool, grepTool } from "./grep.js";
import { lsTool } from "./ls.js"; import { createLsTool, lsTool } from "./ls.js";
import { readTool } from "./read.js"; import { createReadTool, readTool } from "./read.js";
import { writeTool } from "./write.js"; import { createWriteTool, writeTool } from "./write.js";
/** Tool type (AgentTool from pi-ai) */ /** Tool type (AgentTool from pi-ai) */
export type Tool = AgentTool<any>; export type Tool = AgentTool<any>;
// Default tools for full access mode // Default tools for full access mode (using process.cwd())
export const codingTools: Tool[] = [readTool, bashTool, editTool, writeTool]; export const codingTools: Tool[] = [readTool, bashTool, editTool, writeTool];
// Read-only tools for exploration without modification // Read-only tools for exploration without modification (using process.cwd())
export const readOnlyTools: Tool[] = [readTool, grepTool, findTool, lsTool]; export const readOnlyTools: Tool[] = [readTool, grepTool, findTool, lsTool];
// All available tools (including read-only exploration tools) // All available tools (using process.cwd())
export const allTools = { export const allTools = {
read: readTool, read: readTool,
bash: bashTool, bash: bashTool,
@ -38,3 +38,32 @@ export const allTools = {
}; };
export type ToolName = keyof typeof allTools; export type ToolName = keyof typeof allTools;
/**
* Create coding tools configured for a specific working directory.
*/
export function createCodingTools(cwd: string): Tool[] {
return [createReadTool(cwd), createBashTool(cwd), createEditTool(cwd), createWriteTool(cwd)];
}
/**
* Create read-only tools configured for a specific working directory.
*/
export function createReadOnlyTools(cwd: string): Tool[] {
return [createReadTool(cwd), createGrepTool(cwd), createFindTool(cwd), createLsTool(cwd)];
}
/**
* Create all tools configured for a specific working directory.
*/
export function createAllTools(cwd: string): Record<ToolName, Tool> {
return {
read: createReadTool(cwd),
bash: createBashTool(cwd),
edit: createEditTool(cwd),
write: createWriteTool(cwd),
grep: createGrepTool(cwd),
find: createFindTool(cwd),
ls: createLsTool(cwd),
};
}

View file

@ -2,7 +2,7 @@ import type { AgentTool } from "@mariozechner/pi-ai";
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
import { existsSync, readdirSync, statSync } from "fs"; import { existsSync, readdirSync, statSync } from "fs";
import nodePath from "path"; import nodePath from "path";
import { expandPath } from "./path-utils.js"; import { resolveToCwd } from "./path-utils.js";
import { DEFAULT_MAX_BYTES, formatSize, type TruncationResult, truncateHead } from "./truncate.js"; import { DEFAULT_MAX_BYTES, formatSize, type TruncationResult, truncateHead } from "./truncate.js";
const lsSchema = Type.Object({ const lsSchema = Type.Object({
@ -17,115 +17,124 @@ export interface LsToolDetails {
entryLimitReached?: number; entryLimitReached?: number;
} }
export const lsTool: AgentTool<typeof lsSchema> = { export function createLsTool(cwd: string): AgentTool<typeof lsSchema> {
name: "ls", return {
label: "ls", name: "ls",
description: `List directory contents. Returns entries sorted alphabetically, with '/' suffix for directories. Includes dotfiles. Output is truncated to ${DEFAULT_LIMIT} entries or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first).`, label: "ls",
parameters: lsSchema, description: `List directory contents. Returns entries sorted alphabetically, with '/' suffix for directories. Includes dotfiles. Output is truncated to ${DEFAULT_LIMIT} entries or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first).`,
execute: async (_toolCallId: string, { path, limit }: { path?: string; limit?: number }, signal?: AbortSignal) => { parameters: lsSchema,
return new Promise((resolve, reject) => { execute: async (
if (signal?.aborted) { _toolCallId: string,
reject(new Error("Operation aborted")); { path, limit }: { path?: string; limit?: number },
return; signal?: AbortSignal,
} ) => {
return new Promise((resolve, reject) => {
const onAbort = () => reject(new Error("Operation aborted")); if (signal?.aborted) {
signal?.addEventListener("abort", onAbort, { once: true }); reject(new Error("Operation aborted"));
try {
const dirPath = nodePath.resolve(expandPath(path || "."));
const effectiveLimit = limit ?? DEFAULT_LIMIT;
// Check if path exists
if (!existsSync(dirPath)) {
reject(new Error(`Path not found: ${dirPath}`));
return; return;
} }
// Check if path is a directory const onAbort = () => reject(new Error("Operation aborted"));
const stat = statSync(dirPath); signal?.addEventListener("abort", onAbort, { once: true });
if (!stat.isDirectory()) {
reject(new Error(`Not a directory: ${dirPath}`));
return;
}
// Read directory entries
let entries: string[];
try { try {
entries = readdirSync(dirPath); const dirPath = resolveToCwd(path || ".", cwd);
} catch (e: any) { const effectiveLimit = limit ?? DEFAULT_LIMIT;
reject(new Error(`Cannot read directory: ${e.message}`));
return;
}
// Sort alphabetically (case-insensitive) // Check if path exists
entries.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase())); if (!existsSync(dirPath)) {
reject(new Error(`Path not found: ${dirPath}`));
// Format entries with directory indicators return;
const results: string[] = [];
let entryLimitReached = false;
for (const entry of entries) {
if (results.length >= effectiveLimit) {
entryLimitReached = true;
break;
} }
const fullPath = nodePath.join(dirPath, entry); // Check if path is a directory
let suffix = ""; const stat = statSync(dirPath);
if (!stat.isDirectory()) {
reject(new Error(`Not a directory: ${dirPath}`));
return;
}
// Read directory entries
let entries: string[];
try { try {
const entryStat = statSync(fullPath); entries = readdirSync(dirPath);
if (entryStat.isDirectory()) { } catch (e: any) {
suffix = "/"; reject(new Error(`Cannot read directory: ${e.message}`));
} return;
} catch {
// Skip entries we can't stat
continue;
} }
results.push(entry + suffix); // Sort alphabetically (case-insensitive)
entries.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()));
// Format entries with directory indicators
const results: string[] = [];
let entryLimitReached = false;
for (const entry of entries) {
if (results.length >= effectiveLimit) {
entryLimitReached = true;
break;
}
const fullPath = nodePath.join(dirPath, entry);
let suffix = "";
try {
const entryStat = statSync(fullPath);
if (entryStat.isDirectory()) {
suffix = "/";
}
} catch {
// Skip entries we can't stat
continue;
}
results.push(entry + suffix);
}
signal?.removeEventListener("abort", onAbort);
if (results.length === 0) {
resolve({ content: [{ type: "text", text: "(empty directory)" }], details: undefined });
return;
}
// Apply byte truncation (no line limit since we already have entry limit)
const rawOutput = results.join("\n");
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
let output = truncation.content;
const details: LsToolDetails = {};
// Build notices
const notices: string[] = [];
if (entryLimitReached) {
notices.push(`${effectiveLimit} entries limit reached. Use limit=${effectiveLimit * 2} for more`);
details.entryLimitReached = effectiveLimit;
}
if (truncation.truncated) {
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
details.truncation = truncation;
}
if (notices.length > 0) {
output += `\n\n[${notices.join(". ")}]`;
}
resolve({
content: [{ type: "text", text: output }],
details: Object.keys(details).length > 0 ? details : undefined,
});
} catch (e: any) {
signal?.removeEventListener("abort", onAbort);
reject(e);
} }
});
},
};
}
signal?.removeEventListener("abort", onAbort); /** Default ls tool using process.cwd() - for backwards compatibility */
export const lsTool = createLsTool(process.cwd());
if (results.length === 0) {
resolve({ content: [{ type: "text", text: "(empty directory)" }], details: undefined });
return;
}
// Apply byte truncation (no line limit since we already have entry limit)
const rawOutput = results.join("\n");
const truncation = truncateHead(rawOutput, { maxLines: Number.MAX_SAFE_INTEGER });
let output = truncation.content;
const details: LsToolDetails = {};
// Build notices
const notices: string[] = [];
if (entryLimitReached) {
notices.push(`${effectiveLimit} entries limit reached. Use limit=${effectiveLimit * 2} for more`);
details.entryLimitReached = effectiveLimit;
}
if (truncation.truncated) {
notices.push(`${formatSize(DEFAULT_MAX_BYTES)} limit reached`);
details.truncation = truncation;
}
if (notices.length > 0) {
output += `\n\n[${notices.join(". ")}]`;
}
resolve({
content: [{ type: "text", text: output }],
details: Object.keys(details).length > 0 ? details : undefined,
});
} catch (e: any) {
signal?.removeEventListener("abort", onAbort);
reject(e);
}
});
},
};

View file

@ -1,5 +1,6 @@
import { accessSync, constants } from "node:fs"; import { accessSync, constants } from "node:fs";
import * as os from "node:os"; import * as os from "node:os";
import { isAbsolute, resolve as resolvePath } from "node:path";
const UNICODE_SPACES = /[\u00A0\u2000-\u200A\u202F\u205F\u3000]/g; const UNICODE_SPACES = /[\u00A0\u2000-\u200A\u202F\u205F\u3000]/g;
const NARROW_NO_BREAK_SPACE = "\u202F"; const NARROW_NO_BREAK_SPACE = "\u202F";
@ -32,17 +33,29 @@ export function expandPath(filePath: string): string {
return normalized; return normalized;
} }
export function resolveReadPath(filePath: string): string { /**
* Resolve a path relative to the given cwd.
* Handles ~ expansion and absolute paths.
*/
export function resolveToCwd(filePath: string, cwd: string): string {
const expanded = expandPath(filePath); const expanded = expandPath(filePath);
if (isAbsolute(expanded)) {
if (fileExists(expanded)) {
return expanded; return expanded;
} }
return resolvePath(cwd, expanded);
}
const macOSVariant = tryMacOSScreenshotPath(expanded); export function resolveReadPath(filePath: string, cwd: string): string {
if (macOSVariant !== expanded && fileExists(macOSVariant)) { const resolved = resolveToCwd(filePath, cwd);
if (fileExists(resolved)) {
return resolved;
}
const macOSVariant = tryMacOSScreenshotPath(resolved);
if (macOSVariant !== resolved && fileExists(macOSVariant)) {
return macOSVariant; return macOSVariant;
} }
return expanded; return resolved;
} }

View file

@ -2,7 +2,6 @@ import type { AgentTool, ImageContent, TextContent } from "@mariozechner/pi-ai";
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
import { constants } from "fs"; import { constants } from "fs";
import { access, readFile } from "fs/promises"; import { access, readFile } from "fs/promises";
import { resolve as resolvePath } from "path";
import { detectSupportedImageMimeTypeFromFile } from "../../utils/mime.js"; import { detectSupportedImageMimeTypeFromFile } from "../../utils/mime.js";
import { resolveReadPath } from "./path-utils.js"; import { resolveReadPath } from "./path-utils.js";
import { DEFAULT_MAX_BYTES, DEFAULT_MAX_LINES, formatSize, type TruncationResult, truncateHead } from "./truncate.js"; import { DEFAULT_MAX_BYTES, DEFAULT_MAX_LINES, formatSize, type TruncationResult, truncateHead } from "./truncate.js";
@ -17,151 +16,156 @@ export interface ReadToolDetails {
truncation?: TruncationResult; truncation?: TruncationResult;
} }
export const readTool: AgentTool<typeof readSchema> = { export function createReadTool(cwd: string): AgentTool<typeof readSchema> {
name: "read", return {
label: "read", name: "read",
description: `Read the contents of a file. Supports text files and images (jpg, png, gif, webp). Images are sent as attachments. For text files, output is truncated to ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Use offset/limit for large files.`, label: "read",
parameters: readSchema, description: `Read the contents of a file. Supports text files and images (jpg, png, gif, webp). Images are sent as attachments. For text files, output is truncated to ${DEFAULT_MAX_LINES} lines or ${DEFAULT_MAX_BYTES / 1024}KB (whichever is hit first). Use offset/limit for large files.`,
execute: async ( parameters: readSchema,
_toolCallId: string, execute: async (
{ path, offset, limit }: { path: string; offset?: number; limit?: number }, _toolCallId: string,
signal?: AbortSignal, { path, offset, limit }: { path: string; offset?: number; limit?: number },
) => { signal?: AbortSignal,
const absolutePath = resolvePath(resolveReadPath(path)); ) => {
const absolutePath = resolveReadPath(path, cwd);
return new Promise<{ content: (TextContent | ImageContent)[]; details: ReadToolDetails | undefined }>( return new Promise<{ content: (TextContent | ImageContent)[]; details: ReadToolDetails | undefined }>(
(resolve, reject) => { (resolve, reject) => {
// Check if already aborted // Check if already aborted
if (signal?.aborted) { if (signal?.aborted) {
reject(new Error("Operation aborted")); reject(new Error("Operation aborted"));
return; return;
}
let aborted = false;
// Set up abort handler
const onAbort = () => {
aborted = true;
reject(new Error("Operation aborted"));
};
if (signal) {
signal.addEventListener("abort", onAbort, { once: true });
}
// Perform the read operation
(async () => {
try {
// Check if file exists
await access(absolutePath, constants.R_OK);
// Check if aborted before reading
if (aborted) {
return;
}
const mimeType = await detectSupportedImageMimeTypeFromFile(absolutePath);
// Read the file based on type
let content: (TextContent | ImageContent)[];
let details: ReadToolDetails | undefined;
if (mimeType) {
// Read as image (binary)
const buffer = await readFile(absolutePath);
const base64 = buffer.toString("base64");
content = [
{ type: "text", text: `Read image file [${mimeType}]` },
{ type: "image", data: base64, mimeType },
];
} else {
// Read as text
const textContent = await readFile(absolutePath, "utf-8");
const allLines = textContent.split("\n");
const totalFileLines = allLines.length;
// Apply offset if specified (1-indexed to 0-indexed)
const startLine = offset ? Math.max(0, offset - 1) : 0;
const startLineDisplay = startLine + 1; // For display (1-indexed)
// Check if offset is out of bounds
if (startLine >= allLines.length) {
throw new Error(`Offset ${offset} is beyond end of file (${allLines.length} lines total)`);
}
// If limit is specified by user, use it; otherwise we'll let truncateHead decide
let selectedContent: string;
let userLimitedLines: number | undefined;
if (limit !== undefined) {
const endLine = Math.min(startLine + limit, allLines.length);
selectedContent = allLines.slice(startLine, endLine).join("\n");
userLimitedLines = endLine - startLine;
} else {
selectedContent = allLines.slice(startLine).join("\n");
}
// Apply truncation (respects both line and byte limits)
const truncation = truncateHead(selectedContent);
let outputText: string;
if (truncation.firstLineExceedsLimit) {
// First line at offset exceeds 30KB - tell model to use bash
const firstLineSize = formatSize(Buffer.byteLength(allLines[startLine], "utf-8"));
outputText = `[Line ${startLineDisplay} is ${firstLineSize}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${path} | head -c ${DEFAULT_MAX_BYTES}]`;
details = { truncation };
} else if (truncation.truncated) {
// Truncation occurred - build actionable notice
const endLineDisplay = startLineDisplay + truncation.outputLines - 1;
const nextOffset = endLineDisplay + 1;
outputText = truncation.content;
if (truncation.truncatedBy === "lines") {
outputText += `\n\n[Showing lines ${startLineDisplay}-${endLineDisplay} of ${totalFileLines}. Use offset=${nextOffset} to continue]`;
} else {
outputText += `\n\n[Showing lines ${startLineDisplay}-${endLineDisplay} of ${totalFileLines} (${formatSize(DEFAULT_MAX_BYTES)} limit). Use offset=${nextOffset} to continue]`;
}
details = { truncation };
} else if (userLimitedLines !== undefined && startLine + userLimitedLines < allLines.length) {
// User specified limit, there's more content, but no truncation
const remaining = allLines.length - (startLine + userLimitedLines);
const nextOffset = startLine + userLimitedLines + 1;
outputText = truncation.content;
outputText += `\n\n[${remaining} more lines in file. Use offset=${nextOffset} to continue]`;
} else {
// No truncation, no user limit exceeded
outputText = truncation.content;
}
content = [{ type: "text", text: outputText }];
}
// Check if aborted after reading
if (aborted) {
return;
}
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
resolve({ content, details });
} catch (error: any) {
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
if (!aborted) {
reject(error);
}
} }
})();
}, let aborted = false;
);
}, // Set up abort handler
}; const onAbort = () => {
aborted = true;
reject(new Error("Operation aborted"));
};
if (signal) {
signal.addEventListener("abort", onAbort, { once: true });
}
// Perform the read operation
(async () => {
try {
// Check if file exists
await access(absolutePath, constants.R_OK);
// Check if aborted before reading
if (aborted) {
return;
}
const mimeType = await detectSupportedImageMimeTypeFromFile(absolutePath);
// Read the file based on type
let content: (TextContent | ImageContent)[];
let details: ReadToolDetails | undefined;
if (mimeType) {
// Read as image (binary)
const buffer = await readFile(absolutePath);
const base64 = buffer.toString("base64");
content = [
{ type: "text", text: `Read image file [${mimeType}]` },
{ type: "image", data: base64, mimeType },
];
} else {
// Read as text
const textContent = await readFile(absolutePath, "utf-8");
const allLines = textContent.split("\n");
const totalFileLines = allLines.length;
// Apply offset if specified (1-indexed to 0-indexed)
const startLine = offset ? Math.max(0, offset - 1) : 0;
const startLineDisplay = startLine + 1; // For display (1-indexed)
// Check if offset is out of bounds
if (startLine >= allLines.length) {
throw new Error(`Offset ${offset} is beyond end of file (${allLines.length} lines total)`);
}
// If limit is specified by user, use it; otherwise we'll let truncateHead decide
let selectedContent: string;
let userLimitedLines: number | undefined;
if (limit !== undefined) {
const endLine = Math.min(startLine + limit, allLines.length);
selectedContent = allLines.slice(startLine, endLine).join("\n");
userLimitedLines = endLine - startLine;
} else {
selectedContent = allLines.slice(startLine).join("\n");
}
// Apply truncation (respects both line and byte limits)
const truncation = truncateHead(selectedContent);
let outputText: string;
if (truncation.firstLineExceedsLimit) {
// First line at offset exceeds 30KB - tell model to use bash
const firstLineSize = formatSize(Buffer.byteLength(allLines[startLine], "utf-8"));
outputText = `[Line ${startLineDisplay} is ${firstLineSize}, exceeds ${formatSize(DEFAULT_MAX_BYTES)} limit. Use bash: sed -n '${startLineDisplay}p' ${path} | head -c ${DEFAULT_MAX_BYTES}]`;
details = { truncation };
} else if (truncation.truncated) {
// Truncation occurred - build actionable notice
const endLineDisplay = startLineDisplay + truncation.outputLines - 1;
const nextOffset = endLineDisplay + 1;
outputText = truncation.content;
if (truncation.truncatedBy === "lines") {
outputText += `\n\n[Showing lines ${startLineDisplay}-${endLineDisplay} of ${totalFileLines}. Use offset=${nextOffset} to continue]`;
} else {
outputText += `\n\n[Showing lines ${startLineDisplay}-${endLineDisplay} of ${totalFileLines} (${formatSize(DEFAULT_MAX_BYTES)} limit). Use offset=${nextOffset} to continue]`;
}
details = { truncation };
} else if (userLimitedLines !== undefined && startLine + userLimitedLines < allLines.length) {
// User specified limit, there's more content, but no truncation
const remaining = allLines.length - (startLine + userLimitedLines);
const nextOffset = startLine + userLimitedLines + 1;
outputText = truncation.content;
outputText += `\n\n[${remaining} more lines in file. Use offset=${nextOffset} to continue]`;
} else {
// No truncation, no user limit exceeded
outputText = truncation.content;
}
content = [{ type: "text", text: outputText }];
}
// Check if aborted after reading
if (aborted) {
return;
}
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
resolve({ content, details });
} catch (error: any) {
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
if (!aborted) {
reject(error);
}
}
})();
},
);
},
};
}
/** Default read tool using process.cwd() - for backwards compatibility */
export const readTool = createReadTool(process.cwd());

View file

@ -1,82 +1,93 @@
import type { AgentTool } from "@mariozechner/pi-ai"; import type { AgentTool } from "@mariozechner/pi-ai";
import { Type } from "@sinclair/typebox"; import { Type } from "@sinclair/typebox";
import { mkdir, writeFile } from "fs/promises"; import { mkdir, writeFile } from "fs/promises";
import { dirname, resolve as resolvePath } from "path"; import { dirname } from "path";
import { expandPath } from "./path-utils.js"; import { resolveToCwd } from "./path-utils.js";
const writeSchema = Type.Object({ const writeSchema = Type.Object({
path: Type.String({ description: "Path to the file to write (relative or absolute)" }), path: Type.String({ description: "Path to the file to write (relative or absolute)" }),
content: Type.String({ description: "Content to write to the file" }), content: Type.String({ description: "Content to write to the file" }),
}); });
export const writeTool: AgentTool<typeof writeSchema> = { export function createWriteTool(cwd: string): AgentTool<typeof writeSchema> {
name: "write", return {
label: "write", name: "write",
description: label: "write",
"Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Automatically creates parent directories.", description:
parameters: writeSchema, "Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Automatically creates parent directories.",
execute: async (_toolCallId: string, { path, content }: { path: string; content: string }, signal?: AbortSignal) => { parameters: writeSchema,
const absolutePath = resolvePath(expandPath(path)); execute: async (
const dir = dirname(absolutePath); _toolCallId: string,
{ path, content }: { path: string; content: string },
signal?: AbortSignal,
) => {
const absolutePath = resolveToCwd(path, cwd);
const dir = dirname(absolutePath);
return new Promise<{ content: Array<{ type: "text"; text: string }>; details: undefined }>((resolve, reject) => { return new Promise<{ content: Array<{ type: "text"; text: string }>; details: undefined }>(
// Check if already aborted (resolve, reject) => {
if (signal?.aborted) { // Check if already aborted
reject(new Error("Operation aborted")); if (signal?.aborted) {
return; reject(new Error("Operation aborted"));
}
let aborted = false;
// Set up abort handler
const onAbort = () => {
aborted = true;
reject(new Error("Operation aborted"));
};
if (signal) {
signal.addEventListener("abort", onAbort, { once: true });
}
// Perform the write operation
(async () => {
try {
// Create parent directories if needed
await mkdir(dir, { recursive: true });
// Check if aborted before writing
if (aborted) {
return; return;
} }
// Write the file let aborted = false;
await writeFile(absolutePath, content, "utf-8");
// Check if aborted after writing // Set up abort handler
if (aborted) { const onAbort = () => {
return; aborted = true;
} reject(new Error("Operation aborted"));
};
// Clean up abort handler
if (signal) { if (signal) {
signal.removeEventListener("abort", onAbort); signal.addEventListener("abort", onAbort, { once: true });
} }
resolve({ // Perform the write operation
content: [{ type: "text", text: `Successfully wrote ${content.length} bytes to ${path}` }], (async () => {
details: undefined, try {
}); // Create parent directories if needed
} catch (error: any) { await mkdir(dir, { recursive: true });
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
if (!aborted) { // Check if aborted before writing
reject(error); if (aborted) {
} return;
} }
})();
}); // Write the file
}, await writeFile(absolutePath, content, "utf-8");
};
// Check if aborted after writing
if (aborted) {
return;
}
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
resolve({
content: [{ type: "text", text: `Successfully wrote ${content.length} bytes to ${path}` }],
details: undefined,
});
} catch (error: any) {
// Clean up abort handler
if (signal) {
signal.removeEventListener("abort", onAbort);
}
if (!aborted) {
reject(error);
}
}
})();
},
);
},
};
}
/** Default write tool using process.cwd() - for backwards compatibility */
export const writeTool = createWriteTool(process.cwd());

View file

@ -93,6 +93,16 @@ export {
configureOAuthStorage, configureOAuthStorage,
// Factory // Factory
createAgentSession, createAgentSession,
createBashTool,
// Tool factories (for custom cwd)
createCodingTools,
createEditTool,
createFindTool,
createGrepTool,
createLsTool,
createReadOnlyTools,
createReadTool,
createWriteTool,
// Helpers // Helpers
defaultGetApiKey, defaultGetApiKey,
discoverAvailableModels, discoverAvailableModels,
@ -106,7 +116,7 @@ export {
type FileSlashCommand, type FileSlashCommand,
findModel as findModelByProviderAndId, findModel as findModelByProviderAndId,
loadSettings, loadSettings,
// Tools // Pre-built tools (use process.cwd())
readOnlyTools, readOnlyTools,
} from "./core/sdk.js"; } from "./core/sdk.js";
export { export {