Enhance file handling in list_files.js and patch_files.js by auto-creating chroot directories, improving path normalization, and refining error handling for file operations.
This commit is contained in:
@@ -132,9 +132,10 @@ export async function run(args) {
|
||||
let chrootResolved;
|
||||
try {
|
||||
chrootResolved = path.resolve(chrootPath);
|
||||
await fs.access(chrootResolved); // Ensure chroot path exists
|
||||
// Auto-create the chroot base directory if it does not exist
|
||||
await fs.mkdir(chrootResolved, { recursive: true });
|
||||
} catch (err) {
|
||||
return { err: `Invalid chroot path: ${chrootPath} (${err?.message || String(err)})` };
|
||||
return { err: `Failed to prepare chroot path: ${chrootPath} (${err?.message || String(err)})` };
|
||||
}
|
||||
|
||||
let resolvedBase;
|
||||
|
||||
@@ -102,39 +102,53 @@ class Patch {
|
||||
// --------------------------------------------------------------------------- //
|
||||
// Path utilities for chroot functionality
|
||||
// --------------------------------------------------------------------------- //
|
||||
function normalizePath(path) {
|
||||
return path.replace(/\\/g, '/').replace(/\/+/g, '/');
|
||||
function normalizePath(p) {
|
||||
return (p || '').replace(/\\/g, '/').replace(/\/+/g, '/');
|
||||
}
|
||||
|
||||
function joinPaths(...paths) {
|
||||
return normalizePath(paths.filter(p => p).join('/'));
|
||||
function joinPaths(...parts) {
|
||||
const joined = parts.filter(Boolean).join('/');
|
||||
return normalizePath(joined);
|
||||
}
|
||||
|
||||
function resolvePath(chroot, filepath) {
|
||||
if (!chroot) return filepath;
|
||||
const file = normalizePath(filepath);
|
||||
if (!chroot) return file;
|
||||
|
||||
// Remove leading slash from filepath if present
|
||||
const cleanFilepath = filepath.startsWith('/') ? filepath.substring(1) : filepath;
|
||||
const root = normalizePath(chroot);
|
||||
|
||||
// Join chroot and filepath
|
||||
const resolved = joinPaths(chroot, cleanFilepath);
|
||||
// If file is absolute, treat it as relative to chroot
|
||||
if (file.startsWith('/')) {
|
||||
// Remove leading slash and join with chroot
|
||||
const relativePath = file.substring(1);
|
||||
const resolved = joinPaths(root, relativePath);
|
||||
return resolved.startsWith('/') ? resolved : '/' + resolved;
|
||||
}
|
||||
|
||||
// Ensure it starts with /
|
||||
// If file is relative, join with chroot
|
||||
const resolved = joinPaths(root, file);
|
||||
return resolved.startsWith('/') ? resolved : '/' + resolved;
|
||||
}
|
||||
|
||||
function unresolvePath(chroot, filepath) {
|
||||
if (!chroot) return filepath;
|
||||
const file = normalizePath(filepath);
|
||||
if (!chroot) return file;
|
||||
|
||||
const chrootPath = chroot.startsWith('/') ? chroot : '/' + chroot;
|
||||
const cleanFilepath = filepath.startsWith('/') ? filepath : '/' + filepath;
|
||||
const root = normalizePath(chroot);
|
||||
const rootWithSlash = root.endsWith('/') ? root : root + '/';
|
||||
|
||||
if (cleanFilepath.startsWith(chrootPath)) {
|
||||
const relativePath = cleanFilepath.substring(chrootPath.length);
|
||||
return relativePath.startsWith('/') ? relativePath : '/' + relativePath;
|
||||
// Convert absolute path back to what user would expect
|
||||
if (file.startsWith(rootWithSlash)) {
|
||||
// Return path relative to chroot (without leading slash for user expectation)
|
||||
return file.substring(rootWithSlash.length);
|
||||
}
|
||||
|
||||
return filepath;
|
||||
if (file === root) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// If somehow outside chroot, return as-is
|
||||
return file;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------- //
|
||||
@@ -409,6 +423,11 @@ function find_context_core(lines, context, start) {
|
||||
}
|
||||
|
||||
function find_context(lines, context, start, eof) {
|
||||
// Special case: if context is empty, return start position
|
||||
if (!context || context.length === 0) {
|
||||
return [start, 0];
|
||||
}
|
||||
|
||||
if (eof) {
|
||||
let [new_index, fuzz] = find_context_core(lines, context, Math.max(0, lines.length - context.length));
|
||||
if (new_index !== -1) {
|
||||
@@ -461,16 +480,20 @@ function peek_next_section(lines, index) {
|
||||
}
|
||||
s = s.substring(1);
|
||||
|
||||
if (mode === "keep" && last_mode !== mode) {
|
||||
if (ins_lines.length > 0 || del_lines.length > 0) {
|
||||
chunks.push(
|
||||
new Chunk(
|
||||
old.length - del_lines.length,
|
||||
[...del_lines],
|
||||
[...ins_lines]
|
||||
)
|
||||
);
|
||||
}
|
||||
// Handle the case where we're at the beginning and have content
|
||||
if (index === orig_index + 1 && old.length === 0 && (del_lines.length > 0 || ins_lines.length > 0)) {
|
||||
// This is the first content line, start collecting
|
||||
}
|
||||
|
||||
if (mode === "keep" && last_mode !== mode && (ins_lines.length > 0 || del_lines.length > 0)) {
|
||||
const chunk_orig_index = old.length - del_lines.length;
|
||||
chunks.push(
|
||||
new Chunk(
|
||||
chunk_orig_index,
|
||||
[...del_lines],
|
||||
[...ins_lines]
|
||||
)
|
||||
);
|
||||
del_lines = [];
|
||||
ins_lines = [];
|
||||
}
|
||||
@@ -485,10 +508,12 @@ function peek_next_section(lines, index) {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle any remaining content
|
||||
if (ins_lines.length > 0 || del_lines.length > 0) {
|
||||
const chunk_orig_index = old.length - del_lines.length;
|
||||
chunks.push(
|
||||
new Chunk(
|
||||
old.length - del_lines.length,
|
||||
chunk_orig_index,
|
||||
[...del_lines],
|
||||
[...ins_lines]
|
||||
)
|
||||
@@ -654,8 +679,16 @@ function identify_files_added(text, chroot = null) {
|
||||
// --------------------------------------------------------------------------- //
|
||||
function load_files(paths, open_fn) {
|
||||
const result = {};
|
||||
for (const path of paths) {
|
||||
result[path] = open_fn(path);
|
||||
for (const p of paths) {
|
||||
try {
|
||||
result[p] = open_fn(p);
|
||||
} catch (err) {
|
||||
// Skip truly missing files so parser can emit precise DiffErrors
|
||||
if (err && (err.code === 'ENOENT' || /ENOENT/.test(String(err)))) {
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -686,7 +719,12 @@ function process_patch(text, open_fn, write_fn, remove_fn, chroot = null) {
|
||||
if (!text.startsWith("*** Begin Patch")) {
|
||||
throw new DiffError("Patch text must start with *** Begin Patch");
|
||||
}
|
||||
const paths = identify_files_needed(text, chroot);
|
||||
// Load update/delete targets and also attempt to load add targets
|
||||
// so existing files are detected during parsing
|
||||
const paths = [
|
||||
...identify_files_needed(text, chroot),
|
||||
...identify_files_added(text, chroot),
|
||||
];
|
||||
const orig = load_files(paths, open_fn);
|
||||
const [patch, _fuzz] = text_to_patch(text, orig, chroot);
|
||||
const commit = patch_to_commit(patch, orig, chroot);
|
||||
@@ -755,4 +793,4 @@ export async function run(args) {
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user