diff --git a/docs/cli/patch-commit.md b/docs/cli/patch-commit.md index 159022d08bbd31..e20dbaeb4f93f0 100644 --- a/docs/cli/patch-commit.md +++ b/docs/cli/patch-commit.md @@ -1,4 +1,6 @@ -After having prepared a package for patching with [`bun patch`](/docs/cli/patch), you can install with `bun patch-commit `. +An alias for `bun patch --commit` to maintain compatibility with pnpm. + +You must prepare the package for patching with [`bun patch `](/docs/cli/patch) first. ### `--patches-dir` diff --git a/docs/cli/patch.md b/docs/cli/patch.md index 922446543d898f..a3873197112f01 100644 --- a/docs/cli/patch.md +++ b/docs/cli/patch.md @@ -1,10 +1,42 @@ -If you need to modify the contents of a package, call `bun patch ` with the package's name (and optionally a version), -for example: +Bun lets you easily make quick fixes to packages and have those changes work consistently across multiple installs and machines, without having to go through the work of forking and publishing a new version of the package. + +To get started, use `bun patch ` to prepare it for patching: ```bash +# you can supply the package name $ bun patch react + +# ...and a precise version in case multiple versions are installed +$ bun patch react@17.0.2 + +# or the path to the package +$ bun patch node_modules/react ``` -This will copy the package to a temporary directory, where you can make changes to the package's contents. +The output of this command will give you the path to the package in `node_modules/` where you can make your changes to the package. + +This allows you to test your changes before committing them. + +{% callout %} +**Note** — Don't forget to call `bun patch `! This ensures the package folder in `node_modules/` contains a fresh copy of the package with no symlinks/hardlinks to Bun's cache. + +If you forget to do this, you might end up editing the package globally in the cache! +{% /callout %} + +Once you're happy with your changes, run `bun patch --commit `. -Once you're done making changes, run `bun patch-commit ` to have Bun install the patched package. +Bun will generate a patch file in `patches/`, update your `package.json` and lockfile, and Bun will start using the patched package: + +```bash +# you can supply the path to the patched package +$ bun patch --commit node_modules/react + +# ... or the package name and optionally the version +$ bun patch --commit react@17.0.2 + +# choose the directory to store the patch files +$ bun patch --commit react --patches-dir=mypatches + +# `patch-commit` is available for compatibility with pnpm +$ bun patch-commit react +``` diff --git a/src/bun.js/api/bun/process.zig b/src/bun.js/api/bun/process.zig index 699734afcd8c58..3138bcf7a39dc9 100644 --- a/src/bun.js/api/bun/process.zig +++ b/src/bun.js/api/bun/process.zig @@ -1953,6 +1953,7 @@ pub const sync = struct { var this = SyncWindowsProcess.new(.{ .process = spawned.toProcess(undefined, true), }); + this.process.ref(); this.process.setExitHandler(this); defer this.destroy(); this.process.enableKeepingEventLoopAlive(); @@ -2068,11 +2069,11 @@ pub const sync = struct { process.stderr orelse bun.invalid_fd, }; - if (process.memfds[0]) { + if (process.memfds[1]) { out_fds_to_wait_for[0] = bun.invalid_fd; } - if (process.memfds[1]) { + if (process.memfds[2]) { out_fds_to_wait_for[1] = bun.invalid_fd; } diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index 3795cf5628ed7f..31476fa3ac7e67 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -1638,6 +1638,23 @@ pub const SystemError = extern struct { } pub fn format(self: SystemError, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + if (!self.path.isEmpty()) { + // TODO: remove this hardcoding + switch (bun.Output.enable_ansi_colors_stderr) { + inline else => |enable_colors| try writer.print( + comptime bun.Output.prettyFmt( + "{}: {s}: {} ({}())", + enable_colors, + ), + .{ + self.code, + self.path, + self.message, + self.syscall, + }, + ), + } + } else // TODO: remove this hardcoding switch (bun.Output.enable_ansi_colors_stderr) { inline else => |enable_colors| try writer.print( diff --git a/src/cli.zig b/src/cli.zig index 6f7b39726ec022..36a6d3e24b7574 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1005,6 +1005,7 @@ pub const HelpCommand = struct { \\ update {s:<16} Update outdated dependencies \\ link [\] Register or link a local npm package \\ unlink Unregister a local npm package + \\ patch \ Prepare a package for patching \\ pm \ Additional package management utilities \\ \\ build ./a.ts ./b.jsx Bundle TypeScript & JavaScript into a single file @@ -2293,6 +2294,12 @@ pub const Command = struct { Output.pretty("Usage: bun completions", .{}); Output.flush(); }, + Command.Tag.PatchCommand => { + Install.PackageManager.CommandLineArguments.printHelp(.patch); + }, + Command.Tag.PatchCommitCommand => { + Install.PackageManager.CommandLineArguments.printHelp(.@"patch-commit"); + }, Command.Tag.ExecCommand => { Output.pretty( \\Usage: bun exec \ diff --git a/src/install/dependency.zig b/src/install/dependency.zig index c4adda98d2dad7..984fd6257272b4 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -259,6 +259,21 @@ pub inline fn isRemoteTarball(dependency: string) bool { return strings.hasPrefixComptime(dependency, "https://") or strings.hasPrefixComptime(dependency, "http://"); } +/// Turns `foo@1.1.1` into `foo`, `1.1.1`, or `@foo/bar@1.1.1` into `@foo/bar`, `1.1.1`, or `foo` into `foo`, `null`. +pub fn splitNameAndVersion(str: string) struct { string, ?string } { + if (strings.indexOfChar(str, '@')) |at_index| { + if (at_index != 0) { + return .{ str[0..at_index], if (at_index + 1 < str.len) str[at_index + 1 ..] else null }; + } + + const second_at_index = (strings.indexOfChar(str[1..], '@') orelse return .{ str, null }) + 1; + + return .{ str[0..second_at_index], if (second_at_index + 1 < str.len) str[second_at_index + 1 ..] else null }; + } + + return .{ str, null }; +} + pub const Version = struct { tag: Tag = .uninitialized, literal: String = .{}, diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index ba6ad508583f8b..c4ac31829e633c 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -289,7 +289,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD else => unreachable, }; if (folder_name.len == 0 or (folder_name.len == 1 and folder_name[0] == '/')) @panic("Tried to delete root and stopped it"); - var cache_dir = this.cache_dir; + const cache_dir = this.cache_dir; // e.g. @next // if it's a namespace package, we need to make sure the @name folder exists @@ -398,52 +398,15 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD } } - var did_atomically_replace = false; - if (did_atomically_replace and PackageManager.using_fallback_temp_dir) tmpdir.deleteTree(src) catch {}; - - attempt_atomic_rename_and_fallback_to_racy_delete: { - { - // Happy path: the folder doesn't exist in the cache dir, so we can - // just rename it. We don't need to delete anything. - var err = switch (bun.sys.renameat2(bun.toFD(tmpdir.fd), src, bun.toFD(cache_dir.fd), folder_name, .{ - .exclude = true, - })) { - .err => |err| err, - .result => break :attempt_atomic_rename_and_fallback_to_racy_delete, - }; - - // Fallback path: the folder exists in the cache dir, it might be in a strange state - // let's attempt to atomically replace it with the temporary folder's version - if (switch (err.getErrno()) { - .EXIST, .NOTEMPTY, .OPNOTSUPP => true, - else => false, - }) { - did_atomically_replace = true; - switch (bun.sys.renameat2(bun.toFD(tmpdir.fd), src, bun.toFD(cache_dir.fd), folder_name, .{ - .exchange = true, - })) { - .err => {}, - .result => break :attempt_atomic_rename_and_fallback_to_racy_delete, - } - did_atomically_replace = false; - } - } - - // sad path: let's try to delete the folder and then rename it - cache_dir.deleteTree(src) catch {}; - switch (bun.sys.renameat(bun.toFD(tmpdir.fd), src, bun.toFD(cache_dir.fd), folder_name)) { - .err => |err| { - this.package_manager.log.addErrorFmt( - null, - logger.Loc.Empty, - this.package_manager.allocator, - "moving \"{s}\" to cache dir failed: {}\n From: {s}\n To: {s}", - .{ name, err, tmpname, folder_name }, - ) catch unreachable; - return error.InstallFailed; - }, - .result => {}, - } + if (bun.sys.renameatConcurrently(bun.toFD(tmpdir.fd), src, bun.toFD(cache_dir.fd), folder_name).asErr()) |err| { + this.package_manager.log.addErrorFmt( + null, + logger.Loc.Empty, + this.package_manager.allocator, + "moving \"{s}\" to cache dir failed: {}\n From: {s}\n To: {s}", + .{ name, err, tmpname, folder_name }, + ) catch unreachable; + return error.InstallFailed; } } diff --git a/src/install/install.zig b/src/install/install.zig index 2f429130bab48b..772e149b75ad81 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -69,6 +69,14 @@ pub const max_hex_hash_len: comptime_int = brk: { pub const max_buntag_hash_buf_len: comptime_int = max_hex_hash_len + bun_hash_tag.len + 1; pub const BuntagHashBuf = [max_buntag_hash_buf_len]u8; +pub fn buntaghashbuf_make(buf: *BuntagHashBuf, patch_hash: u64) [:0]u8 { + @memcpy(buf[0..bun_hash_tag.len], bun_hash_tag); + const digits = std.fmt.bufPrint(buf[bun_hash_tag.len..], "{x}", .{patch_hash}) catch bun.outOfMemory(); + buf[bun_hash_tag.len + digits.len] = 0; + const bunhashtag = buf[0 .. bun_hash_tag.len + digits.len :0]; + return bunhashtag; +} + pub const patch = @import("./patch_install.zig"); pub const PatchTask = patch.PatchTask; @@ -1018,19 +1026,22 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { // hash from the .patch file, to be checked against bun tag const patchfile_contents_hash = this.patch.patch_contents_hash; var buf: BuntagHashBuf = undefined; - @memcpy(buf[0..bun_hash_tag.len], bun_hash_tag); - const digits = std.fmt.bufPrint(buf[bun_hash_tag.len..], "{x}", .{patchfile_contents_hash}) catch bun.outOfMemory(); - const bunhashtag = buf[0 .. bun_hash_tag.len + digits.len]; + const bunhashtag = buntaghashbuf_make(&buf, patchfile_contents_hash); const patch_tag_path = bun.path.joinZ(&[_][]const u8{ this.destination_dir_subpath, bunhashtag, }, .posix); + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + defer { + if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); + } + if (comptime bun.Environment.isPosix) { - _ = bun.sys.fstatat(bun.toFD(root_node_modules_dir.fd), patch_tag_path).unwrap() catch return false; + _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return false; } else { - switch (bun.sys.openat(bun.toFD(root_node_modules_dir.fd), patch_tag_path, std.os.O.RDONLY, 0)) { + switch (bun.sys.openat(bun.toFD(destination_dir.fd), patch_tag_path, std.os.O.RDONLY, 0)) { .err => return false, .result => |fd| _ = bun.sys.close(fd), } @@ -1068,7 +1079,6 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { return strings.eqlLong(repo.resolved.slice(buf), bun_tag_file, true); } - // TODO: patched dependencies pub fn verify( this: *@This(), resolution: *const Resolution, @@ -2272,9 +2282,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { }; } - fn patchedPackageMissingFromCache(this: *@This(), manager: *PackageManager, package_id: PackageID, patchfile_hash: u64) bool { - _ = patchfile_hash; // autofix - + fn patchedPackageMissingFromCache(this: *@This(), manager: *PackageManager, package_id: PackageID) bool { // const patch_hash_prefix = "_patch_hash="; // var patch_hash_part_buf: [patch_hash_prefix.len + max_buntag_hash_buf_len + 1]u8 = undefined; // @memcpy(patch_hash_part_buf[0..patch_hash_prefix.len], patch_hash_prefix); @@ -2598,6 +2606,9 @@ pub const PackageManager = struct { patch_calc_hash_batch: ThreadPool.Batch = .{}, patch_task_fifo: PatchTaskFifo = PatchTaskFifo.init(), patch_task_queue: PatchTaskQueue = .{}, + /// We actually need to calculate the patch file hashes + /// every single time, because someone could edit the patchfile at anytime + pending_pre_calc_hashes: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), pending_tasks: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), total_tasks: u32 = 0, preallocated_network_tasks: PreallocatedNetworkTasks = PreallocatedNetworkTasks.init(bun.default_allocator), @@ -2660,6 +2671,8 @@ pub const PackageManager = struct { // dependency name -> original version information updating_packages: bun.StringArrayHashMapUnmanaged(PackageUpdateInfo) = .{}, + patched_dependencies_to_remove: std.ArrayHashMapUnmanaged(PackageNameAndVersionHash, void, ArrayIdentityContext.U64, false) = .{}, + pub const PackageUpdateInfo = struct { original_version_literal: string, is_alias: bool, @@ -3265,7 +3278,7 @@ pub const PackageManager = struct { // 3. apply patch to temp dir // 4. rename temp dir to `folder_path` if (patch_hash != null) { - const non_patched_path_ = folder_path[0 .. std.mem.indexOf(u8, folder_path, "_patch_hash=") orelse @panic("todo this is bad")]; + const non_patched_path_ = folder_path[0 .. std.mem.indexOf(u8, folder_path, "_patch_hash=") orelse @panic("Expected folder path to contain `patch_hash=`, this is a bug in Bun. Please file a GitHub issue.")]; const non_patched_path = manager.lockfile.allocator.dupeZ(u8, non_patched_path_) catch bun.outOfMemory(); defer manager.lockfile.allocator.free(non_patched_path); if (manager.isFolderInCache(non_patched_path)) { @@ -3831,11 +3844,11 @@ pub const PackageManager = struct { manager: *PackageManager, pkg_name: string, resolution: *const Resolution, + folder_path_buf: *bun.PathBuffer, patch_hash: ?u64, ) struct { cache_dir: std.fs.Dir, cache_dir_subpath: stringZ } { const name = pkg_name; const buf = manager.lockfile.buffers.string_bytes.items; - _ = buf; // autofix var cache_dir = std.fs.cwd(); var cache_dir_subpath: stringZ = ""; @@ -3856,19 +3869,18 @@ pub const PackageManager = struct { cache_dir = manager.getCacheDirectory(); }, .folder => { - @panic("TODO @zack fix"); - // const folder = resolution.value.folder.slice(buf); - // // Handle when a package depends on itself via file: - // // example: - // // "mineflayer": "file:." - // if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { - // cache_dir_subpath = "."; - // } else { - // @memcpy(manager.folder_path_buf[0..folder.len], folder); - // this.folder_path_buf[folder.len] = 0; - // cache_dir_subpath = this.folder_path_buf[0..folder.len :0]; - // } - // cache_dir = std.fs.cwd(); + const folder = resolution.value.folder.slice(buf); + // Handle when a package depends on itself via file: + // example: + // "mineflayer": "file:." + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + cache_dir_subpath = "."; + } else { + @memcpy(folder_path_buf[0..folder.len], folder); + folder_path_buf[folder.len] = 0; + cache_dir_subpath = folder_path_buf[0..folder.len :0]; + } + cache_dir = std.fs.cwd(); }, .local_tarball => { cache_dir_subpath = manager.cachedTarballFolderName(resolution.value.local_tarball, patch_hash); @@ -3879,77 +3891,51 @@ pub const PackageManager = struct { cache_dir = manager.getCacheDirectory(); }, .workspace => { - @panic("TODO @zack fix"); - // const folder = resolution.value.workspace.slice(buf); - // // Handle when a package depends on itself - // if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { - // cache_dir_subpath = "."; - // } else { - // @memcpy(this.folder_path_buf[0..folder.len], folder); - // this.folder_path_buf[folder.len] = 0; - // cache_dir_subpath = this.folder_path_buf[0..folder.len :0]; - // } - // cache_dir = std.fs.cwd(); + const folder = resolution.value.workspace.slice(buf); + // Handle when a package depends on itself + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + cache_dir_subpath = "."; + } else { + @memcpy(folder_path_buf[0..folder.len], folder); + folder_path_buf[folder.len] = 0; + cache_dir_subpath = folder_path_buf[0..folder.len :0]; + } + cache_dir = std.fs.cwd(); }, .symlink => { - @panic("TODO @zack fix"); - // const directory = manager.globalLinkDir() catch |err| { - // if (comptime log_level != .silent) { - // const fmt = "\nerror: unable to access global directory while installing {s}: {s}\n"; - // const args = .{ name, @errorName(err) }; - - // if (comptime log_level.showProgress()) { - // switch (Output.enable_ansi_colors) { - // inline else => |enable_ansi_colors| { - // this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); - // }, - // } - // } else { - // Output.prettyErrorln(fmt, args); - // } - // } - - // if (manager.options.enable.fail_early) { - // Global.exit(1); - // } - - // Output.flush(); - // this.summary.fail += 1; - // this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); - // return; - // }; - - // const folder = resolution.value.symlink.slice(buf); - - // if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { - // cache_dir_subpath = "."; - // cache_dir = std.fs.cwd(); - // } else { - // const global_link_dir = manager.globalLinkDirPath() catch unreachable; - // var ptr = &this.folder_path_buf; - // var remain: []u8 = this.folder_path_buf[0..]; - // @memcpy(ptr[0..global_link_dir.len], global_link_dir); - // remain = remain[global_link_dir.len..]; - // if (global_link_dir[global_link_dir.len - 1] != std.fs.path.sep) { - // remain[0] = std.fs.path.sep; - // remain = remain[1..]; - // } - // @memcpy(remain[0..folder.len], folder); - // remain = remain[folder.len..]; - // remain[0] = 0; - // const len = @intFromPtr(remain.ptr) - @intFromPtr(ptr); - // cache_dir_subpath = this.folder_path_buf[0..len :0]; - // cache_dir = directory; - // } - }, - else => { - @panic("TODO @zack fix"); - // if (comptime Environment.allow_assert) { - // @panic("Internal assertion failure: unexpected resolution tag"); - // } - // this.incrementTreeInstallCount(this.current_tree_id, !is_pending_package_install, log_level); - // return; + const directory = manager.globalLinkDir() catch |err| { + const fmt = "\nerror: unable to access global directory while installing {s}: {s}\n"; + const args = .{ name, @errorName(err) }; + + Output.prettyErrorln(fmt, args); + + Global.exit(1); + }; + + const folder = resolution.value.symlink.slice(buf); + + if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { + cache_dir_subpath = "."; + cache_dir = std.fs.cwd(); + } else { + const global_link_dir = manager.globalLinkDirPath() catch unreachable; + var ptr = folder_path_buf; + var remain: []u8 = folder_path_buf[0..]; + @memcpy(ptr[0..global_link_dir.len], global_link_dir); + remain = remain[global_link_dir.len..]; + if (global_link_dir[global_link_dir.len - 1] != std.fs.path.sep) { + remain[0] = std.fs.path.sep; + remain = remain[1..]; + } + @memcpy(remain[0..folder.len], folder); + remain = remain[folder.len..]; + remain[0] = 0; + const len = @intFromPtr(remain.ptr) - @intFromPtr(ptr); + cache_dir_subpath = folder_path_buf[0..len :0]; + cache_dir = directory; + } }, + else => {}, } return .{ @@ -4279,6 +4265,18 @@ pub const PackageManager = struct { this.patch_task_fifo.writeItemAssumeCapacity(task); } + /// We need to calculate all the patchfile hashes at the beginning so we don't run into problems with stale hashes + pub fn enqueuePatchTaskPre(this: *PackageManager, task: *PatchTask) void { + debug("Enqueue patch task pre: 0x{x} {s}", .{ @intFromPtr(task), @tagName(task.callback) }); + task.pre = true; + if (this.patch_task_fifo.writableLength() == 0) { + this.flushPatchTaskQueue(); + } + + this.patch_task_fifo.writeItemAssumeCapacity(task); + _ = this.pending_pre_calc_hashes.fetchAdd(1, .Monotonic); + } + const SuccessFn = *const fn (*PackageManager, DependencyID, PackageID) void; const FailFn = *const fn (*PackageManager, *const Dependency, PackageID, anyerror) void; fn assignResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void { @@ -5619,7 +5617,6 @@ pub const PackageManager = struct { manager.network_resolve_batch = .{}; manager.patch_apply_batch = .{}; manager.patch_calc_hash_batch = .{}; - // TODO probably have to put patch tasks here return count; } @@ -6052,34 +6049,30 @@ pub const PackageManager = struct { defer ptask.deinit(); try ptask.runFromMainThread(manager, log_level); if (ptask.callback == .apply) { - if (comptime @TypeOf(callbacks.onExtract) != void) { - if (ptask.callback.apply.task_id) |task_id| { - _ = task_id; // autofix - - // const name = manager.lockfile.packages.items(.name)[ptask.callback.apply.pkg_id].slice(manager.lockfile.buffers.string_bytes.items); - // if (!callbacks.onPatch(extract_ctx, name, task_id, log_level)) { - // if (comptime Environment.allow_assert) { - // Output.panic("Ran callback to install enqueued packages, but there was no task associated with it.", .{}); - // } - // } - } else if (ExtractCompletionContext == *PackageInstaller) { - if (ptask.callback.apply.install_context) |*ctx| { - var installer: *PackageInstaller = extract_ctx; - const path = ctx.path; - ctx.path = std.ArrayList(u8).init(bun.default_allocator); - installer.node_modules.path = path; - installer.current_tree_id = ctx.tree_id; - const pkg_id = ptask.callback.apply.pkg_id; - - installer.installPackageWithNameAndResolution( - ctx.dependency_id, - pkg_id, - log_level, - ptask.callback.apply.pkgname, - ptask.callback.apply.resolution, - false, - false, - ); + if (ptask.callback.apply.logger.errors == 0) { + if (comptime @TypeOf(callbacks.onExtract) != void) { + if (ptask.callback.apply.task_id) |task_id| { + _ = task_id; // autofix + + } else if (ExtractCompletionContext == *PackageInstaller) { + if (ptask.callback.apply.install_context) |*ctx| { + var installer: *PackageInstaller = extract_ctx; + const path = ctx.path; + ctx.path = std.ArrayList(u8).init(bun.default_allocator); + installer.node_modules.path = path; + installer.current_tree_id = ctx.tree_id; + const pkg_id = ptask.callback.apply.pkg_id; + + installer.installPackageWithNameAndResolution( + ctx.dependency_id, + pkg_id, + log_level, + ptask.callback.apply.pkgname, + ptask.callback.apply.resolution, + false, + false, + ); + } } } } @@ -6790,11 +6783,12 @@ pub const PackageManager = struct { .workspaces = true, }, patch_features: union(enum) { + nothing: struct {}, patch: struct {}, commit: struct { patches_dir: string, }, - } = .{ .patch = .{} }, + } = .{ .nothing = .{} }, // The idea here is: // 1. package has a platform-specific binary to install // 2. To prevent downloading & installing incompatible versions, they stick the "real" one in optionalDependencies @@ -7256,14 +7250,18 @@ pub const PackageManager = struct { this.update.development = cli.development; if (!this.update.development) this.update.optional = cli.optional; - if (subcommand == .patch) { - // TODO args - } else if (subcommand == .patch_commit) { - this.patch_features = .{ - .commit = .{ - .patches_dir = cli.patch_commit.patches_dir, - }, - }; + switch (cli.patch) { + .nothing => {}, + .patch => { + this.patch_features = .{ .patch = .{} }; + }, + .commit => { + this.patch_features = .{ + .commit = .{ + .patches_dir = cli.patch.commit.patches_dir, + }, + }; + }, } } else { this.log_level = if (default_disable_progress_bar) LogLevel.default_no_progress else LogLevel.default; @@ -8208,7 +8206,7 @@ pub const PackageManager = struct { link, unlink, patch, - patch_commit, + @"patch-commit", }; pub fn init(ctx: Command.Context, comptime subcommand: Subcommand) !*PackageManager { @@ -8697,7 +8695,7 @@ pub const PackageManager = struct { } pub inline fn patchCommit(ctx: Command.Context) !void { - try updatePackageJSONAndInstallCatchError(ctx, .patch_commit); + try updatePackageJSONAndInstallCatchError(ctx, .@"patch-commit"); } pub inline fn update(ctx: Command.Context) !void { @@ -9083,6 +9081,8 @@ pub const PackageManager = struct { const patch_params = install_params_ ++ [_]ParamType{ clap.parseParam(" ... \"name\" of the package to patch") catch unreachable, + clap.parseParam("--commit Install a package containing modifications in `dir`") catch unreachable, + clap.parseParam("--patches-dir The directory to put the patch file in (only if --commit is used)") catch unreachable, }; const patch_commit_params = install_params_ ++ [_]ParamType{ @@ -9130,16 +9130,14 @@ pub const PackageManager = struct { concurrent_scripts: ?usize = null, - patch: PatchOpts = .{}, - patch_commit: PatchCommitOpts = .{}, - - const PatchOpts = struct { - edit_dir: ?[]const u8 = null, - ignore_existing: bool = false, - }; + patch: PatchOpts = .{ .nothing = .{} }, - const PatchCommitOpts = struct { - patches_dir: []const u8 = "patches", + const PatchOpts = union(enum) { + nothing: struct {}, + patch: struct {}, + commit: struct { + patches_dir: []const u8 = "patches", + }, }; const Omit = struct { @@ -9226,7 +9224,7 @@ pub const PackageManager = struct { // Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, - Subcommand.patch_commit => { + Subcommand.@"patch-commit" => { const intro_text = \\Usage: bun patch-commit \ \\ @@ -9363,7 +9361,7 @@ pub const PackageManager = struct { .link => link_params, .unlink => unlink_params, .patch => patch_params, - .patch_commit => patch_commit_params, + .@"patch-commit" => patch_commit_params, }; var diag = clap.Diagnostic{}; @@ -9401,23 +9399,36 @@ pub const PackageManager = struct { // link and unlink default to not saving, all others default to // saving. - // TODO: I think `bun patch` command goes here if (comptime subcommand == .link or subcommand == .unlink) { cli.no_save = !args.flag("--save"); } else { cli.no_save = args.flag("--no-save"); } - if (comptime subcommand == .patch) { - cli.patch = .{}; + if (subcommand == .patch) { + const patch_commit = args.flag("--commit"); + if (patch_commit) { + cli.patch = .{ + .commit = .{ + .patches_dir = args.option("--patches-dir") orelse "patches", + }, + }; + } else { + cli.patch = .{ + .patch = .{}, + }; + } } - - if (comptime subcommand == .patch_commit) { - cli.patch_commit = .{ - .patches_dir = args.option("--patches-dir") orelse "patches", + if (subcommand == .@"patch-commit") { + cli.patch = .{ + .commit = .{ + .patches_dir = args.option("--patches-dir") orelse "patches", + }, }; } + if (comptime subcommand == .@"patch-commit") {} + if (args.option("--config")) |opt| { cli.config = opt; } @@ -9493,7 +9504,7 @@ pub const PackageManager = struct { Global.crash(); } - if (subcommand == .patch_commit and cli.positionals.len < 2) { + if (subcommand == .@"patch-commit" and cli.positionals.len < 2) { Output.errGeneric("Missing pkg folder to patch\n", .{}); Global.crash(); } @@ -9668,7 +9679,7 @@ pub const PackageManager = struct { Output.prettyErrorln("No package.json, so nothing to remove\n", .{}); Global.crash(); }, - .patch => { + .patch, .@"patch-commit" => { Output.prettyErrorln("No package.json, so nothing to patch\n", .{}); Global.crash(); }, @@ -9691,7 +9702,7 @@ pub const PackageManager = struct { inline else => |log_level| try manager.updatePackageJSONAndInstallWithManager(ctx, log_level), } - if (comptime subcommand == .patch) { + if (manager.options.patch_features == .patch) { try manager.preparePatch(); } @@ -9728,7 +9739,10 @@ pub const PackageManager = struct { } } - const updates = UpdateRequest.parse(ctx.allocator, ctx.log, manager.options.positionals[1..], &update_requests, manager.subcommand); + const updates: []UpdateRequest = if (manager.subcommand == .@"patch-commit" or manager.subcommand == .patch) + &[_]UpdateRequest{} + else + UpdateRequest.parse(ctx.allocator, ctx.log, manager.options.positionals[1..], &update_requests, manager.subcommand); switch (manager.subcommand) { inline else => |subcommand| try manager.updatePackageJSONAndInstallWithManagerWithUpdates( ctx, @@ -9865,23 +9879,7 @@ pub const PackageManager = struct { } } }, - .patch_commit => { - _ = manager.lockfile.loadFromDisk( - manager, - manager.allocator, - manager.log, - manager.options.lockfile_path, - true, - ); - var pathbuf: bun.PathBuffer = undefined; - const stuff = try manager.doPatchCommit(&pathbuf, log_level); - try PackageJSONEditor.editPatchedDependencies( - manager, - ¤t_package_json.root, - stuff.patch_key, - stuff.patchfile_path, - ); - }, + .link, .add, .update => { // `bun update ` is basically the same as `bun add `, except // update will not exceed the current dependency range if it exists @@ -9908,7 +9906,19 @@ pub const PackageManager = struct { ); } }, - else => {}, + else => { + if (manager.options.patch_features == .commit) { + var pathbuf: bun.PathBuffer = undefined; + if (try manager.doPatchCommit(&pathbuf, log_level)) |stuff| { + try PackageJSONEditor.editPatchedDependencies( + manager, + ¤t_package_json.root, + stuff.patch_key, + stuff.patchfile_path, + ); + } + } + }, } manager.to_update = subcommand == .update; @@ -10093,162 +10103,438 @@ pub const PackageManager = struct { } } - /// - Arg is name and possibly version (e.g. "is-even" or "is-even@1.0.0") - /// - Find package that satisfies name and version - /// - Copy contents of package into temp dir - /// - Give that to user - fn preparePatch(manager: *PackageManager) !void { - const @"pkg + maybe version to patch" = manager.options.positionals[1]; - const name: []const u8, const version: ?[]const u8 = brk: { - if (std.mem.indexOfScalar(u8, @"pkg + maybe version to patch", '@')) |version_delimiter| { - break :brk .{ - @"pkg + maybe version to patch"[0..version_delimiter], - @"pkg + maybe version to patch"[version_delimiter + 1 ..], - }; - } - break :brk .{ - @"pkg + maybe version to patch", - null, - }; - }; + fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator, dependency_id: DependencyID) !?Lockfile.Tree.NodeModulesFolder { + while (iterator.nextNodeModulesFolder(null)) |node_modules| { + _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue; + return node_modules; + } + + return null; + } + fn pkgDepIdForNameAndVersion( + lockfile: *Lockfile, + pkg_maybe_version_to_patch: []const u8, + name: []const u8, + version: ?[]const u8, + ) struct { PackageID, DependencyID } { const name_hash = String.Builder.stringHash(name); - const strbuf = manager.lockfile.buffers.string_bytes.items; + const strbuf = lockfile.buffers.string_bytes.items; - const pkg_id: u64 = brk: { + const dependency_id: DependencyID, const pkg_id: PackageID = brk: { var buf: [1024]u8 = undefined; - var i: usize = 0; - - const pkg_hashes = manager.lockfile.packages.items(.name_hash); - var matches_count: u32 = 0; - var first_match: ?u64 = null; - while (i < manager.lockfile.packages.len) { - if (std.mem.indexOfScalar(u64, pkg_hashes[i..], name_hash)) |idx| { - defer i += idx + 1; - const pkg_id = i + idx; - const pkg = manager.lockfile.packages.get(pkg_id); - const pkg_name = pkg.name.slice(strbuf); - if (!std.mem.eql(u8, pkg_name, name)) continue; - matches_count += 1; - - // if they supplied a version it needs to match it, - // otherwise we'll just pick the first one we see, if there are multiple we throw error - if (version) |v| { - const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long"); - if (std.mem.eql(u8, label, v)) break :brk pkg_id; - } else { - first_match = pkg_id; - } - } else break; - } - if (first_match) |id| { - if (matches_count > 1) { - Output.prettyErrorln( - "\nerror: please specify a precise version:", - .{}, - ); - i = 0; - while (i < manager.lockfile.packages.len) { - if (std.mem.indexOfScalar(u64, pkg_hashes[i..], name_hash)) |idx| { - defer i += idx + 1; - const pkg_id = i + idx; - const pkg = manager.lockfile.packages.get(pkg_id); - if (!std.mem.eql(u8, pkg.name.slice(strbuf), name)) continue; + const dependencies = lockfile.buffers.dependencies.items; + + var matches_found: u32 = 0; + var maybe_first_match: ?struct { DependencyID, PackageID } = null; + for (dependencies, 0..) |dep, dep_id| { + if (dep.name_hash != name_hash) continue; + matches_found += 1; + const pkg_id = lockfile.buffers.resolutions.items[dep_id]; + if (pkg_id == invalid_package_id) continue; + const pkg = lockfile.packages.get(pkg_id); + if (version) |v| { + const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long"); + if (std.mem.eql(u8, label, v)) break :brk .{ @intCast(dep_id), pkg_id }; + } else maybe_first_match = .{ @intCast(dep_id), pkg_id }; + } + + const first_match = maybe_first_match orelse { + Output.prettyErrorln("\nerror: package {s} not found", .{pkg_maybe_version_to_patch}); + Global.crash(); + return; + }; - Output.prettyError(" {s}@{}\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) }); - } else break; - } - Output.flush(); - Global.crash(); - return; + if (matches_found > 1) { + Output.prettyErrorln( + "\nerror: Found multiple versions of {s}, please specify a precise version from the following list:\n", + .{name}, + ); + var i: usize = 0; + const pkg_hashes = lockfile.packages.items(.name_hash); + while (i < lockfile.packages.len) { + if (std.mem.indexOfScalar(u64, pkg_hashes[i..], name_hash)) |idx| { + defer i += idx + 1; + const pkg_id = i + idx; + const pkg = lockfile.packages.get(pkg_id); + if (!std.mem.eql(u8, pkg.name.slice(strbuf), name)) continue; + + Output.prettyError(" {s}@{}\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) }); + } else break; } - break :brk id; + Global.crash(); + return; } - Output.prettyErrorln( - "\nerror: could not find package: {s}\n", - .{@"pkg + maybe version to patch"}, - ); - Output.flush(); - return; + + break :brk .{ first_match[0], first_match[1] }; }; - const pkg = manager.lockfile.packages.get(pkg_id); + return .{ pkg_id, dependency_id }; + } - const resolution: *const Resolution = &manager.lockfile.packages.items(.resolution)[pkg_id]; - const stuff = manager.computeCacheDirAndSubpath(name, resolution, null); - const cache_dir_subpath: [:0]const u8 = stuff.cache_dir_subpath; - const cache_dir: std.fs.Dir = stuff.cache_dir; + const PatchArgKind = enum { + path, + name_and_version, - // copy the contents into a tempdir - var tmpname_buf: [1024]u8 = undefined; - const tempdir_name = bun.span(try bun.fs.FileSystem.instance.tmpname("tmp", &tmpname_buf, bun.fastRandom())); - const tmpdir = try bun.fs.FileSystem.instance.tmpdir(); - var destination_dir = try tmpdir.makeOpenPath(tempdir_name, .{}); - defer destination_dir.close(); - - var resolution_buf: [512]u8 = undefined; - const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch unreachable; - const dummy_node_modules = .{ - .path = std.ArrayList(u8).init(manager.allocator), - .tree_id = 0, - }; - var pkg_install = PreparePatchPackageInstall{ - .allocator = manager.allocator, - .cache_dir = cache_dir, - .cache_dir_subpath = cache_dir_subpath, - .destination_dir_subpath = tempdir_name, - .destination_dir_subpath_buf = tmpname_buf[0..], - .progress = .{}, - .package_name = name, - .package_version = resolution_label, - // dummy value - .node_modules = &dummy_node_modules, - .lockfile = manager.lockfile, - }; + pub fn fromArg(argument: []const u8) PatchArgKind { + if (bun.strings.hasPrefixComptime(argument, "node_modules/")) return .path; + if (bun.path.Platform.auto.isAbsolute(argument) and bun.strings.contains(argument, "node_modules/")) return .path; + if (comptime bun.Environment.isWindows) { + if (bun.strings.hasPrefix(argument, "node_modules\\")) return .path; + if (bun.path.Platform.auto.isAbsolute(argument) and bun.strings.contains(argument, "node_modules\\")) return .path; + } + return .name_and_version; + } + }; - switch (pkg_install.installWithMethod(true, tmpdir, .copyfile, pkg.resolution.tag)) { - .success => {}, - .fail => |reason| { - Output.prettyErrorln( - "\nerror: failed to copy package to temp directory: {s}, during step: {s}\n", - .{ - @errorName(reason.err), - reason.step.name(), + /// 1. Arg is either: + /// - name and possibly version (e.g. "is-even" or "is-even@1.0.0") + /// - path to package in node_modules + /// 2. Calculate cache dir for package + /// 3. Overwrite the input package with the one from the cache (cuz it could be hardlinked) + /// 4. Print to user + fn preparePatch(manager: *PackageManager) !void { + const strbuf = manager.lockfile.buffers.string_bytes.items; + const argument = manager.options.positionals[1]; + + const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); + + var folder_path_buf: bun.PathBuffer = undefined; + var iterator = Lockfile.Tree.Iterator.init(manager.lockfile); + var resolution_buf: [1024]u8 = undefined; + + var win_normalizer: if (bun.Environment.isWindows) bun.PathBuffer else struct {} = undefined; + + const cache_dir: std.fs.Dir, const cache_dir_subpath: []const u8, const module_folder: []const u8, const pkg_name: []const u8 = switch (arg_kind) { + .path => brk: { + var lockfile = manager.lockfile; + const package_json_source: logger.Source = src: { + const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); + + switch (bun.sys.File.toSource(package_json_path, manager.allocator)) { + .result => |s| break :src s, + .err => |e| { + Output.prettyError( + "error: failed to read package.json: {}\n", + .{e.withPath(package_json_path).toSystemError()}, + ); + Global.crash(); + }, + } + }; + defer manager.allocator.free(package_json_source.contents); + + initializeStore(); + const json = json_parser.ParsePackageJSONUTF8AlwaysDecode(&package_json_source, manager.log, manager.allocator) catch |err| { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; + }, + } + Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); + Global.crash(); + }; + + const version = version: { + if (json.asProperty("version")) |v| { + if (v.expr.asString(manager.allocator)) |s| break :version s; + } + Output.prettyError( + "error: invalid package.json, missing or invalid property \"version\": {s}\n", + .{package_json_source.path.text}, + ); + Global.crash(); + }; + + var package = Lockfile.Package{}; + try package.parseWithJSON(lockfile, manager.allocator, manager.log, package_json_source, json, void, {}, Features.folder); + + const name = lockfile.str(&package.name); + const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { + Output.prettyError( + "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", + .{}, + ); + Global.crash(); + }) { + .PackageID => |id| lockfile.packages.get(id), + .PackageIDMultiple => |ids| id: { + for (ids.items) |id| { + const pkg = lockfile.packages.get(id); + const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; + if (std.mem.eql(u8, resolution_label, version)) { + break :id pkg; + } + } + Output.prettyError("error: could not find package with name: {s}\n", .{ + package.name.slice(lockfile.buffers.string_bytes.items), + }); + Global.crash(); }, + }; + + const existing_patchfile_hash = existing_patchfile_hash: { + var __sfb = std.heap.stackFallback(1024, manager.allocator); + const allocator = __sfb.get(); + const name_and_version = std.fmt.allocPrint(allocator, "{s}@{}", .{ name, actual_package.resolution.fmt(strbuf, .posix) }) catch unreachable; + defer allocator.free(name_and_version); + const name_and_version_hash = String.Builder.stringHash(name_and_version); + if (lockfile.patched_dependencies.get(name_and_version_hash)) |patched_dep| { + if (patched_dep.patchfileHash()) |hash| break :existing_patchfile_hash hash; + } + break :existing_patchfile_hash null; + }; + + const cache_result = manager.computeCacheDirAndSubpath( + name, + &actual_package.resolution, + &folder_path_buf, + existing_patchfile_hash, ); - Output.flush(); - return; + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + + const buf = if (comptime bun.Environment.isWindows) bun.path.pathToPosixBuf(u8, argument, win_normalizer[0..]) else argument; + + break :brk .{ + cache_dir, + cache_dir_subpath, + buf, + name, + }; }, - } + .name_and_version => brk: { + const pkg_maybe_version_to_patch = argument; + const name, const version = Dependency.splitNameAndVersion(pkg_maybe_version_to_patch); + const result = pkgDepIdForNameAndVersion(manager.lockfile, pkg_maybe_version_to_patch, name, version); + const pkg_id = result[0]; + const dependency_id = result[1]; + + const folder = (try nodeModulesFolderForDependencyID(&iterator, dependency_id)) orelse { + Output.prettyError( + "error: could not find the folder for {s} in node_modules\n", + .{pkg_maybe_version_to_patch}, + ); + Global.crash(); + }; - var pathbuf: bun.PathBuffer = undefined; - const pkg_to_patch_dir = switch (bun.sys.getFdPath(bun.toFD(destination_dir.fd), &pathbuf)) { - .result => |fd| fd, - .err => |e| { - Output.prettyErrorln( - "\nerror: {}\n", - .{ - e.toSystemError(), - }, + const pkg = manager.lockfile.packages.get(pkg_id); + const pkg_name = pkg.name.slice(strbuf); + + const existing_patchfile_hash = existing_patchfile_hash: { + var __sfb = std.heap.stackFallback(1024, manager.allocator); + const sfballoc = __sfb.get(); + const name_and_version = std.fmt.allocPrint(sfballoc, "{s}@{}", .{ name, pkg.resolution.fmt(strbuf, .posix) }) catch unreachable; + defer sfballoc.free(name_and_version); + const name_and_version_hash = String.Builder.stringHash(name_and_version); + if (manager.lockfile.patched_dependencies.get(name_and_version_hash)) |patched_dep| { + if (patched_dep.patchfileHash()) |hash| break :existing_patchfile_hash hash; + } + break :existing_patchfile_hash null; + }; + + const cache_result = manager.computeCacheDirAndSubpath( + pkg_name, + &pkg.resolution, + &folder_path_buf, + existing_patchfile_hash, ); - Output.flush(); - return; + + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + + const module_folder_ = bun.path.join(&[_][]const u8{ folder.relative_path, name }, .auto); + const buf = if (comptime bun.Environment.isWindows) bun.path.pathToPosixBuf(u8, module_folder_, win_normalizer[0..]) else module_folder_; + + break :brk .{ + cache_dir, + cache_dir_subpath, + buf, + pkg_name, + }; }, }; - Output.pretty("\nTo patch {s}, edit the following folder:\n\n {s}\n", .{ name, pkg_to_patch_dir }); - Output.pretty("\nOnce you're done with your changes, run:\n\n bun patch-commit '{s}'\n", .{pkg_to_patch_dir}); + // The package may be installed using the hard link method, + // meaning that changes to the folder will also change the package in the cache. + // + // So we will overwrite the folder by directly copying the package in cache into it + manager.overwritePackageInNodeModulesFolder(cache_dir, cache_dir_subpath, module_folder) catch |e| { + Output.prettyError( + "error: error overwriting folder in node_modules: {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }; + + Output.pretty("\nTo patch {s}, edit the following folder:\n\n {s}\n", .{ pkg_name, module_folder }); + Output.pretty("\nOnce you're done with your changes, run:\n\n bun patch --commit '{s}'\n", .{module_folder}); return; } + fn overwritePackageInNodeModulesFolder( + manager: *PackageManager, + cache_dir: std.fs.Dir, + cache_dir_subpath: []const u8, + node_modules_folder_path: []const u8, + ) !void { + var node_modules_folder = try std.fs.cwd().openDir(node_modules_folder_path, .{ .iterate = true }); + defer node_modules_folder.close(); + + const IGNORED_PATHS: []const bun.OSPathSlice = &[_][]const bun.OSPathChar{ + bun.OSPathLiteral("node_modules"), + bun.OSPathLiteral(".git"), + bun.OSPathLiteral("CMakeFiles"), + }; + + const FileCopier = struct { + pub fn copy( + destination_dir_: std.fs.Dir, + walker: *Walker, + in_dir: if (bun.Environment.isWindows) []const u16 else void, + out_dir: if (bun.Environment.isWindows) []const u16 else void, + buf1: if (bun.Environment.isWindows) []u16 else void, + buf2: if (bun.Environment.isWindows) []u16 else void, + ) !u32 { + var real_file_count: u32 = 0; + + var copy_file_state: bun.CopyFileState = .{}; + var pathbuf: bun.PathBuffer = undefined; + var pathbuf2: bun.PathBuffer = undefined; + // _ = pathbuf; // autofix + + while (try walker.next()) |entry| { + if (entry.kind != .file) continue; + real_file_count += 1; + const openFile = std.fs.Dir.openFile; + const createFile = std.fs.Dir.createFile; + + // - rename node_modules/$PKG/$FILE -> node_modules/$PKG/$TMPNAME + // - create node_modules/$PKG/$FILE + // - copy: cache/$PKG/$FILE -> node_modules/$PKG/$FILE + // - unlink: $TMPDIR/$FILE + if (comptime bun.Environment.isWindows) { + var tmpbuf: [1024]u8 = undefined; + const basename = bun.strings.fromWPath(pathbuf2[0..], entry.basename); + const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname(basename, tmpbuf[0..], bun.fastRandom()) catch |e| { + Output.prettyError("error: copying file {s}", .{@errorName(e)}); + Global.crash(); + }); + + const entrypath = bun.strings.fromWPath(pathbuf[0..], entry.path); + pathbuf[entrypath.len] = 0; + const entrypathZ = pathbuf[0..entrypath.len :0]; + + if (bun.sys.renameatConcurrently( + bun.toFD(destination_dir_.fd), + entrypathZ, + bun.toFD(destination_dir_.fd), + tmpname, + ).asErr()) |e| { + Output.prettyError("error: copying file {}", .{e}); + Global.crash(); + } + + var outfile = createFile(destination_dir_, entrypath, .{}) catch |e| { + Output.prettyError("error: failed to create file {s} ({s})", .{ entrypath, @errorName(e) }); + Global.crash(); + }; + outfile.close(); + + const infile_path = bun.path.joinStringBufWZ(buf1, &[_][]const u16{ in_dir, entry.path }, .auto); + const outfile_path = bun.path.joinStringBufWZ(buf2, &[_][]const u16{ out_dir, entry.path }, .auto); + + bun.copyFileWithState(infile_path, outfile_path, ©_file_state) catch |err| { + Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); + Global.crash(); + }; + } else if (comptime Environment.isPosix) { + var in_file = try openFile(entry.dir, entry.basename, .{ .mode = .read_only }); + defer in_file.close(); + + @memcpy(pathbuf[0..entry.path.len], entry.path); + pathbuf[entry.path.len] = 0; + + if (bun.sys.unlinkat( + bun.toFD(destination_dir_.fd), + pathbuf[0..entry.path.len :0], + ).asErr()) |e| { + Output.prettyError("error: copying file {}", .{e.withPath(entry.path)}); + Global.crash(); + } + + var outfile = try createFile(destination_dir_, entry.path, .{}); + defer outfile.close(); + + const stat = in_file.stat() catch continue; + _ = C.fchmod(outfile.handle, @intCast(stat.mode)); + + bun.copyFileWithState(in_file.handle, outfile.handle, ©_file_state) catch |err| { + Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); + Global.crash(); + }; + } + } + + return real_file_count; + } + }; + + var pkg_in_cache_dir = try cache_dir.openDir(cache_dir_subpath, .{ .iterate = true }); + defer pkg_in_cache_dir.close(); + var walker = Walker.walk(pkg_in_cache_dir, manager.allocator, &.{}, IGNORED_PATHS) catch bun.outOfMemory(); + defer walker.deinit(); + + var buf1: if (bun.Environment.isWindows) bun.WPathBuffer else void = undefined; + var buf2: if (bun.Environment.isWindows) bun.WPathBuffer else void = undefined; + var in_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; + var out_dir: if (bun.Environment.isWindows) []const u16 else void = undefined; + + if (comptime bun.Environment.isWindows) { + const inlen = bun.windows.kernel32.GetFinalPathNameByHandleW(pkg_in_cache_dir.fd, &buf1, buf1.len, 0); + if (inlen == 0) { + const e = bun.windows.Win32Error.get(); + const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; + Output.prettyError("error: copying file {}", .{err}); + Global.crash(); + } + in_dir = buf1[0..inlen]; + const outlen = bun.windows.kernel32.GetFinalPathNameByHandleW(node_modules_folder.fd, &buf2, buf2.len, 0); + if (outlen == 0) { + const e = bun.windows.Win32Error.get(); + const err = if (e.toSystemErrno()) |sys_err| bun.errnoToZigErr(sys_err) else error.Unexpected; + Output.prettyError("error: copying file {}", .{err}); + Global.crash(); + } + out_dir = buf2[0..outlen]; + _ = try FileCopier.copy( + node_modules_folder, + &walker, + in_dir, + out_dir, + &buf1, + &buf2, + ); + } else if (Environment.isPosix) { + _ = try FileCopier.copy( + node_modules_folder, + &walker, + {}, + {}, + {}, + {}, + ); + } + } + const PatchCommitResult = struct { patch_key: []const u8, patchfile_path: []const u8, }; - /// - Arg is the tempdir containing the package with changes + /// - Arg is the dir containing the package with changes OR name and version /// - Get the patch file contents by running git diff on the temp dir and the original package dir /// - Write the patch file to $PATCHES_DIR/$PKG_NAME_AND_VERSION.patch /// - Update "patchedDependencies" in package.json @@ -10257,12 +10543,14 @@ pub const PackageManager = struct { manager: *PackageManager, pathbuf: *bun.PathBuffer, comptime log_level: Options.LogLevel, - ) !PatchCommitResult { + ) !?PatchCommitResult { + var folder_path_buf: bun.PathBuffer = undefined; var lockfile: *Lockfile = try manager.allocator.create(Lockfile); defer lockfile.deinit(); switch (lockfile.loadFromDisk(manager, manager.allocator, manager.log, manager.options.lockfile_path, true)) { .not_found => { - Output.panic("Lockfile not found", .{}); + Output.errGeneric("Cannot find lockfile. Install packages with `bun install` before patching them.", .{}); + Global.crash(); }, .err => |cause| { if (log_level != .silent) { @@ -10294,106 +10582,321 @@ pub const PackageManager = struct { .ok => {}, } - const patched_pkg_folder = manager.options.positionals[1]; - if (patched_pkg_folder.len >= bun.MAX_PATH_BYTES) { - Output.prettyError("error: argument provided is too long\n", .{}); - Output.flush(); - Global.crash(); - } - @memcpy(pathbuf[0..patched_pkg_folder.len], patched_pkg_folder); - pathbuf[patched_pkg_folder.len] = 0; + const argument = manager.options.positionals[1]; - var versionbuf: [1024]u8 = undefined; - const version = switch (patchCommitGetVersion( - &versionbuf, - bun.path.joinZ(&[_][]const u8{ patched_pkg_folder, ".bun-patch-tag" }, .auto), - )) { - .result => |v| v, + const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); + + // Attempt to open the existing node_modules folder + var root_node_modules = switch (bun.sys.openatOSPath(bun.FD.cwd(), bun.OSPathLiteral("node_modules"), std.os.O.DIRECTORY | std.os.O.RDONLY, 0o755)) { + .result => |fd| std.fs.Dir{ .fd = fd.cast() }, .err => |e| { - Output.prettyError("error: failed to get bun patch tag: {}\n", .{e.toSystemError()}); - Output.flush(); + Output.prettyError( + "error: failed to open root node_modules folder: {}\n", + .{e}, + ); Global.crash(); }, }; + defer root_node_modules.close(); + + var iterator = Lockfile.Tree.Iterator.init(lockfile); + var resolution_buf: [1024]u8 = undefined; + const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) { + .path => result: { + const package_json_source: logger.Source = brk: { + const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); + + switch (bun.sys.File.toSource(package_json_path, manager.allocator)) { + .result => |s| break :brk s, + .err => |e| { + Output.prettyError( + "error: failed to read package.json: {}\n", + .{e.withPath(package_json_path).toSystemError()}, + ); + Global.crash(); + }, + } + }; + defer manager.allocator.free(package_json_source.contents); - const package_json_source: logger.Source = brk: { - const patched_pkg_folderZ = pathbuf[0..patched_pkg_folder.len :0]; - const pkgjsonpath = bun.path.joinZ(&[_][]const u8{ - patched_pkg_folderZ, - "package.json", - }, .auto); + initializeStore(); + const json = json_parser.ParsePackageJSONUTF8AlwaysDecode(&package_json_source, manager.log, manager.allocator) catch |err| { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + manager.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; + }, + } + Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); + Global.crash(); + }; - switch (bun.sys.File.toSource(pkgjsonpath, manager.allocator)) { - .result => |s| break :brk s, - .err => |e| { + const version = version: { + if (json.asProperty("version")) |v| { + if (v.expr.asString(manager.allocator)) |s| break :version s; + } Output.prettyError( - "error: failed to read package.json: {}\n", - .{e.withPath(pkgjsonpath).toSystemError()}, + "error: invalid package.json, missing or invalid property \"version\": {s}\n", + .{package_json_source.path.text}, ); - Output.flush(); Global.crash(); - }, - } - }; - defer manager.allocator.free(package_json_source.contents); + }; - var package = Lockfile.Package{}; - try package.parse(lockfile, manager.allocator, manager.log, package_json_source, void, {}, Features.folder); - const name = lockfile.str(&package.name); - var resolution_buf: [1024]u8 = undefined; - const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { - Output.prettyError( - "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", - .{}, - ); - Output.flush(); - Global.crash(); - }) { - .PackageID => |id| lockfile.packages.get(id), - .PackageIDMultiple => |ids| brk: { - for (ids.items) |id| { - const pkg = lockfile.packages.get(id); - const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; - if (std.mem.eql(u8, resolution_label, version)) { - break :brk pkg; - } - } - Output.prettyError("error: could not find package with name: {s}\n", .{ - package.name.slice(lockfile.buffers.string_bytes.items), - }); - Output.flush(); - Global.crash(); + var package = Lockfile.Package{}; + try package.parseWithJSON(lockfile, manager.allocator, manager.log, package_json_source, json, void, {}, Features.folder); + + const name = lockfile.str(&package.name); + const actual_package = switch (lockfile.package_index.get(package.name_hash) orelse { + Output.prettyError( + "error: failed to find package in lockfile package index, this is a bug in Bun. Please file a GitHub issue.\n", + .{}, + ); + Global.crash(); + }) { + .PackageID => |id| lockfile.packages.get(id), + .PackageIDMultiple => |ids| brk: { + for (ids.items) |id| { + const pkg = lockfile.packages.get(id); + const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; + if (std.mem.eql(u8, resolution_label, version)) { + break :brk pkg; + } + } + Output.prettyError("error: could not find package with name: {s}\n", .{ + package.name.slice(lockfile.buffers.string_bytes.items), + }); + Global.crash(); + }, + }; + + const cache_result = manager.computeCacheDirAndSubpath( + name, + &actual_package.resolution, + &folder_path_buf, + null, + ); + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + + const changes_dir = argument; + + break :result .{ cache_dir, cache_dir_subpath, changes_dir, actual_package }; + }, + .name_and_version => brk: { + const name, const version = Dependency.splitNameAndVersion(argument); + const result = pkgDepIdForNameAndVersion(lockfile, argument, name, version); + const pkg_id: PackageID = result[0]; + const dependency_id: DependencyID = result[1]; + const node_modules = (try nodeModulesFolderForDependencyID( + &iterator, + dependency_id, + )) orelse { + Output.prettyError( + "error: could not find the folder for {s} in node_modules\n", + .{argument}, + ); + Global.crash(); + }; + const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{ + node_modules.relative_path, + name, + }, .auto); + const pkg = lockfile.packages.get(pkg_id); + + const cache_result = manager.computeCacheDirAndSubpath( + pkg.name.slice(lockfile.buffers.string_bytes.items), + &pkg.resolution, + &folder_path_buf, + null, + ); + const cache_dir = cache_result.cache_dir; + const cache_dir_subpath = cache_result.cache_dir_subpath; + break :brk .{ cache_dir, cache_dir_subpath, changes_dir, pkg }; }, }; - const resolution_label = std.fmt.bufPrint(&resolution_buf, "{s}@{}", .{ name, actual_package.resolution.fmt(lockfile.buffers.string_bytes.items, .posix) }) catch unreachable; - const stuff = manager.computeCacheDirAndSubpath(name, &actual_package.resolution, null); + + // zls + const cache_dir: std.fs.Dir = _cache_dir; + const cache_dir_subpath: stringZ = _cache_dir_subpath; + const changes_dir: []const u8 = _changes_dir; + const pkg: Package = _pkg; + + const name = pkg.name.slice(lockfile.buffers.string_bytes.items); + const resolution_label = std.fmt.bufPrint(&resolution_buf, "{s}@{}", .{ name, pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix) }) catch unreachable; const patchfile_contents = brk: { - const new_folder = patched_pkg_folder; + const new_folder = changes_dir; var buf2: bun.PathBuffer = undefined; + var buf3: bun.PathBuffer = undefined; const old_folder = old_folder: { - const cache_dir_path = switch (bun.sys.getFdPath(bun.toFD(stuff.cache_dir.fd), &buf2)) { + const cache_dir_path = switch (bun.sys.getFdPath(bun.toFD(cache_dir.fd), &buf2)) { .result => |s| s, .err => |e| { Output.prettyError( "error: failed to read from cache {}\n", .{e.toSystemError()}, ); - Output.flush(); Global.crash(); }, }; break :old_folder bun.path.join(&[_][]const u8{ cache_dir_path, - stuff.cache_dir_subpath, + cache_dir_subpath, }, .posix); }; - break :brk switch (bun.patch.gitDiff(manager.allocator, old_folder, new_folder) catch |e| { + + const random_tempdir = bun.span(bun.fs.FileSystem.instance.tmpname(name, buf2[0..], bun.fastRandom()) catch |e| { + Output.prettyError( + "error: failed to make tempdir {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }); + + // If the package has nested a node_modules folder, we don't want this to + // appear in the patch file when we run git diff. + // + // There isn't an option to exclude it with `git diff --no-index`, so we + // will `rename()` it out and back again. + const has_nested_node_modules = has_nested_node_modules: { + var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { + Output.prettyError( + "error: failed to open directory {s} {s}\n", + .{ new_folder, @errorName(e) }, + ); + Global.crash(); + }; + defer new_folder_handle.close(); + + if (bun.sys.renameatConcurrently( + bun.toFD(new_folder_handle.fd), + "node_modules", + bun.toFD(root_node_modules.fd), + random_tempdir, + ).asErr()) |_| break :has_nested_node_modules false; + + break :has_nested_node_modules true; + }; + + const patch_tag_tmpname = bun.span(bun.fs.FileSystem.instance.tmpname(name, buf3[0..], bun.fastRandom()) catch |e| { + Output.prettyError( + "error: failed to make tempdir {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }); + + var bunpatchtagbuf: BuntagHashBuf = undefined; + // If the package was already patched then it might have a ".bun-tag-XXXXXXXX" + // we need to rename this out and back too. + const bun_patch_tag: ?[:0]const u8 = has_bun_patch_tag: { + const name_and_version_hash = String.Builder.stringHash(resolution_label); + const patch_tag = patch_tag: { + if (lockfile.patched_dependencies.get(name_and_version_hash)) |patchdep| { + if (patchdep.patchfileHash()) |hash| { + break :patch_tag buntaghashbuf_make(&bunpatchtagbuf, hash); + } + } + break :has_bun_patch_tag null; + }; + var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { + Output.prettyError( + "error: failed to open directory {s} {s}\n", + .{ new_folder, @errorName(e) }, + ); + Global.crash(); + }; + defer new_folder_handle.close(); + + if (bun.sys.renameatConcurrently( + bun.toFD(new_folder_handle.fd), + patch_tag, + bun.toFD(root_node_modules.fd), + patch_tag_tmpname, + ).asErr()) |e| { + Output.warn("failed renaming the bun patch tag, this may cause issues: {}", .{e}); + break :has_bun_patch_tag null; + } + break :has_bun_patch_tag patch_tag; + }; + defer { + if (has_nested_node_modules or bun_patch_tag != null) { + var new_folder_handle = std.fs.cwd().openDir(new_folder, .{}) catch |e| { + Output.prettyError( + "error: failed to open directory {s} {s}\n", + .{ new_folder, @errorName(e) }, + ); + Global.crash(); + }; + defer new_folder_handle.close(); + + if (has_nested_node_modules) { + if (bun.sys.renameatConcurrently( + bun.toFD(root_node_modules.fd), + random_tempdir, + bun.toFD(new_folder_handle.fd), + "node_modules", + ).asErr()) |e| { + Output.warn("failed renaming nested node_modules folder, this may cause issues: {}", .{e}); + } + } + + if (bun_patch_tag) |patch_tag| { + if (bun.sys.renameatConcurrently( + bun.toFD(root_node_modules.fd), + patch_tag_tmpname, + bun.toFD(new_folder_handle.fd), + patch_tag, + ).asErr()) |e| { + Output.warn("failed renaming the bun patch tag, this may cause issues: {}", .{e}); + } + } + } + } + + var cwdbuf: bun.PathBuffer = undefined; + const cwd = switch (bun.sys.getcwdZ(&cwdbuf)) { + .result => |fd| fd, + .err => |e| { + Output.prettyError( + "error: failed to get cwd path {}\n", + .{e}, + ); + Global.crash(); + }, + }; + var gitbuf: bun.PathBuffer = undefined; + const git = bun.which(&gitbuf, bun.getenvZ("PATH") orelse "", cwd, "git") orelse { + Output.prettyError( + "error: git must be installed to use `bun patch --commit` \n", + .{}, + ); + Global.crash(); + }; + const paths = bun.patch.gitDiffPreprocessPaths(bun.default_allocator, old_folder, new_folder, false); + const opts = bun.patch.spawnOpts(paths[0], paths[1], cwd, git, &manager.event_loop); + + var spawn_result = switch (bun.spawnSync(&opts) catch |e| { + Output.prettyError( + "error: failed to make diff {s}\n", + .{@errorName(e)}, + ); + Global.crash(); + }) { + .result => |r| r, + .err => |e| { + Output.prettyError( + "error: failed to make diff {}\n", + .{e}, + ); + Global.crash(); + }, + }; + + const contents = switch (bun.patch.diffPostProcess(&spawn_result, paths[0], paths[1]) catch |e| { Output.prettyError( "error: failed to make diff {s}\n", .{@errorName(e)}, ); - Output.flush(); Global.crash(); }) { .result => |stdout| stdout, @@ -10420,17 +10923,25 @@ pub const PackageManager = struct { Truncate{ .stderr = stderr }, }, ); - Output.flush(); Global.crash(); }, }; + + if (contents.items.len == 0) { + Output.pretty("\nNo changes detected, comparing {s} to {s}\n", .{ old_folder, new_folder }); + Output.flush(); + contents.deinit(); + return null; + } + + break :brk contents; }; defer patchfile_contents.deinit(); // write the patch contents to temp file then rename var tmpname_buf: [1024]u8 = undefined; const tempfile_name = bun.span(try bun.fs.FileSystem.instance.tmpname("tmp", &tmpname_buf, bun.fastRandom())); - const tmpdir = try bun.fs.FileSystem.instance.tmpdir(); + const tmpdir = manager.getTemporaryDirectory(); const tmpfd = switch (bun.sys.openat( bun.toFD(tmpdir.fd), tempfile_name, @@ -10443,7 +10954,6 @@ pub const PackageManager = struct { "error: failed to open temp file {}\n", .{e.toSystemError()}, ); - Output.flush(); Global.crash(); }, }; @@ -10454,7 +10964,6 @@ pub const PackageManager = struct { "error: failed to write patch to temp file {}\n", .{e.toSystemError()}, ); - Output.flush(); Global.crash(); } @@ -10484,29 +10993,26 @@ pub const PackageManager = struct { "error: failed to make patches dir {}\n", .{e.toSystemError()}, ); - Output.flush(); Global.crash(); } // rename to patches dir - if (bun.sys.renameat2( + if (bun.sys.renameatConcurrently( bun.toFD(tmpdir.fd), tempfile_name, bun.FD.cwd(), path_in_patches_dir, - .{ .exclude = true }, ).asErr()) |e| { Output.prettyError( - "error: failed to renaming patch file to patches dir {}\n", + "error: failed renaming patch file to patches dir {}\n", .{e.toSystemError()}, ); - Output.flush(); Global.crash(); } const patch_key = std.fmt.allocPrint(manager.allocator, "{s}", .{resolution_label}) catch bun.outOfMemory(); const patchfile_path = manager.allocator.dupe(u8, path_in_patches_dir) catch bun.outOfMemory(); - _ = bun.sys.unlink(bun.path.joinZ(&[_][]const u8{ patched_pkg_folder, ".bun-patch-tag" }, .auto)); + _ = bun.sys.unlink(bun.path.joinZ(&[_][]const u8{ changes_dir, ".bun-patch-tag" }, .auto)); return .{ .patch_key = patch_key, @@ -11120,14 +11626,25 @@ pub const PackageManager = struct { break :brk ""; } else std.fmt.bufPrint(&resolution_buf, "{}", .{resolution.fmt(buf, .posix)}) catch unreachable; - const patch_patch, const patch_contents_hash, const patch_name_and_version_hash = brk: { - if (this.manager.lockfile.patched_dependencies.entries.len == 0) break :brk .{ null, null, null }; + const patch_patch, const patch_contents_hash, const patch_name_and_version_hash, const remove_patch = brk: { + if (this.manager.lockfile.patched_dependencies.entries.len == 0 and this.manager.patched_dependencies_to_remove.entries.len == 0) break :brk .{ null, null, null, false }; var sfb = std.heap.stackFallback(1024, this.lockfile.allocator); const name_and_version = std.fmt.allocPrint(sfb.get(), "{s}@{s}", .{ name, package_version }) catch unreachable; defer sfb.get().free(name_and_version); const name_and_version_hash = String.Builder.stringHash(name_and_version); - const patchdep = this.lockfile.patched_dependencies.get(name_and_version_hash) orelse break :brk .{ null, null, null }; + const patchdep = this.lockfile.patched_dependencies.get(name_and_version_hash) orelse { + const to_remove = this.manager.patched_dependencies_to_remove.contains(name_and_version_hash); + if (to_remove) { + break :brk .{ + null, + null, + name_and_version_hash, + true, + }; + } + break :brk .{ null, null, null, false }; + }; bun.assert(!patchdep.patchfile_hash_is_null); // if (!patchdep.patchfile_hash_is_null) { // this.manager.enqueuePatchTask(PatchTask.newCalcPatchHash(this, package_id, name_and_version_hash, dependency_id, url: string)) @@ -11136,6 +11653,7 @@ pub const PackageManager = struct { patchdep.path.slice(this.lockfile.buffers.string_bytes.items), patchdep.patchfileHash().?, name_and_version_hash, + false, }; }; @@ -11276,7 +11794,7 @@ pub const PackageManager = struct { }, } - const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or !installer.verify( + const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or !installer.verify( resolution, buf, this.root_node_modules_folder, @@ -11284,7 +11802,7 @@ pub const PackageManager = struct { this.summary.skipped += @intFromBool(!needs_install); if (needs_install) { - if (resolution.tag.canEnqueueInstallTask() and installer.packageMissingFromCache(this.manager, package_id)) { + if (!remove_patch and resolution.tag.canEnqueueInstallTask() and installer.packageMissingFromCache(this.manager, package_id)) { if (comptime Environment.allow_assert) { bun.assert(resolution.canEnqueueInstallTask()); } @@ -11368,7 +11886,7 @@ pub const PackageManager = struct { // above checks if unpatched package is in cache, if not null apply patch in temp directory, copy // into cache, then install into node_modules if (!installer.patch.isNull()) { - if (installer.patchedPackageMissingFromCache(this.manager, package_id, installer.patch.patch_contents_hash)) { + if (installer.patchedPackageMissingFromCache(this.manager, package_id)) { const task = PatchTask.newApplyPatchHash( this.manager, package_id, @@ -12668,7 +13186,6 @@ pub const PackageManager = struct { // Update patched dependencies { var iter = lockfile.patched_dependencies.iterator(); - // TODO: if one key is present in manager.lockfile and not present in lockfile we should get rid of it while (iter.next()) |entry| { const pkg_name_and_version_hash = entry.key_ptr.*; bun.debugAssert(entry.value_ptr.patchfile_hash_is_null); @@ -12687,6 +13204,26 @@ pub const PackageManager = struct { gop.value_ptr.setPatchfileHash(null); } } + + var count: usize = 0; + iter = manager.lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (!lockfile.patched_dependencies.contains(entry.key_ptr.*)) { + count += 1; + } + } + if (count > 0) { + try manager.patched_dependencies_to_remove.ensureTotalCapacity(manager.allocator, count); + iter = manager.lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (!lockfile.patched_dependencies.contains(entry.key_ptr.*)) { + try manager.patched_dependencies_to_remove.put(manager.allocator, entry.key_ptr.*, {}); + } + } + for (manager.patched_dependencies_to_remove.keys()) |hash| { + _ = manager.lockfile.patched_dependencies.orderedRemove(hash); + } + } } builder.clamp(); @@ -12761,15 +13298,15 @@ pub const PackageManager = struct { _ = manager.getCacheDirectory(); _ = manager.getTemporaryDirectory(); } - manager.enqueueDependencyList(root.dependencies); { var iter = manager.lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| if (entry.value_ptr.patchfile_hash_is_null) manager.enqueuePatchTask(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); + while (iter.next()) |entry| manager.enqueuePatchTaskPre(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); } + manager.enqueueDependencyList(root.dependencies); } else { { var iter = manager.lockfile.patched_dependencies.iterator(); - while (iter.next()) |entry| if (entry.value_ptr.patchfile_hash_is_null) manager.enqueuePatchTask(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); + while (iter.next()) |entry| manager.enqueuePatchTaskPre(PatchTask.newCalcPatchHash(manager, entry.key_ptr.*, null)); } // Anything that needs to be downloaded from an update needs to be scheduled here manager.drainDependencyList(); @@ -12789,7 +13326,7 @@ pub const PackageManager = struct { } const runAndWaitFn = struct { - pub fn runAndWaitFn(comptime check_peers: bool) *const fn (*PackageManager) anyerror!void { + pub fn runAndWaitFn(comptime check_peers: bool, comptime only_pre_patch: bool) *const fn (*PackageManager) anyerror!void { return struct { manager: *PackageManager, err: ?anyerror = null, @@ -12827,6 +13364,11 @@ pub const PackageManager = struct { } } + if (comptime only_pre_patch) { + const pending_patch = this.pending_pre_calc_hashes.load(.Monotonic); + return pending_patch == 0; + } + const pending_tasks = this.pendingTaskCount(); if (PackageManager.verbose_install and pending_tasks > 0) { @@ -12851,8 +13393,13 @@ pub const PackageManager = struct { } }.runAndWaitFn; - const waitForEverythingExceptPeers = runAndWaitFn(false); - const waitForPeers = runAndWaitFn(true); + const waitForCalcingPatchHashes = runAndWaitFn(false, true); + const waitForEverythingExceptPeers = runAndWaitFn(false, false); + const waitForPeers = runAndWaitFn(true, false); + + if (manager.lockfile.patched_dependencies.entries.len > 0) { + try waitForCalcingPatchHashes(manager); + } if (manager.pendingTaskCount() > 0) { try waitForEverythingExceptPeers(manager); diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index c05fd17f7786ed..d640ed780c9511 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -3812,6 +3812,10 @@ pub const Package = extern struct { )) break :patched_dependencies_changed true; } else break :patched_dependencies_changed true; } + iter = from_lockfile.patched_dependencies.iterator(); + while (iter.next()) |entry| { + if (!to_lockfile.patched_dependencies.contains(entry.key_ptr.*)) break :patched_dependencies_changed true; + } break :patched_dependencies_changed false; }; diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig index 9278552b04d2a2..8ba1d94f9b4b60 100644 --- a/src/install/patch_install.zig +++ b/src/install/patch_install.zig @@ -10,6 +10,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const logger = bun.logger; +const Loc = logger.Loc; const PackageManager = bun.PackageManager; pub const PackageID = bun.install.PackageID; @@ -40,6 +41,7 @@ pub const BuntagHashBuf = [max_buntag_hash_buf_len]u8; pub const PatchTask = struct { manager: *PackageManager, + tempdir: std.fs.Dir, project_dir: []const u8, callback: union(enum) { calc_hash: CalcPatchHash, @@ -48,15 +50,11 @@ pub const PatchTask = struct { task: ThreadPool.Task = .{ .callback = runFromThreadPool, }, + pre: bool = false, next: ?*PatchTask = null, const debug = bun.Output.scoped(.InstallPatch, false); - fn errDupePath(e: bun.sys.Error) bun.sys.Error { - if (e.path.len > 0) return e.withPath(bun.default_allocator.dupe(u8, e.path) catch bun.outOfMemory()); - return e; - } - const Maybe = bun.sys.Maybe; const CalcPatchHash = struct { @@ -65,7 +63,9 @@ pub const PatchTask = struct { state: ?EnqueueAfterState = null, - result: ?Maybe(u64) = null, + result: ?u64 = null, + + logger: logger.Log, const EnqueueAfterState = struct { pkg_id: PackageID, @@ -105,15 +105,12 @@ pub const PatchTask = struct { this.manager.allocator.free(this.callback.apply.cache_dir_subpath); this.manager.allocator.free(this.callback.apply.pkgname); if (this.callback.apply.install_context) |ictx| ictx.path.deinit(); + this.callback.apply.logger.deinit(); }, .calc_hash => { // TODO: how to deinit `this.callback.calc_hash.network_task` if (this.callback.calc_hash.state) |state| this.manager.allocator.free(state.url); - if (this.callback.calc_hash.result) |r| { - if (r.asErr()) |e| { - if (e.path.len > 0) bun.default_allocator.free(e.path); - } - } + this.callback.calc_hash.logger.deinit(); this.manager.allocator.free(this.callback.calc_hash.patchfile_path); }, } @@ -147,6 +144,9 @@ pub const PatchTask = struct { comptime log_level: PackageManager.Options.LogLevel, ) !void { debug("runFromThreadMainThread {s}", .{@tagName(this.callback)}); + defer { + if (this.pre) _ = manager.pending_pre_calc_hashes.fetchSub(1, .Monotonic); + } switch (this.callback) { .calc_hash => try this.runFromMainThreadCalcHash(manager, log_level), .apply => this.runFromMainThreadApply(manager), @@ -157,7 +157,7 @@ pub const PatchTask = struct { _ = manager; // autofix if (this.callback.apply.logger.errors > 0) { defer this.callback.apply.logger.deinit(); - // this.log.addErrorFmt(null, logger.Loc.Empty, bun.default_allocator, "failed to apply patch: {}", .{e}) catch unreachable; + Output.errGeneric("failed to apply patchfile ({s})", .{this.callback.apply.patchfilepath}); this.callback.apply.logger.printForLogLevel(Output.writer()) catch {}; } } @@ -170,42 +170,23 @@ pub const PatchTask = struct { // TODO only works for npm package // need to switch on version.tag and handle each case appropriately const calc_hash = &this.callback.calc_hash; - const hash = switch (calc_hash.result orelse @panic("Calc hash didn't run, this is a bug in Bun.")) { - .result => |h| h, - .err => |e| { - if (e.getErrno() == bun.C.E.NOENT) { - const fmt = "\n\nerror: could not find patch file {s}\n\nPlease make sure it exists.\n\nTo create a new patch file run:\n\n bun patch {s}\n"; - const args = .{ - this.callback.calc_hash.patchfile_path, - manager.lockfile.patched_dependencies.get(calc_hash.name_and_version_hash).?.path.slice(manager.lockfile.buffers.string_bytes.items), - }; - if (comptime log_level.showProgress()) { - Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress); - } else { - Output.prettyErrorln( - fmt, - args, - ); - Output.flush(); - } - Global.crash(); - } - - const fmt = "\n\nerror: {s}{s} while calculating hash for patchfile: {s}\n"; - const args = .{ @tagName(e.getErrno()), e.path, this.callback.calc_hash.patchfile_path }; - if (comptime log_level.showProgress()) { - Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress); - } else { - Output.prettyErrorln( - fmt, - args, - ); - Output.flush(); - } - Global.crash(); - - return; - }, + const hash = calc_hash.result orelse { + const fmt = "\n\nErrors occured while calculating hash for {s}:\n\n"; + const args = .{this.callback.calc_hash.patchfile_path}; + if (comptime log_level.showProgress()) { + Output.prettyWithPrinterFn(fmt, args, Progress.log, &manager.progress); + } else { + Output.prettyErrorln( + fmt, + args, + ); + } + if (calc_hash.logger.errors > 0) { + Output.prettyErrorln("\n\n", .{}); + calc_hash.logger.printForLogLevel(Output.writer()) catch {}; + } + Output.flush(); + Global.crash(); }; var gop = manager.lockfile.patched_dependencies.getOrPut(manager.allocator, calc_hash.name_and_version_hash) catch bun.outOfMemory(); @@ -272,7 +253,7 @@ pub const PatchTask = struct { // 5. Add bun tag for patch hash // 6. rename() newly patched pkg to cache pub fn apply(this: *PatchTask) !void { - var log = this.callback.apply.logger; + var log = &this.callback.apply.logger; debug("apply patch task", .{}); bun.assert(this.callback == .apply); @@ -318,14 +299,7 @@ pub const PatchTask = struct { // 2. Create temp dir to do all the modifications var tmpname_buf: [1024]u8 = undefined; const tempdir_name = bun.span(bun.fs.FileSystem.instance.tmpname("tmp", &tmpname_buf, bun.fastRandom()) catch bun.outOfMemory()); - const system_tmpdir = bun.fs.FileSystem.instance.tmpdir() catch |e| { - try log.addErrorFmtNoLoc( - this.manager.allocator, - "failed to creating temp dir: {s}", - .{@errorName(e)}, - ); - return; - }; + const system_tmpdir = this.tempdir; const pkg_name = this.callback.apply.pkgname; @@ -401,57 +375,18 @@ pub const PatchTask = struct { }, .auto, ); - // var allocated = false; - // const package_name_z = brk: { - // if (this.package_name.len < tmpname_buf.len) { - // @memcpy(tmpname_buf[0..this.package_name.len], this.package_name); - // tmpname_buf[this.package_name.len] = 0; - // break :brk tmpname_buf[0..this.package_name.len :0]; - // } - // allocated = true; - // break :brk this.manager.allocator.dupeZ(u8, this.package_name) catch bun.outOfMemory(); - // }; - // defer if (allocated) this.manager.allocator.free(package_name_z); - - worked: { - if (bun.sys.renameat2( - bun.toFD(system_tmpdir.fd), - path_in_tmpdir, - bun.toFD(this.callback.apply.cache_dir.fd), - this.callback.apply.cache_dir_subpath, - .{ - .exclude = true, - }, - ).asErr()) |e_| { - var e = e_; - - if (if (comptime bun.Environment.isWindows) switch (e.getErrno()) { - bun.C.E.NOTEMPTY, bun.C.E.EXIST => true, - else => false, - } else switch (e.getErrno()) { - bun.C.E.NOTEMPTY, bun.C.E.EXIST, bun.C.E.OPNOTSUPP => true, - else => false, - }) { - switch (bun.sys.renameat2( - bun.toFD(system_tmpdir.fd), - path_in_tmpdir, - bun.toFD(this.callback.apply.cache_dir.fd), - this.callback.apply.cache_dir_subpath, - .{ - .exchange = true, - }, - )) { - .err => |ee| e = ee, - .result => break :worked, - } - } - return try log.addErrorFmtNoLoc(this.manager.allocator, "{}", .{e}); - } - } + + if (bun.sys.renameatConcurrently( + bun.toFD(system_tmpdir.fd), + path_in_tmpdir, + bun.toFD(this.callback.apply.cache_dir.fd), + this.callback.apply.cache_dir_subpath, + ).asErr()) |e| return try log.addErrorFmtNoLoc(this.manager.allocator, "{}", .{e}); } - pub fn calcHash(this: *PatchTask) Maybe(u64) { + pub fn calcHash(this: *PatchTask) ?u64 { bun.assert(this.callback == .calc_hash); + var log = &this.callback.calc_hash.logger; const dir = this.project_dir; const patchfile_path = this.callback.calc_hash.patchfile_path; @@ -463,13 +398,50 @@ pub const PatchTask = struct { }, .auto); const stat: bun.Stat = switch (bun.sys.stat(absolute_patchfile_path)) { - .err => |e| return .{ .err = errDupePath(e) }, + .err => |e| { + if (e.getErrno() == bun.C.E.NOENT) { + const fmt = "\n\nerror: could not find patch file {s}\n\nPlease make sure it exists.\n\nTo create a new patch file run:\n\n bun patch {s}\n"; + const args = .{ + this.callback.calc_hash.patchfile_path, + this.manager.lockfile.patched_dependencies.get(this.callback.calc_hash.name_and_version_hash).?.path.slice(this.manager.lockfile.buffers.string_bytes.items), + }; + log.addErrorFmt(null, Loc.Empty, this.manager.allocator, fmt, args) catch bun.outOfMemory(); + return null; + } + log.addWarningFmt( + null, + Loc.Empty, + this.manager.allocator, + "patchfile {s} is empty, please restore or delete it.", + .{absolute_patchfile_path}, + ) catch bun.outOfMemory(); + return null; + }, .result => |s| s, }; const size: u64 = @intCast(stat.size); + if (size == 0) { + log.addErrorFmt( + null, + Loc.Empty, + this.manager.allocator, + "patchfile {s} is empty, plese restore or delete it.", + .{absolute_patchfile_path}, + ) catch bun.outOfMemory(); + return null; + } const fd = switch (bun.sys.open(absolute_patchfile_path, std.os.O.RDONLY, 0)) { - .err => |e| return .{ .err = errDupePath(e) }, + .err => |e| { + log.addErrorFmt( + null, + Loc.Empty, + this.manager.allocator, + "failed to open patch file: {}", + .{e}, + ) catch bun.outOfMemory(); + return null; + }, .result => |fd| fd, }; defer _ = bun.sys.close(fd); @@ -479,21 +451,29 @@ pub const PatchTask = struct { // what's a good number for this? page size i guess const STACK_SIZE = 16384; + var file = bun.sys.File{ .handle = fd }; var stack: [STACK_SIZE]u8 = undefined; var read: usize = 0; while (read < size) { - var i: usize = 0; - while (i < STACK_SIZE and i < size) { - switch (bun.sys.read(fd, stack[i..])) { - .result => |w| i += w, - .err => |e| return .{ .err = errDupePath(e) }, - } - } - read += i; - hasher.update(stack[0..i]); + const slice = switch (file.readFillBuf(stack[0..])) { + .result => |slice| slice, + .err => |e| { + log.addErrorFmt( + null, + Loc.Empty, + this.manager.allocator, + "failed to read from patch file: {} ({s})", + .{ e, absolute_patchfile_path }, + ) catch bun.outOfMemory(); + return null; + }, + }; + if (slice.len == 0) break; + hasher.update(slice); + read += slice.len; } - return .{ .result = hasher.final() }; + return hasher.final(); } pub fn notify(this: *PatchTask) void { @@ -511,15 +491,16 @@ pub const PatchTask = struct { state: ?CalcPatchHash.EnqueueAfterState, ) *PatchTask { const patchdep = manager.lockfile.patched_dependencies.get(name_and_version_hash) orelse @panic("This is a bug"); - bun.debugAssert(patchdep.patchfile_hash_is_null); const patchfile_path = manager.allocator.dupeZ(u8, patchdep.path.slice(manager.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(); const pt = bun.new(PatchTask, .{ + .tempdir = manager.getTemporaryDirectory(), .callback = .{ .calc_hash = .{ .state = state, .patchfile_path = patchfile_path, .name_and_version_hash = name_and_version_hash, + .logger = logger.Log.init(manager.allocator), }, }, .manager = manager, @@ -538,15 +519,18 @@ pub const PatchTask = struct { const pkg_name = pkg_manager.lockfile.packages.items(.name)[pkg_id]; const resolution: *const Resolution = &pkg_manager.lockfile.packages.items(.resolution)[pkg_id]; + var folder_path_buf: bun.PathBuffer = undefined; const stuff = pkg_manager.computeCacheDirAndSubpath( pkg_name.slice(pkg_manager.lockfile.buffers.string_bytes.items), resolution, + &folder_path_buf, patch_hash, ); const patchfilepath = pkg_manager.allocator.dupe(u8, pkg_manager.lockfile.patched_dependencies.get(name_and_version_hash).?.path.slice(pkg_manager.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(); const pt = bun.new(PatchTask, .{ + .tempdir = pkg_manager.getTemporaryDirectory(), .callback = .{ .apply = .{ .pkg_id = pkg_id, diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index 106d4f1c010de1..0e22d5b775da12 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -20,6 +20,7 @@ export const SQL = $cpp("JSSQLStatement.cpp", "createJSSQLStatementConstructor") export const patchInternals = { parse: $newZigFunction("patch.zig", "TestingAPIs.parse", 1), apply: $newZigFunction("patch.zig", "TestingAPIs.apply", 2), + makeDiff: $newZigFunction("patch.zig", "TestingAPIs.makeDiff", 2), }; export const shellInternals = { diff --git a/src/patch.zig b/src/patch.zig index 3c7e0f08f67bf1..f4d039ec615a37 100644 --- a/src/patch.zig +++ b/src/patch.zig @@ -1,3 +1,5 @@ +const Output = bun.Output; +const Global = bun.Global; const std = @import("std"); const bun = @import("root").bun; const JSC = bun.JSC; @@ -9,6 +11,8 @@ const WHITESPACE: []const u8 = " \t\n\r"; // TODO: calculate this for different systems const PAGE_SIZE = 16384; +const debug = bun.Output.scoped(.patch, false); + /// All strings point to the original patch file text pub const PatchFilePart = union(enum) { file_patch: *FilePatch, @@ -534,7 +538,6 @@ pub const PatchFilePartKind = enum { }; const ParseErr = error{ - empty_patchfile, unrecognized_pragma, no_newline_at_eof_pragma_encountered_without_context, hunk_lines_encountered_before_hunk_header, @@ -755,7 +758,7 @@ const PatchLinesParser = struct { file_: []const u8, opts: struct { support_legacy_diffs: bool = false }, ) ParseErr!void { - if (file_.len == 0) return ParseErr.empty_patchfile; + if (file_.len == 0) return; const end = brk: { var iter = std.mem.splitBackwardsScalar(u8, file_, '\n'); var prev: usize = file_.len; @@ -1088,6 +1091,45 @@ const PatchLinesParser = struct { }; pub const TestingAPIs = struct { + pub fn makeDiff(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue { + const arguments_ = callframe.arguments(2); + var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + + const old_folder_jsval = arguments.nextEat() orelse { + globalThis.throw("expected 2 strings", .{}); + return .undefined; + }; + const old_folder_bunstr = old_folder_jsval.toBunString(globalThis); + defer old_folder_bunstr.deref(); + + const new_folder_jsval = arguments.nextEat() orelse { + globalThis.throw("expected 2 strings", .{}); + return .undefined; + }; + const new_folder_bunstr = new_folder_jsval.toBunString(globalThis); + defer new_folder_bunstr.deref(); + + const old_folder = old_folder_bunstr.toUTF8(bun.default_allocator); + defer old_folder.deinit(); + + const new_folder = new_folder_bunstr.toUTF8(bun.default_allocator); + defer new_folder.deinit(); + + return switch (gitDiffInternal(bun.default_allocator, old_folder.slice(), new_folder.slice()) catch |e| { + globalThis.throwError(e, "failed to make diff"); + return .undefined; + }) { + .result => |s| { + defer s.deinit(); + return bun.String.fromBytes(s.items).toJS(globalThis); + }, + .err => |e| { + defer e.deinit(); + globalThis.throw("failed to make diff: {s}", .{e.items}); + return .undefined; + }, + }; + } const ApplyArgs = struct { patchfile_txt: JSC.ZigString.Slice, patchfile: PatchFile, @@ -1192,25 +1234,140 @@ pub const TestingAPIs = struct { } }; -pub fn gitDiff( +pub fn spawnOpts( + old_folder: []const u8, + new_folder: []const u8, + cwd: [:0]const u8, + git: [:0]const u8, + loop: *JSC.AnyEventLoop, +) bun.spawn.sync.Options { + const argv: []const []const u8 = brk: { + const ARGV = &[_][:0]const u8{ + "git", + "-c", + "core.safecrlf=false", + "diff", + "--src-prefix=a/", + "--dst-prefix=b/", + "--ignore-cr-at-eol", + "--irreversible-delete", + "--full-index", + "--no-index", + }; + const argv_buf = bun.default_allocator.alloc([]const u8, ARGV.len + 2) catch bun.outOfMemory(); + argv_buf[0] = git; + for (1..ARGV.len) |i| { + argv_buf[i] = ARGV[i]; + } + argv_buf[ARGV.len] = old_folder; + argv_buf[ARGV.len + 1] = new_folder; + break :brk argv_buf; + }; + + const envp: [:null]?[*:0]const u8 = brk: { + const env_arr = &[_][:0]const u8{ + "GIT_CONFIG_NOSYSTEM", + "HOME", + "XDG_CONFIG_HOME", + "USERPROFILE", + }; + const PATH = bun.getenvZ("PATH"); + const envp_buf = bun.default_allocator.allocSentinel(?[*:0]const u8, env_arr.len + @as(usize, if (PATH != null) 1 else 0), null) catch bun.outOfMemory(); + for (0..env_arr.len) |i| { + envp_buf[i] = env_arr[i].ptr; + } + if (PATH) |p| { + envp_buf[envp_buf.len - 1] = @ptrCast(p.ptr); + } + break :brk envp_buf; + }; + + return bun.spawn.sync.Options{ + .stdout = .buffer, + .stderr = .buffer, + .cwd = cwd, + .envp = envp, + .argv = argv, + .windows = if (bun.Environment.isWindows) .{ .loop = switch (loop.*) { + .js => |x| .{ .js = x }, + .mini => |*x| .{ .mini = x }, + } } else {}, + }; +} + +pub fn diffPostProcess(result: *bun.spawn.sync.Result, old_folder: []const u8, new_folder: []const u8) !bun.JSC.Node.Maybe(std.ArrayList(u8), std.ArrayList(u8)) { + var stdout = std.ArrayList(u8).init(bun.default_allocator); + var stderr = std.ArrayList(u8).init(bun.default_allocator); + + std.mem.swap(std.ArrayList(u8), &stdout, &result.stdout); + std.mem.swap(std.ArrayList(u8), &stderr, &result.stderr); + + var deinit_stdout = true; + var deinit_stderr = true; + defer { + if (deinit_stdout) stdout.deinit(); + if (deinit_stderr) stderr.deinit(); + } + + if (stderr.items.len > 0) { + deinit_stderr = false; + return .{ .err = stderr }; + } + + debug("Before postprocess: {s}\n", .{stdout.items}); + try gitDiffPostprocess(&stdout, old_folder, new_folder); + deinit_stdout = false; + return .{ .result = stdout }; +} + +pub fn gitDiffPreprocessPaths( allocator: std.mem.Allocator, old_folder_: []const u8, new_folder_: []const u8, -) !bun.JSC.Node.Maybe(std.ArrayList(u8), std.ArrayList(u8)) { - const old_folder: []const u8 = if (comptime bun.Environment.isWindows) brk: { + comptime sentinel: bool, +) [2]if (sentinel) [:0]const u8 else []const u8 { + const bump = if (sentinel) 1 else 0; + const old_folder = if (comptime bun.Environment.isWindows) brk: { // backslash in the path fucks everything up - const cpy = allocator.alloc(u8, old_folder_.len) catch bun.outOfMemory(); - @memcpy(cpy, old_folder_); + const cpy = allocator.alloc(u8, old_folder_.len + bump) catch bun.outOfMemory(); + @memcpy(cpy[0..old_folder_.len], old_folder_); std.mem.replaceScalar(u8, cpy, '\\', '/'); + if (sentinel) { + cpy[old_folder_.len] = 0; + break :brk cpy[0..old_folder_.len :0]; + } break :brk cpy; } else old_folder_; const new_folder = if (comptime bun.Environment.isWindows) brk: { - const cpy = allocator.alloc(u8, new_folder_.len) catch bun.outOfMemory(); - @memcpy(cpy, new_folder_); + const cpy = allocator.alloc(u8, new_folder_.len + bump) catch bun.outOfMemory(); + @memcpy(cpy[0..new_folder_.len], new_folder_); std.mem.replaceScalar(u8, cpy, '\\', '/'); + if (sentinel) { + cpy[new_folder_.len] = 0; + break :brk cpy[0..new_folder_.len :0]; + } break :brk cpy; } else new_folder_; + if (bun.Environment.isPosix and sentinel) { + return .{ + allocator.dupeZ(u8, old_folder) catch bun.outOfMemory(), + allocator.dupeZ(u8, new_folder) catch bun.outOfMemory(), + }; + } + + return .{ old_folder, new_folder }; +} + +pub fn gitDiffInternal( + allocator: std.mem.Allocator, + old_folder_: []const u8, + new_folder_: []const u8, +) !bun.JSC.Node.Maybe(std.ArrayList(u8), std.ArrayList(u8)) { + const paths = gitDiffPreprocessPaths(allocator, old_folder_, new_folder_, false); + const old_folder = paths[0]; + const new_folder = paths[1]; + defer if (comptime bun.Environment.isWindows) { allocator.free(old_folder); allocator.free(new_folder); @@ -1262,6 +1419,7 @@ pub fn gitDiff( return .{ .err = stderr }; } + debug("Before postprocess: {s}\n", .{stdout.items}); try gitDiffPostprocess(&stdout, old_folder, new_folder); deinit_stdout = false; return .{ .result = stdout }; @@ -1288,8 +1446,10 @@ pub fn gitDiff( /// - b/src/index.js /// /// The operations look roughy like the following sequence of substitutions and regexes: -/// .replace(new RegExp(`(a|b)(${escapeStringRegexp(`/${removeTrailingAndLeadingSlash(aFolder)}/`)})`, "g"), "$1/") -/// .replace(new RegExp(`(a|b)${escapeStringRegexp(`/${removeTrailingAndLeadingSlash(bFolder)}/`)}`, "g"), "$1/") +/// .replace(new RegExp(`(a|b)(${escapeStringRegexp(`/${removeTrailingAndLeadingSlash(aFolder)}/`)})`, "g"), "$1/") +/// .replace(new RegExp(`(a|b)${escapeStringRegexp(`/${removeTrailingAndLeadingSlash(bFolder)}/`)}`, "g"), "$1/") +/// .replace(new RegExp(escapeStringRegexp(`${aFolder}/`), "g"), "") +/// .replace(new RegExp(escapeStringRegexp(`${bFolder}/`), "g"), ""); fn gitDiffPostprocess(stdout: *std.ArrayList(u8), old_folder: []const u8, new_folder: []const u8) !void { const old_folder_trimmed = std.mem.trim(u8, old_folder, "/"); const new_folder_trimmed = std.mem.trim(u8, new_folder, "/"); @@ -1308,25 +1468,65 @@ fn gitDiffPostprocess(stdout: *std.ArrayList(u8), old_folder: []const u8, new_fo @memcpy(new_buf[2..][0..new_folder_trimmed.len], new_folder_trimmed); new_buf[2 + new_folder_trimmed.len] = '/'; - break :brk .{ old_buf[0 .. 2 + old_folder_trimmed.len + 1], new_buf[0 .. 2 + old_folder_trimmed.len + 1] }; + break :brk .{ old_buf[0 .. 2 + old_folder_trimmed.len + 1], new_buf[0 .. 2 + new_folder_trimmed.len + 1] }; }; + // const @"$old_folder/" = @"a/$old_folder/"[2..]; + // const @"$new_folder/" = @"b/$new_folder/"[2..]; + + // these vars are here to disambguate `a/$OLD_FOLDER` when $OLD_FOLDER itself contains "a/" + // basically if $OLD_FOLDER contains "a/" then the code will replace it + // so we need to not run that code path + var saw_a_folder: ?usize = null; + var saw_b_folder: ?usize = null; + var line_idx: u32 = 0; + var line_iter = std.mem.splitScalar(u8, stdout.items, '\n'); while (line_iter.next()) |line| { - if (shouldSkipLine(line)) continue; - if (std.mem.indexOf(u8, line, @"a/$old_folder/")) |idx| { - const @"$old_folder/ start" = idx + 2; - const line_start = line_iter.index.? - 1 - line.len; - line_iter.index.? -= 1 + line.len; - try stdout.replaceRange(line_start + @"$old_folder/ start", old_folder_trimmed.len + 1, ""); - continue; - } - if (std.mem.indexOf(u8, line, @"b/$new_folder/")) |idx| { - const @"$new_folder/ start" = idx + 2; - const line_start = line_iter.index.? - 1 - line.len; - try stdout.replaceRange(line_start + @"$new_folder/ start", new_folder_trimmed.len + 1, ""); - line_iter.index.? -= new_folder_trimmed.len + 1; + if (!shouldSkipLine(line)) { + if (std.mem.indexOf(u8, line, @"a/$old_folder/")) |idx| { + const @"$old_folder/ start" = idx + 2; + const line_start = line_iter.index.? - 1 - line.len; + line_iter.index.? -= 1 + line.len; + try stdout.replaceRange(line_start + @"$old_folder/ start", old_folder_trimmed.len + 1, ""); + saw_a_folder = line_idx; + continue; + } + if (std.mem.indexOf(u8, line, @"b/$new_folder/")) |idx| { + const @"$new_folder/ start" = idx + 2; + const line_start = line_iter.index.? - 1 - line.len; + try stdout.replaceRange(line_start + @"$new_folder/ start", new_folder_trimmed.len + 1, ""); + line_iter.index.? -= new_folder_trimmed.len + 1; + saw_b_folder = line_idx; + continue; + } + if (saw_a_folder == null or saw_a_folder.? != line_idx) { + if (std.mem.indexOf(u8, line, old_folder)) |idx| { + if (idx + old_folder.len < line.len and line[idx + old_folder.len] == '/') { + const line_start = line_iter.index.? - 1 - line.len; + line_iter.index.? -= 1 + line.len; + try stdout.replaceRange(line_start + idx, old_folder.len + 1, ""); + saw_a_folder = line_idx; + continue; + } + } + } + if (saw_b_folder == null or saw_b_folder.? != line_idx) { + if (std.mem.indexOf(u8, line, new_folder)) |idx| { + if (idx + new_folder.len < line.len and line[idx + new_folder.len] == '/') { + const line_start = line_iter.index.? - 1 - line.len; + line_iter.index.? -= 1 + line.len; + try stdout.replaceRange(line_start + idx, new_folder.len + 1, ""); + saw_b_folder = line_idx; + continue; + } + } + } } + + line_idx += 1; + saw_a_folder = null; + saw_b_folder = null; } } diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index 53af0d083e6c9a..f2af9c97c7fb92 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -1258,6 +1258,14 @@ pub fn joinStringBufW(buf: []u16, parts: anytype, comptime _platform: Platform) return joinStringBufT(u16, buf, parts, _platform); } +pub fn joinStringBufWZ(buf: []u16, parts: anytype, comptime _platform: Platform) [:0]const u16 { + const joined = joinStringBufT(u16, buf[0 .. buf.len - 1], parts, _platform); + assert(bun.isSliceInBufferT(u16, joined, buf)); + const start_offset = @intFromPtr(joined.ptr) / 2 - @intFromPtr(buf.ptr) / 2; + buf[joined.len + start_offset] = 0; + return buf[start_offset..][0..joined.len :0]; +} + pub fn joinStringBufT(comptime T: type, buf: []T, parts: anytype, comptime _platform: Platform) []const T { const platform = comptime _platform.resolve(); diff --git a/src/sys.zig b/src/sys.zig index e5c70c782cc3a0..8873919f82d926 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -353,6 +353,17 @@ pub fn getcwd(buf: *bun.PathBuffer) Maybe([]const u8) { Result.errnoSys(0, .getcwd).?; } +pub fn getcwdZ(buf: *bun.PathBuffer) Maybe([:0]const u8) { + const Result = Maybe([:0]const u8); + buf[0] = 0; + buf[buf.len - 1] = 0; + const rc: ?[*:0]u8 = @ptrCast(std.c.getcwd(buf, bun.MAX_PATH_BYTES)); + return if (rc != null) + Result{ .result = rc.?[0..std.mem.len(rc.?) :0] } + else + Result.errnoSys(0, .getcwd).?; +} + pub fn fchmod(fd: bun.FileDescriptor, mode: bun.Mode) Maybe(void) { if (comptime Environment.isWindows) { return sys_uv.fchmod(fd, mode); @@ -1684,6 +1695,55 @@ pub const RenameAt2Flags = packed struct { } }; +// NOTE: that this _does not_ handle moving across filesystems. For that, check if the return error is XDEV and then use `bun.C.moveFileZWithHandle` +pub fn renameatConcurrently(from_dir_fd: bun.FileDescriptor, from: [:0]const u8, to_dir_fd: bun.FileDescriptor, to: [:0]const u8) Maybe(void) { + var did_atomically_replace = false; + + attempt_atomic_rename_and_fallback_to_racy_delete: { + { + // Happy path: the folder doesn't exist in the cache dir, so we can + // just rename it. We don't need to delete anything. + var err = switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{ + .exclude = true, + })) { + .err => |err| err, + .result => break :attempt_atomic_rename_and_fallback_to_racy_delete, + }; + + // Fallback path: the folder exists in the cache dir, it might be in a strange state + // let's attempt to atomically replace it with the temporary folder's version + if (if (comptime bun.Environment.isPosix) switch (err.getErrno()) { + .EXIST, .NOTEMPTY, .OPNOTSUPP => true, + else => false, + } else switch (err.getErrno()) { + .EXIST, .NOTEMPTY => true, + else => false, + }) { + did_atomically_replace = true; + switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{ + .exchange = true, + })) { + .err => {}, + .result => break :attempt_atomic_rename_and_fallback_to_racy_delete, + } + did_atomically_replace = false; + } + } + + // sad path: let's try to delete the folder and then rename it + var to_dir = to_dir_fd.asDir(); + to_dir.deleteTree(from) catch {}; + switch (bun.sys.renameat(from_dir_fd, from, to_dir_fd, to)) { + .err => |err| { + return .{ .err = err }; + }, + .result => {}, + } + } + + return Maybe(void).success; +} + pub fn renameat2(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.FileDescriptor, to: [:0]const u8, flags: RenameAt2Flags) Maybe(void) { if (Environment.isWindows) { return renameat(from_dir, from, to_dir, to); @@ -2897,6 +2957,7 @@ pub const File = struct { return self.bytes.items; } }; + pub fn readFillBuf(this: File, buf: []u8) Maybe([]u8) { var read_amount: usize = 0; while (read_amount < buf.len) { @@ -2919,6 +2980,7 @@ pub const File = struct { return .{ .result = buf[0..read_amount] }; } + pub fn readToEndWithArrayList(this: File, list: *std.ArrayList(u8)) Maybe(usize) { const size = switch (this.getEndPos()) { .err => |err| { diff --git a/test/cli/install/bun-install-patch.test.ts b/test/cli/install/bun-install-patch.test.ts index da972c13b73b0c..332aed1d1c5c88 100644 --- a/test/cli/install/bun-install-patch.test.ts +++ b/test/cli/install/bun-install-patch.test.ts @@ -2,6 +2,10 @@ import { $ } from "bun"; import { bunExe, bunEnv as env, toBeValidBin, toHaveBins, toBeWorkspaceLink, tempDirWithFiles, bunEnv } from "harness"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it, describe, test, setDefaultTimeout } from "bun:test"; +beforeAll(() => { + setDefaultTimeout(1000 * 60 * 5); +}); + describe("patch", async () => { const is_even_patch = /* patch */ `diff --git a/index.js b/index.js index 832d92223a9ec491364ee10dcbe3ad495446ab80..bc652e496c165a7415880ef4520c0ab302bf0765 100644 @@ -351,6 +355,57 @@ index c8950c17b265104bcf27f8c345df1a1b13a78950..7ce57ab96400ab0ff4fac7e06f6e02c2 } }); + describe("should work when patches are removed", async () => { + for (const [version, patchVersion_] of versions) { + const patchFilename = filepathEscape(`is-even@${version}.patch`); + const patchVersion = patchVersion_ ?? version; + test(version, async () => { + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "patchedDependencies": { + [`is-even@${patchVersion}`]: `patches/${patchFilename}`, + }, + "dependencies": { + "is-even": version, + }, + }), + patches: { + [patchFilename]: is_even_patch2, + }, + "index.ts": /* ts */ `import isEven from 'is-even'; isEven(2); console.log('lol')`, + }); + + console.log("FILEDIR", filedir); + + await $`${bunExe()} i`.env(bunEnv).cwd(filedir); + + await $`echo ${JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "patchedDependencies": { + [`is-odd@0.1.2`]: `patches/is-odd@0.1.2.patch`, + }, + "dependencies": { + "is-even": version, + }, + })} > package.json` + .env(bunEnv) + .cwd(filedir); + + await $`echo ${is_odd_patch} > patches/is-odd@0.1.2.patch; ${bunExe()} i`.env(bunEnv).cwd(filedir); + + const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).toBe(""); + expect(stdout.toString()).toContain("Hi from isOdd!\n"); + expect(stdout.toString()).not.toContain("lmao\n"); + }); + } + }); + it("should update a transitive dependency when the patchfile changes", async () => { $.throws(true); const filedir = tempDirWithFiles("patch1", { @@ -379,4 +434,103 @@ index c8950c17b265104bcf27f8c345df1a1b13a78950..7ce57ab96400ab0ff4fac7e06f6e02c2 expect(stderr.toString()).toBe(""); expect(stdout.toString()).toContain("lmao\n"); }); + + it("should update a scoped package", async () => { + const patchfile = /* patch */ `diff --git a/private/var/folders/wy/3969rv2x63g63jf8jwlcb2x40000gn/T/.b7f7d77b9ffdd3ee-00000000.tmp/index.js b/index.js +new file mode 100644 +index 0000000000000000000000000000000000000000..6edc0598a84632c41d9c770cfbbad7d99e2ab624 +--- /dev/null ++++ b/index.js +@@ -0,0 +1,4 @@ ++ ++module.exports = () => { ++ return 'PATCHED!' ++} +diff --git a/package.json b/package.json +index aa7c7012cda790676032d1b01d78c0b69ec06360..6048e7cb462b3f9f6ac4dc21aacf9a09397cd4be 100644 +--- a/package.json ++++ b/package.json +@@ -2,7 +2,7 @@ + "name": "@zackradisic/hls-dl", + "version": "0.0.1", + "description": "", +- "main": "dist/hls-dl.commonjs2.js", ++ "main": "./index.js", + "dependencies": { + "m3u8-parser": "^4.5.0", + "typescript": "^4.0.5" +`; + + $.throws(true); + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "patchedDependencies": { + "@zackradisic/hls-dl@0.0.1": "patches/thepatch.patch", + }, + "dependencies": { + "@zackradisic/hls-dl": "0.0.1", + }, + }), + patches: { + ["thepatch.patch"]: patchfile, + }, + "index.ts": /* ts */ `import hlsDl from '@zackradisic/hls-dl'; console.log(hlsDl())`, + }); + + await $`${bunExe()} i`.env(bunEnv).cwd(filedir); + + const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).toBe(""); + expect(stdout.toString()).toContain("PATCHED!\n"); + }); + + it("shouldn't infinite loop on failure to apply patch", async () => { + const badPatch = /* patch */ `diff --git a/index.js b/node_modules/is-even/index.js +index 832d92223a9ec491364ee10dcbe3ad495446ab80..7e079a817825de4b8c3d01898490dc7e960172bb 100644 +--- a/index.js ++++ b/node_modules/is-even/index.js +@@ -10,5 +10,6 @@ + var isOdd = require('is-odd'); + + module.exports = function isEven(i) { ++ console.log('hi') + return !isOdd(i); + }; +`; + + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + }, + }), + patches: { + "is-even@1.0.0.patch": badPatch, + }, + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven())`, + }); + + await $`${bunExe()} i`.env(bunEnv).cwd(filedir); + + const pkgjsonWithPatch = { + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "patchedDependencies": { + "is-even@1.0.0": "patches/is-even@1.0.0.patch", + }, + "dependencies": { + "is-even": "1.0.0", + }, + }; + + await $`echo ${JSON.stringify(pkgjsonWithPatch)} > package.json`.cwd(filedir).env(bunEnv); + await $`${bunExe()} i`.env(bunEnv).cwd(filedir); + }); }); diff --git a/test/cli/install/bun-patch.test.ts b/test/cli/install/bun-patch.test.ts new file mode 100644 index 00000000000000..e4655b7660c149 --- /dev/null +++ b/test/cli/install/bun-patch.test.ts @@ -0,0 +1,501 @@ +import { $, ShellOutput, ShellPromise } from "bun"; +import { bunExe, bunEnv as env, toBeValidBin, toHaveBins, toBeWorkspaceLink, tempDirWithFiles, bunEnv } from "harness"; +import { afterAll, afterEach, beforeAll, beforeEach, expect, it, describe, test, setDefaultTimeout } from "bun:test"; +import { join, sep } from "path"; + +const expectNoError = (o: ShellOutput) => expect(o.stderr.toString()).not.toContain("error"); +// const platformPath = (path: string) => (process.platform === "win32" ? path.replaceAll("/", sep) : path); +const platformPath = (path: string) => path; + +beforeAll(() => { + setDefaultTimeout(1000 * 60 * 5); +}); + +describe("bun patch ", async () => { + // Tests to make sure that patching + describe("popular pkg", async () => { + const dummyCode = /* ts */ ` + module.exports = function lmao() { + return 420; + } + `; + + function makeTest(pkgName: string, version: string, folder_in_node_modules: string = `${pkgName}`) { + test( + `${pkgName}@${version}`, + async () => { + const tempdir = tempDirWithFiles("popular", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + [pkgName]: version, + }, + }), + "index.ts": /* ts */ `import lmao from '${pkgName}'; console.log(lmao())`, + }); + + console.log("TEMPDIR", tempdir); + expectNoError(await $`${bunExe()} i`.env(bunEnv).cwd(tempdir)); + expectNoError(await $`${bunExe()} patch ${pkgName}@${version}`.env(bunEnv).cwd(tempdir)); + await $`echo ${dummyCode} > node_modules/${folder_in_node_modules}/index.js`.env(bunEnv).cwd(tempdir); + const { type, module, exports, ...package_json }: Record = + await $`cat node_modules/${folder_in_node_modules}/package.json`.env(bunEnv).cwd(tempdir).json(); + package_json["main"] = "index.js"; + await $`echo ${JSON.stringify(package_json)} > node_modules/${folder_in_node_modules}/package.json` + .env(bunEnv) + .cwd(tempdir); + + expectNoError( + await $`${bunExe()} patch --commit node_modules/${folder_in_node_modules}`.env(bunEnv).cwd(tempdir), + ); + + const { stdout } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(tempdir); + expect(stdout.toString()).toBe("420\n"); + }, + 30 * 1000, + ); + } + + makeTest("lodash", "4.17.21"); + makeTest("react", "18.3.1"); + makeTest("react-dom", "18.3.1"); + makeTest("axios", "1.7.2"); + // makeTest("tslib", "2.6.3"); + // makeTest("chalk", "5.3.0"); + // makeTest("next", "14.2.4"); + // makeTest("express", "4.19.2"); + // makeTest("inquirer", "9.2.23"); + // makeTest("commander", "12.1.0"); + + // // vercel/next.js + // makeTest("webpack-sources", "3.2.3"); + + // // vitejs/vite + // makeTest("acorn", "8.11.3"); + // makeTest("chokidar", "3.6.0"); + // makeTest("http-proxy", "1.18.1"); + // makeTest("sirv", "2.0.4"); + + // // mermaid-js/mermaid + // makeTest("cytoscape", "3.28.1"); + + // // remix-run/react-router + // makeTest("@changesets/get-dependents-graph", "1.3.6", "@changesets/get-dependents-graph"); + + // // n8n-io/n8n + // makeTest("typedi", "0.10.0"); + // makeTest("@sentry/cli", "2.17.0", "@sentry/cli"); + // makeTest("pkce-challenge", "3.0.0"); + // makeTest("pyodide", "0.23.4"); + makeTest("@types/express-serve-static-core", "4.17.43", "@types/express-serve-static-core"); + makeTest("@types/ws", "8.5.4", "@types/ws"); + makeTest("@types/uuencode", "0.0.3", "@types/uuencode"); + }); + test("should patch a package when it is already patched", async () => { + const tempdir = tempDirWithFiles("lol", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + "is-odd": "3.0.1", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`, + }); + + await $`${bunExe()} i`.env(bunEnv).cwd(tempdir); + const { stderr } = await $`${bunExe()} patch is-odd@0.1.2`.env(bunEnv).cwd(tempdir).throws(false); + expect(stderr.toString()).not.toContain("error"); + + const firstChange = /* ts */ `/*! +* is-odd +* +* Copyright (c) 2015-2017, Jon Schlinkert. +* Released under the MIT License. +*/ + +'use strict'; + +var isNumber = require('is-number'); + +module.exports = function isOdd(i) { + if (!isNumber(i)) { + throw new TypeError('is-odd expects a number.'); + } + if (Number(i) !== Math.floor(i)) { + throw new RangeError('is-odd expects an integer.'); + } + console.log('hi') + return !!(~~i & 1); +};`; + + await $`echo ${firstChange} > node_modules/is-even/node_modules/is-odd/index.js`.env(bunEnv).cwd(tempdir); + + const { stderr: stderr2 } = await $`${bunExe()} patch --commit node_modules/is-even/node_modules/is-odd` + .env(bunEnv) + .cwd(tempdir) + .throws(false); + expect(stderr2.toString()).not.toContain("error"); + + const { stderr: stderr3 } = await $`${bunExe()} patch is-odd@0.1.2`.env(bunEnv).cwd(tempdir).throws(false); + expect(stderr3.toString()).not.toContain("error"); + + const secondChange = /* ts */ `/*! +* is-odd +* +* Copyright (c) 2015-2017, Jon Schlinkert. +* Released under the MIT License. +*/ + +'use strict'; + +var isNumber = require('is-number'); + +module.exports = function isOdd(i) { + if (!isNumber(i)) { + throw new TypeError('is-odd expects a number.'); + } + if (Number(i) !== Math.floor(i)) { + throw new RangeError('is-odd expects an integer.'); + } + console.log('hi') + console.log('hello') + return !!(~~i & 1); +};`; + + await $`echo ${secondChange} > node_modules/is-even/node_modules/is-odd/index.js`.env(bunEnv).cwd(tempdir); + const { stderr: stderr4 } = await $`${bunExe()} patch --commit node_modules/is-even/node_modules/is-odd` + .env(bunEnv) + .cwd(tempdir) + .throws(false); + expect(stderr4.toString()).not.toContain("error"); + + await $`cat patches/is-odd@0.1.2.patch`.env(bunEnv).cwd(tempdir); + + await $`${bunExe()} i`.env(bunEnv).cwd(tempdir).throws(false); + const { stdout } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(tempdir).throws(false); + expect(stdout.toString()).toContain("hi\nhello\n"); + }); + + test("bad patch arg", async () => { + const tempdir = tempDirWithFiles("lol", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`, + }); + + await $`${bunExe()} i`.env(bunEnv).cwd(tempdir); + const { stderr, exitCode } = await $`${bunExe()} patch lkflksdkfj`.env(bunEnv).cwd(tempdir).throws(false); + expect(exitCode).toBe(1); + expect(stderr.toString()).toContain("error: package lkflksdkfj not found"); + }); + + test("bad patch commit arg", async () => { + const tempdir = tempDirWithFiles("lol", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`, + }); + + await $`${bunExe()} i`.env(bunEnv).cwd(tempdir); + const { stderr } = await $`${bunExe()} patch is-even`.env(bunEnv).cwd(tempdir); + expect(stderr.toString()).not.toContain("error"); + + const { stderr: stderr2 } = await $`${bunExe()} patch --commit lskfjdslkfjsldkfjlsdkfj` + .env(bunEnv) + .cwd(tempdir) + .throws(false); + expect(stderr2.toString()).toContain("error: package lskfjdslkfjsldkfjlsdkfj not found"); + }); + + function makeTest( + name: string, + { + dependencies, + mainScript, + patchArg, + patchedCode, + expected, + }: { + dependencies: Record; + mainScript: string; + patchArg: string; + patchedCode: string; + expected: { patchName: string; patchPath: string; stdout: string }; + extra?: (filedir: string) => Promise; + }, + ) { + expected.patchPath = platformPath(expected.patchPath); + test(name, async () => { + $.throws(true); + + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": dependencies, + }), + "index.ts": mainScript, + }); + + { + const { stderr } = await $`${bunExe()} i`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).not.toContain("error"); + } + + { + const { stderr, stdout } = await $`${bunExe()} patch ${patchArg}`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).not.toContain("error"); + expect(stdout.toString()).toContain( + `To patch ${platformPath(expected.patchName)}, edit the following folder: + + ${platformPath(expected.patchPath)} + +Once you're done with your changes, run: + + bun patch --commit '${platformPath(expected.patchPath)}'`, + ); + } + + { + const newCode = patchedCode; + + await $`echo ${newCode} > ${expected.patchPath}/index.js`.env(bunEnv).cwd(filedir); + const { stderr, stdout } = await $`${bunExe()} patch --commit ${expected.patchPath}`.env(bunEnv).cwd(filedir); + } + + const output = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir).text(); + expect(output).toBe(expected.stdout); + }); + } + + test( + "overwriting module with multiple levels of directories", + async () => { + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { lodash: "4.17.21" }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven())`, + }); + + { + const { stderr } = await $`${bunExe()} i`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).not.toContain("error"); + } + + { + const { stderr, stdout } = await $`${bunExe()} patch lodash`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).not.toContain("error"); + } + + // run it again to make sure we didn't f something up + { + const { stderr, stdout } = await $`${bunExe()} patch lodash`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).not.toContain("error"); + } + }, + 15 * 1000, + ); + + ["is-even@1.0.0", "node_modules/is-even"].map(patchArg => + makeTest("should patch a node_modules package", { + dependencies: { "is-even": "1.0.0" }, + mainScript: /* ts */ `import isEven from 'is-even'; isEven(420)`, + patchArg, + patchedCode: /* ts */ `/*! + * is-even + * + * Copyright (c) 2015, 2017, Jon Schlinkert. + * Released under the MIT License. + */ + + 'use strict'; + + var isOdd = require('is-odd'); + + module.exports = function isEven(i) { + console.log("If you're reading this, the patch worked!") + return !isOdd(i); + }; + `, + expected: { + patchName: "is-even", + patchPath: "node_modules/is-even", + stdout: "If you're reading this, the patch worked!\n", + }, + }), + ); + + ["is-odd@0.1.2", "node_modules/is-even/node_modules/is-odd"].map(patchArg => + makeTest("should patch a nested node_modules package", { + dependencies: { "is-even": "1.0.0", "is-odd": "3.0.1" }, + mainScript: /* ts */ `import isEven from 'is-even'; isEven(420)`, + patchArg, + patchedCode: /* ts */ `/*! + * is-odd + * + * Copyright (c) 2015-2017, Jon Schlinkert. + * Released under the MIT License. + */ + + 'use strict'; + + var isNumber = require('is-number'); + + module.exports = function isOdd(i) { + if (!isNumber(i)) { + throw new TypeError('is-odd expects a number.'); + } + if (Number(i) !== Math.floor(i)) { + throw new RangeError('is-odd expects an integer.'); + } + console.log("If you're reading this, the patch worked.") + return !!(~~i & 1); + }; + `, + expected: { + patchName: "is-odd", + patchPath: "node_modules/is-even/node_modules/is-odd", + stdout: "If you're reading this, the patch worked.\n", + }, + extra: async filedir => { + const patchfile = await $`cat ${join(filedir, "patches", "is-odd@0.1.2.patch")}`.cwd(filedir).text(); + // ensure node modules is not in the patch + expect(patchfile).not.toContain("node_modules"); + }, + }), + ); + + test("should overwrite the node_modules folder of the package", async () => { + const patchArgs = ["is-even@1.0.0", "node_modules/is-even"]; + + for (const patchArg of patchArgs) { + $.throws(true); + + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven())`, + }); + + { + const { stderr } = await $`${bunExe()} i --backend hardlink`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).toContain("Saved lockfile"); + + const newCode = /* ts */ ` +module.exports = function isEven() { + return 'LOL' +} +`; + + await $`${bunExe()} patch ${patchArg}`.env(bunEnv).cwd(filedir); + await $`echo ${newCode} > node_modules/is-even/index.js`.env(bunEnv).cwd(filedir); + } + + const tempdir = tempDirWithFiles("unpatched", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`, + }); + + await $`${bunExe()} run index.ts` + .env(bunEnv) + .cwd(filedir) + .then(o => expect(o.stderr.toString()).toBe("")); + + const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(tempdir); + expect(stderr.toString()).toBe(""); + expect(stdout.toString()).toBe("true\n"); + } + }); + + test("should overwrite nested node_modules folder of the package", async () => { + const patchArgs = ["is-odd@0.1.2", "node_modules/is-even/node_modules/is-odd"]; + + for (const patchArg of patchArgs) { + $.throws(true); + + const filedir = tempDirWithFiles("patch1", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + "is-odd": "3.0.1", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven())`, + }); + + { + const { stderr } = await $`${bunExe()} i --backend hardlink`.env(bunEnv).cwd(filedir); + expect(stderr.toString()).toContain("Saved lockfile"); + + const newCode = /* ts */ ` +module.exports = function isOdd() { + return 'LOL' +} +`; + + await $`ls -d node_modules/is-even/node_modules/is-odd`.cwd(filedir); + await $`${bunExe()} patch ${patchArg}`.env(bunEnv).cwd(filedir); + await $`echo ${newCode} > node_modules/is-even/node_modules/is-odd/index.js`.env(bunEnv).cwd(filedir); + } + + const tempdir = tempDirWithFiles("unpatched", { + "package.json": JSON.stringify({ + "name": "bun-patch-test", + "module": "index.ts", + "type": "module", + "dependencies": { + "is-even": "1.0.0", + }, + }), + "index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(420))`, + }); + + await $`${bunExe()} run index.ts` + .env(bunEnv) + .cwd(filedir) + .then(o => expect(o.stderr.toString()).toBe("")); + + const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(tempdir); + expect(stderr.toString()).toBe(""); + expect(stdout.toString()).toBe("true\n"); + } + }); +}); diff --git a/test/js/bun/patch/patch.test.ts b/test/js/bun/patch/patch.test.ts index e3352f212ec734..e11c1f076e8d31 100644 --- a/test/js/bun/patch/patch.test.ts +++ b/test/js/bun/patch/patch.test.ts @@ -4,9 +4,9 @@ import { patchInternals } from "bun:internal-for-testing"; import { tempDirWithFiles as __tempDirWithFiles } from "harness"; import { join as __join } from "node:path"; import fs from "fs/promises"; -const { parse, apply } = patchInternals; +const { parse, apply, makeDiff } = patchInternals; -const makeDiff = async (aFolder: string, bFolder: string, cwd: string): Promise => { +const makeDiffJs = async (aFolder: string, bFolder: string, cwd: string): Promise => { const { stdout, stderr } = await $`git -c core.safecrlf=false diff --src-prefix=a/ --dst-prefix=b/ --ignore-cr-at-eol --irreversible-delete --full-index --no-index ${aFolder} ${bFolder}` .env( @@ -51,6 +51,47 @@ const join = : __join; describe("apply", () => { + test("edgecase", async () => { + const newcontents = "module.exports = x => x % 420 === 0;"; + const tempdir2 = tempDirWithFiles("patch-test2", { + ".bun/install/cache/is-even@1.0.0": { + "index.js": "module.exports = x => x % 2 === 0;", + }, + }); + const tempdir = tempDirWithFiles("patch-test", { + a: {}, + ["node_modules/is-even"]: { + "index.js": newcontents, + }, + }); + + const patchfile = await makeDiff( + `${tempdir2}/.bun/install/cache/is-even@1.0.0`, + `${tempdir}/node_modules/is-even`, + tempdir, + ); + + await apply(patchfile, `${tempdir}/node_modules/is-even`); + expect(await fs.readFile(`${tempdir}/node_modules/is-even/index.js`).then(b => b.toString())).toBe(newcontents); + }); + + test("empty", async () => { + const tempdir = tempDirWithFiles("patch-test", { + a: {}, + b: {}, + }); + + const afolder = join(tempdir, "a"); + const bfolder = join(tempdir, "b"); + + const patchfile = await makeDiff(afolder, bfolder, tempdir); + expect(patchfile).toBe(""); + + await apply(patchfile, afolder); + + expect(await fs.readdir(afolder)).toEqual([]); + }); + describe("deletion", () => { test("simple", async () => { const files = {