From 6e4790ceb15b50644357ec09ee66df26d8bd2c2b Mon Sep 17 00:00:00 2001 From: ShlomoCode <78599753+ShlomoCode@users.noreply.github.com> Date: Sun, 12 Oct 2025 04:49:05 +0300 Subject: [PATCH] feat: implement `bun update --recursive` --- src/cli/outdated_command.zig | 103 +------- src/cli/update_interactive_command.zig | 142 ++--------- .../PackageManager/WorkspaceHelpers.zig | 150 ++++++++++++ .../updatePackageJSONAndInstall.zig | 227 ++++++++++++++++++ 4 files changed, 396 insertions(+), 226 deletions(-) create mode 100644 src/install/PackageManager/WorkspaceHelpers.zig diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig index c3b742568df88e..35786d88601c7a 100644 --- a/src/cli/outdated_command.zig +++ b/src/cli/outdated_command.zig @@ -119,105 +119,6 @@ pub const OutdatedCommand = struct { pub fn deinit(_: @This(), _: std.mem.Allocator) void {} }; - fn getAllWorkspaces( - allocator: std.mem.Allocator, - manager: *PackageManager, - ) OOM![]const PackageID { - const lockfile = manager.lockfile; - const packages = lockfile.packages.slice(); - const pkg_resolutions = packages.items(.resolution); - - var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; - for (pkg_resolutions, 0..) |resolution, pkg_id| { - if (resolution.tag != .workspace and resolution.tag != .root) continue; - try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); - } - - return workspace_pkg_ids.toOwnedSlice(allocator); - } - - fn findMatchingWorkspaces( - allocator: std.mem.Allocator, - original_cwd: string, - manager: *PackageManager, - filters: []const string, - ) OOM![]const PackageID { - const lockfile = manager.lockfile; - const packages = lockfile.packages.slice(); - const pkg_names = packages.items(.name); - const pkg_resolutions = packages.items(.resolution); - const string_buf = lockfile.buffers.string_bytes.items; - - var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; - for (pkg_resolutions, 0..) |resolution, pkg_id| { - if (resolution.tag != .workspace and resolution.tag != .root) continue; - try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); - } - - var path_buf: bun.PathBuffer = undefined; - - const converted_filters = converted_filters: { - const buf = try allocator.alloc(WorkspaceFilter, filters.len); - for (filters, buf) |filter, *converted| { - converted.* = try WorkspaceFilter.init(allocator, filter, original_cwd, &path_buf); - } - break :converted_filters buf; - }; - defer { - for (converted_filters) |filter| { - filter.deinit(allocator); - } - allocator.free(converted_filters); - } - - // move all matched workspaces to front of array - var i: usize = 0; - while (i < workspace_pkg_ids.items.len) { - const workspace_pkg_id = workspace_pkg_ids.items[i]; - - const matched = matched: { - for (converted_filters) |filter| { - switch (filter) { - .path => |pattern| { - if (pattern.len == 0) continue; - const res = pkg_resolutions[workspace_pkg_id]; - - const res_path = switch (res.tag) { - .workspace => res.value.workspace.slice(string_buf), - .root => FileSystem.instance.top_level_dir, - else => unreachable, - }; - - const abs_res_path = path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &path_buf, &[_]string{res_path}, .posix); - - if (!glob.match(pattern, strings.withoutTrailingSlash(abs_res_path)).matches()) { - break :matched false; - } - }, - .name => |pattern| { - const name = pkg_names[workspace_pkg_id].slice(string_buf); - - if (!glob.match(pattern, name).matches()) { - break :matched false; - } - }, - .all => {}, - } - } - - break :matched true; - }; - - if (matched) { - i += 1; - } else { - _ = workspace_pkg_ids.swapRemove(i); - } - } - - return workspace_pkg_ids.items; - } - const GroupedOutdatedInfo = struct { package_id: PackageID, dep_id: DependencyID, @@ -712,3 +613,7 @@ const Behavior = Install.Dependency.Behavior; const PackageManager = Install.PackageManager; const WorkspaceFilter = PackageManager.WorkspaceFilter; + +const WorkspaceHelpers = @import("../install/PackageManager/WorkspaceHelpers.zig"); +const getAllWorkspaces = WorkspaceHelpers.getAllWorkspaces; +const findMatchingWorkspaces = WorkspaceHelpers.findMatchingWorkspaces; diff --git a/src/cli/update_interactive_command.zig b/src/cli/update_interactive_command.zig index 7380fce640ac5a..fe23e2fef5eab5 100644 --- a/src/cli/update_interactive_command.zig +++ b/src/cli/update_interactive_command.zig @@ -49,24 +49,6 @@ pub const UpdateInteractiveCommand = struct { // Common utility functions to reduce duplication - fn buildPackageJsonPath(root_dir: []const u8, workspace_path: []const u8, path_buf: *bun.PathBuffer) []const u8 { - if (workspace_path.len > 0) { - return bun.path.joinAbsStringBuf( - root_dir, - path_buf, - &[_]string{ workspace_path, "package.json" }, - .auto, - ); - } else { - return bun.path.joinAbsStringBuf( - root_dir, - path_buf, - &[_]string{"package.json"}, - .auto, - ); - } - } - // Helper to update a catalog entry at a specific path in the package.json AST fn savePackageJson( manager: *PackageManager, @@ -173,21 +155,21 @@ pub const UpdateInteractiveCommand = struct { // Build the package.json path for this workspace const root_dir = FileSystem.instance.top_level_dir; var path_buf: bun.PathBuffer = undefined; - const package_json_path = buildPackageJsonPath(root_dir, workspace_path, &path_buf); + const package_json_paths = buildWorkspacePackageJsonPath(root_dir, workspace_path, &path_buf); // Load and parse the package.json var package_json = switch (manager.workspace_package_json_cache.getWithPath( manager.allocator, manager.log, - package_json_path, + package_json_paths.path_z, .{ .guess_indentation = true }, )) { .parse_err => |err| { - Output.errGeneric("Failed to parse package.json at {s}: {s}", .{ package_json_path, @errorName(err) }); + Output.errGeneric("Failed to parse package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); continue; }, .read_err => |err| { - Output.errGeneric("Failed to read package.json at {s}: {s}", .{ package_json_path, @errorName(err) }); + Output.errGeneric("Failed to read package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); continue; }, .entry => |package_entry| package_entry, @@ -227,7 +209,7 @@ pub const UpdateInteractiveCommand = struct { // Write the updated package.json if modified if (modified) { - try savePackageJson(manager, &package_json, package_json_path); + try savePackageJson(manager, &package_json, package_json_paths.path); } } } @@ -279,21 +261,21 @@ pub const UpdateInteractiveCommand = struct { // Build the package.json path for this workspace const root_dir = FileSystem.instance.top_level_dir; var path_buf: bun.PathBuffer = undefined; - const package_json_path = buildPackageJsonPath(root_dir, workspace_path, &path_buf); + const package_json_paths = buildWorkspacePackageJsonPath(root_dir, workspace_path, &path_buf); // Load and parse the package.json properly var package_json = switch (manager.workspace_package_json_cache.getWithPath( manager.allocator, manager.log, - package_json_path, + package_json_paths.path_z, .{ .guess_indentation = true }, )) { .parse_err => |err| { - Output.errGeneric("Failed to parse package.json at {s}: {s}", .{ package_json_path, @errorName(err) }); + Output.errGeneric("Failed to parse package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); continue; }, .read_err => |err| { - Output.errGeneric("Failed to read package.json at {s}: {s}", .{ package_json_path, @errorName(err) }); + Output.errGeneric("Failed to read package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); continue; }, .entry => |entry| entry, @@ -303,7 +285,7 @@ pub const UpdateInteractiveCommand = struct { try editCatalogDefinitions(manager, updates_for_workspace.items, &package_json.root); // Save the updated package.json - try savePackageJson(manager, &package_json, package_json_path); + try savePackageJson(manager, &package_json, package_json_paths.path); } } @@ -531,105 +513,6 @@ pub const UpdateInteractiveCommand = struct { } } - fn getAllWorkspaces( - allocator: std.mem.Allocator, - manager: *PackageManager, - ) OOM![]const PackageID { - const lockfile = manager.lockfile; - const packages = lockfile.packages.slice(); - const pkg_resolutions = packages.items(.resolution); - - var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; - for (pkg_resolutions, 0..) |resolution, pkg_id| { - if (resolution.tag != .workspace and resolution.tag != .root) continue; - try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); - } - - return workspace_pkg_ids.toOwnedSlice(allocator); - } - - fn findMatchingWorkspaces( - allocator: std.mem.Allocator, - original_cwd: string, - manager: *PackageManager, - filters: []const string, - ) OOM![]const PackageID { - const lockfile = manager.lockfile; - const packages = lockfile.packages.slice(); - const pkg_names = packages.items(.name); - const pkg_resolutions = packages.items(.resolution); - const string_buf = lockfile.buffers.string_bytes.items; - - var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; - for (pkg_resolutions, 0..) |resolution, pkg_id| { - if (resolution.tag != .workspace and resolution.tag != .root) continue; - try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); - } - - var path_buf: bun.PathBuffer = undefined; - - const converted_filters = converted_filters: { - const buf = try allocator.alloc(WorkspaceFilter, filters.len); - for (filters, buf) |filter, *converted| { - converted.* = try WorkspaceFilter.init(allocator, filter, original_cwd, &path_buf); - } - break :converted_filters buf; - }; - defer { - for (converted_filters) |filter| { - filter.deinit(allocator); - } - allocator.free(converted_filters); - } - - // move all matched workspaces to front of array - var i: usize = 0; - while (i < workspace_pkg_ids.items.len) { - const workspace_pkg_id = workspace_pkg_ids.items[i]; - - const matched = matched: { - for (converted_filters) |filter| { - switch (filter) { - .path => |pattern| { - if (pattern.len == 0) continue; - const res = pkg_resolutions[workspace_pkg_id]; - - const res_path = switch (res.tag) { - .workspace => res.value.workspace.slice(string_buf), - .root => FileSystem.instance.top_level_dir, - else => unreachable, - }; - - const abs_res_path = path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &path_buf, &[_]string{res_path}, .posix); - - if (!glob.match(pattern, strings.withoutTrailingSlash(abs_res_path)).matches()) { - break :matched false; - } - }, - .name => |pattern| { - const name = pkg_names[workspace_pkg_id].slice(string_buf); - - if (!glob.match(pattern, name).matches()) { - break :matched false; - } - }, - .all => {}, - } - } - - break :matched true; - }; - - if (matched) { - i += 1; - } else { - _ = workspace_pkg_ids.swapRemove(i); - } - } - - return workspace_pkg_ids.items; - } - fn groupCatalogDependencies( allocator: std.mem.Allocator, packages: []OutdatedPackage, @@ -2018,3 +1901,8 @@ const Behavior = Install.Dependency.Behavior; const PackageManager = Install.PackageManager; const PackageJSONEditor = PackageManager.PackageJSONEditor; const WorkspaceFilter = PackageManager.WorkspaceFilter; + +const WorkspaceHelpers = @import("../install/PackageManager/WorkspaceHelpers.zig"); +const getAllWorkspaces = WorkspaceHelpers.getAllWorkspaces; +const findMatchingWorkspaces = WorkspaceHelpers.findMatchingWorkspaces; +const buildWorkspacePackageJsonPath = WorkspaceHelpers.buildWorkspacePackageJsonPath; diff --git a/src/install/PackageManager/WorkspaceHelpers.zig b/src/install/PackageManager/WorkspaceHelpers.zig new file mode 100644 index 00000000000000..04614206d0d84b --- /dev/null +++ b/src/install/PackageManager/WorkspaceHelpers.zig @@ -0,0 +1,150 @@ +pub fn getAllWorkspaces( + allocator: std.mem.Allocator, + manager: *PackageManager, +) OOM![]const PackageID { + const lockfile = manager.lockfile; + + if (lockfile.packages.len == 0) { + return try allocator.alloc(PackageID, 0); + } + + const packages = lockfile.packages.slice(); + const pkg_resolutions = packages.items(.resolution); + + var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; + for (pkg_resolutions, 0..) |resolution, pkg_id| { + if (resolution.tag != .workspace and resolution.tag != .root) continue; + try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); + } + + return workspace_pkg_ids.toOwnedSlice(allocator); +} + +pub fn findMatchingWorkspaces( + allocator: std.mem.Allocator, + original_cwd: string, + manager: *PackageManager, + filters: []const string, +) OOM![]const PackageID { + const lockfile = manager.lockfile; + + if (lockfile.packages.len == 0) { + return try allocator.alloc(PackageID, 0); + } + + const packages = lockfile.packages.slice(); + const pkg_names = packages.items(.name); + const pkg_resolutions = packages.items(.resolution); + const string_buf = lockfile.buffers.string_bytes.items; + + var workspace_pkg_ids: std.ArrayListUnmanaged(PackageID) = .{}; + for (pkg_resolutions, 0..) |resolution, pkg_id| { + if (resolution.tag != .workspace and resolution.tag != .root) continue; + try workspace_pkg_ids.append(allocator, @intCast(pkg_id)); + } + + var path_buf: bun.PathBuffer = undefined; + + const converted_filters = converted_filters: { + const buf = try allocator.alloc(PackageManager.WorkspaceFilter, filters.len); + for (filters, buf) |filter, *converted| { + converted.* = try PackageManager.WorkspaceFilter.init(allocator, filter, original_cwd, &path_buf); + } + break :converted_filters buf; + }; + defer { + for (converted_filters) |filter| { + filter.deinit(allocator); + } + allocator.free(converted_filters); + } + + // move all matched workspaces to front of array + var i: usize = 0; + while (i < workspace_pkg_ids.items.len) { + const workspace_pkg_id = workspace_pkg_ids.items[i]; + + const matched = matched: { + for (converted_filters) |filter| { + switch (filter) { + .path => |pattern| { + if (pattern.len == 0) continue; + const res = pkg_resolutions[workspace_pkg_id]; + + const res_path = switch (res.tag) { + .workspace => res.value.workspace.slice(string_buf), + .root => FileSystem.instance.top_level_dir, + else => unreachable, + }; + + const abs_res_path = path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &path_buf, &[_]string{res_path}, .posix); + + if (!glob.match(pattern, strings.withoutTrailingSlash(abs_res_path)).matches()) { + break :matched false; + } + }, + .name => |pattern| { + const name = pkg_names[workspace_pkg_id].slice(string_buf); + + if (!glob.match(pattern, name).matches()) { + break :matched false; + } + }, + .all => {}, + } + } + + break :matched true; + }; + + if (matched) { + i += 1; + } else { + _ = workspace_pkg_ids.swapRemove(i); + } + } + + return workspace_pkg_ids.toOwnedSlice(allocator); +} + +pub fn buildWorkspacePackageJsonPath( + root_dir: []const u8, + workspace_path: []const u8, + path_buf: *bun.PathBuffer, +) struct { path: []const u8, path_z: [:0]const u8 } { + const package_json_path = if (workspace_path.len > 0) + bun.path.joinAbsStringBuf( + root_dir, + path_buf, + &[_]string{ workspace_path, "package.json" }, + .auto, + ) + else + bun.path.joinAbsStringBuf( + root_dir, + path_buf, + &[_]string{"package.json"}, + .auto, + ); + + path_buf[package_json_path.len] = 0; + const package_json_path_z = path_buf[0..package_json_path.len :0]; + + return .{ + .path = package_json_path, + .path_z = package_json_path_z, + }; +} + +const std = @import("std"); +const bun = @import("bun"); +const strings = bun.strings; +const glob = bun.glob; +const path = bun.path; + +const OOM = bun.OOM; +const PackageID = bun.install.PackageID; +const PackageManager = bun.install.PackageManager; +const FileSystem = bun.fs.FileSystem; + +const string = []const u8; diff --git a/src/install/PackageManager/updatePackageJSONAndInstall.zig b/src/install/PackageManager/updatePackageJSONAndInstall.zig index f2932e0349b25b..69d6846eecae61 100644 --- a/src/install/PackageManager/updatePackageJSONAndInstall.zig +++ b/src/install/PackageManager/updatePackageJSONAndInstall.zig @@ -1,3 +1,165 @@ +fn updatePackageJSONForWorkspaces( + manager: *PackageManager, + ctx: Command.Context, + original_cwd: string, + workspace_pkg_ids: []const PackageID, +) !void { + const lockfile = manager.lockfile; + const packages = lockfile.packages.slice(); + const pkg_resolutions = packages.items(.resolution); + const string_buf = lockfile.buffers.string_bytes.items; + const root_dir = FileSystem.instance.top_level_dir; + + for (workspace_pkg_ids) |workspace_pkg_id| { + const workspace_resolution = pkg_resolutions[workspace_pkg_id]; + const workspace_path = if (workspace_resolution.tag == .workspace) + workspace_resolution.value.workspace.slice(string_buf) + else + ""; + + var path_buf: bun.PathBuffer = undefined; + const package_json_paths = buildWorkspacePackageJsonPath(root_dir, workspace_path, &path_buf); + + var package_json = switch (manager.workspace_package_json_cache.getWithPath( + manager.allocator, + manager.log, + package_json_paths.path_z, + .{ .guess_indentation = true }, + )) { + .parse_err => |err| { + Output.errGeneric("Failed to parse package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); + continue; + }, + .read_err => |err| { + Output.errGeneric("Failed to read package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); + continue; + }, + .entry => |package_entry| package_entry, + }; + + PackageJSONEditor.editUpdateNoArgs( + manager, + &package_json.root, + .{ + .exact_versions = true, + .before_install = true, + }, + ) catch |err| { + Output.errGeneric("Failed to update package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); + continue; + }; + + const preserve_trailing_newline = package_json.source.contents.len > 0 and + package_json.source.contents[package_json.source.contents.len - 1] == '\n'; + + var buffer_writer = JSPrinter.BufferWriter.init(manager.allocator); + buffer_writer.buffer.list.ensureTotalCapacity(manager.allocator, package_json.source.contents.len + 1) catch |err| bun.handleOom(err); + buffer_writer.append_newline = preserve_trailing_newline; + var package_json_writer = JSPrinter.BufferPrinter.init(buffer_writer); + + _ = JSPrinter.printJSON( + @TypeOf(&package_json_writer), + &package_json_writer, + package_json.root, + &package_json.source, + .{ + .indent = package_json.indentation, + .mangled_props = null, + }, + ) catch |err| { + Output.errGeneric("Failed to serialize package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); + continue; + }; + + const new_package_json_source = manager.allocator.dupe(u8, package_json_writer.ctx.writtenWithoutTrailingZero()) catch |err| bun.handleOom(err); + + const write_file = std.fs.cwd().createFile(package_json_paths.path, .{}) catch |err| { + Output.errGeneric("Failed to write package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); + manager.allocator.free(new_package_json_source); + continue; + }; + defer write_file.close(); + + write_file.writeAll(new_package_json_source) catch |err| { + Output.errGeneric("Failed to write package.json at {s}: {s}", .{ package_json_paths.path, @errorName(err) }); + manager.allocator.free(new_package_json_source); + continue; + }; + + manager.allocator.free(new_package_json_source); + } + + manager.to_update = true; + + var root_path_buf: bun.PathBuffer = undefined; + const root_package_json_path = bun.path.joinAbsStringBuf( + root_dir, + &root_path_buf, + &[_]string{"package.json"}, + .auto, + ); + root_path_buf[root_package_json_path.len] = 0; + const root_package_json_path_z = root_path_buf[0..root_package_json_path.len :0]; + + try manager.installWithManager(ctx, root_package_json_path_z, original_cwd); + + for (workspace_pkg_ids) |workspace_pkg_id| { + const workspace_resolution = pkg_resolutions[workspace_pkg_id]; + const workspace_path = if (workspace_resolution.tag == .workspace) + workspace_resolution.value.workspace.slice(string_buf) + else + ""; + + var path_buf2: bun.PathBuffer = undefined; + const package_json_paths = buildWorkspacePackageJsonPath(root_dir, workspace_path, &path_buf2); + + var package_json = switch (manager.workspace_package_json_cache.getWithPath( + manager.allocator, + manager.log, + package_json_paths.path_z, + .{ .guess_indentation = true }, + )) { + .parse_err, .read_err => continue, + .entry => |package_entry| package_entry, + }; + + PackageJSONEditor.editUpdateNoArgs( + manager, + &package_json.root, + .{ + .exact_versions = manager.options.enable.exact_versions, + }, + ) catch continue; + + const preserve_trailing_newline = package_json.source.contents.len > 0 and + package_json.source.contents[package_json.source.contents.len - 1] == '\n'; + + var buffer_writer = JSPrinter.BufferWriter.init(manager.allocator); + buffer_writer.buffer.list.ensureTotalCapacity(manager.allocator, package_json.source.contents.len + 1) catch |err| bun.handleOom(err); + buffer_writer.append_newline = preserve_trailing_newline; + var package_json_writer = JSPrinter.BufferPrinter.init(buffer_writer); + + _ = JSPrinter.printJSON( + @TypeOf(&package_json_writer), + &package_json_writer, + package_json.root, + &package_json.source, + .{ + .indent = package_json.indentation, + .mangled_props = null, + }, + ) catch continue; + + const new_package_json_source = manager.allocator.dupe(u8, package_json_writer.ctx.writtenWithoutTrailingZero()) catch |err| bun.handleOom(err); + defer manager.allocator.free(new_package_json_source); + + const write_file = std.fs.cwd().createFile(package_json_paths.path, .{}) catch continue; + defer write_file.close(); + + write_file.writeAll(new_package_json_source) catch continue; + } +} + pub fn updatePackageJSONAndInstallWithManager( manager: *PackageManager, ctx: Command.Context, @@ -27,6 +189,64 @@ pub fn updatePackageJSONAndInstallWithManager( } } + if (manager.subcommand == .update and manager.options.positionals.len <= 1) { + if (manager.options.do.recursive or manager.options.filter_patterns.len > 0) { + const load_lockfile_result = manager.lockfile.loadFromCwd( + manager, + manager.allocator, + manager.log, + true, + ); + + manager.lockfile = switch (load_lockfile_result) { + .not_found => { + return try updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( + manager, + ctx, + original_cwd, + manager.options.positionals[1..], + &update_requests, + ); + }, + .err => { + return try updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( + manager, + ctx, + original_cwd, + manager.options.positionals[1..], + &update_requests, + ); + }, + .ok => |ok| ok.lockfile, + }; + } + + const workspace_pkg_ids = if (manager.options.filter_patterns.len > 0) blk: { + const filters = manager.options.filter_patterns; + break :blk findMatchingWorkspaces( + bun.default_allocator, + original_cwd, + manager, + filters, + ) catch |err| bun.handleOom(err); + } else if (manager.options.do.recursive) blk: { + break :blk bun.handleOom(getAllWorkspaces(bun.default_allocator, manager)); + } else blk: { + break :blk bun.handleOom(bun.default_allocator.alloc(PackageID, 0)); + }; + + defer bun.default_allocator.free(workspace_pkg_ids); + + if (workspace_pkg_ids.len > 1 or (workspace_pkg_ids.len == 1 and manager.options.do.recursive)) { + return try updatePackageJSONForWorkspaces( + manager, + ctx, + original_cwd, + workspace_pkg_ids, + ); + } + } + return try updatePackageJSONAndInstallWithManagerWithUpdatesAndUpdateRequests( manager, ctx, @@ -759,3 +979,10 @@ const PatchCommitResult = PackageManager.PatchCommitResult; const Subcommand = PackageManager.Subcommand; const UpdateRequest = PackageManager.UpdateRequest; const attemptToCreatePackageJSON = PackageManager.attemptToCreatePackageJSON; +const PackageID = bun.install.PackageID; +const OOM = bun.OOM; + +const WorkspaceHelpers = @import("./WorkspaceHelpers.zig"); +const getAllWorkspaces = WorkspaceHelpers.getAllWorkspaces; +const findMatchingWorkspaces = WorkspaceHelpers.findMatchingWorkspaces; +const buildWorkspacePackageJsonPath = WorkspaceHelpers.buildWorkspacePackageJsonPath;