From 8aa451c2dcdfa6f3570c8a39fcc373e70e14b041 Mon Sep 17 00:00:00 2001 From: dave caruso Date: Fri, 29 Nov 2024 19:07:56 -0800 Subject: [PATCH] bake(dev): plugins in dev server, with other fixes (#15467) Co-authored-by: paperdave Co-authored-by: Jarred Sumner --- packages/bun-types/bun.d.ts | 5 +- src/bake/DevServer.zig | 83 ++++--- src/bake/bake.d.ts | 134 ++++++++--- src/bake/bake.zig | 110 +++++++-- src/bake/bun-framework-react/ssr.tsx | 6 +- src/bake/hmr-module.ts | 114 +++++++++- src/bake/hmr-runtime-server.ts | 10 +- src/bake/production.zig | 3 +- src/bun.js/api/JSBundler.zig | 289 ++++++++++-------------- src/bun.js/api/server.zig | 1 + src/bun.js/bindings/JSBundlerPlugin.cpp | 38 +++- src/bun.js/event_loop.zig | 2 +- src/bun.js/module_loader.zig | 3 +- src/bundler/bundle_v2.zig | 253 ++++++++++++++------- src/fs.zig | 7 + src/import_record.zig | 1 - src/js/builtins/BundlerPlugin.ts | 30 +-- src/js_parser.zig | 219 +++++++++--------- src/js_printer.zig | 22 +- src/linker.zig | 4 - src/logger.zig | 25 +- src/options.zig | 5 + src/output.zig | 2 +- src/sourcemap/sourcemap.zig | 3 + test/bake/dev-server-harness.ts | 61 ++++- test/bake/dev/bundle.test.ts | 28 ++- test/bake/dev/css.test.ts | 2 +- test/bake/dev/dev-plugins.test.ts | 149 ++++++++++++ test/bake/dev/ecosystem.test.ts | 11 + test/bake/dev/esm.test.ts | 99 +++++++- 30 files changed, 1192 insertions(+), 527 deletions(-) create mode 100644 test/bake/dev/dev-plugins.test.ts create mode 100644 test/bake/dev/ecosystem.test.ts diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index d6e7b0ee17de61..8fb2da993bae91 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -3873,7 +3873,6 @@ declare module "bun" { * The default loader for this file extension */ loader: Loader; - /** * Defer the execution of this callback until all other modules have been parsed. * @@ -3899,6 +3898,10 @@ declare module "bun" { * The namespace of the importer. */ namespace: string; + /** + * The directory to perform file-based resolutions in. + */ + resolveDir: string; /** * The kind of import this resolve is for. */ diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 64821c5dbba680..63026dab4f0d8c 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -16,10 +16,11 @@ pub const Options = struct { root: []const u8, vm: *VirtualMachine, framework: bake.Framework, + bundler_options: bake.SplitBundlerOptions, // Debugging features dump_sources: ?[]const u8 = if (Environment.isDebug) ".bake-debug" else null, - dump_state_on_crash: bool = false, + dump_state_on_crash: ?bool = false, verbose_watcher: bool = false, }; @@ -93,6 +94,7 @@ generation: usize = 0, bundles_since_last_error: usize = 0, framework: bake.Framework, +bundler_options: bake.SplitBundlerOptions, // Each logical graph gets its own bundler configuration server_bundler: Bundler, client_bundler: Bundler, @@ -238,8 +240,11 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .graph_safety_lock = bun.DebugThreadLock.unlocked, .dump_dir = dump_dir, .framework = options.framework, + .bundler_options = options.bundler_options, .emit_visualizer_events = 0, - .has_pre_crash_handler = options.dump_state_on_crash, + .has_pre_crash_handler = bun.FeatureFlags.bake_debugging_features and + options.dump_state_on_crash orelse + bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"), .css_files = .{}, .route_js_payloads = .{}, // .assets = .{}, @@ -307,7 +312,8 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { } dev.framework = dev.framework.resolve(&dev.server_bundler.resolver, &dev.client_bundler.resolver, options.arena) catch { - try bake.Framework.addReactInstallCommandNote(&dev.log); + if (dev.framework.is_built_in_react) + try bake.Framework.addReactInstallCommandNote(&dev.log); return global.throwValue2(dev.log.toJSAggregateError(global, "Framework is missing required files!")); }; @@ -438,7 +444,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { // after that line. try dev.scanInitialRoutes(); - if (bun.FeatureFlags.bake_debugging_features and options.dump_state_on_crash) + if (bun.FeatureFlags.bake_debugging_features and dev.has_pre_crash_handler) try bun.crash_handler.appendPreCrashHandler(DevServer, dev, dumpStateDueToCrash); return dev; @@ -906,6 +912,7 @@ fn startAsyncBundle( heap, ); bv2.bun_watcher = dev.bun_watcher; + bv2.plugins = dev.bundler_options.plugin; bv2.asynchronous = true; { @@ -1202,10 +1209,10 @@ pub fn finalizeBundle( ); // Create an entry for this file. - const abs_path = ctx.sources[index.get()].path.text; + const key = ctx.sources[index.get()].path.keyForIncrementalGraph(); // Later code needs to retrieve the CSS content // The hack is to use `entry_point_id`, which is otherwise unused, to store an index. - chunk.entry_point.entry_point_id = try dev.insertOrUpdateCssAsset(abs_path, code.buffer); + chunk.entry_point.entry_point_id = try dev.insertOrUpdateCssAsset(key, code.buffer); try dev.client_graph.receiveChunk(&ctx, index, "", .css, false); @@ -1216,7 +1223,7 @@ pub fn finalizeBundle( try dev.server_graph.insertCssFileOnServer( &ctx, index, - abs_path, + key, ); } } @@ -1432,8 +1439,8 @@ pub fn finalizeBundle( try w.writeInt(u32, @intCast(css_chunks.len), .little); const sources = bv2.graph.input_files.items(.source); for (css_chunks) |chunk| { - const abs_path = sources[chunk.entry_point.source_index].path.text; - try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&bun.hash(abs_path)), .lower)); + const key = sources[chunk.entry_point.source_index].path.keyForIncrementalGraph(); + try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&bun.hash(key)), .lower)); const css_data = css_values[chunk.entry_point.entry_point_id]; try w.writeInt(u32, @intCast(css_data.len), .little); try w.writeAll(css_data); @@ -1588,12 +1595,13 @@ fn insertOrUpdateCssAsset(dev: *DevServer, abs_path: []const u8, code: []const u return @intCast(gop.index); } +/// Note: The log is not consumed here pub fn handleParseTaskFailure( dev: *DevServer, err: anyerror, graph: bake.Graph, - abs_path: []const u8, - log: *Log, + key: []const u8, + log: *const Log, ) bun.OOM!void { dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); @@ -1605,23 +1613,23 @@ pub fn handleParseTaskFailure( // TODO: this should walk up the graph one level, and queue all of these // files for re-bundling if they aren't already in the BundleV2 graph. switch (graph) { - .server, .ssr => try dev.server_graph.onFileDeleted(abs_path, log), - .client => try dev.client_graph.onFileDeleted(abs_path, log), + .server, .ssr => try dev.server_graph.onFileDeleted(key, log), + .client => try dev.client_graph.onFileDeleted(key, log), } } else { Output.prettyErrorln("Error{s} while bundling \"{s}\":", .{ if (log.errors +| log.warnings != 1) "s" else "", - dev.relativePath(abs_path), + dev.relativePath(key), }); log.print(Output.errorWriterBuffered()) catch {}; Output.flush(); // Do not index css errors - if (!bun.strings.hasSuffixComptime(abs_path, ".css")) { + if (!bun.strings.hasSuffixComptime(key, ".css")) { switch (graph) { - .server => try dev.server_graph.insertFailure(abs_path, log, false), - .ssr => try dev.server_graph.insertFailure(abs_path, log, true), - .client => try dev.client_graph.insertFailure(abs_path, log, false), + .server => try dev.server_graph.insertFailure(key, log, false), + .ssr => try dev.server_graph.insertFailure(key, log, true), + .client => try dev.client_graph.insertFailure(key, log, false), } } } @@ -1851,7 +1859,10 @@ pub fn IncrementalGraph(side: bake.Side) type { return struct { // Unless otherwise mentioned, all data structures use DevServer's allocator. - /// Key contents are owned by `default_allocator` + /// Keys are absolute paths for the "file" namespace, or the + /// pretty-formatted path value that appear in imports. Absolute paths + /// are stored so the watcher can quickly query and invalidate them. + /// Key slices are owned by `default_allocator` bundled_files: bun.StringArrayHashMapUnmanaged(File), /// Track bools for files which are "stale", meaning they should be /// re-bundled before being used. Resizing this is usually deferred @@ -2034,7 +2045,8 @@ pub fn IncrementalGraph(side: bake.Side) type { const dev = g.owner(); dev.graph_safety_lock.assertLocked(); - const abs_path = ctx.sources[index.get()].path.text; + const path = ctx.sources[index.get()].path; + const key = path.keyForIncrementalGraph(); if (Environment.allow_assert) { switch (kind) { @@ -2042,7 +2054,7 @@ pub fn IncrementalGraph(side: bake.Side) type { .js => if (bun.strings.isAllWhitespace(code)) { // Should at least contain the function wrapper bun.Output.panic("Empty chunk is impossible: {s} {s}", .{ - abs_path, + key, switch (side) { .client => "client", .server => if (is_ssr_graph) "ssr" else "server", @@ -2060,7 +2072,7 @@ pub fn IncrementalGraph(side: bake.Side) type { const cwd = dev.root; var a: bun.PathBuffer = undefined; var b: [bun.MAX_PATH_BYTES * 2]u8 = undefined; - const rel_path = bun.path.relativeBufZ(&a, cwd, abs_path); + const rel_path = bun.path.relativeBufZ(&a, cwd, key); const size = std.mem.replacementSize(u8, rel_path, "../", "_.._/"); _ = std.mem.replace(u8, rel_path, "../", "_.._/", &b); const rel_path_escaped = b[0..size]; @@ -2073,11 +2085,11 @@ pub fn IncrementalGraph(side: bake.Side) type { }; }; - const gop = try g.bundled_files.getOrPut(dev.allocator, abs_path); + const gop = try g.bundled_files.getOrPut(dev.allocator, key); const file_index = FileIndex.init(@intCast(gop.index)); if (!gop.found_existing) { - gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); + gop.key_ptr.* = try bun.default_allocator.dupe(u8, key); try g.first_dep.append(dev.allocator, .none); try g.first_import.append(dev.allocator, .none); } @@ -2117,7 +2129,7 @@ pub fn IncrementalGraph(side: bake.Side) type { gop.value_ptr.* = File.init(try std.fmt.allocPrint( dev.allocator, css_prefix ++ "/{}.css", - .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&bun.hash(abs_path)))}, + .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&bun.hash(key)))}, ), flags); } else { // The key is just the file-path @@ -2301,16 +2313,25 @@ pub fn IncrementalGraph(side: bake.Side) type { const log = bun.Output.scoped(.processChunkDependencies, false); for (ctx.import_records[index.get()].slice()) |import_record| { if (!import_record.source_index.isRuntime()) try_index_record: { + const key = import_record.path.keyForIncrementalGraph(); const imported_file_index = if (import_record.source_index.isInvalid()) - if (std.fs.path.isAbsolute(import_record.path.text)) - FileIndex.init(@intCast( - g.bundled_files.getIndex(import_record.path.text) orelse break :try_index_record, - )) - else - break :try_index_record + FileIndex.init(@intCast( + g.bundled_files.getIndex(key) orelse break :try_index_record, + )) else ctx.getCachedIndex(side, import_record.source_index).*; + if (Environment.isDebug) { + if (imported_file_index.get() > g.bundled_files.count()) { + Output.debugWarn("Invalid mapped source index {x}. {} was not inserted into IncrementalGraph", .{ + imported_file_index.get(), + bun.fmt.quote(key), + }); + Output.flush(); + continue; + } + } + if (quick_lookup.getPtr(imported_file_index)) |lookup| { // If the edge has already been seen, it will be skipped // to ensure duplicate edges never exist. diff --git a/src/bake/bake.d.ts b/src/bake/bake.d.ts index f5a6d6da90fd53..6836ec63adf1ff 100644 --- a/src/bake/bake.d.ts +++ b/src/bake/bake.d.ts @@ -9,9 +9,6 @@ declare module "bun" { declare namespace Bake { interface Options { - /** Will be replaced by fileSystemRouters */ - routes: {}[]; - /** * Bun provides built-in support for using React as a framework by passing * 'react' as the framework name. Otherwise, frameworks are config objects. @@ -24,29 +21,33 @@ declare module "bun" { framework: Framework | "react"; // Note: To contribute to 'bun-framework-react', it can be run from this file: // https://github.com/oven-sh/bun/blob/main/src/bake/bun-framework-react/index.ts - /** - * A subset of the options from Bun.build can be configured. Keep in mind, - * your framework may set different defaults. + * A subset of the options from Bun.build can be configured. While the framework + * can also set these options, this property overrides and merges with them. * * @default {} */ bundlerOptions?: BundlerOptions | undefined; + /** + * These plugins are applied after `framework.plugins` + */ + plugins?: BunPlugin[] | undefined; } /** Bake only allows a subset of options from `Bun.build` */ type BuildConfigSubset = Pick< BuildConfig, - "conditions" | "plugins" | "define" | "loader" | "ignoreDCEAnnotations" | "drop" + "conditions" | "define" | "loader" | "ignoreDCEAnnotations" | "drop" // - format is not allowed because it is set to an internal "hmr" format // - entrypoints/outfile/outdir doesnt make sense to set // - disabling sourcemap is not allowed because it makes code impossible to debug // - enabling minifyIdentifiers in dev is not allowed because some generated code does not support it - // - publicPath is set elsewhere (TODO:) + // - publicPath is set by the user (TODO: add options.publicPath) // - emitDCEAnnotations is not useful // - banner and footer do not make sense in these multi-file builds // - experimentalCss cannot be disabled // - disabling external would make it exclude imported files. + // - plugins is specified in the framework object, and currently merge between client and server. // TODO: jsx customization // TODO: chunk naming @@ -114,6 +115,8 @@ declare module "bun" { * @default false */ reactFastRefresh?: boolean | ReactFastRefreshOptions | undefined; + /** Framework bundler plugins load before the user-provided ones. */ + plugins?: BunPlugin[]; // /** // * Called after the list of routes is updated. This can be used to @@ -123,6 +126,7 @@ declare module "bun" { // onRouteListUpdate?: (routes: OnRouteListUpdateItem) => void; } + /** Using `code` here will cause import resolution to happen from the root. */ type BuiltInModule = { import: string; code: string } | { import: string; path: string }; /** @@ -167,7 +171,7 @@ declare module "bun" { * where every export calls this export from `serverRuntimeImportSource`. * This is used to implement client components on the server. * - * The call is given three arguments: + * When separateSSRGraph is enabled, the call looks like: * * export const ClientComp = registerClientReference( * // A function which may be passed through, it throws an error @@ -181,6 +185,24 @@ declare module "bun" { * // name the user has given. * "ClientComp", * ); + * + * When separateSSRGraph is disabled, the call looks like: + * + * export const ClientComp = registerClientReference( + * function () { ... original user implementation here ... }, + * + * // The file path of the client-side file to import in the browser. + * "/_bun/d41d8cd0.js", + * + * // The export within the client-side file to load. This is + * // not guaranteed to match the export name the user has given. + * "ClientComp", + * ); + * + * While subtle, the parameters in `separateSSRGraph` mode are opaque + * strings that have to be looked up in the server manifest. While when + * there isn't a separate SSR graph, the two parameters are the actual + * URLs to load on the client; The manifest is not required for anything. * * Additionally, the bundler will assemble a component manifest to be used * during rendering. @@ -426,6 +448,14 @@ declare module "bun" { // No exports } + interface DevServerHookEntryPoint { + default: (dev: DevServerHookAPI) => Awaitable; + } + + interface DevServerHookAPI { + // TODO: + } + /** * This object and it's children may be re-used between invocations, so it * is not safe to mutate it at all. @@ -476,12 +506,31 @@ declare module "bun" { } } - interface GenericServeOptions { + declare interface GenericServeOptions { /** Add a fullstack web app to this server using Bun Bake */ app?: Bake.Options | undefined; } + + declare interface PluginBuilder { + /** + * Inject a module into the development server's runtime, to be loaded + * before all other user code. + */ + addPreload(module: string, side: 'client' | 'server'): void; + } + + declare interface OnLoadArgs { + /** + * When using server-components, the same bundle has both client and server + * files; A single plugin can operate on files from both module graphs. + * Outside of server-components, this will be "client" when the target is + * set to "browser" and "server" otherwise. + */ + side: 'server' | 'client'; + } } +/** Available in server-side files only. */ declare module "bun:bake/server" { // NOTE: The format of these manifests will likely be customizable in the future. @@ -490,55 +539,61 @@ declare module "bun:bake/server" { * is a mapping of component IDs to the client-side file it is exported in. * The specifiers from here are to be imported in the client. * - * To perform SSR with client components, see `clientManifest` + * To perform SSR with client components, see `ssrManifest` */ - declare const serverManifest: ReactServerManifest; + declare const serverManifest: ServerManifest; /** * Entries in this manifest map from client-side files to their respective SSR * bundles. They can be loaded by `await import()` or `require()`. */ - declare const clientManifest: ReactClientManifest; + declare const ssrManifest: SSRManifest; /** (insert teaser trailer) */ declare const actionManifest: never; - declare interface ReactServerManifest { + declare interface ServerManifest { /** * Concatenation of the component file ID and the instance id with '#' * Example: 'components/Navbar.tsx#default' (dev) or 'l2#a' (prod/minified) * * The component file ID and the instance id are both passed to `registerClientReference` */ - [combinedComponentId: string]: { - /** - * The `id` in ReactClientManifest. - * Correlates but is not required to be the filename - */ - id: string; - /** - * The `name` in ReactServerManifest - * Correlates but is not required to be the export name - */ - name: string; - /** Currently not implemented; always an empty array */ - chunks: []; - }; + [combinedComponentId: string]: ServerManifestEntry; } - declare interface ReactClientManifest { - /** ReactServerManifest[...].id */ + declare interface ServerManifestEntry { + /** + * The `id` in ReactClientManifest. + * Correlates but is not required to be the filename + */ + id: string; + /** + * The `name` in ReactServerManifest + * Correlates but is not required to be the export name + */ + name: string; + /** Currently not implemented; always an empty array */ + chunks: []; + } + + declare interface SSRManifest { + /** ServerManifest[...].id */ [id: string]: { - /** ReactServerManifest[...].name */ - [name: string]: { - /** Valid specifier to import */ - specifier: string; - /** Export name */ - name: string; - }; + /** ServerManifest[...].name */ + [name: string]: SSRManifestEntry; }; } + + declare interface SSRManifestEntry { + /** Valid specifier to import */ + specifier: string; + /** Export name */ + name: string; + } + } +/** Available in client-side files. */ declare module "bun:bake/client" { /** * Callback is invoked when server-side code is changed. This can be used to @@ -549,3 +604,8 @@ declare module "bun:bake/client" { */ export function onServerSideReload(cb: () => void | Promise): Promise; } + +/** Available during development */ +declare module "bun:bake/dev" { + +}; diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 7b392cf8420206..fc684376bd5ee6 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -18,13 +18,14 @@ pub const UserOptions = struct { root: []const u8, framework: Framework, bundler_options: SplitBundlerOptions, - // bundler_plugin: ?*Plugin, pub fn deinit(options: *UserOptions) void { options.arena.deinit(); options.allocations.free(); + if (options.bundler_options.plugin) |p| p.deinit(); } + /// Currently, this function must run at the top of the event loop. pub fn fromJS(config: JSValue, global: *JSC.JSGlobalObject) !UserOptions { if (!config.isObject()) { return global.throwInvalidArguments("'" ++ api_name ++ "' is not an object", .{}); @@ -35,7 +36,7 @@ pub const UserOptions = struct { var allocations = StringRefList.empty; errdefer allocations.free(); - var bundler_options: SplitBundlerOptions = .{}; + var bundler_options = SplitBundlerOptions.empty; const framework = try Framework.fromJS( try config.get(global, "framework") orelse { @@ -59,6 +60,10 @@ pub const UserOptions = struct { }, }; + if (try config.get(global, "plugins")) |plugin_array| { + try bundler_options.parsePluginArray(plugin_array, global); + } + return .{ .arena = arena, .allocations = allocations, @@ -86,11 +91,62 @@ const StringRefList = struct { pub const empty: StringRefList = .{ .strings = .{} }; }; -const SplitBundlerOptions = struct { +pub const SplitBundlerOptions = struct { + plugin: ?*Plugin = null, all: BuildConfigSubset = .{}, client: BuildConfigSubset = .{}, server: BuildConfigSubset = .{}, ssr: BuildConfigSubset = .{}, + + pub const empty: SplitBundlerOptions = .{ + .plugin = null, + .all = .{}, + .client = .{}, + .server = .{}, + .ssr = .{}, + }; + + pub fn parsePluginArray(opts: *SplitBundlerOptions, plugin_array: JSValue, global: *JSC.JSGlobalObject) !void { + const plugin = opts.plugin orelse Plugin.create(global, .bun); + opts.plugin = plugin; + const empty_object = JSValue.createEmptyObject(global, 0); + + var iter = plugin_array.arrayIterator(global); + while (iter.next()) |plugin_config| { + if (!plugin_config.isObject()) { + return global.throwInvalidArguments("Expected plugin to be an object", .{}); + } + + if (try plugin_config.getOptional(global, "name", ZigString.Slice)) |slice| { + defer slice.deinit(); + if (slice.len == 0) { + return global.throwInvalidArguments("Expected plugin to have a non-empty name", .{}); + } + } else { + return global.throwInvalidArguments("Expected plugin to have a name", .{}); + } + + const function = try plugin_config.getFunction(global, "setup") orelse { + return global.throwInvalidArguments("Expected plugin to have a setup() function", .{}); + }; + const plugin_result = try plugin.addPlugin(function, empty_object, .null, false, true); + if (plugin_result.asAnyPromise()) |promise| { + promise.setHandled(global.vm()); + // TODO: remove this call, replace with a promise list that must + // be resolved before the first bundle task can begin. + global.bunVM().waitForPromise(promise); + switch (promise.unwrap(global.vm(), .mark_handled)) { + .pending => unreachable, + .fulfilled => |val| { + _ = val; + }, + .rejected => |err| { + return global.throwValue2(err); + }, + } + } + } + } }; const BuildConfigSubset = struct { @@ -164,7 +220,7 @@ pub const Framework = struct { }; } - const FileSystemRouterType = struct { + pub const FileSystemRouterType = struct { root: []const u8, prefix: []const u8, entry_server: []const u8, @@ -176,12 +232,12 @@ pub const Framework = struct { allow_layouts: bool, }; - const BuiltInModule = union(enum) { + pub const BuiltInModule = union(enum) { import: []const u8, code: []const u8, }; - const ServerComponents = struct { + pub const ServerComponents = struct { separate_ssr_graph: bool = false, server_runtime_import: []const u8, // client_runtime_import: []const u8, @@ -257,7 +313,6 @@ pub const Framework = struct { bundler_options: *SplitBundlerOptions, arena: Allocator, ) !Framework { - _ = bundler_options; // autofix if (opts.isString()) { const str = try opts.toBunString2(global); defer str.deref(); @@ -330,11 +385,14 @@ pub const Framework = struct { return global.throwInvalidArguments("Missing 'framework.serverComponents.serverRuntimeImportSource'", .{}); }, ), - .server_register_client_reference = refs.track( - try sc.getOptional(global, "serverRegisterClientReferenceExport", ZigString.Slice) orelse { - return global.throwInvalidArguments("Missing 'framework.serverComponents.serverRegisterClientReferenceExport'", .{}); - }, - ), + .server_register_client_reference = if (try sc.getOptional( + global, + "serverRegisterClientReferenceExport", + ZigString.Slice, + )) |slice| + refs.track(slice) + else + "registerClientReference", }; }; const built_in_modules: bun.StringArrayHashMapUnmanaged(BuiltInModule) = built_in_modules: { @@ -406,9 +464,9 @@ pub const Framework = struct { break :exts &.{}; } } else if (exts_js.isArray()) { - var it_2 = array.arrayIterator(global); + var it_2 = exts_js.arrayIterator(global); var i_2: usize = 0; - const extensions = try arena.alloc([]const u8, len); + const extensions = try arena.alloc([]const u8, array.getLength(global)); while (it_2.next()) |array_item| : (i_2 += 1) { const slice = refs.track(try array_item.toSlice2(global, arena)); if (bun.strings.eqlComptime(slice, "*")) @@ -468,9 +526,13 @@ pub const Framework = struct { .built_in_modules = built_in_modules, }; + if (try opts.getOptional(global, "plugins", JSValue)) |plugin_array| { + try bundler_options.parsePluginArray(plugin_array, global); + } + if (try opts.getOptional(global, "bundlerOptions", JSValue)) |js_options| { - _ = js_options; // autofix - // try SplitBundlerOptions.parseInto(global, js_options, bundler_options, .root); + _ = js_options; // TODO: + // try bundler_options.parseInto(global, js_options, .root); } return framework; @@ -521,6 +583,10 @@ pub const Framework = struct { if (renderer == .server and framework.server_components != null) { try out.options.conditions.appendSlice(&.{"react-server"}); } + if (mode == .development) { + // Support `esm-env` package using this condition. + try out.options.conditions.appendSlice(&.{"development"}); + } out.options.production = mode != .development; @@ -534,6 +600,10 @@ pub const Framework = struct { out.options.framework = framework; + // In development mode, source maps must always be `linked` + // In production, TODO: follow user configuration + out.options.source_map = .linked; + out.configureLinker(); try out.configureDefines(); @@ -646,6 +716,14 @@ pub fn addImportMetaDefines( "import.meta.env.STATIC", Define.Data.initBoolean(mode == .production_static), ); + + if (mode != .development) { + try define.insert( + allocator, + "import.meta.hot", + Define.Data.initBoolean(false), + ); + } } pub const server_virtual_source: bun.logger.Source = .{ diff --git a/src/bake/bun-framework-react/ssr.tsx b/src/bake/bun-framework-react/ssr.tsx index 228e934fbbf48c..d42c10a2412422 100644 --- a/src/bake/bun-framework-react/ssr.tsx +++ b/src/bake/bun-framework-react/ssr.tsx @@ -2,7 +2,7 @@ // no longer set. This means we can import client components, using `react-dom` // to perform Server-side rendering (creating HTML) out of the RSC payload. import * as React from "react"; -import { clientManifest } from "bun:bake/server"; +import { ssrManifest } from "bun:bake/server"; import type { Readable } from "node:stream"; import { EventEmitter } from "node:events"; import { createFromNodeStream, type Manifest } from "react-server-dom-bun/client.node.unbundled.js"; @@ -15,7 +15,7 @@ if (!React.use) { } const createFromNodeStreamOptions: Manifest = { - moduleMap: clientManifest, + moduleMap: ssrManifest, moduleLoading: { prefix: "/" }, }; @@ -49,7 +49,7 @@ export function renderToHtml( const promise = createFromNodeStream(rscPayload, { // React takes in a manifest mapping client-side assets // to the imports needed for server-side rendering. - moduleMap: clientManifest, + moduleMap: ssrManifest, moduleLoading: { prefix: "/" }, }); // The root is this "Root" component that unwraps the streamed promise diff --git a/src/bake/hmr-module.ts b/src/bake/hmr-module.ts index f05adb0d85b82d..15801f5031e2bb 100644 --- a/src/bake/hmr-module.ts +++ b/src/bake/hmr-module.ts @@ -8,7 +8,7 @@ export type ExportsCallbackFunction = (new_exports: any) => void; export const enum State { Loading, - Boundary, + Ready, Error, } @@ -17,6 +17,11 @@ export const enum LoadModuleType { UserDynamic, } +interface DepEntry { + _callback: ExportsCallbackFunction; + _expectedImports: string[] | undefined; +} + /** * This object is passed as the CommonJS "module", but has a bunch of * non-standard properties that are used for implementing hot-module reloading. @@ -33,7 +38,8 @@ export class HotModule { _import_meta: ImportMeta | undefined = undefined; _cached_failure: any = undefined; // modules that import THIS module - _deps: Map = new Map(); + _deps: Map = new Map(); + _onDispose: HotDisposeFunction[] | undefined = undefined; constructor(id: Id) { this.id = id; @@ -41,18 +47,27 @@ export class HotModule { require(id: Id, onReload?: ExportsCallbackFunction) { const mod = loadModule(id, LoadModuleType.UserDynamic); - mod._deps.set(this, onReload); + mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: undefined } : undefined); return mod.exports; } - importSync(id: Id, onReload?: ExportsCallbackFunction) { + importSync(id: Id, onReload?: ExportsCallbackFunction, expectedImports?: string[]) { const mod = loadModule(id, LoadModuleType.AssertPresent); - // insert into the map if not present - mod._deps.set(this, onReload); + mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: expectedImports } : undefined); const { exports, __esModule } = mod; - return __esModule ? exports : (mod._ext_exports ??= { ...exports, default: exports }); + const object = __esModule ? exports : (mod._ext_exports ??= { ...exports, default: exports }); + + if (expectedImports && mod._state === State.Ready) { + for (const key of expectedImports) { + if (!(key in object)) { + throw new SyntaxError(`The requested module '${id}' does not provide an export named '${key}'`); + } + } + } + return object; } + /// Equivalent to `import()` in ES modules async dynamicImport(specifier: string, opts?: ImportCallOptions) { const mod = loadModule(specifier, LoadModuleType.UserDynamic); // insert into the map if not present @@ -76,7 +91,79 @@ if (side === "server") { } function initImportMeta(m: HotModule): ImportMeta { - throw new Error("TODO: import meta object"); + return { + url: `bun://${m.id}`, + main: false, + // @ts-ignore + get hot() { + const hot = new Hot(m); + Object.defineProperty(this, "hot", { value: hot }); + return hot; + }, + }; +} + +type HotAcceptFunction = (esmExports: any | void) => void; +type HotArrayAcceptFunction = (esmExports: (any | void)[]) => void; +type HotDisposeFunction = (data: any) => void; +type HotEventHandler = (data: any) => void; + +class Hot { + private _module: HotModule; + + data = {}; + + constructor(module: HotModule) { + this._module = module; + } + + accept( + arg1: string | readonly string[] | HotAcceptFunction, + arg2: HotAcceptFunction | HotArrayAcceptFunction | undefined, + ) { + console.warn("TODO: implement ImportMetaHot.accept (called from " + JSON.stringify(this._module.id) + ")"); + } + + decline() {} // Vite: "This is currently a noop and is there for backward compatibility" + + dispose(cb: HotDisposeFunction) { + (this._module._onDispose ??= []).push(cb); + } + + prune(cb: HotDisposeFunction) { + throw new Error("TODO: implement ImportMetaHot.prune"); + } + + invalidate() { + throw new Error("TODO: implement ImportMetaHot.invalidate"); + } + + on(event: string, cb: HotEventHandler) { + if (isUnsupportedViteEventName(event)) { + throw new Error(`Unsupported event name: ${event}`); + } + + throw new Error("TODO: implement ImportMetaHot.on"); + } + + off(event: string, cb: HotEventHandler) { + throw new Error("TODO: implement ImportMetaHot.off"); + } + + send(event: string, cb: HotEventHandler) { + throw new Error("TODO: implement ImportMetaHot.send"); + } +} + +function isUnsupportedViteEventName(str: string) { + return str === 'vite:beforeUpdate' + || str === 'vite:afterUpdate' + || str === 'vite:beforeFullReload' + || str === 'vite:beforePrune' + || str === 'vite:invalidate' + || str === 'vite:error' + || str === 'vite:ws:disconnect' + || str === 'vite:ws:connect'; } /** @@ -107,6 +194,10 @@ export function loadModule(key: Id, type: LoadModuleType): HotModule try { registry.set(key, mod); load(mod); + mod._state = State.Ready; + mod._deps.forEach((entry, dep) => { + entry._callback?.(mod.exports); + }); } catch (err) { console.error(err); mod._cached_failure = err; @@ -121,11 +212,12 @@ export const getModule = registry.get.bind(registry); export function replaceModule(key: Id, load: ModuleLoadFunction) { const module = registry.get(key); if (module) { + module._onDispose?.forEach((cb) => cb(null)); module.exports = {}; load(module); const { exports } = module; for (const updater of module._deps.values()) { - updater?.(exports); + updater?._callback?.(exports); } } } @@ -155,14 +247,14 @@ export function replaceModules(modules: any) { } export const serverManifest = {}; -export const clientManifest = {}; +export const ssrManifest = {}; export let onServerSideReload: (() => Promise) | null = null; if (side === "server") { const server_module = new HotModule("bun:bake/server"); server_module.__esModule = true; - server_module.exports = { serverManifest, clientManifest }; + server_module.exports = { serverManifest, ssrManifest, actionManifest: null }; registry.set(server_module.id, server_module); } diff --git a/src/bake/hmr-runtime-server.ts b/src/bake/hmr-runtime-server.ts index df6bf6c07b510f..51f5dc5103e598 100644 --- a/src/bake/hmr-runtime-server.ts +++ b/src/bake/hmr-runtime-server.ts @@ -1,7 +1,7 @@ // This file is the entrypoint to the hot-module-reloading runtime. // On the server, communication is established with `server_exports`. import type { Bake } from "bun"; -import { loadModule, LoadModuleType, replaceModules, clientManifest, serverManifest } from "./hmr-module"; +import { loadModule, LoadModuleType, replaceModules, ssrManifest, serverManifest } from "./hmr-module"; if (typeof IS_BUN_DEVELOPMENT !== "boolean") { throw new Error("DCE is configured incorrectly"); @@ -74,7 +74,7 @@ server_exports = { const client = {}; for (const exportName of Object.keys(exp)) { - serverManifest[uid] = { + serverManifest[uid + '#' + exportName] = { id: uid, name: exportName, chunks: [], @@ -84,7 +84,7 @@ server_exports = { name: exportName, }; } - clientManifest[uid] = client; + ssrManifest[uid] = client; } catch (err) { console.log("caught error"); console.log(err); @@ -94,11 +94,11 @@ server_exports = { if (componentManifestDelete) { for (const fileName of componentManifestDelete) { - const client = clientManifest[fileName]; + const client = ssrManifest[fileName]; for (const exportName in client) { delete serverManifest[`${fileName}#${exportName}`]; } - delete clientManifest[fileName]; + delete ssrManifest[fileName]; } } }, diff --git a/src/bake/production.zig b/src/bake/production.zig index a8de9dc685ad92..295931103e4a2a 100644 --- a/src/bake/production.zig +++ b/src/bake/production.zig @@ -184,7 +184,8 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa bun.assert(server_bundler.env == client_bundler.env); framework.* = framework.resolve(&server_bundler.resolver, &client_bundler.resolver, allocator) catch { - try bake.Framework.addReactInstallCommandNote(server_bundler.log); + if (framework.is_built_in_react) + try bake.Framework.addReactInstallCommandNote(server_bundler.log); Output.errGeneric("Failed to resolve all imports required by the framework", .{}); Output.flush(); server_bundler.log.print(Output.errorWriter()) catch {}; diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 5374f84bebd7b8..36f9b0533e17e4 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -147,7 +147,7 @@ pub const JSBundler = struct { }; const is_last = i == length - 1; - var plugin_result = try bun_plugins.addPlugin(function, config, onstart_promise_array, is_last); + var plugin_result = try bun_plugins.addPlugin(function, config, onstart_promise_array, is_last, false); if (!plugin_result.isEmptyOrUndefinedOrNull()) { if (plugin_result.asAnyPromise()) |promise| { @@ -562,15 +562,12 @@ pub const JSBundler = struct { } pub const Resolve = struct { + bv2: *BundleV2, import_record: MiniImportRecord, + value: Value, - /// Null means the Resolve is aborted - completion: ?*bun.BundleV2.JSBundleCompletionTask = null, - - value: Value = .{ .pending = {} }, - - js_task: JSC.AnyTask = undefined, - task: JSC.AnyEventLoop.Task = undefined, + js_task: JSC.AnyTask, + task: JSC.AnyEventLoop.Task, pub const MiniImportRecord = struct { kind: bun.ImportKind, @@ -582,42 +579,19 @@ pub const JSBundler = struct { range: logger.Range = logger.Range.None, original_target: Target, - pub inline fn loader(_: *const MiniImportRecord) ?options.Loader { - return null; - } + // pub inline fn loader(_: *const MiniImportRecord) ?options.Loader { + // return null; + // } }; - pub fn create( - from: union(enum) { - MiniImportRecord: MiniImportRecord, - ImportRecord: struct { - importer_source_index: u32, - import_record_index: u32, - source_file: []const u8 = "", - original_target: Target, - record: *const bun.ImportRecord, - }, - }, - completion: *bun.BundleV2.JSBundleCompletionTask, - ) Resolve { - completion.ref(); - - return Resolve{ - .import_record = switch (from) { - .MiniImportRecord => from.MiniImportRecord, - .ImportRecord => |file| MiniImportRecord{ - .kind = file.record.kind, - .source_file = file.source_file, - .namespace = file.record.path.namespace, - .specifier = file.record.path.text, - .importer_source_index = file.importer_source_index, - .import_record_index = file.import_record_index, - .range = file.record.range, - .original_target = file.original_target, - }, - }, - .completion = completion, - .value = .{ .pending = {} }, + pub fn init(bv2: *bun.BundleV2, record: MiniImportRecord) Resolve { + return .{ + .bv2 = bv2, + .import_record = record, + .value = .pending, + + .task = undefined, + .js_task = undefined, }; } @@ -633,9 +607,9 @@ pub const JSBundler = struct { bun.default_allocator.free(this.namespace); } }, - no_match: void, - pending: void, - consumed: void, + no_match, + pending, + consumed, pub fn consume(this: *Value) Value { const result = this.*; @@ -659,32 +633,18 @@ pub const JSBundler = struct { pub fn deinit(this: *Resolve) void { this.value.deinit(); - if (this.completion) |completion| - completion.deref(); bun.default_allocator.destroy(this); } const AnyTask = JSC.AnyTask.New(@This(), runOnJSThread); pub fn dispatch(this: *Resolve) void { - var completion = this.completion orelse { - this.deinit(); - return; - }; - completion.ref(); - this.js_task = AnyTask.init(this); - completion.jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(this.js_task.task())); + this.bv2.jsLoopForPlugins().enqueueTaskConcurrent(JSC.ConcurrentTask.create(this.js_task.task())); } pub fn runOnJSThread(this: *Resolve) void { - var completion = this.completion orelse { - this.deinit(); - return; - }; - - completion.plugins.?.matchOnResolve( - completion.globalThis, + this.bv2.plugins.?.matchOnResolve( this.import_record.specifier, this.import_record.namespace, this.import_record.source_file, @@ -694,22 +654,19 @@ pub const JSBundler = struct { } export fn JSBundlerPlugin__onResolveAsync( - this: *Resolve, + resolve: *Resolve, _: *anyopaque, path_value: JSValue, namespace_value: JSValue, external_value: JSValue, ) void { - var completion = this.completion orelse { - this.deinit(); - return; - }; if (path_value.isEmptyOrUndefinedOrNull() or namespace_value.isEmptyOrUndefinedOrNull()) { - this.value = .{ .no_match = {} }; + resolve.value = .{ .no_match = {} }; } else { - const path = path_value.toSliceCloneWithAllocator(completion.globalThis, bun.default_allocator) orelse @panic("Unexpected: path is not a string"); - const namespace = namespace_value.toSliceCloneWithAllocator(completion.globalThis, bun.default_allocator) orelse @panic("Unexpected: namespace is not a string"); - this.value = .{ + const global = resolve.bv2.plugins.?.globalObject(); + const path = path_value.toSliceCloneWithAllocator(global, bun.default_allocator) orelse @panic("Unexpected: path is not a string"); + const namespace = namespace_value.toSliceCloneWithAllocator(global, bun.default_allocator) orelse @panic("Unexpected: namespace is not a string"); + resolve.value = .{ .success = .{ .path = path.slice(), .namespace = namespace.slice(), @@ -718,7 +675,7 @@ pub const JSBundler = struct { }; } - completion.bundler.onResolveAsync(this); + resolve.bv2.onResolveAsync(resolve); } comptime { @@ -729,53 +686,54 @@ pub const JSBundler = struct { const DeferredTask = bun.bundle_v2.DeferredTask; pub const Load = struct { + bv2: *BundleV2, + source_index: Index, default_loader: options.Loader, - path: []const u8 = "", - namespace: []const u8 = "", - - /// Null means the task was aborted. - completion: ?*bun.BundleV2.JSBundleCompletionTask = null, + path: []const u8, + namespace: []const u8, value: Value, - js_task: JSC.AnyTask = undefined, - task: JSC.AnyEventLoop.Task = undefined, - parse_task: *bun.ParseTask = undefined, - + js_task: JSC.AnyTask, + task: JSC.AnyEventLoop.Task, + parse_task: *bun.ParseTask, /// Faster path: skip the extra threadpool dispatch when the file is not found - was_file: bool = false, - - // We only allow the user to call defer once right now - called_defer: bool = false, + was_file: bool, + /// Defer may only be called once + called_defer: bool, const debug_deferred = bun.Output.scoped(.BUNDLER_DEFERRED, true); - pub fn create( - completion: *bun.BundleV2.JSBundleCompletionTask, - source_index: Index, - default_loader: options.Loader, - path: Fs.Path, - ) Load { - completion.ref(); - return Load{ - .source_index = source_index, - .default_loader = default_loader, - .completion = completion, - .value = .{ .pending = {} }, - .path = path.text, - .namespace = path.namespace, + pub fn init(bv2: *bun.BundleV2, parse: *bun.bundle_v2.ParseTask) Load { + return .{ + .bv2 = bv2, + .parse_task = parse, + .source_index = parse.source_index, + .default_loader = parse.path.loader(&bv2.bundler.options.loaders) orelse .js, + .value = .pending, + .path = parse.path.text, + .namespace = parse.path.namespace, + .was_file = false, + .called_defer = false, + .task = undefined, + .js_task = undefined, }; } + pub fn bakeGraph(load: *const Load) bun.bake.Graph { + return load.parse_task.known_target.bakeGraph(); + } + pub const Value = union(enum) { err: logger.Msg, success: struct { source_code: []const u8 = "", - loader: options.Loader = options.Loader.file, + loader: options.Loader = .file, }, - pending: void, - no_match: void, - consumed: void, + pending, + no_match, + /// The value has been de-initialized or left over from `consume()` + consumed, pub fn deinit(this: *Value) void { switch (this.*) { @@ -790,6 +748,9 @@ pub const JSBundler = struct { this.* = .{ .consumed = {} }; } + /// Moves the value, replacing the original with `.consumed`. It is + /// safe to `deinit()` the consumed value, but the memory in `err` + /// and `success` must be freed by the caller. pub fn consume(this: *Value) Value { const result = this.*; this.* = .{ .consumed = {} }; @@ -800,53 +761,40 @@ pub const JSBundler = struct { pub fn deinit(this: *Load) void { debug("Deinit Load(0{x}, {s})", .{ @intFromPtr(this), this.path }); this.value.deinit(); - if (this.completion) |completion| - completion.deref(); } const AnyTask = JSC.AnyTask.New(@This(), runOnJSThread); - pub fn runOnJSThread(this: *Load) void { - var completion: *bun.BundleV2.JSBundleCompletionTask = this.completion orelse { - this.deinit(); - return; - }; - - completion.plugins.?.matchOnLoad( - completion.globalThis, - this.path, - this.namespace, - this, - this.default_loader, + pub fn runOnJSThread(load: *Load) void { + load.bv2.plugins.?.matchOnLoad( + load.path, + load.namespace, + load, + load.default_loader, + load.bakeGraph() != .client, ); } pub fn dispatch(this: *Load) void { - var completion: *bun.BundleV2.JSBundleCompletionTask = this.completion orelse { - this.deinit(); - return; - }; - completion.ref(); - this.js_task = AnyTask.init(this); const concurrent_task = JSC.ConcurrentTask.createFrom(&this.js_task); - completion.jsc_event_loop.enqueueTaskConcurrent(concurrent_task); + this.bv2.jsLoopForPlugins().enqueueTaskConcurrent(concurrent_task); } - export fn JSBundlerPlugin__onDefer( - this: *Load, - globalObject: *JSC.JSGlobalObject, - ) JSValue { + export fn JSBundlerPlugin__onDefer(load: *Load, global: *JSC.JSGlobalObject) JSValue { + return JSC.toJSHostValue(global, load.onDefer(global)); + } + + fn onDefer(this: *Load, globalObject: *JSC.JSGlobalObject) bun.JSError!JSValue { if (this.called_defer) { - globalObject.throw("Can't call .defer() more than once within an onLoad plugin", .{}); - return .zero; + return globalObject.throw2("Can't call .defer() more than once within an onLoad plugin", .{}); } - this.called_defer = true; debug_deferred("JSBundlerPlugin__onDefer(0x{x}, {s})", .{ @intFromPtr(this), this.path }); // Notify the bundler thread about the deferral. This will decrement + // the pending item counter and increment the deferred counter. switch (this.parse_task.ctx.loop().*) { .js => |jsc_event_loop| { jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(this.parse_task.ctx, BundleV2.onNotifyDefer)); @@ -862,8 +810,7 @@ pub const JSBundler = struct { }, } - const promise: JSValue = if (this.completion) |c| c.plugins.?.appendDeferPromise() else return .undefined; - return promise; + return this.bv2.plugins.?.appendDeferPromise(); } export fn JSBundlerPlugin__onLoadAsync( @@ -873,22 +820,18 @@ pub const JSBundler = struct { loader_as_int: JSValue, ) void { JSC.markBinding(@src()); - var completion: *bun.BundleV2.JSBundleCompletionTask = this.completion orelse { - this.deinit(); - return; - }; if (source_code_value.isEmptyOrUndefinedOrNull() or loader_as_int.isEmptyOrUndefinedOrNull()) { this.value = .{ .no_match = {} }; if (this.was_file) { // Faster path: skip the extra threadpool dispatch - completion.bundler.graph.pool.pool.schedule(bun.ThreadPool.Batch.from(&this.parse_task.task)); + this.bv2.graph.pool.pool.schedule(bun.ThreadPool.Batch.from(&this.parse_task.task)); this.deinit(); return; } } else { const loader: Api.Loader = @enumFromInt(loader_as_int.to(u8)); - const source_code = JSC.Node.StringOrBuffer.fromJSToOwnedSlice(completion.globalThis, source_code_value, bun.default_allocator) catch + const source_code = JSC.Node.StringOrBuffer.fromJSToOwnedSlice(this.bv2.plugins.?.globalObject(), source_code_value, bun.default_allocator) catch // TODO: @panic("Unexpected: source_code is not a string"); this.value = .{ @@ -899,7 +842,7 @@ pub const JSBundler = struct { }; } - completion.bundler.onLoadAsync(this); + this.bv2.onLoadAsync(this); } comptime { @@ -909,14 +852,22 @@ pub const JSBundler = struct { pub const Plugin = opaque { extern fn JSBundlerPlugin__create(*JSC.JSGlobalObject, JSC.JSGlobalObject.BunPluginTarget) *Plugin; - pub fn create(globalObject: *JSC.JSGlobalObject, target: JSC.JSGlobalObject.BunPluginTarget) *Plugin { + pub fn create(global: *JSC.JSGlobalObject, target: JSC.JSGlobalObject.BunPluginTarget) *Plugin { JSC.markBinding(@src()); - const plugin = JSBundlerPlugin__create(globalObject, target); + const plugin = JSBundlerPlugin__create(global, target); JSC.JSValue.fromCell(plugin).protect(); return plugin; } extern fn JSBundlerPlugin__tombstone(*Plugin) void; + pub fn deinit(this: *Plugin) void { + JSC.markBinding(@src()); + JSBundlerPlugin__tombstone(this); + JSC.JSValue.fromCell(this).unprotect(); + } + + extern fn JSBundlerPlugin__globalObject(*Plugin) *JSC.JSGlobalObject; + pub const globalObject = JSBundlerPlugin__globalObject; extern fn JSBundlerPlugin__anyMatches( *Plugin, @@ -926,16 +877,15 @@ pub const JSBundler = struct { ) bool; extern fn JSBundlerPlugin__matchOnLoad( - *JSC.JSGlobalObject, *Plugin, namespaceString: *const String, path: *const String, context: *anyopaque, u8, + bool, ) void; extern fn JSBundlerPlugin__matchOnResolve( - *JSC.JSGlobalObject, *Plugin, namespaceString: *const String, path: *const String, @@ -945,11 +895,8 @@ pub const JSBundler = struct { ) void; extern fn JSBundlerPlugin__drainDeferred(*Plugin, rejected: bool) void; - extern fn JSBundlerPlugin__appendDeferPromise(*Plugin, rejected: bool) JSValue; - - pub fn appendDeferPromise(this: *Plugin) JSValue { - return JSBundlerPlugin__appendDeferPromise(this, false); - } + extern fn JSBundlerPlugin__appendDeferPromise(*Plugin) JSValue; + pub const appendDeferPromise = JSBundlerPlugin__appendDeferPromise; pub fn hasAnyMatches( this: *Plugin, @@ -972,11 +919,11 @@ pub const JSBundler = struct { pub fn matchOnLoad( this: *Plugin, - globalThis: *JSC.JSGlobalObject, path: []const u8, namespace: []const u8, context: *anyopaque, default_loader: options.Loader, + is_server_side: bool, ) void { JSC.markBinding(@src()); const tracer = bun.tracy.traceNamed(@src(), "JSBundler.matchOnLoad"); @@ -989,12 +936,11 @@ pub const JSBundler = struct { const path_string = bun.String.createUTF8(path); defer namespace_string.deref(); defer path_string.deref(); - JSBundlerPlugin__matchOnLoad(globalThis, this, &namespace_string, &path_string, context, @intFromEnum(default_loader)); + JSBundlerPlugin__matchOnLoad(this, &namespace_string, &path_string, context, @intFromEnum(default_loader), is_server_side); } pub fn matchOnResolve( this: *Plugin, - globalThis: *JSC.JSGlobalObject, path: []const u8, namespace: []const u8, importer: []const u8, @@ -1013,7 +959,7 @@ pub const JSBundler = struct { defer namespace_string.deref(); defer path_string.deref(); defer importer_string.deref(); - JSBundlerPlugin__matchOnResolve(globalThis, this, &namespace_string, &path_string, &importer_string, context, @intFromEnum(import_record_kind)); + JSBundlerPlugin__matchOnResolve(this, &namespace_string, &path_string, &importer_string, context, @intFromEnum(import_record_kind)); } pub fn addPlugin( @@ -1022,6 +968,7 @@ pub const JSBundler = struct { config: JSC.JSValue, onstart_promises_array: JSC.JSValue, is_last: bool, + is_bake: bool, ) !JSValue { JSC.markBinding(@src()); const tracer = bun.tracy.traceNamed(@src(), "JSBundler.addPlugin"); @@ -1032,6 +979,7 @@ pub const JSBundler = struct { config, onstart_promises_array, JSValue.jsBoolean(is_last), + JSValue.jsBoolean(is_bake), ).unwrap(); } @@ -1039,12 +987,6 @@ pub const JSBundler = struct { JSBundlerPlugin__drainDeferred(this, rejected); } - pub fn deinit(this: *Plugin) void { - JSC.markBinding(@src()); - JSBundlerPlugin__tombstone(this); - JSC.JSValue.fromCell(this).unprotect(); - } - pub fn setConfig(this: *Plugin, config: *anyopaque) void { JSC.markBinding(@src()); JSBundlerPlugin__setConfig(this, config); @@ -1058,30 +1000,39 @@ pub const JSBundler = struct { JSC.JSValue, JSC.JSValue, JSC.JSValue, + JSC.JSValue, ) JSValue.MaybeException; pub export fn JSBundlerPlugin__addError( ctx: *anyopaque, - _: *Plugin, + plugin: *Plugin, exception: JSValue, which: JSValue, ) void { switch (which.to(i32)) { 0 => { - var this: *JSBundler.Resolve = bun.cast(*Resolve, ctx); - var completion = this.completion orelse return; - this.value = .{ - .err = logger.Msg.fromJS(bun.default_allocator, completion.globalThis, this.import_record.source_file, exception) catch @panic("Out of memory in addError callback"), + const resolve: *JSBundler.Resolve = bun.cast(*Resolve, ctx); + resolve.value = .{ + .err = logger.Msg.fromJS( + bun.default_allocator, + plugin.globalObject(), + resolve.import_record.source_file, + exception, + ) catch bun.outOfMemory(), }; - completion.bundler.onResolveAsync(this); + resolve.bv2.onResolveAsync(resolve); }, 1 => { - var this: *Load = bun.cast(*Load, ctx); - var completion = this.completion orelse return; - this.value = .{ - .err = logger.Msg.fromJS(bun.default_allocator, completion.globalThis, this.path, exception) catch @panic("Out of memory in addError callback"), + const load: *Load = bun.cast(*Load, ctx); + load.value = .{ + .err = logger.Msg.fromJS( + bun.default_allocator, + plugin.globalObject(), + load.path, + exception, + ) catch bun.outOfMemory(), }; - completion.bundler.onLoadAsync(this); + load.bv2.onLoadAsync(load); }, else => @panic("invalid error type"), } diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index ee8699c20b31ba..6ab994b9061f23 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -6531,6 +6531,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp .arena = bake_options.arena.allocator(), .root = bake_options.root, .framework = bake_options.framework, + .bundler_options = bake_options.bundler_options, .vm = global.bunVM(), }); } else null; diff --git a/src/bun.js/bindings/JSBundlerPlugin.cpp b/src/bun.js/bindings/JSBundlerPlugin.cpp index 8ff5e9ad29996b..badf2dc59ef38b 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.cpp +++ b/src/bun.js/bindings/JSBundlerPlugin.cpp @@ -27,7 +27,7 @@ #include namespace Bun { -#define WRAP_BUNDLER_PLUGIN(argName) jsNumber(bitwise_cast(reinterpret_cast(argName))) +#define WRAP_BUNDLER_PLUGIN(argName) jsDoubleNumber(bitwise_cast(reinterpret_cast(argName))) #define UNWRAP_BUNDLER_PLUGIN(callFrame) reinterpret_cast(bitwise_cast(callFrame->argument(0).asDouble())) /// These are callbacks defined in Zig and to be run after their associated JS version is run @@ -151,6 +151,7 @@ class JSBundlerPlugin final : public JSC::JSNonFinalObject { [](auto& spaces) { return spaces.m_subspaceForBundlerPlugin.get(); }, [](auto& spaces, auto&& space) { spaces.m_subspaceForBundlerPlugin = std::forward(space); }); } + static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) { return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info()); @@ -164,12 +165,21 @@ class JSBundlerPlugin final : public JSC::JSNonFinalObject { JSC::LazyProperty onResolveFunction; JSC::LazyProperty moduleFunction; JSC::LazyProperty setupFunction; + JSC::JSGlobalObject* m_globalObject; private: - JSBundlerPlugin(JSC::VM& vm, JSC::JSGlobalObject*, JSC::Structure* structure, void* config, BunPluginTarget target, - JSBundlerPluginAddErrorCallback addError, JSBundlerPluginOnLoadAsyncCallback onLoadAsync, JSBundlerPluginOnResolveAsyncCallback onResolveAsync) + JSBundlerPlugin( + JSC::VM& vm, + JSC::JSGlobalObject* global, + JSC::Structure* structure, + void* config, + BunPluginTarget target, + JSBundlerPluginAddErrorCallback addError, + JSBundlerPluginOnLoadAsyncCallback onLoadAsync, + JSBundlerPluginOnResolveAsyncCallback onResolveAsync) : JSC::JSNonFinalObject(vm, structure) , plugin(BundlerPlugin(config, target, addError, onLoadAsync, onResolveAsync)) + , m_globalObject(global) { } @@ -221,7 +231,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBundlerPluginFunction_addError, (JSC::JSGlobalObject if (!thisObject->plugin.tombstoned) { thisObject->plugin.addError( UNWRAP_BUNDLER_PLUGIN(callFrame), - thisObject->plugin.config, + thisObject, JSValue::encode(callFrame->argument(1)), JSValue::encode(callFrame->argument(2))); } @@ -256,7 +266,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBundlerPluginFunction_onResolveAsync, (JSC::JSGlobalO return JSC::JSValue::encode(JSC::jsUndefined()); } -extern "C" JSC::EncodedJSValue JSBundlerPlugin__appendDeferPromise(Bun::JSBundlerPlugin* pluginObject, bool rejected) +extern "C" JSC::EncodedJSValue JSBundlerPlugin__appendDeferPromise(Bun::JSBundlerPlugin* pluginObject) { JSC::JSGlobalObject* globalObject = pluginObject->globalObject(); Strong strong_promise = JSC::Strong(globalObject->vm(), JSPromise::create(globalObject->vm(), globalObject->promiseStructure())); @@ -314,8 +324,9 @@ extern "C" bool JSBundlerPlugin__anyMatches(Bun::JSBundlerPlugin* pluginObject, return pluginObject->plugin.anyMatchesCrossThread(pluginObject->vm(), namespaceString, path, isOnLoad); } -extern "C" void JSBundlerPlugin__matchOnLoad(JSC::JSGlobalObject* globalObject, Bun::JSBundlerPlugin* plugin, const BunString* namespaceString, const BunString* path, void* context, uint8_t defaultLoaderId) +extern "C" void JSBundlerPlugin__matchOnLoad(Bun::JSBundlerPlugin* plugin, const BunString* namespaceString, const BunString* path, void* context, uint8_t defaultLoaderId, bool isServerSide) { + JSC::JSGlobalObject* globalObject = plugin->globalObject(); WTF::String namespaceStringStr = namespaceString ? namespaceString->toWTFString(BunString::ZeroCopy) : WTF::String(); WTF::String pathStr = path ? path->toWTFString(BunString::ZeroCopy) : WTF::String(); @@ -334,6 +345,7 @@ extern "C" void JSBundlerPlugin__matchOnLoad(JSC::JSGlobalObject* globalObject, arguments.append(JSC::jsString(plugin->vm(), pathStr)); arguments.append(JSC::jsString(plugin->vm(), namespaceStringStr)); arguments.append(JSC::jsNumber(defaultLoaderId)); + arguments.append(JSC::jsBoolean(isServerSide)); call(globalObject, function, callData, plugin, arguments); @@ -350,8 +362,9 @@ extern "C" void JSBundlerPlugin__matchOnLoad(JSC::JSGlobalObject* globalObject, } } -extern "C" void JSBundlerPlugin__matchOnResolve(JSC::JSGlobalObject* globalObject, Bun::JSBundlerPlugin* plugin, const BunString* namespaceString, const BunString* path, const BunString* importer, void* context, uint8_t kindId) +extern "C" void JSBundlerPlugin__matchOnResolve(Bun::JSBundlerPlugin* plugin, const BunString* namespaceString, const BunString* path, const BunString* importer, void* context, uint8_t kindId) { + JSC::JSGlobalObject* globalObject = plugin->globalObject(); WTF::String namespaceStringStr = namespaceString ? namespaceString->toWTFString(BunString::ZeroCopy) : WTF::String("file"_s); if (namespaceStringStr.length() == 0) { namespaceStringStr = WTF::String("file"_s); @@ -385,7 +398,7 @@ extern "C" void JSBundlerPlugin__matchOnResolve(JSC::JSGlobalObject* globalObjec if (!plugin->plugin.tombstoned) { JSBundlerPlugin__addError( context, - plugin->plugin.config, + plugin, JSC::JSValue::encode(exception), JSValue::encode(jsNumber(1))); } @@ -412,7 +425,8 @@ extern "C" JSC::EncodedJSValue JSBundlerPlugin__runSetupFunction( JSC::EncodedJSValue encodedSetupFunction, JSC::EncodedJSValue encodedConfig, JSC::EncodedJSValue encodedOnstartPromisesArray, - JSC::EncodedJSValue encodedIsLast) + JSC::EncodedJSValue encodedIsLast, + JSC::EncodedJSValue encodedIsBake) { auto& vm = plugin->vm(); auto scope = DECLARE_CATCH_SCOPE(vm); @@ -430,6 +444,7 @@ extern "C" JSC::EncodedJSValue JSBundlerPlugin__runSetupFunction( arguments.append(JSValue::decode(encodedConfig)); arguments.append(JSValue::decode(encodedOnstartPromisesArray)); arguments.append(JSValue::decode(encodedIsLast)); + arguments.append(JSValue::decode(encodedIsBake)); auto* lexicalGlobalObject = jsCast(JSValue::decode(encodedSetupFunction))->globalObject(); return JSC::JSValue::encode(JSC::call(lexicalGlobalObject, setupFunction, callData, plugin, arguments)); @@ -458,4 +473,9 @@ extern "C" void JSBundlerPlugin__tombstone(Bun::JSBundlerPlugin* plugin) plugin->plugin.tombstone(); } +extern "C" JSC::JSGlobalObject* JSBundlerPlugin__globalObject(Bun::JSBundlerPlugin* plugin) +{ + return plugin->m_globalObject; +} + } // namespace Bun diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 9f70b3a48e339b..c80fe890ea8d05 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -2107,7 +2107,7 @@ pub const AnyEventLoop = union(enum) { ) void { switch (this.*) { .js => { - unreachable; // TODO: + bun.todoPanic(@src(), "AnyEventLoop.enqueueTaskConcurrent", .{}); // const TaskType = AnyTask.New(Context, Callback); // @field(ctx, field) = TaskType.init(ctx); // var concurrent = bun.default_allocator.create(ConcurrentTask) catch unreachable; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index b3a0b91da9e4ae..df80522e280979 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -2412,7 +2412,8 @@ pub const ModuleLoader = struct { else specifier[@min(namespace.len + 1, specifier.len)..]; - return globalObject.runOnLoadPlugins(bun.String.init(namespace), bun.String.init(after_namespace), .bun) orelse return JSValue.zero; + return globalObject.runOnLoadPlugins(bun.String.init(namespace), bun.String.init(after_namespace), .bun) orelse + return JSValue.zero; } pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) !?ResolvedSource { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 717198d6704e2c..4d7d94e190ae5b 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -320,23 +320,48 @@ pub const ThreadPool = struct { const Watcher = bun.JSC.NewHotReloader(BundleV2, EventLoop, true); +/// This assigns a concise, predictable, and unique `.pretty` attribute to a Path. +/// DevServer relies on pretty paths for identifying modules, so they must be unique. fn genericPathWithPrettyInitialized(path: Fs.Path, target: options.Target, top_level_dir: string, allocator: std.mem.Allocator) !Fs.Path { // TODO: outbase var buf: bun.PathBuffer = undefined; - const rel = bun.path.relativePlatform(top_level_dir, path.text, .loose, false); - var path_clone = path; - - // stack-allocated temporary is not leaked because dupeAlloc on the path will - // move .pretty into the heap. that function also fixes some slash issues. - if (target == .bake_server_components_ssr) { - // the SSR graph needs different pretty names or else HMR mode will - // confuse the two modules. - path_clone.pretty = std.fmt.bufPrint(&buf, "ssr:{s}", .{rel}) catch buf[0..]; + if (path.isFile()) { + const rel = bun.path.relativePlatform(top_level_dir, path.text, .loose, false); + var path_clone = path; + // stack-allocated temporary is not leaked because dupeAlloc on the path will + // move .pretty into the heap. that function also fixes some slash issues. + if (target == .bake_server_components_ssr) { + // the SSR graph needs different pretty names or else HMR mode will + // confuse the two modules. + path_clone.pretty = std.fmt.bufPrint(&buf, "ssr:{s}", .{rel}) catch buf[0..]; + } else { + path_clone.pretty = rel; + } + return path_clone.dupeAllocFixPretty(allocator); } else { - path_clone.pretty = rel; + // in non-file namespaces, standard filesystem rules do not apply. + var path_clone = path; + path_clone.pretty = std.fmt.bufPrint(&buf, "{s}{}:{s}", .{ + if (target == .bake_server_components_ssr) "ssr:" else "", + // make sure that a namespace including a colon wont collide with anything + std.fmt.Formatter(fmtEscapedNamespace){ .data = path.namespace }, + path.text, + }) catch buf[0..]; + return path_clone.dupeAllocFixPretty(allocator); + } +} + +fn fmtEscapedNamespace(slice: []const u8, comptime fmt: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { + comptime bun.assert(fmt.len == 0); + var rest = slice; + while (bun.strings.indexOfChar(rest, ':')) |i| { + try w.writeAll(rest[0..i]); + try w.writeAll("::"); + rest = rest[i + 1 ..]; } - return path_clone.dupeAllocFixPretty(allocator); + try w.writeAll(rest); } + pub const BundleV2 = struct { bundler: *Bundler, /// When Server Component is enabled, this is used for the client bundles @@ -383,6 +408,21 @@ pub const BundleV2 = struct { return &this.linker.loop; } + /// Returns the JSC.EventLoop where plugin callbacks can be queued up on + pub fn jsLoopForPlugins(this: *BundleV2) *JSC.EventLoop { + bun.assert(this.plugins != null); + if (this.completion) |completion| + // From Bun.build + return completion.jsc_event_loop + else switch (this.loop().*) { + // From bake where the loop running the bundle is also the loop + // running the plugins. + .js => |jsc_event_loop| return jsc_event_loop, + // The CLI currently has no JSC event loop; for now, no plugin support + .mini => @panic("No JavaScript event loop for bundler plugins to run on"), + } + } + /// Most of the time, accessing .bundler directly is OK. This is only /// needed when it is important to distinct between client and server /// @@ -1191,7 +1231,7 @@ pub const BundleV2 = struct { .kind = .k_const, .decls = try G.Decl.List.fromSlice(alloc, &.{.{ .binding = Binding.alloc(alloc, B.Identifier{ - .ref = try server.newSymbol(.other, "clientManifest"), + .ref = try server.newSymbol(.other, "ssrManifest"), }, Logger.Loc.Empty), .value = server.newExpr(E.Object{ .properties = G.Property.List.fromList(client_manifest_props), @@ -1749,30 +1789,42 @@ pub const BundleV2 = struct { } }; - pub fn onLoadAsync( - this: *BundleV2, - load: *bun.JSC.API.JSBundler.Load, - ) void { - this.loop().enqueueTaskConcurrent( - bun.JSC.API.JSBundler.Load, - BundleV2, - load, - BundleV2.onLoad, - .task, - ); + pub fn onLoadAsync(this: *BundleV2, load: *bun.JSC.API.JSBundler.Load) void { + switch (this.loop().*) { + .js => |jsc_event_loop| { + jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(load, onLoadFromJsLoop)); + }, + .mini => |*mini| { + mini.enqueueTaskConcurrentWithExtraCtx( + bun.JSC.API.JSBundler.Load, + BundleV2, + load, + BundleV2.onLoad, + .task, + ); + }, + } } - pub fn onResolveAsync( - this: *BundleV2, - resolve: *bun.JSC.API.JSBundler.Resolve, - ) void { - this.loop().enqueueTaskConcurrent( - bun.JSC.API.JSBundler.Resolve, - BundleV2, - resolve, - BundleV2.onResolve, - .task, - ); + pub fn onResolveAsync(this: *BundleV2, resolve: *bun.JSC.API.JSBundler.Resolve) void { + switch (this.loop().*) { + .js => |jsc_event_loop| { + jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(resolve, onResolveFromJsLoop)); + }, + .mini => |*mini| { + mini.enqueueTaskConcurrentWithExtraCtx( + bun.JSC.API.JSBundler.Resolve, + BundleV2, + resolve, + BundleV2.onResolve, + .task, + ); + }, + } + } + + pub fn onLoadFromJsLoop(load: *bun.JSC.API.JSBundler.Load) void { + onLoad(load, load.bv2); } pub fn onLoad(load: *bun.JSC.API.JSBundler.Load, this: *BundleV2) void { @@ -1783,13 +1835,15 @@ pub const BundleV2 = struct { this.graph.heap.gc(true); } } - var log = &load.completion.?.log; + const log = this.bundler.log; + + // TODO: watcher switch (load.value.consume()) { .no_match => { + const source = &this.graph.input_files.items(.source)[load.source_index.get()]; // If it's a file namespace, we should run it through the parser like normal. // The file could be on disk. - const source = &this.graph.input_files.items(.source)[load.source_index.get()]; if (source.path.isFile()) { this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&load.parse_task.task)); return; @@ -1824,10 +1878,28 @@ pub const BundleV2 = struct { }; this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&parse_task.task)); }, - .err => |err| { - log.msgs.append(err) catch unreachable; - log.errors += @as(u32, @intFromBool(err.kind == .err)); - log.warnings += @as(u32, @intFromBool(err.kind == .warn)); + .err => |msg| { + if (this.bundler.options.dev_server) |dev| { + const source = &this.graph.input_files.items(.source)[load.source_index.get()]; + // A stack-allocated Log object containing the singular message + var msg_mut = msg; + const temp_log: Logger.Log = .{ + .clone_line_text = false, + .errors = @intFromBool(msg.kind == .err), + .warnings = @intFromBool(msg.kind == .warn), + .msgs = std.ArrayList(Logger.Msg).fromOwnedSlice(this.graph.allocator, (&msg_mut)[0..1]), + }; + dev.handleParseTaskFailure( + error.Plugin, + load.bakeGraph(), + source.path.keyForIncrementalGraph(), + &temp_log, + ) catch bun.outOfMemory(); + } else { + log.msgs.append(msg) catch bun.outOfMemory(); + log.errors += @intFromBool(msg.kind == .err); + log.warnings += @intFromBool(msg.kind == .warn); + } // An error occurred, prevent spinning the event loop forever this.decrementScanCounter(); @@ -1836,6 +1908,10 @@ pub const BundleV2 = struct { } } + pub fn onResolveFromJsLoop(resolve: *bun.JSC.API.JSBundler.Resolve) void { + onResolve(resolve, resolve.bv2); + } + pub fn onResolve(resolve: *bun.JSC.API.JSBundler.Resolve, this: *BundleV2) void { defer resolve.deinit(); defer this.decrementScanCounter(); @@ -1846,7 +1922,7 @@ pub const BundleV2 = struct { this.graph.heap.gc(true); } } - var log = &resolve.completion.?.log; + const log = this.bundler.log; switch (resolve.value.consume()) { .no_match => { @@ -1894,6 +1970,8 @@ pub const BundleV2 = struct { if (!existing.found_existing) { this.free_list.appendSlice(&.{ result.namespace, result.path }) catch {}; + path = this.pathWithPrettyInitialized(path, resolve.import_record.original_target) catch bun.outOfMemory(); + // We need to parse this const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len))); existing.value_ptr.* = source_index.get(); @@ -1908,7 +1986,7 @@ pub const BundleV2 = struct { .index = source_index, }, .loader = loader, - .side_effects = _resolver.SideEffects.has_side_effects, + .side_effects = .has_side_effects, }) catch unreachable; var task = bun.default_allocator.create(ParseTask) catch unreachable; task.* = ParseTask{ @@ -1921,7 +1999,7 @@ pub const BundleV2 = struct { .file = bun.invalid_fd, }, }, - .side_effects = _resolver.SideEffects.has_side_effects, + .side_effects = .has_side_effects, .jsx = this.bundlerForTarget(resolve.import_record.original_target).options.jsx, .source_index = source_index, .module_type = .unknown, @@ -2094,6 +2172,7 @@ pub const BundleV2 = struct { var js_files = try std.ArrayListUnmanaged(Index).initCapacity(this.graph.allocator, this.graph.ast.len - this.graph.css_file_count - 1); const asts = this.graph.ast.slice(); + const loaders = this.graph.input_files.items(.loader); for ( asts.items(.parts)[1..], asts.items(.import_records)[1..], @@ -2116,8 +2195,8 @@ pub const BundleV2 = struct { // Discover all CSS roots. for (import_records.slice()) |*record| { - if (record.tag != .css) continue; if (!record.source_index.isValid()) continue; + if (loaders[record.source_index.get()] != .css) continue; if (asts.items(.parts)[record.source_index.get()].len == 0) { record.source_index = Index.invalid; continue; @@ -2240,18 +2319,16 @@ pub const BundleV2 = struct { }); this.incrementScanCounter(); - resolve.* = JSC.API.JSBundler.Resolve.create( - .{ - .ImportRecord = .{ - .record = import_record, - .source_file = source_file, - .import_record_index = import_record_index, - .importer_source_index = source_index, - .original_target = original_target, - }, - }, - this.completion.?, - ); + resolve.* = JSC.API.JSBundler.Resolve.init(this, .{ + .kind = import_record.kind, + .source_file = source_file, + .namespace = import_record.path.namespace, + .specifier = import_record.path.text, + .importer_source_index = source_index, + .import_record_index = import_record_index, + .range = import_record.range, + .original_target = original_target, + }); resolve.dispatch(); return true; } @@ -2271,14 +2348,8 @@ pub const BundleV2 = struct { parse.path.namespace, parse.path.text, }); - const load = bun.default_allocator.create(JSC.API.JSBundler.Load) catch unreachable; - load.* = JSC.API.JSBundler.Load.create( - this.completion.?, - parse.source_index, - parse.path.loader(&this.bundler.options.loaders) orelse options.Loader.js, - parse.path, - ); - load.parse_task = parse; + const load = bun.default_allocator.create(JSC.API.JSBundler.Load) catch bun.outOfMemory(); + load.* = JSC.API.JSBundler.Load.init(this, parse); load.dispatch(); return true; } @@ -2595,7 +2666,7 @@ pub const BundleV2 = struct { import_record.path.pretty = rel; import_record.path = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory(); if (entry.kind == .css) { - import_record.tag = .css; + import_record.path.is_disabled = true; } continue; } @@ -2609,9 +2680,6 @@ pub const BundleV2 = struct { } else { import_record.source_index = Index.init(id); } - if (this.graph.input_files.items(.loader)[id] == .css) { - import_record.tag = .css; - } continue; } @@ -2652,10 +2720,6 @@ pub const BundleV2 = struct { resolve_task.tree_shaking = this.bundler.options.tree_shaking; } - if (resolve_task.loader == .css) { - import_record.tag = .css; - } - resolve_entry.value_ptr.* = resolve_task; } @@ -2876,7 +2940,7 @@ pub const BundleV2 = struct { // For files with use directives, index and prepare the other side. if (result.use_directive != .none and - ((result.use_directive == .client) == (result.ast.target == .browser))) + ((result.use_directive == .client) != (result.ast.target == .browser))) { if (result.use_directive == .server) bun.todoPanic(@src(), "\"use server\"", .{}); @@ -2909,7 +2973,7 @@ pub const BundleV2 = struct { const server_index = this.enqueueParseTask2( server_source, this.graph.input_files.items(.loader)[result.source.index.get()], - .bake_server_components_ssr, + .browser, ) catch bun.outOfMemory(); break :brk .{ server_index, Index.invalid.get() }; @@ -3795,7 +3859,13 @@ pub const ParseTask = struct { else .none; - if ((use_directive == .client and task.known_target != .bake_server_components_ssr) or + if ( + // separate_ssr_graph makes boundaries switch to client because the server file uses that generated file as input. + // this is not done when there is one server graph because it is easier for plugins to deal with. + (use_directive == .client and + task.known_target != .bake_server_components_ssr and + this.ctx.framework.?.server_components.?.separate_ssr_graph) or + // set the target to the client when bundling client-side files (bundler.options.server_components and task.known_target == .browser)) { bundler = this.ctx.client_bundler; @@ -6214,11 +6284,13 @@ pub const LinkerContext = struct { var order: BabyList(Index) = .{}; const all_import_records = this.graph.ast.items(.import_records); + const all_loaders = this.parse_graph.input_files.items(.loader); const visit = struct { fn visit( c: *LinkerContext, import_records: []const BabyList(ImportRecord), + loaders: []const Loader, temp: std.mem.Allocator, visits: *BitSet, o: *BabyList(Index), @@ -6241,11 +6313,12 @@ pub const LinkerContext = struct { visit( c, import_records, + loaders, temp, visits, o, record.source_index, - record.tag == .css, + loaders[record.source_index.get()] == .css, ); } } @@ -6260,6 +6333,7 @@ pub const LinkerContext = struct { visit( this, all_import_records, + all_loaders, temp_allocator, &visited, &order, @@ -10948,6 +11022,21 @@ pub const LinkerContext = struct { .{ .binding = Binding.alloc(allocator, B.Identifier{ .ref = ast.module_ref }, Logger.Loc.Empty) }, }); const module_id = Expr.initIdentifier(ast.module_ref, Logger.Loc.Empty); + + // add a marker for the client runtime to tell that this is an ES module + if (ast.exports_kind == .esm) { + try stmts.inside_wrapper_prefix.append(Stmt.alloc(S.SExpr, .{ + .value = Expr.assign( + Expr.init(E.Dot, .{ + .target = Expr.initIdentifier(ast.module_ref, Loc.Empty), + .name = "__esModule", + .name_loc = Loc.Empty, + }, Loc.Empty), + Expr.init(E.Boolean, .{ .value = true }, Loc.Empty), + ), + }, Loc.Empty)); + } + for (part_stmts) |stmt| { switch (stmt.data) { else => { @@ -10968,8 +11057,14 @@ pub const LinkerContext = struct { const is_bare_import = st.star_name_loc == null and st.items.len == 0 and st.default_name == null; + // CSS files and `is_disabled` records should not generate an import statement + const is_enabled = !record.path.is_disabled and if (record.source_index.isValid()) + c.parse_graph.input_files.items(.loader)[record.source_index.get()] != .css + else + true; + // module.importSync('path', (module) => ns = module, ['dep', 'etc']) - const call = if (record.tag != .css) call: { + const call = if (is_enabled) call: { const path = if (record.source_index.isValid()) c.parse_graph.input_files.items(.source)[record.source_index.get()].path else @@ -11018,9 +11113,7 @@ pub const LinkerContext = struct { }), ), }, stmt.loc); - } else ( - // CSS files just get an empty object - Expr.init(E.Object, .{}, stmt.loc)); + } else Expr.init(E.Object, .{}, stmt.loc); if (is_bare_import) { // the import value is never read diff --git a/src/fs.zig b/src/fs.zig index 11f78d85952d01..eec9ef34a85384 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -1908,6 +1908,13 @@ pub const Path = struct { pub fn isJSXFile(this: *const Path) bool { return strings.hasSuffixComptime(this.name.filename, ".jsx") or strings.hasSuffixComptime(this.name.filename, ".tsx"); } + + pub fn keyForIncrementalGraph(path: *const Path) []const u8 { + return if (path.isFile()) + path.text + else + path.pretty; + } }; // pub fn customRealpath(allocator: std.mem.Allocator, path: string) !string { diff --git a/src/import_record.zig b/src/import_record.zig index e16c6b94dfbb1b..87d1f93ac52781 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -192,7 +192,6 @@ pub const ImportRecord = struct { with_type_toml, with_type_file, - css, tailwind, pub fn loader(this: Tag) ?bun.options.Loader { diff --git a/src/js/builtins/BundlerPlugin.ts b/src/js/builtins/BundlerPlugin.ts index 4ca9a8264c5780..7b248081d667f5 100644 --- a/src/js/builtins/BundlerPlugin.ts +++ b/src/js/builtins/BundlerPlugin.ts @@ -1,29 +1,28 @@ import type { - AnyFunction, BuildConfig, BunPlugin, OnLoadCallback, - OnLoadResultObject, - OnLoadResultSourceCode, OnResolveCallback, PluginBuilder, PluginConstraints, } from "bun"; +type AnyFunction = (...args: any[]) => any; -// This API expects 4 functions: -// It should be generic enough to reuse for Bun.plugin() eventually, too. interface BundlerPlugin { onLoad: Map; onResolve: Map; + /** Binding to `JSBundlerPlugin__onLoadAsync` */ onLoadAsync( internalID, sourceCode: string | Uint8Array | ArrayBuffer | DataView | null, loaderKey: number | null, ): void; + /** Binding to `JSBundlerPlugin__onResolveAsync` */ onResolveAsync(internalID, a, b, c): void; - addError(internalID, error, number): void; + /** Binding to `JSBundlerPlugin__addError` */ + addError(internalID: number, error: any, which: number): void; addFilter(filter, namespace, number): void; - generateDeferPromise(): Promise; + generateDeferPromise(id: number): Promise; promises: Array> | undefined; } @@ -31,15 +30,13 @@ interface BundlerPlugin { type Setup = BunPlugin["setup"]; type MinifyObj = Exclude; interface BuildConfigExt extends BuildConfig { - // we support esbuild-style entryPoints + // we support esbuild-style 'entryPoints' capitalization entryPoints?: string[]; // plugins is guaranteed to not be null plugins: BunPlugin[]; } interface PluginBuilderExt extends PluginBuilder { - // these functions aren't implemented yet, so we dont publicly expose them resolve: AnyFunction; - onStart: AnyFunction; onEnd: AnyFunction; onDispose: AnyFunction; // we partially support initialOptions. it's read-only and a subset of @@ -55,6 +52,7 @@ export function runSetupFunction( config: BuildConfigExt, promises: Array> | undefined, is_last: boolean, + isBake: boolean, ) { this.promises = promises; var onLoadPlugins = new Map(); @@ -110,6 +108,9 @@ export function runSetupFunction( const self = this; function onStart(callback) { + if(isBake) { + throw new TypeError("onStart() is not supported in Bake yet"); + } if (!$isCallable(callback)) { throw new TypeError("callback must be a function"); } @@ -248,7 +249,7 @@ export function runOnResolvePlugins(this: BundlerPlugin, specifier, inputNamespa path: inputPath, importer, namespace: inputNamespace, - // resolveDir + resolveDir: inputNamespace === "file" ? require("node:path").dirname(importer) : undefined, kind, // pluginData }); @@ -334,12 +335,12 @@ export function runOnResolvePlugins(this: BundlerPlugin, specifier, inputNamespa } } -export function runOnLoadPlugins(this: BundlerPlugin, internalID, path, namespace, defaultLoaderId) { +export function runOnLoadPlugins(this: BundlerPlugin, internalID, path, namespace, defaultLoaderId, isServerSide: boolean) { const LOADERS_MAP = $LoaderLabelToId; const loaderName = $LoaderIdToLabel[defaultLoaderId]; const generateDefer = () => this.generateDeferPromise(internalID); - var promiseResult = (async (internalID, path, namespace, defaultLoader, generateDefer) => { + var promiseResult = (async (internalID, path, namespace, isServerSide, defaultLoader, generateDefer) => { var results = this.onLoad.$get(namespace); if (!results) { this.onLoadAsync(internalID, null, null); @@ -355,6 +356,7 @@ export function runOnLoadPlugins(this: BundlerPlugin, internalID, path, namespac // pluginData loader: defaultLoader, defer: generateDefer, + side: isServerSide ? "server" : "client", }); while ( @@ -406,7 +408,7 @@ export function runOnLoadPlugins(this: BundlerPlugin, internalID, path, namespac this.onLoadAsync(internalID, null, null); return null; - })(internalID, path, namespace, loaderName, generateDefer); + })(internalID, path, namespace, isServerSide, loaderName, generateDefer); while ( promiseResult && diff --git a/src/js_parser.zig b/src/js_parser.zig index 3c27753894d024..8ed4033187091d 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -8986,64 +8986,62 @@ fn NewParser_( try p.is_import_item.ensureUnusedCapacity(p.allocator, count_excluding_namespace); var remap_count: u32 = 0; // Link the default item to the namespace - if (stmt.default_name) |*name_loc| { - outer: { - const name = p.loadNameFromRef(name_loc.ref.?); - const ref = try p.declareSymbol(.import, name_loc.loc, name); - name_loc.ref = ref; - try p.is_import_item.put(p.allocator, ref, {}); + if (stmt.default_name) |*name_loc| outer: { + const name = p.loadNameFromRef(name_loc.ref.?); + const ref = try p.declareSymbol(.import, name_loc.loc, name); + name_loc.ref = ref; + try p.is_import_item.put(p.allocator, ref, {}); - // ensure every e_import_identifier holds the namespace - if (p.options.features.hot_module_reloading) { - const symbol = &p.symbols.items[ref.inner_index]; - if (symbol.namespace_alias == null) { - symbol.namespace_alias = .{ - .namespace_ref = stmt.namespace_ref, - .alias = "default", - .import_record_index = stmt.import_record_index, - }; - } + // ensure every e_import_identifier holds the namespace + if (p.options.features.hot_module_reloading) { + const symbol = &p.symbols.items[ref.inner_index]; + if (symbol.namespace_alias == null) { + symbol.namespace_alias = .{ + .namespace_ref = stmt.namespace_ref, + .alias = "default", + .import_record_index = stmt.import_record_index, + }; } + } - if (macro_remap) |*remap| { - if (remap.get("default")) |remapped_path| { - const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path); - try p.macro.refs.put(ref, new_import_id); + if (macro_remap) |*remap| { + if (remap.get("default")) |remapped_path| { + const new_import_id = p.addImportRecord(.stmt, path.loc, remapped_path); + try p.macro.refs.put(ref, new_import_id); - p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace; - p.import_records.items[new_import_id].is_unused = true; - if (comptime only_scan_imports_and_do_not_visit) { - p.import_records.items[new_import_id].is_internal = true; - p.import_records.items[new_import_id].path.is_disabled = true; - } - stmt.default_name = null; - remap_count += 1; - break :outer; + p.import_records.items[new_import_id].path.namespace = js_ast.Macro.namespace; + p.import_records.items[new_import_id].is_unused = true; + if (comptime only_scan_imports_and_do_not_visit) { + p.import_records.items[new_import_id].is_internal = true; + p.import_records.items[new_import_id].path.is_disabled = true; } - } - - if (comptime track_symbol_usage_during_parse_pass) { - p.parse_pass_symbol_uses.put(name, .{ - .ref = ref, - .import_record_index = stmt.import_record_index, - }) catch unreachable; - } - - if (is_macro) { - try p.macro.refs.put(ref, stmt.import_record_index); stmt.default_name = null; + remap_count += 1; break :outer; } + } - if (comptime ParsePassSymbolUsageType != void) { - p.parse_pass_symbol_uses.put(name, .{ - .ref = ref, - .import_record_index = stmt.import_record_index, - }) catch unreachable; - } + if (comptime track_symbol_usage_during_parse_pass) { + p.parse_pass_symbol_uses.put(name, .{ + .ref = ref, + .import_record_index = stmt.import_record_index, + }) catch unreachable; + } + + if (is_macro) { + try p.macro.refs.put(ref, stmt.import_record_index); + stmt.default_name = null; + break :outer; + } - item_refs.putAssumeCapacity(name, name_loc.*); + if (comptime ParsePassSymbolUsageType != void) { + p.parse_pass_symbol_uses.put(name, .{ + .ref = ref, + .import_record_index = stmt.import_record_index, + }) catch unreachable; } + + item_refs.putAssumeCapacity(name, name_loc.*); } var end: usize = 0; @@ -12469,7 +12467,6 @@ fn NewParser_( fn declareSymbolMaybeGenerated(p: *P, kind: Symbol.Kind, loc: logger.Loc, name: string, comptime is_generated: bool) !Ref { // p.checkForNonBMPCodePoint(loc, name) - if (comptime !is_generated) { // Forbid declaring a symbol with a reserved word in strict mode if (p.isStrictMode() and name.ptr != arguments_str.ptr and js_lexer.StrictModeReservedWords.has(name)) { @@ -19046,16 +19043,6 @@ fn NewParser_( } }, .s_export_from => |data| { - // When HMR is enabled, we need to transform this into - // import {foo} from "./foo"; - // export {foo}; - - // From: - // export {foo as default} from './foo'; - // To: - // import {default as foo} from './foo'; - // export {foo}; - // "export {foo} from 'path'" const name = p.loadNameFromRef(data.namespace_ref); @@ -19079,7 +19066,7 @@ fn NewParser_( const _name = p.loadNameFromRef(old_ref); - const ref = try p.newSymbol(.other, _name); + const ref = try p.newSymbol(.import, _name); try p.current_scope.generated.push(p.allocator, ref); try p.recordDeclaredSymbol(ref); data.items[j] = item; @@ -19093,11 +19080,10 @@ fn NewParser_( return; } } else { - // This is a re-export and the symbols created here are used to reference for (data.items) |*item| { const _name = p.loadNameFromRef(item.name.ref.?); - const ref = try p.newSymbol(.other, _name); + const ref = try p.newSymbol(.import, _name); try p.current_scope.generated.push(p.allocator, ref); try p.recordDeclaredSymbol(ref); item.name.ref = ref; @@ -19182,6 +19168,10 @@ fn NewParser_( data.default_name = createDefaultName(p, data.value.expr.loc) catch unreachable; } + if (p.options.features.server_components.wrapsExports()) { + data.value.expr = p.wrapValueForServerComponentReference(data.value.expr, "default"); + } + // If there are lowered "using" declarations, change this into a "var" if (p.current_scope.parent == null and p.will_wrap_module_in_try_catch_for_using) { try stmts.ensureUnusedCapacity(2); @@ -19263,6 +19253,10 @@ fn NewParser_( data.default_name = createDefaultName(p, stmt.loc) catch unreachable; } + if (p.options.features.server_components.wrapsExports()) { + data.value = .{ .expr = p.wrapValueForServerComponentReference(p.newExpr(E.Function{ .func = func.func }, stmt.loc), "default") }; + } + stmts.append(stmt.*) catch unreachable; // if (func.func.name != null and func.func.name.?.ref != null) { @@ -19313,6 +19307,10 @@ fn NewParser_( stmts.append(stmt.*) catch {}; } + if (p.options.features.server_components.wrapsExports()) { + data.value = .{ .expr = p.wrapValueForServerComponentReference(p.newExpr(class.class, stmt.loc), "default") }; + } + return; }, else => {}, @@ -23857,7 +23855,7 @@ pub const ConvertESMExportsForHmr = struct { if (st.kind.isReassignable()) { for (st.decls.slice()) |decl| { - try ctx.visitBindingForKitModuleExports(p, decl.binding, true); + try ctx.visitBindingToExport(p, decl.binding, true); } } else { // TODO: remove this dupe @@ -23881,13 +23879,13 @@ pub const ConvertESMExportsForHmr = struct { }); } else { dupe_decls.appendAssumeCapacity(decl); - try ctx.visitBindingForKitModuleExports(p, decl.binding, false); + try ctx.visitBindingToExport(p, decl.binding, false); } }, else => { dupe_decls.appendAssumeCapacity(decl); - try ctx.visitBindingForKitModuleExports(p, decl.binding, false); + try ctx.visitBindingToExport(p, decl.binding, false); }, } } @@ -23967,24 +23965,40 @@ pub const ConvertESMExportsForHmr = struct { }, .s_export_clause => |st| { for (st.items) |item| { - try ctx.export_props.append(p.allocator, .{ - .key = Expr.init(E.String, .{ - .data = item.alias, - }, stmt.loc), - .value = Expr.initIdentifier(item.name.ref.?, item.name.loc), - }); + const ref = item.name.ref.?; + try ctx.visitRefToExport(p, ref, item.alias, item.name.loc, false); } return; // do not emit a statement here }, + .s_export_from => |st| stmt: { + for (st.items) |item| { + const ref = item.name.ref.?; + const symbol = &p.symbols.items[ref.innerIndex()]; + if (symbol.namespace_alias == null) { + symbol.namespace_alias = .{ + .namespace_ref = st.namespace_ref, + .alias = item.original_name, + .import_record_index = st.import_record_index, + }; + } + try ctx.visitRefToExport(p, ref, item.alias, item.name.loc, true); + } - .s_export_from => { - bun.todoPanic(@src(), "hot-module-reloading instrumentation for 'export {{ ... }} from'", .{}); + const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index); + if (gop.found_existing) return; + break :stmt Stmt.alloc(S.Import, .{ + .import_record_index = st.import_record_index, + .is_single_line = true, + .default_name = null, + .items = st.items, + .namespace_ref = st.namespace_ref, + .star_name_loc = null, + }, stmt.loc); }, .s_export_star => { bun.todoPanic(@src(), "hot-module-reloading instrumentation for 'export * from'", .{}); }, - // De-duplicate import statements. It is okay to disregard // named/default imports here as we always rewrite them as // full qualified property accesses (need to so live-bindings) @@ -23998,7 +24012,7 @@ pub const ConvertESMExportsForHmr = struct { try ctx.stmts.append(p.allocator, new_stmt); } - fn visitBindingForKitModuleExports( + fn visitBindingToExport( ctx: *ConvertESMExportsForHmr, p: anytype, binding: Binding, @@ -24007,33 +24021,41 @@ pub const ConvertESMExportsForHmr = struct { switch (binding.data) { .b_missing => {}, .b_identifier => |id| { - try ctx.visitRefForKitModuleExports(p, id.ref, binding.loc, is_live_binding); + try ctx.visitRefToExport(p, id.ref, null, binding.loc, is_live_binding); }, .b_array => |array| { for (array.items) |item| { - try ctx.visitBindingForKitModuleExports(p, item.binding, is_live_binding); + try ctx.visitBindingToExport(p, item.binding, is_live_binding); } }, .b_object => |object| { for (object.properties) |item| { - try ctx.visitBindingForKitModuleExports(p, item.value, is_live_binding); + try ctx.visitBindingToExport(p, item.value, is_live_binding); } }, } } - fn visitRefForKitModuleExports( + fn visitRefToExport( ctx: *ConvertESMExportsForHmr, p: anytype, ref: Ref, + export_symbol_name: ?[]const u8, loc: logger.Loc, - is_live_binding: bool, + is_live_binding_source: bool, ) !void { const symbol = p.symbols.items[ref.inner_index]; - const id = Expr.initIdentifier(ref, loc); - if (is_live_binding) { + const id = if (symbol.kind == .import) + Expr.init(E.ImportIdentifier, .{ .ref = ref }, loc) + else + Expr.initIdentifier(ref, loc); + if (is_live_binding_source or symbol.kind == .import) { + // TODO: instead of requiring getters for live-bindings, + // a callback propagation system should be considered. + // mostly because here, these might not even be live + // bindings, and re-exports are so, so common. const key = Expr.init(E.String, .{ - .data = symbol.original_name, + .data = export_symbol_name orelse symbol.original_name, }, loc); // This is technically incorrect in that we've marked this as a @@ -24058,29 +24080,12 @@ pub const ConvertESMExportsForHmr = struct { }, } }, loc), }); - // 'set abc(abc2) { abc = abc2 }' - try ctx.export_props.append(p.allocator, .{ - .kind = .set, - .key = key, - .value = Expr.init(E.Function, .{ .func = .{ - .args = try p.allocator.dupe(G.Arg, &.{.{ - .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = arg1 }, loc), - }}), - .body = .{ - .stmts = try p.allocator.dupe(Stmt, &.{ - Stmt.alloc(S.SExpr, .{ - .value = Expr.assign(id, Expr.initIdentifier(arg1, loc)), - }, loc), - }), - .loc = loc, - }, - } }, loc), - }); + // no setter is added since live bindings are read-only } else { // 'abc,' try ctx.export_props.append(p.allocator, .{ .key = Expr.init(E.String, .{ - .data = symbol.original_name, + .data = export_symbol_name orelse symbol.original_name, }, loc), .value = id, }); @@ -24089,18 +24094,6 @@ pub const ConvertESMExportsForHmr = struct { pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.Part) ![]js_ast.Part { if (ctx.export_props.items.len > 0) { - // add a marker for the client runtime to tell that this is an ES module - try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{ - .value = Expr.assign( - Expr.init(E.Dot, .{ - .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty), - .name = "__esModule", - .name_loc = logger.Loc.Empty, - }, logger.Loc.Empty), - Expr.init(E.Boolean, .{ .value = true }, logger.Loc.Empty), - ), - }, logger.Loc.Empty)); - try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{ .value = Expr.assign( Expr.init(E.Dot, .{ diff --git a/src/js_printer.zig b/src/js_printer.zig index 128cb2d5bc4c0e..2963b96e65cbca 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -6003,15 +6003,19 @@ pub fn printWithWriterAndPlatform( printer.printFnArgs(func.open_parens_loc, func.args, func.flags.contains(.has_rest_arg), false); printer.printSpace(); printer.print("{\n"); - printer.indent(); - printer.printSymbol(printer.options.commonjs_module_ref); - printer.print(".exports = "); - printer.printExpr(.{ - .data = func.body.stmts[0].data.s_lazy_export, - .loc = func.body.stmts[0].loc, - }, .comma, .{}); - printer.print("; // bun .s_lazy_export\n"); - printer.indent(); + if (func.body.stmts[0].data.s_lazy_export != .e_undefined) { + printer.indent(); + printer.printIndent(); + printer.printSymbol(printer.options.commonjs_module_ref); + printer.print(".exports = "); + printer.printExpr(.{ + .data = func.body.stmts[0].data.s_lazy_export, + .loc = func.body.stmts[0].loc, + }, .comma, .{}); + printer.print("; // bun .s_lazy_export\n"); + printer.unindent(); + } + printer.printIndent(); printer.print("}"); } printer.print(",\n"); diff --git a/src/linker.zig b/src/linker.zig index 96e538b0a464b1..f0136706d5bb69 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -281,10 +281,6 @@ pub const Linker = struct { continue; } - if (strings.hasSuffixComptime(import_record.path.text, ".css")) { - import_record.tag = .css; - } - // Resolve dynamic imports lazily for perf if (import_record.kind == .dynamic) { continue; diff --git a/src/logger.zig b/src/logger.zig index 4bd830541de7ba..77234eda667d78 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -96,13 +96,24 @@ pub const Loc = struct { }; pub const Location = struct { - file: string = "", + file: string, namespace: string = "file", - line: i32 = 1, // 1-based - column: i32 = 0, // 0-based, in bytes - length: usize = 0, // in bytes + /// 1-based line number. + /// Line <= 0 means there is no line and column information. + // TODO: move to `bun.Ordinal` + line: i32, + // TODO: figure out how this is interpreted, convert to `bun.Ordinal` + // original docs: 0-based, in bytes. + // but there is a place where this is emitted in output, implying one based character offset + column: i32, + /// Number of bytes this location should highlight. + /// 0 to just point at a single character + length: usize = 0, + /// Text on the line, avoiding the need to refetch the source code line_text: ?string = null, + // TODO: remove this unused field suggestion: ?string = null, + // TODO: document or remove offset: usize = 0, pub fn count(this: Location, builder: *StringBuilder) void { @@ -340,7 +351,7 @@ pub const Data = struct { location.file, }); - if (location.line > -1 and location.column > -1) { + if (location.line > 0 and location.column > -1) { try to.print(comptime Output.prettyFmt(":{d}:{d}", enable_ansi_colors), .{ location.line, location.column, @@ -401,6 +412,8 @@ pub const Msg = struct { .text = try zig_exception_holder.zigException().message.toOwnedSlice(allocator), .location = Location{ .file = file, + .line = 0, + .column = 0, }, }, }; @@ -1227,7 +1240,7 @@ pub const Log = struct { ); } - pub fn print(self: *Log, to: anytype) !void { + pub fn print(self: *const Log, to: anytype) !void { return switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| self.printWithEnableAnsiColors(to, enable_ansi_colors), }; diff --git a/src/options.zig b/src/options.zig index 654efbd7a3a928..d1f669b8214c54 100644 --- a/src/options.zig +++ b/src/options.zig @@ -1714,6 +1714,11 @@ pub const BundleOptions = struct { opts.conditions = try ESMConditions.init(allocator, opts.target.defaultConditions()); + if (bun.FeatureFlags.breaking_changes_1_2) { + // This is currently done in DevServer by default, but not in Bun.build + @compileError("if (!production) { add \"development\" condition }"); + } + if (transform.conditions.len > 0) { opts.conditions.appendSlice(transform.conditions) catch bun.outOfMemory(); } diff --git a/src/output.zig b/src/output.zig index 54197e72cd3b83..aa25b4530146d2 100644 --- a/src/output.zig +++ b/src/output.zig @@ -1045,7 +1045,7 @@ pub inline fn warn(comptime fmt: []const u8, args: anytype) void { prettyErrorln("warn: " ++ fmt, args); } -const debugWarnScope = Scoped("debug warn", false); +const debugWarnScope = Scoped("debug_warn", false); /// Print a yellow warning message, only in debug mode pub inline fn debugWarn(comptime fmt: []const u8, args: anytype) void { diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 997a41708cbeff..41ddfd67f13d67 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -607,6 +607,9 @@ pub const ParseResult = union(enum) { .location = Logger.Location{ .file = path, .offset = this.loc.toUsize(), + // TODO: populate correct line and column information + .line = -1, + .column = -1, }, .text = this.msg, }; diff --git a/test/bake/dev-server-harness.ts b/test/bake/dev-server-harness.ts index 3a6203f21ac82b..947b37d132b756 100644 --- a/test/bake/dev-server-harness.ts +++ b/test/bake/dev-server-harness.ts @@ -33,6 +33,11 @@ export interface DevServerTest { * Provide this object or `files['bun.app.ts']` for a dynamic one. */ framework?: Bake.Framework | "react"; + /** + * Source code for a TSX file that `export default`s an array of BunPlugin, + * combined with the `framework` option. + */ + pluginFile?: string; /** Starting files */ files: FileObject; test: (dev: Dev) => Promise; @@ -64,6 +69,7 @@ export class Dev { fetch(url: string, init?: RequestInit) { return new DevFetchPromise((resolve, reject) => fetch(new URL(url, this.baseUrl).toString(), init).then(resolve, reject), + this ); } @@ -102,7 +108,14 @@ export class Dev { } async waitForHotReload() { - await this.output.waitForLine(/bundled route|error|reloaded/i); + const err = this.output.waitForLine(/error/i); + const success = this.output.waitForLine(/bundled route|reloaded/i); + await Promise.race([ + // On failure, give a little time in case a partial write caused a + // bundling error, and a success came in. + err.then(() => Bun.sleep(500), () => {}), + success, + ]); } async [Symbol.asyncDispose]() {} @@ -117,14 +130,30 @@ export interface Step { } class DevFetchPromise extends Promise { - expect(result: string) { + dev: Dev; + constructor(executor: (resolve: (value: Response | PromiseLike) => void, reject: (reason?: any) => void) => void, dev: Dev) { + super(executor); + this.dev = dev; + } + + expect(result: any) { + if (typeof result !== "string") { + result = JSON.stringify(result); + } return withAnnotatedStack(snapshotCallerLocation(), async () => { - const res = await this; - if (!res.ok) { - throw new Error(`Expected response to be ok, but got ${res.status} ${res.statusText}`); + try { + const res = await this; + if (!res.ok) { + throw new Error(`Expected response to be ok, but got ${res.status} ${res.statusText}`); + } + const text = (await res.text()).trim(); + expect(text).toBe(result.trim()); + } catch (err) { + if (this.dev.panicked) { + throw new Error("DevServer crashed"); + } + throw err; } - const text = (await res.text()).trim(); - expect(text).toBe(result.trim()); }); } expectNoSpaces(result: string) { @@ -281,7 +310,7 @@ class OutputLineStream extends EventEmitter { } } -export function devTest(description: string, options: DevServerTest) { +export function devTest(description: string, options: T): T { // Capture the caller name as part of the test tempdir const callerLocation = snapshotCallerLocation(); const caller = stackTraceFileName(callerLocation); @@ -293,7 +322,7 @@ export function devTest(description: string, options: DevServerTest) { // TODO: Tests are too flaky on Windows. Cannot reproduce locally. if (isWindows) { jest.test.todo(`DevServer > ${basename}.${count}: ${description}`); - return; + return options; } jest.test(`DevServer > ${basename}.${count}: ${description}`, async () => { @@ -303,16 +332,27 @@ export function devTest(description: string, options: DevServerTest) { if (!options.framework) { throw new Error("Must specify a options.framework or provide a bun.app.ts file"); } + if (options.pluginFile) { + fs.writeFileSync(path.join(root, "pluginFile.ts"), dedent(options.pluginFile)); + } fs.writeFileSync( path.join(root, "bun.app.ts"), dedent` + ${options.pluginFile ? + `import plugins from './pluginFile.ts';` : "let plugins = undefined;" + } export default { app: { framework: ${JSON.stringify(options.framework)}, + plugins, }, }; `, ); + } else { + if (options.pluginFile) { + throw new Error("Cannot provide both bun.app.ts and pluginFile"); + } } fs.writeFileSync( path.join(root, "harness_start.ts"), @@ -327,7 +367,7 @@ export function devTest(description: string, options: DevServerTest) { await using devProcess = Bun.spawn({ cwd: root, - cmd: [process.execPath, "./bun.app.ts"], + cmd: [process.execPath, "./harness_start.ts"], env: mergeWindowEnvs([ bunEnv, { @@ -354,4 +394,5 @@ export function devTest(description: string, options: DevServerTest) { throw err; } }); + return options; } diff --git a/test/bake/dev/bundle.test.ts b/test/bake/dev/bundle.test.ts index eac62f124bd504..fe470080d84c8f 100644 --- a/test/bake/dev/bundle.test.ts +++ b/test/bake/dev/bundle.test.ts @@ -1,5 +1,5 @@ // Bundle tests are tests concerning bundling bugs that only occur in DevServer. -import { devTest, minimalFramework, Step } from "../dev-server-harness"; +import { devTest, minimalFramework } from "../dev-server-harness"; devTest("import identifier doesnt get renamed", { framework: minimalFramework, @@ -49,3 +49,29 @@ devTest("symbol collision with import identifier", { await dev.fetch("/").expect("Hello, 456, 987!"); }, }); +devTest("uses \"development\" condition", { + framework: minimalFramework, + files: { + "node_modules/example/package.json": JSON.stringify({ + name: "example", + version: "1.0.0", + exports: { + ".": { + development: "./development.js", + default: "./production.js", + }, + }, + }), + "node_modules/example/development.js": `export default "development";`, + "node_modules/example/production.js": `export default "production";`, + "routes/index.ts": ` + import environment from 'example'; + export default function (req, meta) { + return new Response('Environment: ' + environment); + } + `, + }, + async test(dev) { + await dev.fetch("/").expect("Environment: development"); + }, +}); diff --git a/test/bake/dev/css.test.ts b/test/bake/dev/css.test.ts index 04eb58d1f44fbe..205f01470939f1 100644 --- a/test/bake/dev/css.test.ts +++ b/test/bake/dev/css.test.ts @@ -1,5 +1,5 @@ // CSS tests concern bundling bugs with CSS files -import { devTest, minimalFramework, Step } from "../dev-server-harness"; +import { devTest, minimalFramework } from "../dev-server-harness"; // devTest("css file with syntax error does not kill old styles", { // framework: minimalFramework, diff --git a/test/bake/dev/dev-plugins.test.ts b/test/bake/dev/dev-plugins.test.ts new file mode 100644 index 00000000000000..c448cf8402e72c --- /dev/null +++ b/test/bake/dev/dev-plugins.test.ts @@ -0,0 +1,149 @@ +// CSS tests concern bundling bugs with CSS files +import { devTest, minimalFramework } from "../dev-server-harness"; + +// Note: more in depth testing of plugins is done in test/bundler/bundler_plugin.test.ts +devTest("onResolve", { + framework: minimalFramework, + pluginFile: ` + import * as path from 'path'; + export default [ + { + name: 'a', + setup(build) { + build.onResolve({ filter: /trigger/ }, (args) => { + return { path: path.join(import.meta.dirname, '/file.ts') }; + }); + }, + } + ]; + `, + files: { + "file.ts": ` + export const value = 1; + `, + "routes/index.ts": ` + import { value } from 'trigger'; + + export default function (req, meta) { + return new Response('value: ' + value); + } + `, + }, + async test(dev) { + await dev.fetch("/").expect('value: 1'); + }, +}); +devTest("onLoad", { + framework: minimalFramework, + pluginFile: ` + import * as path from 'path'; + export default [ + { + name: 'a', + setup(build) { + build.onLoad({ filter: /trigger/ }, (args) => { + return { contents: 'export const value = 1;', loader: 'ts' }; + }); + }, + } + ]; + `, + files: { + "trigger.ts": ` + throw new Error('should not be loaded'); + `, + "routes/index.ts": ` + import { value } from '../trigger.ts'; + + export default function (req, meta) { + return new Response('value: ' + value); + } + `, + }, + async test(dev) { + await dev.fetch("/").expect('value: 1'); + await dev.fetch("/").expect('value: 1'); + await dev.fetch("/").expect('value: 1'); + }, +}); +devTest("onResolve + onLoad virtual file", { + framework: minimalFramework, + pluginFile: ` + import * as path from 'path'; + export default [ + { + name: 'a', + setup(build) { + build.onResolve({ filter: /^trigger$/ }, (args) => { + return { path: "hello.ts", namespace: "virtual" }; + }); + build.onLoad({ filter: /.*/, namespace: "virtual" }, (args) => { + return { contents: 'export default ' + JSON.stringify(args) + ';', loader: 'ts' }; + }); + }, + } + ]; + `, + files: { + // this file must not collide with the virtual file + "hello.ts": ` + export default "file-on-disk"; + `, + "routes/index.ts": ` + import disk from '../hello'; + import virtual from 'trigger'; + + export default function (req, meta) { + return Response.json([virtual, disk]); + } + `, + }, + async test(dev) { + await dev.fetch("/").expect([ + { + path: "hello.ts", + namespace: "virtual", + loader: "ts", + side: "server", + }, + "file-on-disk", + ]); + }, +}); +// devTest("onLoad with watchFile", { +// framework: minimalFramework, +// pluginFile: ` +// import * as path from 'path'; +// export default [ +// { +// name: 'a', +// setup(build) { +// let a = 0; +// build.onLoad({ filter: /trigger/ }, (args) => { +// a += 1; +// return { contents: 'export const value = ' + a + ';', loader: 'ts' }; +// }); +// }, +// } +// ]; +// `, +// files: { +// "trigger.ts": ` +// throw new Error('should not be loaded'); +// `, +// "routes/index.ts": ` +// import { value } from '../trigger.ts'; + +// export default function (req, meta) { +// return new Response('value: ' + value); +// } +// `, +// }, +// async test(dev) { +// await dev.fetch("/").expect('value: 1'); +// await dev.fetch("/").expect('value: 1'); +// await dev.write("trigger.ts", "throw new Error('should not be loaded 2');"); +// await dev.fetch("/").expect('value: 2'); +// await dev.fetch("/").expect('value: 2'); +// }, +// }); diff --git a/test/bake/dev/ecosystem.test.ts b/test/bake/dev/ecosystem.test.ts new file mode 100644 index 00000000000000..068795a8750537 --- /dev/null +++ b/test/bake/dev/ecosystem.test.ts @@ -0,0 +1,11 @@ +// these tests involve ensuring certain libraries are working correctly. it +// should be preferred to write specific tests for the bugs that these libraries +// discovered, but it easy and still a reasonable idea to just test the library +// entirely. +import { devTest } from "../dev-server-harness"; + +// TODO: svelte server component example project +// Bugs discovered thanks to Svelte: +// - Valid circular import use. +// - Re-export `.e_import_identifier`, including live bindings. +// TODO: - something related to the wrong push function being called \ No newline at end of file diff --git a/test/bake/dev/esm.test.ts b/test/bake/dev/esm.test.ts index bed852bc4bf086..1864c5e387d086 100644 --- a/test/bake/dev/esm.test.ts +++ b/test/bake/dev/esm.test.ts @@ -1,7 +1,7 @@ // Bundle tests are tests concerning bundling bugs that only occur in DevServer. -import { devTest, minimalFramework, Step } from "../dev-server-harness"; +import { devTest, minimalFramework } from "../dev-server-harness"; -devTest("live bindings with `var`", { +const liveBindingTest = devTest("live bindings with `var`", { framework: minimalFramework, files: { "state.ts": ` @@ -41,3 +41,98 @@ devTest("live bindings with `var`", { await dev.fetch("/").expect("Value: -2"); }, }); +devTest("live bindings through export clause", { + framework: minimalFramework, + files: { + "state.ts": ` + export var value = 0; + export function increment() { + value++; + } + `, + "proxy.ts": ` + import { value } from './state'; + export { value as live }; + `, + "routes/index.ts": ` + import { increment } from '../state'; + import { live } from '../proxy'; + export default function(req, meta) { + increment(); + return new Response('State: ' + live); + } + `, + }, + test: liveBindingTest.test, +}); +devTest("live bindings through export from", { + framework: minimalFramework, + files: { + "state.ts": ` + export var value = 0; + export function increment() { + value++; + } + `, + "proxy.ts": ` + export { value as live } from './state'; + `, + "routes/index.ts": ` + import { increment } from '../state'; + import { live } from '../proxy'; + export default function(req, meta) { + increment(); + return new Response('State: ' + live); + } + `, + }, + test: liveBindingTest.test, +}); +// devTest("live bindings through export star", { +// framework: minimalFramework, +// files: { +// "state.ts": ` +// export var value = 0; +// export function increment() { +// value++; +// } +// `, +// "proxy.ts": ` +// export * from './state'; +// `, +// "routes/index.ts": ` +// import { increment } from '../state'; +// import { live } from '../proxy'; +// export default function(req, meta) { +// increment(); +// return new Response('State: ' + live); +// } +// `, +// }, +// test: liveBindingTest.test, +// }); +devTest("export { x as y }", { + framework: minimalFramework, + files: { + "module.ts": ` + function x(value) { + return value + 1; + } + export { x as y }; + `, + "routes/index.ts": ` + import { y } from '../module'; + export default function(req, meta) { + return new Response('Value: ' + y(1)); + } + `, + }, + async test(dev) { + await dev.fetch("/").expect("Value: 2"); + await dev.patch("module.ts", { + find: "1", + replace: "2", + }); + await dev.fetch("/").expect("Value: 3"); + } +}); \ No newline at end of file