Skip to content

Commit

Permalink
Skip javascriptcore's first parse step for ES Modules (#15758)
Browse files Browse the repository at this point in the history
  • Loading branch information
pfgithub authored Jan 10, 2025
1 parent 0b9db36 commit ccc7bde
Show file tree
Hide file tree
Showing 21 changed files with 1,515 additions and 189 deletions.
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit

# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h

# Build bun with the local JSC build
$ bun run build:local
Expand Down
5 changes: 5 additions & 0 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,11 @@ pub fn addInstallObjectFile(
name: []const u8,
out_mode: ObjectFormat,
) *Step {
if (@import("builtin").os.tag != .windows and std.posix.getenvZ("COMPILE_ERRORS_ONLY") != null) {
const failstep = b.addSystemCommand(&.{"COMPILE_ERRORS_ONLY set but there were no compile errors"});
failstep.step.dependOn(&compile.step);
return &failstep.step;
}
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
const bin = compile.getEmittedBin();
return &b.addInstallFile(switch (out_mode) {
Expand Down
2 changes: 2 additions & 0 deletions cmake/tools/SetupWebKit.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ if(WEBKIT_LOCAL)
${WEBKIT_PATH}
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
${WEBKIT_PATH}/bmalloc/Headers
${WEBKIT_PATH}/WTF/Headers
)
Expand Down
477 changes: 477 additions & 0 deletions src/analyze_transpiled_module.zig

Large diffs are not rendered by default.

85 changes: 64 additions & 21 deletions src/bun.js/RuntimeTranspilerCache.zig
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
/// Version 11: Fix \uFFFF printing regression
/// Version 12: "use strict"; makes it CommonJS if we otherwise don't know which one to pick.
/// Version 13: Hoist `import.meta.require` definition, see #15738
const expected_version = 13;
/// Version 14: Include module info with an ES Module, see #15758
const expected_version = 14;

const bun = @import("root").bun;
const std = @import("std");
Expand All @@ -33,6 +34,7 @@ pub const RuntimeTranspilerCache = struct {

sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,

const seed = 42;
pub const Metadata = struct {
Expand All @@ -53,6 +55,10 @@ pub const RuntimeTranspilerCache = struct {
sourcemap_byte_length: u64 = 0,
sourcemap_hash: u64 = 0,

esm_record_byte_offset: u64 = 0,
esm_record_byte_length: u64 = 0,
esm_record_hash: u64 = 0,

pub const size = brk: {
var count: usize = 0;
const meta: Metadata = .{};
Expand All @@ -79,6 +85,10 @@ pub const RuntimeTranspilerCache = struct {
try writer.writeInt(u64, this.sourcemap_byte_offset, .little);
try writer.writeInt(u64, this.sourcemap_byte_length, .little);
try writer.writeInt(u64, this.sourcemap_hash, .little);

try writer.writeInt(u64, this.esm_record_byte_offset, .little);
try writer.writeInt(u64, this.esm_record_byte_length, .little);
try writer.writeInt(u64, this.esm_record_hash, .little);
}

pub fn decode(this: *Metadata, reader: anytype) !void {
Expand All @@ -103,6 +113,10 @@ pub const RuntimeTranspilerCache = struct {
this.sourcemap_byte_length = try reader.readInt(u64, .little);
this.sourcemap_hash = try reader.readInt(u64, .little);

this.esm_record_byte_offset = try reader.readInt(u64, .little);
this.esm_record_byte_length = try reader.readInt(u64, .little);
this.esm_record_hash = try reader.readInt(u64, .little);

switch (this.module_type) {
.esm, .cjs => {},
// Invalid module type
Expand All @@ -121,7 +135,7 @@ pub const RuntimeTranspilerCache = struct {
metadata: Metadata,
output_code: OutputCode = .{ .utf8 = "" },
sourcemap: []const u8 = "",

esm_record: []const u8 = "",
pub const OutputCode = union(enum) {
utf8: []const u8,
string: bun.String,
Expand All @@ -143,11 +157,14 @@ pub const RuntimeTranspilerCache = struct {
}
};

pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator) void {
pub fn deinit(this: *Entry, sourcemap_allocator: std.mem.Allocator, output_code_allocator: std.mem.Allocator, esm_record_allocator: std.mem.Allocator) void {
this.output_code.deinit(output_code_allocator);
if (this.sourcemap.len > 0) {
sourcemap_allocator.free(this.sourcemap);
}
if (this.esm_record.len > 0) {
esm_record_allocator.free(this.esm_record);
}
}

pub fn save(
Expand All @@ -157,6 +174,7 @@ pub const RuntimeTranspilerCache = struct {
input_hash: u64,
features_hash: u64,
sourcemap: []const u8,
esm_record: []const u8,
output_code: OutputCode,
exports_kind: bun.JSAst.ExportsKind,
) !void {
Expand Down Expand Up @@ -202,6 +220,8 @@ pub const RuntimeTranspilerCache = struct {
.output_byte_offset = Metadata.size,
.output_byte_length = output_bytes.len,
.sourcemap_byte_offset = Metadata.size + output_bytes.len,
.esm_record_byte_offset = Metadata.size + output_bytes.len + sourcemap.len,
.esm_record_byte_length = esm_record.len,
};

metadata.output_hash = hash(output_bytes);
Expand All @@ -220,20 +240,26 @@ pub const RuntimeTranspilerCache = struct {
break :brk metadata_buf[0..metadata_stream.pos];
};

const vecs: []const bun.PlatformIOVecConst = if (output_bytes.len > 0)
&.{
bun.platformIOVecConstCreate(metadata_bytes),
bun.platformIOVecConstCreate(output_bytes),
bun.platformIOVecConstCreate(sourcemap),
}
else
&.{
bun.platformIOVecConstCreate(metadata_bytes),
bun.platformIOVecConstCreate(sourcemap),
};
var vecs_buf: [4]bun.PlatformIOVecConst = undefined;
var vecs_i: usize = 0;
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(metadata_bytes);
vecs_i += 1;
if (output_bytes.len > 0) {
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(output_bytes);
vecs_i += 1;
}
if (sourcemap.len > 0) {
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(sourcemap);
vecs_i += 1;
}
if (esm_record.len > 0) {
vecs_buf[vecs_i] = bun.platformIOVecConstCreate(esm_record);
vecs_i += 1;
}
const vecs: []const bun.PlatformIOVecConst = vecs_buf[0..vecs_i];

var position: isize = 0;
const end_position = Metadata.size + output_bytes.len + sourcemap.len;
const end_position = Metadata.size + output_bytes.len + sourcemap.len + esm_record.len;

if (bun.Environment.allow_assert) {
var total: usize = 0;
Expand All @@ -243,7 +269,7 @@ pub const RuntimeTranspilerCache = struct {
}
bun.assert(end_position == total);
}
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size + esm_record.len)));

bun.C.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
while (position < end_position) {
Expand All @@ -264,6 +290,7 @@ pub const RuntimeTranspilerCache = struct {
file: std.fs.File,
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
) !void {
const stat_size = try file.getEndPos();
if (stat_size < Metadata.size + this.metadata.output_byte_length + this.metadata.sourcemap_byte_length) {
Expand Down Expand Up @@ -339,6 +366,17 @@ pub const RuntimeTranspilerCache = struct {

this.sourcemap = sourcemap;
}

if (this.metadata.esm_record_byte_length > 0) {
const esm_record = try esm_record_allocator.alloc(u8, this.metadata.esm_record_byte_length);
errdefer esm_record_allocator.free(esm_record);
const read_bytes = try file.preadAll(esm_record, this.metadata.esm_record_byte_offset);
if (read_bytes != this.metadata.esm_record_byte_length) {
return error.MissingData;
}

this.esm_record = esm_record;
}
}
};

Expand Down Expand Up @@ -456,6 +494,7 @@ pub const RuntimeTranspilerCache = struct {
input_stat_size: u64,
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
) !Entry {
var tracer = bun.tracy.traceNamed(@src(), "RuntimeTranspilerCache.fromFile");
defer tracer.end();
Expand All @@ -470,6 +509,7 @@ pub const RuntimeTranspilerCache = struct {
input_stat_size,
sourcemap_allocator,
output_code_allocator,
esm_record_allocator,
);
}

Expand All @@ -480,6 +520,7 @@ pub const RuntimeTranspilerCache = struct {
input_stat_size: u64,
sourcemap_allocator: std.mem.Allocator,
output_code_allocator: std.mem.Allocator,
esm_record_allocator: std.mem.Allocator,
) !Entry {
var metadata_bytes_buf: [Metadata.size * 2]u8 = undefined;
const cache_fd = try bun.sys.open(cache_file_path.sliceAssumeZ(), bun.O.RDONLY, 0).unwrap();
Expand Down Expand Up @@ -511,7 +552,7 @@ pub const RuntimeTranspilerCache = struct {
return error.MismatchedFeatureHash;
}

try entry.load(file, sourcemap_allocator, output_code_allocator);
try entry.load(file, sourcemap_allocator, output_code_allocator, esm_record_allocator);

return entry;
}
Expand All @@ -528,6 +569,7 @@ pub const RuntimeTranspilerCache = struct {
input_hash: u64,
features_hash: u64,
sourcemap: []const u8,
esm_record: []const u8,
source_code: bun.String,
exports_kind: bun.JSAst.ExportsKind,
) !void {
Expand Down Expand Up @@ -567,6 +609,7 @@ pub const RuntimeTranspilerCache = struct {
input_hash,
features_hash,
sourcemap,
esm_record,
output_code,
exports_kind,
);
Expand Down Expand Up @@ -600,7 +643,7 @@ pub const RuntimeTranspilerCache = struct {
parser_options.hashForRuntimeTranspiler(&features_hasher, used_jsx);
this.features_hash = features_hasher.final();

this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator) catch |err| {
this.entry = fromFile(input_hash, this.features_hash.?, source.contents.len, this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator) catch |err| {
debug("get(\"{s}\") = {s}", .{ source.path.text, @errorName(err) });
return false;
};
Expand All @@ -616,7 +659,7 @@ pub const RuntimeTranspilerCache = struct {
if (comptime bun.Environment.isDebug) {
if (!bun_debug_restore_from_cache) {
if (this.entry) |*entry| {
entry.deinit(this.sourcemap_allocator, this.output_code_allocator);
entry.deinit(this.sourcemap_allocator, this.output_code_allocator, this.esm_record_allocator);
this.entry = null;
}
}
Expand All @@ -625,7 +668,7 @@ pub const RuntimeTranspilerCache = struct {
return this.entry != null;
}

pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8) void {
pub fn put(this: *RuntimeTranspilerCache, output_code_bytes: []const u8, sourcemap: []const u8, esm_record: []const u8) void {
if (comptime !bun.FeatureFlags.runtime_transpiler_cache)
@compileError("RuntimeTranspilerCache is disabled");

Expand All @@ -636,7 +679,7 @@ pub const RuntimeTranspilerCache = struct {
const output_code = bun.String.createLatin1(output_code_bytes);
this.output_code = output_code;

toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, output_code, this.exports_kind) catch |err| {
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
debug("put() = {s}", .{@errorName(err)});
return;
};
Expand Down
Loading

0 comments on commit ccc7bde

Please sign in to comment.