Skip to content

Commit

Permalink
Snapshot lockfiles in some tests (#10994)
Browse files Browse the repository at this point in the history
  • Loading branch information
dylan-conway authored May 13, 2024
1 parent dbdc376 commit 88468a2
Show file tree
Hide file tree
Showing 14 changed files with 87,184 additions and 866 deletions.
22 changes: 12 additions & 10 deletions src/install/install.zig
Original file line number Diff line number Diff line change
Expand Up @@ -11203,18 +11203,19 @@ pub const bun_install_js_bindings = struct {
const JSGlobalObject = JSC.JSGlobalObject;

pub fn generate(global: *JSGlobalObject) JSValue {
const obj = JSValue.createEmptyObject(global, 3);
const printLockfileAsJSON = ZigString.static("printLockfileAsJSON");
obj.put(global, printLockfileAsJSON, JSC.createCallback(global, printLockfileAsJSON, 1, jsPrintLockfileAsJSON));
const obj = JSValue.createEmptyObject(global, 2);
const parseLockfile = ZigString.static("parseLockfile");
obj.put(global, parseLockfile, JSC.createCallback(global, parseLockfile, 1, jsParseLockfile));
return obj;
}

pub fn jsPrintLockfileAsJSON(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSValue {
pub fn jsParseLockfile(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSValue {
const allocator = bun.default_allocator;
var log = logger.Log.init(allocator);
defer log.deinit();

const args = callFrame.arguments(1).slice();
const cwd = args[0].toSliceOrNull(globalObject) orelse return .undefined;
const cwd = args[0].toSliceOrNull(globalObject) orelse return .zero;
defer cwd.deinit();

const lockfile_path = Path.joinAbsStringZ(cwd.slice(), &[_]string{"bun.lockb"}, .auto);
Expand All @@ -11226,11 +11227,11 @@ pub const bun_install_js_bindings = struct {

switch (load_result) {
.err => |err| {
globalObject.throw("Failed to load lockfile: {s}, \"{s}\"", .{ @errorName(err.value), lockfile_path });
globalObject.throw("failed to load lockfile: {s}, \"{s}\"", .{ @errorName(err.value), lockfile_path });
return .zero;
},
.not_found => {
globalObject.throw("Lockfile not found: \"{s}\"", .{lockfile_path});
globalObject.throw("lockfile not found: \"{s}\"", .{lockfile_path});
return .zero;
},
.ok => {},
Expand All @@ -11250,17 +11251,18 @@ pub const bun_install_js_bindings = struct {
},
buffered_writer.writer(),
) catch |err| {
globalObject.throw("Failed to print lockfile as JSON: {s}", .{@errorName(err)});
globalObject.throw("failed to print lockfile as JSON: {s}", .{@errorName(err)});
return .zero;
};

buffered_writer.flush() catch |err| {
globalObject.throw("Failed to print lockfile as JSON: {s}", .{@errorName(err)});
globalObject.throw("failed to print lockfile as JSON: {s}", .{@errorName(err)});
return .zero;
};

var str = bun.String.createUTF8(buffer.list.items);
defer str.deref();
return str.toJS(globalObject);

return str.toJSByParseJSON(globalObject);
}
};
231 changes: 180 additions & 51 deletions src/install/lockfile.zig
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,9 @@ pub const Tree = struct {
depth: usize,
};

// max number of node_modules folders
pub const max_depth = (bun.MAX_PATH_BYTES / "node_modules".len) + 1;

pub const Iterator = struct {
trees: []const Tree,
dependency_ids: []const DependencyID,
Expand All @@ -330,8 +333,7 @@ pub const Tree = struct {
last_parent: Id = invalid_id,
string_buf: string,

// max number of node_modules folders
depth_stack: [(bun.MAX_PATH_BYTES / "node_modules".len) + 1]Id = undefined,
depth_stack: [max_depth]Id = undefined,

pub fn init(lockfile: *const Lockfile) Iterator {
var iter = Iterator{
Expand Down Expand Up @@ -370,41 +372,18 @@ pub const Tree = struct {
}

const tree = this.trees[this.tree_id];
var depth: usize = 0;

{
var parent_id = tree.id;
var path_written: usize = "node_modules".len;
this.depth_stack[0] = 0;

if (tree.id > 0) {
var depth_buf_len: usize = 1;
while (parent_id > 0 and parent_id < @as(Id, @intCast(this.trees.len))) {
this.depth_stack[depth_buf_len] = parent_id;
parent_id = this.trees[parent_id].parent;
depth_buf_len += 1;
}
depth_buf_len -= 1;
depth = depth_buf_len;
while (depth_buf_len > 0) : (depth_buf_len -= 1) {
this.path_buf[path_written] = std.fs.path.sep;
path_written += 1;

const tree_id = this.depth_stack[depth_buf_len];
const name = this.dependencies[this.trees[tree_id].dependency_id].name.slice(this.string_buf);
@memcpy(this.path_buf[path_written..][0..name.len], name);
path_written += name.len;

@memcpy(this.path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules");
path_written += "/node_modules".len;
}
}
this.path_buf[path_written] = 0;
this.path_buf_len = path_written;
}
const relative_path, const depth = tree.relativePathAndDepth(
this.trees,
this.dependencies,
this.string_buf,
&this.path_buf,
&this.depth_stack,
);

this.tree_id += 1;
const relative_path: [:0]u8 = this.path_buf[0..this.path_buf_len :0];
this.path_buf_len = relative_path.len;

return .{
.relative_path = relative_path,
.dependencies = tree.dependencies.get(this.dependency_ids),
Expand All @@ -414,6 +393,50 @@ pub const Tree = struct {
}
};

/// Returns relative path and the depth of the tree
pub fn relativePathAndDepth(
tree: *const Tree,
trees: []const Tree,
dependencies: []const Dependency,
string_buf: string,
path_buf: *bun.PathBuffer,
depth_buf: *[max_depth]Id,
) struct { stringZ, usize } {
var depth: usize = 0;

var parent_id = tree.id;
var path_written: usize = "node_modules".len;

depth_buf[0] = 0;

if (tree.id > 0) {
var depth_buf_len: usize = 1;
while (parent_id > 0 and parent_id < trees.len) {
depth_buf[depth_buf_len] = parent_id;
parent_id = trees[parent_id].parent;
depth_buf_len += 1;
}
depth_buf_len -= 1;
depth = depth_buf_len;
while (depth_buf_len > 0) : (depth_buf_len -= 1) {
path_buf[path_written] = std.fs.path.sep;
path_written += 1;

const id = depth_buf[depth_buf_len];
const name = dependencies[trees[id].dependency_id].name.slice(string_buf);
@memcpy(path_buf[path_written..][0..name.len], name);
path_written += name.len;

@memcpy(path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules");
path_written += "/node_modules".len;
}
}
path_buf[path_written] = 0;
const rel = path_buf[0..path_written :0];

return .{ rel, depth };
}

const Builder = struct {
allocator: Allocator,
name_hashes: []const PackageNameHash,
Expand Down Expand Up @@ -517,6 +540,7 @@ pub const Tree = struct {
if (pid >= max_package_id) continue;

const dependency = builder.dependencies[dep_id];

// Do not hoist aliased packages
const destination = if (dependency.name_hash != name_hashes[pid])
next.id
Expand Down Expand Up @@ -1732,6 +1756,8 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) void {
.trusted_dependencies = null,
.workspace_paths = .{},
.workspace_versions = .{},
.overrides = .{},
.meta_hash = zero_hash,
};
}

Expand Down Expand Up @@ -5937,13 +5963,21 @@ pub fn hasTrustedDependency(this: *Lockfile, name: []const u8) bool {
return default_trusted_dependencies.has(name);
}

pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep: Dependency, res: ?PackageID) !void {
pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: DependencyID, dep: Dependency, res: PackageID) !void {
const sb = this.buffers.string_bytes.items;
var buf: [2048]u8 = undefined;

try w.beginObject();
defer w.endObject() catch {};

try w.objectField("name");
try w.write(dep.name.slice(sb));

if (dep.version.tag == .npm and dep.version.value.npm.is_alias) {
try w.objectField("is_alias");
try w.write(true);
}

try w.objectField("literal");
try w.write(dep.version.literal.slice(sb));

Expand All @@ -5960,7 +5994,7 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep: Dependenc
try w.write(info.name.slice(sb));

try w.objectField("version");
try w.write(try std.fmt.bufPrint(&buf, "{}", .{info.version}));
try w.write(try std.fmt.bufPrint(&buf, "{}", .{info.version.fmt(sb)}));
},
.dist_tag => {
try w.beginObject();
Expand Down Expand Up @@ -6032,12 +6066,25 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep: Dependenc
},
}

try w.objectField("resolved_id");
try w.write(if (res) |r| if (r == invalid_package_id) null else r else null);
try w.objectField("package_id");
try w.write(if (res == invalid_package_id) null else res);

const behavior = try std.fmt.bufPrint(&buf, "{}", .{dep.behavior});
try w.objectField("behavior");
try w.write(behavior);
{
try w.beginObject();
defer w.endObject() catch {};

const fields = @typeInfo(Behavior).Struct.fields;
inline for (fields[1 .. fields.len - 1]) |field| {
if (@field(dep.behavior, field.name)) {
try w.objectField(field.name);
try w.write(true);
}
}
}

try w.objectField("id");
try w.write(dep_id);
}

pub fn jsonStringify(this: *const Lockfile, w: anytype) !void {
Expand Down Expand Up @@ -6077,6 +6124,84 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void {
}
}
}
{
try w.objectField("trees");
try w.beginArray();
defer w.endArray() catch {};

const trees = this.buffers.trees.items;
const string_buf = this.buffers.string_bytes.items;
const dependencies = this.buffers.dependencies.items;
const hoisted_deps = this.buffers.hoisted_dependencies.items;
const resolutions = this.buffers.resolutions.items;
var depth_buf: [Tree.max_depth]Tree.Id = undefined;
var path_buf: bun.PathBuffer = undefined;
@memcpy(path_buf[0.."node_modules".len], "node_modules");

for (0..this.buffers.trees.items.len) |tree_id| {
try w.beginObject();
defer w.endObject() catch {};

const tree = this.buffers.trees.items[tree_id];

try w.objectField("id");
try w.write(tree_id);

const relative_path, const depth = tree.relativePathAndDepth(
trees,
dependencies,
string_buf,
&path_buf,
&depth_buf,
);

try w.objectField("path");
const formatted = try std.fmt.bufPrint(&buf, "{}", .{bun.fmt.fmtPath(u8, relative_path, .{ .path_sep = .posix })});
try w.write(formatted);

try w.objectField("depth");
try w.write(depth);

try w.objectField("dependencies");
{
try w.beginObject();
defer w.endObject() catch {};

for (tree.dependencies.get(hoisted_deps)) |tree_dep_id| {
const dep = dependencies[tree_dep_id];
const package_id = resolutions[tree_dep_id];

try w.objectField(dep.name.slice(sb));
{
try w.beginObject();
defer w.endObject() catch {};

try w.objectField("id");
try w.write(tree_dep_id);

try w.objectField("package_id");
try w.write(package_id);
}
}
}
}
}

{
try w.objectField("dependencies");
try w.beginArray();
defer w.endArray() catch {};

const dependencies = this.buffers.dependencies.items;
const resolutions = this.buffers.resolutions.items;

for (0..dependencies.len) |dep_id| {
const dep = dependencies[dep_id];
const res = resolutions[dep_id];
try this.jsonStringifyDependency(w, @intCast(dep_id), dep, res);
}
}

{
try w.objectField("packages");
try w.beginArray();
Expand All @@ -6097,22 +6222,26 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void {
try w.write(pkg.name_hash);

try w.objectField("resolution");
if (pkg.resolution.tag == .uninitialized) {
try w.write(null);
} else {
const b = try std.fmt.bufPrint(&buf, "{s} {s}", .{ @tagName(pkg.resolution.tag), pkg.resolution.fmt(sb, .posix) });
try w.write(b);
{
const res = pkg.resolution;
try w.beginObject();
defer w.endObject() catch {};

try w.objectField("tag");
try w.write(@tagName(res.tag));

try w.objectField("value");
const formatted = try std.fmt.bufPrint(&buf, "{s}", .{res.fmt(sb, .posix)});
try w.write(formatted);
}

try w.objectField("dependencies");
{
try w.beginObject();
defer w.endObject() catch {};
try w.beginArray();
defer w.endArray() catch {};

for (pkg.dependencies.get(this.buffers.dependencies.items), pkg.resolutions.get(this.buffers.resolutions.items)) |dep_, res| {
const dep: Dependency = dep_;
try w.objectField(dep.name.slice(sb));
try this.jsonStringifyDependency(w, dep, res);
for (pkg.dependencies.off..pkg.dependencies.off + pkg.dependencies.len) |dep_id| {
try w.write(dep_id);
}
}

Expand Down
Loading

0 comments on commit 88468a2

Please sign in to comment.