Skip to content

Commit

Permalink
Merge pull request #22222 from ianprime0509/git-sha256
Browse files Browse the repository at this point in the history
zig fetch: add support for SHA-256 Git repositories
  • Loading branch information
andrewrk authored Dec 14, 2024
2 parents 0fac47c + 5217da5 commit 13a2834
Show file tree
Hide file tree
Showing 11 changed files with 283 additions and 169 deletions.
41 changes: 15 additions & 26 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -381,32 +381,6 @@ pub fn build(b: *std.Build) !void {
const test_target_filters = b.option([]const []const u8, "test-target-filter", "Skip tests whose target triple do not match any filter") orelse &[0][]const u8{};
const test_slow_targets = b.option(bool, "test-slow-targets", "Enable running module tests for targets that have a slow compiler backend") orelse false;

const test_cases_options = b.addOptions();

test_cases_options.addOption(bool, "enable_tracy", false);
test_cases_options.addOption(bool, "enable_debug_extensions", enable_debug_extensions);
test_cases_options.addOption(bool, "enable_logging", enable_logging);
test_cases_options.addOption(bool, "enable_link_snapshots", enable_link_snapshots);
test_cases_options.addOption(bool, "skip_non_native", skip_non_native);
test_cases_options.addOption(bool, "have_llvm", enable_llvm);
test_cases_options.addOption(bool, "llvm_has_m68k", llvm_has_m68k);
test_cases_options.addOption(bool, "llvm_has_csky", llvm_has_csky);
test_cases_options.addOption(bool, "llvm_has_arc", llvm_has_arc);
test_cases_options.addOption(bool, "llvm_has_xtensa", llvm_has_xtensa);
test_cases_options.addOption(bool, "force_gpa", force_gpa);
test_cases_options.addOption(bool, "enable_qemu", b.enable_qemu);
test_cases_options.addOption(bool, "enable_wine", b.enable_wine);
test_cases_options.addOption(bool, "enable_wasmtime", b.enable_wasmtime);
test_cases_options.addOption(bool, "enable_rosetta", b.enable_rosetta);
test_cases_options.addOption(bool, "enable_darling", b.enable_darling);
test_cases_options.addOption(u32, "mem_leak_frames", mem_leak_frames * 2);
test_cases_options.addOption(bool, "value_tracing", value_tracing);
test_cases_options.addOption(?[]const u8, "glibc_runtimes_dir", b.glibc_runtimes_dir);
test_cases_options.addOption([:0]const u8, "version", version);
test_cases_options.addOption(std.SemanticVersion, "semver", semver);
test_cases_options.addOption([]const []const u8, "test_filters", test_filters);
test_cases_options.addOption(DevEnv, "dev", if (only_c) .bootstrap else .core);

var chosen_opt_modes_buf: [4]builtin.OptimizeMode = undefined;
var chosen_mode_index: usize = 0;
if (!skip_debug) {
Expand Down Expand Up @@ -533,6 +507,21 @@ pub fn build(b: *std.Build) !void {
.max_rss = 5029889638,
}));

test_modules_step.dependOn(tests.addModuleTests(b, .{
.test_filters = test_filters,
.test_target_filters = test_target_filters,
.test_slow_targets = test_slow_targets,
.root_src = "src/main.zig",
.name = "compiler-internals",
.desc = "Run the compiler internals tests",
.optimize_modes = optimization_modes,
.include_paths = &.{},
.skip_single_threaded = skip_single_threaded,
.skip_non_native = true,
.skip_libc = skip_libc,
.build_options = exe_options,
}));

test_step.dependOn(test_modules_step);

test_step.dependOn(tests.addCompareOutputTests(b, test_filters, optimization_modes));
Expand Down
1 change: 1 addition & 0 deletions src/InternPool.zig
Original file line number Diff line number Diff line change
Expand Up @@ -9822,6 +9822,7 @@ test "basic usage" {
const gpa = std.testing.allocator;

var ip: InternPool = .empty;
try ip.init(gpa, 1);
defer ip.deinit(gpa);

const i32_type = try ip.get(gpa, .main, .{ .int_type = .{
Expand Down
43 changes: 19 additions & 24 deletions src/Package/Fetch.zig
Original file line number Diff line number Diff line change
Expand Up @@ -814,7 +814,7 @@ const Resource = union(enum) {
const Git = struct {
session: git.Session,
fetch_stream: git.Session.FetchStream,
want_oid: [git.oid_length]u8,
want_oid: git.Oid,
};

fn deinit(resource: *Resource) void {
Expand Down Expand Up @@ -976,7 +976,7 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re
const want_oid = want_oid: {
const want_ref =
if (uri.fragment) |fragment| try fragment.toRawMaybeAlloc(arena) else "HEAD";
if (git.parseOid(want_ref)) |oid| break :want_oid oid else |_| {}
if (git.Oid.parseAny(want_ref)) |oid| break :want_oid oid else |_| {}

const want_ref_head = try std.fmt.allocPrint(arena, "refs/heads/{s}", .{want_ref});
const want_ref_tag = try std.fmt.allocPrint(arena, "refs/tags/{s}", .{want_ref});
Expand Down Expand Up @@ -1018,17 +1018,13 @@ fn initResource(f: *Fetch, uri: std.Uri, server_header_buffer: []u8) RunError!Re
});
const notes_start = try eb.reserveNotes(notes_len);
eb.extra.items[notes_start] = @intFromEnum(try eb.addErrorMessage(.{
.msg = try eb.printString("try .url = \"{;+/}#{}\",", .{
uri, std.fmt.fmtSliceHexLower(&want_oid),
}),
.msg = try eb.printString("try .url = \"{;+/}#{}\",", .{ uri, want_oid }),
}));
return error.FetchFailed;
}

var want_oid_buf: [git.fmt_oid_length]u8 = undefined;
_ = std.fmt.bufPrint(&want_oid_buf, "{}", .{
std.fmt.fmtSliceHexLower(&want_oid),
}) catch unreachable;
var want_oid_buf: [git.Oid.max_formatted_length]u8 = undefined;
_ = std.fmt.bufPrint(&want_oid_buf, "{}", .{want_oid}) catch unreachable;
var fetch_stream = session.fetch(&.{&want_oid_buf}, server_header_buffer) catch |err| {
return f.fail(f.location_tok, try eb.printString(
"unable to create fetch stream: {s}",
Expand Down Expand Up @@ -1163,7 +1159,7 @@ fn unpackResource(
});
return try unpackTarball(f, tmp_directory.handle, dcp.reader());
},
.git_pack => return unpackGitPack(f, tmp_directory.handle, resource) catch |err| switch (err) {
.git_pack => return unpackGitPack(f, tmp_directory.handle, &resource.git) catch |err| switch (err) {
error.FetchFailed => return error.FetchFailed,
error.OutOfMemory => return error.OutOfMemory,
else => |e| return f.fail(f.location_tok, try eb.printString(
Expand Down Expand Up @@ -1298,11 +1294,10 @@ fn unzip(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackResult {
return res;
}

fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource) anyerror!UnpackResult {
fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource.Git) anyerror!UnpackResult {
const arena = f.arena.allocator();
const gpa = f.arena.child_allocator;
const want_oid = resource.git.want_oid;
const reader = resource.git.fetch_stream.reader();
const object_format: git.Oid.Format = resource.want_oid;

var res: UnpackResult = .{};
// The .git directory is used to store the packfile and associated index, but
Expand All @@ -1314,7 +1309,7 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource) anyerror!Unpac
var pack_file = try pack_dir.createFile("pkg.pack", .{ .read = true });
defer pack_file.close();
var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init();
try fifo.pump(reader, pack_file.writer());
try fifo.pump(resource.fetch_stream.reader(), pack_file.writer());
try pack_file.sync();

var index_file = try pack_dir.createFile("pkg.idx", .{ .read = true });
Expand All @@ -1323,18 +1318,18 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource) anyerror!Unpac
const index_prog_node = f.prog_node.start("Index pack", 0);
defer index_prog_node.end();
var index_buffered_writer = std.io.bufferedWriter(index_file.writer());
try git.indexPack(gpa, pack_file, index_buffered_writer.writer());
try git.indexPack(gpa, object_format, pack_file, index_buffered_writer.writer());
try index_buffered_writer.flush();
try index_file.sync();
}

{
const checkout_prog_node = f.prog_node.start("Checkout", 0);
defer checkout_prog_node.end();
var repository = try git.Repository.init(gpa, pack_file, index_file);
var repository = try git.Repository.init(gpa, object_format, pack_file, index_file);
defer repository.deinit();
var diagnostics: git.Diagnostics = .{ .allocator = arena };
try repository.checkout(out_dir, want_oid, &diagnostics);
try repository.checkout(out_dir, resource.want_oid, &diagnostics);

if (diagnostics.errors.items.len > 0) {
try res.allocErrors(arena, diagnostics.errors.items.len, "unable to unpack packfile");
Expand Down Expand Up @@ -1695,7 +1690,7 @@ const HashedFile = struct {
fn stripRoot(fs_path: []const u8, root_dir: []const u8) []const u8 {
if (root_dir.len == 0 or fs_path.len <= root_dir.len) return fs_path;

if (std.mem.eql(u8, fs_path[0..root_dir.len], root_dir) and fs_path[root_dir.len] == fs.path.sep) {
if (std.mem.eql(u8, fs_path[0..root_dir.len], root_dir) and fs.path.isSep(fs_path[root_dir.len])) {
return fs_path[root_dir.len + 1 ..];
}

Expand Down Expand Up @@ -1810,8 +1805,8 @@ const FileHeader = struct {
}

pub fn isExecutable(self: *FileHeader) bool {
return std.mem.eql(u8, self.header[0..shebang.len], shebang) or
std.mem.eql(u8, self.header[0..elf_magic.len], elf_magic);
return std.mem.eql(u8, self.header[0..@min(self.bytes_read, shebang.len)], shebang) or
std.mem.eql(u8, self.header[0..@min(self.bytes_read, elf_magic.len)], elf_magic);
}
};

Expand Down Expand Up @@ -2244,7 +2239,6 @@ const TestFetchBuilder = struct {
thread_pool: ThreadPool,
http_client: std.http.Client,
global_cache_directory: Cache.Directory,
progress: std.Progress,
job_queue: Fetch.JobQueue,
fetch: Fetch,

Expand All @@ -2260,8 +2254,6 @@ const TestFetchBuilder = struct {
self.http_client = .{ .allocator = allocator };
self.global_cache_directory = .{ .handle = cache_dir, .path = null };

self.progress = .{ .dont_print_on_dumb = true };

self.job_queue = .{
.http_client = &self.http_client,
.thread_pool = &self.thread_pool,
Expand All @@ -2281,10 +2273,11 @@ const TestFetchBuilder = struct {
.lazy_status = .eager,
.parent_package_root = Cache.Path{ .root_dir = Cache.Directory{ .handle = cache_dir, .path = null } },
.parent_manifest_ast = null,
.prog_node = self.progress.start("Fetch", 0),
.prog_node = std.Progress.Node.none,
.job_queue = &self.job_queue,
.omit_missing_hash_error = true,
.allow_missing_paths_field = false,
.use_latest_commit = true,

.package_root = undefined,
.error_bundle = undefined,
Expand All @@ -2293,6 +2286,8 @@ const TestFetchBuilder = struct {
.actual_hash = undefined,
.has_build_zig = false,
.oom_flag = false,
.latest_commit = null,

.module = null,
};
return &self.fetch;
Expand Down
Loading

0 comments on commit 13a2834

Please sign in to comment.