mirror of
https://codeberg.org/ziglang/zig.git
synced 2026-04-26 13:01:34 +03:00
zig fetch: detect global vs local mode via --save
Makes `zig fetch` only fetch globally, just like it used to. However, if `--save` (or any variant) is used, then it also fetches locally. When fetching by path, the hash is always computed, recompressed tarball is always created, always overwrites any existing global cache entry. closes #31818 closes #31866 (only requires build.zig file when --save is passed)
This commit is contained in:
+70
-68
@@ -104,6 +104,12 @@ pub const LazyStatus = enum {
|
||||
unavailable,
|
||||
};
|
||||
|
||||
pub const LocalStorage = struct {
|
||||
cache_root: Cache.Path,
|
||||
/// Path to "zig-pkg" inside the package in which the user ran `zig build`.
|
||||
pkg_root: Cache.Path,
|
||||
};
|
||||
|
||||
/// Contains shared state among all `Fetch` tasks.
|
||||
pub const JobQueue = struct {
|
||||
io: Io,
|
||||
@@ -122,9 +128,8 @@ pub const JobQueue = struct {
|
||||
/// This tracks `Fetch` tasks as well as recompression tasks.
|
||||
group: Io.Group = .init,
|
||||
global_cache: Cache.Directory,
|
||||
local_cache: Cache.Path,
|
||||
/// Path to "zig-pkg" inside the package in which the user ran `zig build`.
|
||||
root_pkg_path: Cache.Path,
|
||||
/// If `null`, indicates fetch globally only.
|
||||
local_storage: ?*const LocalStorage,
|
||||
/// If true then, no fetching occurs, and:
|
||||
/// * The `global_cache` directory is assumed to be the direct parent
|
||||
/// directory of on-disk packages rather than having the "p/" directory
|
||||
@@ -341,7 +346,7 @@ pub const JobQueue = struct {
|
||||
);
|
||||
}
|
||||
|
||||
fn recompress(jq: *JobQueue, package_hash: Package.Hash) Io.Cancelable!void {
|
||||
fn recompress(jq: *JobQueue, package_hash: Package.Hash, package_root: Cache.Path) Io.Cancelable!void {
|
||||
const pkg_hash_slice = package_hash.toSlice();
|
||||
|
||||
const prog_node = jq.prog_node.startFmt(0, "recompress {s}", .{pkg_hash_slice});
|
||||
@@ -359,7 +364,7 @@ pub const JobQueue = struct {
|
||||
defer arena_instance.deinit();
|
||||
const arena = arena_instance.allocator();
|
||||
|
||||
recompressFallible(jq, arena, dest_path, pkg_hash_slice, prog_node) catch |err| switch (err) {
|
||||
recompressFallible(jq, arena, dest_path, pkg_hash_slice, package_root, prog_node) catch |err| switch (err) {
|
||||
error.Canceled => |e| return e,
|
||||
error.ReadFailed => comptime unreachable,
|
||||
error.WriteFailed => comptime unreachable,
|
||||
@@ -372,6 +377,7 @@ pub const JobQueue = struct {
|
||||
arena: Allocator,
|
||||
dest_path: Cache.Path,
|
||||
pkg_hash_slice: []const u8,
|
||||
package_root: Cache.Path,
|
||||
prog_node: std.Progress.Node,
|
||||
) !void {
|
||||
const gpa = jq.http_client.allocator;
|
||||
@@ -386,7 +392,7 @@ pub const JobQueue = struct {
|
||||
var scanned_files: std.ArrayList(ScannedFile) = .empty;
|
||||
defer scanned_files.deinit(gpa);
|
||||
|
||||
var pkg_dir = try jq.root_pkg_path.openDir(io, pkg_hash_slice, .{ .iterate = true });
|
||||
var pkg_dir = try package_root.root_dir.handle.openDir(io, package_root.sub_path, .{ .iterate = true });
|
||||
defer pkg_dir.close(io);
|
||||
|
||||
{
|
||||
@@ -513,7 +519,6 @@ pub fn run(f: *Fetch) RunError!void {
|
||||
const eb = &f.error_bundle;
|
||||
const arena = f.arena.allocator();
|
||||
const gpa = f.arena.child_allocator;
|
||||
const local_cache_root = job_queue.local_cache;
|
||||
|
||||
try eb.init(gpa);
|
||||
|
||||
@@ -534,32 +539,16 @@ pub fn run(f: *Fetch) RunError!void {
|
||||
);
|
||||
// Packages fetched by URL may not use relative paths to escape outside the
|
||||
// fetched package directory from within the package cache.
|
||||
if (pkg_root.root_dir.eql(local_cache_root.root_dir)) {
|
||||
// `parent_package_root.sub_path` contains a path like this:
|
||||
// "p/$hash", or
|
||||
// "p/$hash/foo", with possibly more directories after "foo".
|
||||
// We want to fail unless the resolved relative path has a
|
||||
// prefix of "p/$hash/".
|
||||
const prefix_len: usize = if (job_queue.read_only) 0 else "p/".len;
|
||||
const parent_sub_path = f.parent_package_root.sub_path;
|
||||
const end = find_end: {
|
||||
if (parent_sub_path.len > prefix_len) {
|
||||
// Use `isSep` instead of `indexOfScalarPos` to account for
|
||||
// Windows accepting both `\` and `/` as path separators.
|
||||
for (parent_sub_path[prefix_len..], prefix_len..) |c, i| {
|
||||
if (std.fs.path.isSep(c)) break :find_end i;
|
||||
}
|
||||
}
|
||||
break :find_end parent_sub_path.len;
|
||||
};
|
||||
const expected_prefix = parent_sub_path[0..end];
|
||||
if (!std.mem.startsWith(u8, pkg_root.sub_path, expected_prefix)) {
|
||||
return f.fail(
|
||||
f.location_tok,
|
||||
try eb.printString("dependency path outside project: '{f}'", .{pkg_root}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// This code path is only reachable recursively and the sub_path
|
||||
// will already have been resolved to no longer have extra ".." or
|
||||
// "." components.
|
||||
assert(job_queue.local_storage != null);
|
||||
assert(pkg_root.root_dir.eql(f.parent_package_root.root_dir));
|
||||
if (!std.mem.startsWith(u8, pkg_root.sub_path, f.parent_package_root.sub_path)) return f.fail(
|
||||
f.location_tok,
|
||||
try eb.printString("dependency path outside project: '{f}'", .{pkg_root}),
|
||||
);
|
||||
f.package_root = pkg_root;
|
||||
try loadManifest(f, pkg_root);
|
||||
if (!f.has_build_zig) try checkBuildFileExistence(f);
|
||||
@@ -610,31 +599,33 @@ pub fn run(f: *Fetch) RunError!void {
|
||||
return queueJobsForDeps(f);
|
||||
}
|
||||
|
||||
const package_root = try job_queue.root_pkg_path.join(arena, expected_hash.toSlice());
|
||||
if (package_root.root_dir.handle.access(io, package_root.sub_path, .{})) |_| {
|
||||
assert(f.lazy_status != .unavailable);
|
||||
f.package_root = package_root;
|
||||
try loadManifest(f, f.package_root);
|
||||
try checkBuildFileExistence(f);
|
||||
if (!job_queue.recursive) return;
|
||||
return queueJobsForDeps(f);
|
||||
} else |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
log.debug("FileNotFound: {f}", .{package_root});
|
||||
if (job_queue.read_only and f.lazy_status == .eager) return f.fail(
|
||||
f.name_tok,
|
||||
try eb.printString("package not found at '{f}'", .{package_root}),
|
||||
);
|
||||
},
|
||||
error.Canceled => |e| return e,
|
||||
else => |e| {
|
||||
try eb.addRootErrorMessage(.{
|
||||
.msg = try eb.printString("unable to open package cache directory {f}: {t}", .{
|
||||
package_root, e,
|
||||
}),
|
||||
});
|
||||
return error.FetchFailed;
|
||||
},
|
||||
if (job_queue.local_storage) |ls| {
|
||||
const package_root = try ls.pkg_root.join(arena, expected_hash.toSlice());
|
||||
if (package_root.root_dir.handle.access(io, package_root.sub_path, .{})) |_| {
|
||||
assert(f.lazy_status != .unavailable);
|
||||
f.package_root = package_root;
|
||||
try loadManifest(f, f.package_root);
|
||||
try checkBuildFileExistence(f);
|
||||
if (!job_queue.recursive) return;
|
||||
return queueJobsForDeps(f);
|
||||
} else |err| switch (err) {
|
||||
error.FileNotFound => {
|
||||
log.debug("FileNotFound: {f}", .{package_root});
|
||||
if (job_queue.read_only and f.lazy_status == .eager) return f.fail(
|
||||
f.name_tok,
|
||||
try eb.printString("package not found at '{f}'", .{package_root}),
|
||||
);
|
||||
},
|
||||
error.Canceled => |e| return e,
|
||||
else => |e| {
|
||||
try eb.addRootErrorMessage(.{
|
||||
.msg = try eb.printString("unable to open package cache directory {f}: {t}", .{
|
||||
package_root, e,
|
||||
}),
|
||||
});
|
||||
return error.FetchFailed;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Check global cache before remote fetch.
|
||||
@@ -713,7 +704,14 @@ fn runResource(
|
||||
break :r x;
|
||||
};
|
||||
const tmp_dir_sub_path = ".tmp-" ++ std.fmt.hex(rand_int);
|
||||
const tmp_directory_path = try job_queue.root_pkg_path.join(arena, tmp_dir_sub_path);
|
||||
const tmp_tmp_dir_sub_path = "tmp/" ++ tmp_dir_sub_path;
|
||||
const tmp_directory_path: Cache.Path = if (job_queue.local_storage) |ls|
|
||||
try ls.pkg_root.join(arena, tmp_dir_sub_path)
|
||||
else
|
||||
.{
|
||||
.root_dir = job_queue.global_cache,
|
||||
.sub_path = tmp_tmp_dir_sub_path,
|
||||
};
|
||||
|
||||
const package_sub_path = blk: {
|
||||
var tmp_directory: Cache.Directory = .{
|
||||
@@ -772,19 +770,23 @@ fn runResource(
|
||||
// zig package directory untouched as it may be in use. This is done even
|
||||
// if the hash is invalid, in case the package with the different hash is
|
||||
// used in the future.
|
||||
f.package_root = try job_queue.root_pkg_path.join(arena, computed_package_hash.toSlice());
|
||||
renameTmpIntoCache(io, package_sub_path, f.package_root) catch |err| {
|
||||
try eb.addRootErrorMessage(.{ .msg = try eb.printString(
|
||||
"unable to rename temporary directory {f} into package cache directory {f}: {t}",
|
||||
.{ package_sub_path, f.package_root, err },
|
||||
) });
|
||||
return error.FetchFailed;
|
||||
};
|
||||
if (job_queue.local_storage) |ls| {
|
||||
f.package_root = try ls.pkg_root.join(arena, computed_package_hash.toSlice());
|
||||
renameTmpIntoCache(io, package_sub_path, f.package_root) catch |err| {
|
||||
try eb.addRootErrorMessage(.{ .msg = try eb.printString(
|
||||
"unable to rename temporary directory {f} into package cache directory {f}: {t}",
|
||||
.{ package_sub_path, f.package_root, err },
|
||||
) });
|
||||
return error.FetchFailed;
|
||||
};
|
||||
} else {
|
||||
f.package_root = tmp_directory_path;
|
||||
}
|
||||
|
||||
if (!disable_recompress) {
|
||||
// Spin off a task to recompress the tarball, with filtered files deleted, into
|
||||
// the global cache.
|
||||
job_queue.group.async(io, JobQueue.recompress, .{ job_queue, computed_package_hash });
|
||||
job_queue.group.async(io, JobQueue.recompress, .{ job_queue, computed_package_hash, f.package_root });
|
||||
}
|
||||
|
||||
// Remove temporary directory root if not already renamed to global cache.
|
||||
|
||||
+44
-36
@@ -5331,9 +5331,6 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8,
|
||||
.parent = root_mod,
|
||||
});
|
||||
|
||||
var cleanup_build_dir: ?Io.Dir = null;
|
||||
defer if (cleanup_build_dir) |*dir| dir.close(io);
|
||||
|
||||
if (dev.env.supports(.fetch_command)) {
|
||||
const fetch_prog_node = root_prog_node.start("Fetch Packages", 0);
|
||||
defer fetch_prog_node.end();
|
||||
@@ -5345,36 +5342,29 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, io: Io, args: []const []const u8,
|
||||
.io = io,
|
||||
.http_client = &http_client,
|
||||
.global_cache = dirs.global_cache,
|
||||
.local_cache = .{ .root_dir = dirs.local_cache, .sub_path = "" },
|
||||
.root_pkg_path = if (override_pkg_dir) |cwd_rel_path| .initCwd(cwd_rel_path) else .{
|
||||
.root_dir = build_root.directory,
|
||||
.sub_path = "zig-pkg",
|
||||
.local_storage = &.{
|
||||
.cache_root = .{ .root_dir = dirs.local_cache, .sub_path = "" },
|
||||
.pkg_root = if (override_pkg_dir) |p|
|
||||
.initCwd(p)
|
||||
else if (system_pkg_dir_path) |p|
|
||||
.initCwd(p)
|
||||
else
|
||||
.{
|
||||
.root_dir = build_root.directory,
|
||||
.sub_path = "zig-pkg",
|
||||
},
|
||||
},
|
||||
.read_only = false,
|
||||
.recursive = true,
|
||||
.debug_hash = false,
|
||||
.unlazy_set = unlazy_set,
|
||||
.fork_set = fork_set,
|
||||
.mode = fetch_mode,
|
||||
.prog_node = fetch_prog_node,
|
||||
.read_only = system_pkg_dir_path != null,
|
||||
};
|
||||
defer job_queue.deinit();
|
||||
|
||||
if (system_pkg_dir_path) |p| {
|
||||
const system_pkg_path: Path = .{
|
||||
.root_dir = .{
|
||||
.path = p,
|
||||
.handle = Io.Dir.cwd().openDir(io, p, .{}) catch |err| {
|
||||
fatal("unable to open system package directory '{s}': {t}", .{ p, err });
|
||||
},
|
||||
},
|
||||
.sub_path = "",
|
||||
};
|
||||
job_queue.global_cache = system_pkg_path.root_dir;
|
||||
job_queue.root_pkg_path = system_pkg_path;
|
||||
job_queue.read_only = true;
|
||||
cleanup_build_dir = job_queue.global_cache.handle;
|
||||
} else {
|
||||
if (system_pkg_dir_path == null) {
|
||||
try http_client.initDefaultProxies(arena, environ_map);
|
||||
}
|
||||
|
||||
@@ -7041,7 +7031,8 @@ const usage_fetch =
|
||||
\\Options:
|
||||
\\ -h, --help Print this help and exit
|
||||
\\ --global-cache-dir [path] Override path to global Zig cache directory
|
||||
\\ --pkg-dir [path] Override path to package directory
|
||||
\\ --cache-dir [path] Override path to local cache directory
|
||||
\\ --pkg-dir [path] Override path to local package directory
|
||||
\\ --debug-hash Print verbose hash information to stdout
|
||||
\\ --save Add the fetched package to build.zig.zon
|
||||
\\ --save=[name] Add the fetched package to build.zig.zon as name
|
||||
@@ -7062,6 +7053,7 @@ fn cmdFetch(
|
||||
const color: Color = .auto;
|
||||
var opt_path_or_url: ?[]const u8 = null;
|
||||
var override_global_cache_dir: ?[]const u8 = EnvVar.ZIG_GLOBAL_CACHE_DIR.get(environ_map);
|
||||
var override_local_cache_dir: ?[]const u8 = EnvVar.ZIG_LOCAL_CACHE_DIR.get(environ_map);
|
||||
var override_pkg_dir: ?[]const u8 = EnvVar.ZIG_LOCAL_PKG_DIR.get(environ_map);
|
||||
var debug_hash: bool = false;
|
||||
var save: union(enum) {
|
||||
@@ -7082,6 +7074,10 @@ fn cmdFetch(
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
override_global_cache_dir = args[i];
|
||||
} else if (mem.eql(u8, arg, "--cache-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
override_local_cache_dir = args[i];
|
||||
} else if (mem.eql(u8, arg, "--pkg-dir")) {
|
||||
if (i + 1 >= args.len) fatal("expected argument after '{s}'", .{arg});
|
||||
i += 1;
|
||||
@@ -7128,27 +7124,39 @@ fn cmdFetch(
|
||||
};
|
||||
defer global_cache_directory.handle.close(io);
|
||||
|
||||
var local_storage: Package.Fetch.LocalStorage = undefined;
|
||||
var build_root: BuildRoot = undefined;
|
||||
var build_root_initialized = false;
|
||||
defer if (build_root_initialized) build_root.deinit(io);
|
||||
|
||||
const cwd_path = try introspect.getResolvedCwd(io, arena);
|
||||
|
||||
var build_root = try findBuildRoot(arena, io, .{
|
||||
.cwd_path = cwd_path,
|
||||
});
|
||||
defer build_root.deinit(io);
|
||||
const local_storage_ptr = switch (save) {
|
||||
.no => null,
|
||||
.yes, .exact => ls: {
|
||||
build_root = try findBuildRoot(arena, io, .{ .cwd_path = cwd_path });
|
||||
build_root_initialized = true;
|
||||
|
||||
const local_cache_path: Path = .{
|
||||
.root_dir = build_root.directory,
|
||||
.sub_path = ".zig-cache",
|
||||
local_storage = .{
|
||||
.cache_root = if (override_local_cache_dir) |p| .initCwd(p) else .{
|
||||
.root_dir = build_root.directory,
|
||||
.sub_path = ".zig-cache",
|
||||
},
|
||||
.pkg_root = if (override_pkg_dir) |p| .initCwd(p) else .{
|
||||
.root_dir = build_root.directory,
|
||||
.sub_path = "zig-pkg",
|
||||
},
|
||||
};
|
||||
|
||||
break :ls &local_storage;
|
||||
},
|
||||
};
|
||||
|
||||
var job_queue: Package.Fetch.JobQueue = .{
|
||||
.io = io,
|
||||
.http_client = &http_client,
|
||||
.global_cache = global_cache_directory,
|
||||
.local_cache = local_cache_path,
|
||||
.root_pkg_path = if (override_pkg_dir) |cwd_rel_path| .initCwd(cwd_rel_path) else .{
|
||||
.root_dir = build_root.directory,
|
||||
.sub_path = "zig-pkg",
|
||||
},
|
||||
.local_storage = local_storage_ptr,
|
||||
.recursive = false,
|
||||
.read_only = false,
|
||||
.debug_hash = debug_hash,
|
||||
|
||||
Reference in New Issue
Block a user