From c2114ddbae7130007accf97f6f3605203a1bbe88 Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Fri, 15 Mar 2024 15:14:44 +0000 Subject: [PATCH 01/12] Add functions to export and import packets to/from a zip file. While the ability to push and pull from remote locations that orderly already supports works well for back-and-forth collaboration, it is not as well suited to produce a one-time artefact that may be released and shared publicly, without depending on a server or shared location. One use case for this is for publishing a reproducible set of analyses to accompany a paper. This commit adds a pair of functions, orderly_export_zip and orderly_import_zip. These allow a set of packets (and their transitive dependencies) to be exported as a standalone zip file, containing both the metadata and files. The zip file can then be imported into a different repository. The zip file is formatted as a metadata directory, with a file per packet, and a content-addressed file store. --- DESCRIPTION | 3 +- NAMESPACE | 2 + R/export.R | 121 +++++++++++++++++++++ R/location.R | 4 +- R/outpack_hash.R | 8 ++ R/outpack_insert.R | 1 + R/outpack_misc.R | 4 + man/orderly_export_zip.Rd | 37 +++++++ man/orderly_import_zip.Rd | 29 +++++ tests/testthat/test-export.R | 199 +++++++++++++++++++++++++++++++++++ 10 files changed, 404 insertions(+), 4 deletions(-) create mode 100644 R/export.R create mode 100644 man/orderly_export_zip.Rd create mode 100644 man/orderly_import_zip.Rd create mode 100644 tests/testthat/test-export.R diff --git a/DESCRIPTION b/DESCRIPTION index 74c440e4..eb515715 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -24,7 +24,8 @@ Imports: rlang, rstudioapi, withr, - yaml + yaml, + zip Suggests: DBI, RSQLite, diff --git a/NAMESPACE b/NAMESPACE index 4c96b240..205924ad 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -12,9 +12,11 @@ export(orderly_copy_files) export(orderly_dependency) export(orderly_description) export(orderly_example) +export(orderly_export_zip) export(orderly_gitignore_update) export(orderly_hash_data) export(orderly_hash_file) +export(orderly_import_zip) export(orderly_init) export(orderly_interactive_set_search_options) export(orderly_list_src) diff --git a/R/export.R b/R/export.R new file mode 100644 index 00000000..d19be5d9 --- /dev/null +++ b/R/export.R @@ -0,0 +1,121 @@ +##' Export packets as a zip file. +##' +##' The packets can be imported into a different repository using the +##' [orderly2::orderly_import_zip] function. +##' +##' This is useful as one-time way to publish your results, for example as an +##' artefact accompanying a paper. For back-and-forth collaboration, a shared +##' location should be priviledged. +##' +##' @param path the path where the zip file will be created +##' +##' @param packets One or more packets to export +##' +##' @inheritParams orderly_metadata +##' +##' @return Nothing +##' @export +orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { + root <- root_open(root, locate = locate, require_orderly = FALSE, + call = environment()) + + metadata <- root$index$data()$metadata + packets <- find_all_dependencies(packets, metadata) + files <- find_all_files(packets, metadata) + + dest <- withr::local_tempfile() + fs::dir_create(dest) + fs::dir_create(file.path(dest, "metadata")) + store <- file_store$new(file.path(dest, "files")) + + fs::file_copy( + file.path(root$path, ".outpack", "metadata", packets), + file.path(dest, "metadata", packets)) + + for (hash in files) { + store$put(find_file_by_hash(root, hash), hash) + } + + zip::zip(fs::path_abs(path), root = dest, files = c("metadata", "files")) + invisible() +} + +##' Import packets from a zip file. +##' +##' @param path the path to the zip file to be imported. +##' +##' @inheritParams orderly_metadata +##' +##' @return Invisibly, the IDs of the imported packets +##' @export +orderly_import_zip <- function(path, root = NULL, locate = TRUE) { + root <- root_open(root, locate = locate, require_orderly = FALSE, + call = environment()) + index <- root$index$data() + + hash_algorithm <- root$config$core$hash_algorithm + + src <- withr::local_tempfile() + zip::unzip(path, exdir = src) + store <- file_store$new(file.path(src, "files")) + + ids <- dir(file.path(src, "metadata")) + + # TODO: is using the root's hash algorithm correct? What if the origin had + # used a different hash, now there are two hashes for the same packet. We + # don't record the hash algorithm anywhere in the zip files, maybe we should. + metadata_hashes <- hash_metadata_files( + file.path(src, "metadata", ids), hash_algorithm) + + known_packets <- ids %in% names(index$metadata) + missing_packets <- !(ids %in% index$unpacked) + + import_check_hashes(src, ids[known_packets], metadata_hashes[known_packets], + root, call = environment()) + + fs::file_copy( + file.path(src, "metadata", ids[!known_packets]), + file.path(root$path, ".outpack", "metadata", ids[!known_packets])) + + if (root$config$core$use_file_store) { + # The index needs reloading to take into account the new metadata we just + # pulled. + index <- root$index$data() + files <- find_all_files(ids, index$metadata) + files <- files[!root$files$exists(files)] + for (hash in files) { + file_path <- store$get(hash, root$files$tmp(), overwrite = FALSE) + root$files$put(file_path, hash, move = TRUE) + } + } + + for (i in which(missing_packets)) { + if (!is.null(root$config$core$path_archive)) { + location_pull_files_archive(ids[[i]], store, root) + } + mark_packet_known(ids[[i]], local, metadata_hashes[[i]], Sys.time(), root) + } + + invisible(ids) +} + +import_check_hashes <- function(src, ids, hashes, root, call) { + index <- root$index$data() + hash_algorithm <- root$config$core$hash_algorithm + + hash_here <- index$location$hash[match(ids, index$location$packet)] + err <- hashes != hash_here + if (any(err)) { + cli::cli_abort( + c("Imported file has conflicting metadata", + x = paste("This is {.strong really} bad news. The zip file contains", + "packets with a different hash than the metadata already in", + "this repository. I'm not going to import this new metadata", + "but there's no guarantee that the older metadata is", + "actually what you want!"), + i = "Conflicts for: {squote(ids[err])}", + i = "We would be interested in this case, please let us know"), + call = call) + } + invisible() +} diff --git a/R/location.R b/R/location.R index d0223dd2..bc32b418 100644 --- a/R/location.R +++ b/R/location.R @@ -695,10 +695,8 @@ location_build_push_plan <- function(packet_id, location_name, root) { files_msg <- character(0) } else { packet_id_msg <- sort(packet_id_msg) - metadata <- metadata ## All files across all missing ids: - files <- unique(unlist( - lapply(packet_id_msg, function(i) metadata[[i]]$files$hash))) + files <- find_all_files(packet_id_msg, metadata) ## Which of these does the server not know about: files_msg <- driver$list_unknown_files(files) diff --git a/R/outpack_hash.R b/R/outpack_hash.R index df1e98ea..f135cae6 100644 --- a/R/outpack_hash.R +++ b/R/outpack_hash.R @@ -112,3 +112,11 @@ rehash <- function(data, hash_function, expected) { algorithm <- hash_parse(expected)$algorithm hash_function(data, algorithm) } + +## metadata files are hashed by ignoring leading and trailing newline +## characters. +hash_metadata_files <- function(path, hash_algorithm) { + vcapply(path, function(p) { + hash_data(read_string(p), hash_algorithm) + }) +} diff --git a/R/outpack_insert.R b/R/outpack_insert.R index e9ab53e6..c13c74d3 100644 --- a/R/outpack_insert.R +++ b/R/outpack_insert.R @@ -39,6 +39,7 @@ outpack_insert_packet <- function(path, json, root = NULL) { ## TODO: once we get more flexible remotes, this will get moved into ## its own thing. hash <- hash_data(json, hash_algorithm) + time <- Sys.time() mark_packet_known(id, local, hash, time, root) } diff --git a/R/outpack_misc.R b/R/outpack_misc.R index eb669a91..e07295b3 100644 --- a/R/outpack_misc.R +++ b/R/outpack_misc.R @@ -63,6 +63,10 @@ find_all_dependencies <- function(id, metadata) { sort(ret) } +## Get all the files for a set of packets, filtering any overlap. +find_all_files <- function(id, metadata) { + unique(unlist(lapply(id, function(i) metadata[[i]]$files$hash))) +} validate_parameters <- function(parameters, call) { if (is.null(parameters) || length(parameters) == 0) { diff --git a/man/orderly_export_zip.Rd b/man/orderly_export_zip.Rd new file mode 100644 index 00000000..a207b1ea --- /dev/null +++ b/man/orderly_export_zip.Rd @@ -0,0 +1,37 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/export.R +\name{orderly_export_zip} +\alias{orderly_export_zip} +\title{Export packets as a zip file.} +\usage{ +orderly_export_zip(path, packets, root = NULL, locate = TRUE) +} +\arguments{ +\item{path}{the path where the zip file will be created} + +\item{packets}{One or more packets to export} + +\item{root}{The path to the root directory, or \code{NULL} (the +default) to search for one from the current working directory if +\code{locate} is \code{TRUE}. This function does not require that the +directory is configured for orderly, and can be any \code{outpack} +root (see \link{orderly_init} for details).} + +\item{locate}{Logical, indicating if the root should be searched +for. If \code{TRUE}, then we looks in the directory given for \code{root} +(or the working directory if \code{NULL}) and then up through its +parents until it finds an \code{.outpack} directory or +\code{orderly_config.yml}} +} +\value{ +Nothing +} +\description{ +The packets can be imported into a different repository using the +\link{orderly_import_zip} function. +} +\details{ +This is useful as one-time way to publish your results, for example as an +artefact accompanying a paper. For back-and-forth collaboration, a shared +location should be priviledged. +} diff --git a/man/orderly_import_zip.Rd b/man/orderly_import_zip.Rd new file mode 100644 index 00000000..bd9c66cb --- /dev/null +++ b/man/orderly_import_zip.Rd @@ -0,0 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/export.R +\name{orderly_import_zip} +\alias{orderly_import_zip} +\title{Import packets from a zip file.} +\usage{ +orderly_import_zip(path, root = NULL, locate = TRUE) +} +\arguments{ +\item{path}{the path to the zip file to be imported.} + +\item{root}{The path to the root directory, or \code{NULL} (the +default) to search for one from the current working directory if +\code{locate} is \code{TRUE}. This function does not require that the +directory is configured for orderly, and can be any \code{outpack} +root (see \link{orderly_init} for details).} + +\item{locate}{Logical, indicating if the root should be searched +for. If \code{TRUE}, then we looks in the directory given for \code{root} +(or the working directory if \code{NULL}) and then up through its +parents until it finds an \code{.outpack} directory or +\code{orderly_config.yml}} +} +\value{ +Invisibly, the IDs of the imported packets +} +\description{ +Import packets from a zip file. +} diff --git a/tests/testthat/test-export.R b/tests/testthat/test-export.R new file mode 100644 index 00000000..6c35bce7 --- /dev/null +++ b/tests/testthat/test-export.R @@ -0,0 +1,199 @@ +export_info <- function(path) { + listing <- zip::zip_list(path)$filename + + metadata <- grep("^metadata/.*[^/]$", listing, value = TRUE) + metadata <- sub("^metadata/", "", metadata) + + files <- grep("^files/.*[^/]$", listing, value = TRUE) + files <- sub("^files/", "", files) + + list(metadata = metadata, files = files) +} + +test_that("Exporting a packet includes its transitive dependencies", { + root <- create_temporary_root() + ids <- create_random_packet_chain(root, 3) + other <- create_random_packet(root) + + path <- tempfile() + orderly_export_zip(path, ids[[3]], root = root) + + info <- export_info(path) + expect_setequal(info$metadata, ids) + + # The root packet has one file, and each downstream one has 2 (one source file + # and one data). The downstreams actually have three, but one of them is a + # copy of the parent packet's data, hence is deduplicated and doesn't count. + expect_equal(length(info$files), 5) +}) + +test_that("Can export multiple packets", { + root <- create_temporary_root() + first <- create_random_packet(root) + second <- create_random_packet(root) + ids <- c(first, second) + + path <- tempfile() + orderly_export_zip(path, ids, root = root) + + info <- export_info(path) + expect_setequal(info$metadata, ids) + expect_equal(length(info$files), 2) +}) + +test_that("Can export from a file store", { + root <- create_temporary_root(use_file_store = TRUE) + ids <- create_random_packet_chain(root, 3) + + path <- tempfile() + orderly_export_zip(path, ids[[3]], root = root) + + info <- export_info(path) + expect_setequal(info$metadata, ids) + expect_equal(length(info$files), 5) +}) + +test_that("Packet files are de-duplicated when exported", { + root <- create_temporary_root() + ids <- c(create_deterministic_packet(root), create_deterministic_packet(root)) + + path <- tempfile() + orderly_export_zip(path, ids, root = root) + + info <- export_info(path) + expect_setequal(info$metadata, ids) + expect_equal(length(info$files), 1) +}) + +test_that("Can import a zip file", { + upstream <- create_temporary_root() + downstream <- create_temporary_root() + + id <- create_random_packet(upstream) + + path <- tempfile() + orderly_export_zip(path, id, root = upstream) + + imported <- orderly_import_zip(path, root = downstream) + expect_equal(imported, id) + + index <- downstream$index$data() + expect_setequal(names(index$metadata), id) + expect_mapequal(index$metadata, upstream$index$data()$metadata) + expect_setequal(index$unpacked, id) + + files <- upstream$index$metadata(id)$files + file_paths <- file.path(downstream$path, downstream$config$core$path_archive, + upstream$index$metadata(id)$name, id, files$path) + + for (i in seq_along(file_paths)) { + expect_no_error(hash_validate_file(file_paths[[i]], files$hash[[i]])) + } +}) + +test_that("Can import a zip file to a file store", { + upstream <- create_temporary_root() + downstream <- create_temporary_root(use_file_store = TRUE) + + ids <- create_random_packet_chain(upstream, 3) + + path <- tempfile() + orderly_export_zip(path, ids[[3]], root = upstream) + orderly_import_zip(path, root = downstream) + + index <- downstream$index$data() + expect_setequal(names(index$metadata), ids) + expect_mapequal(index$metadata, upstream$index$data()$metadata) + expect_setequal(index$unpacked, ids) + + for (id in ids) { + files <- upstream$index$metadata(id)$files + expect_true(all(downstream$files$exists(files$hash))) + } +}) + +test_that("Importing a zip file is idempotent", { + upstream <- create_temporary_root() + downstream <- create_temporary_root() + + id <- create_random_packet(upstream) + + path <- tempfile() + orderly_export_zip(path, id, root = upstream) + imported_once <- orderly_import_zip(path, root = downstream) + imported_twice <- orderly_import_zip(path, root = downstream) + + expect_equal(imported_once, id) + expect_equal(imported_twice, id) + + index <- downstream$index$data() + expect_setequal(names(index$metadata), id) + expect_mapequal(index$metadata, upstream$index$data()$metadata) + expect_setequal(index$unpacked, id) +}) + +test_that("New packets are imported", { + upstream <- create_temporary_root() + + first_id <- create_random_packet(upstream) + first_zip <- tempfile() + orderly_export_zip(first_zip, first_id, root = upstream) + + second_id <- create_random_packet(upstream) + second_zip <- tempfile() + orderly_export_zip(second_zip, c(first_id, second_id), root = upstream) + + downstream <- create_temporary_root() + + orderly_import_zip(first_zip, root = downstream) + index <- downstream$index$data() + expect_setequal(names(index$metadata), first_id) + expect_setequal(index$unpacked, first_id) + + orderly_import_zip(second_zip, root = downstream) + index <- downstream$index$data() + expect_setequal(names(index$metadata), c(first_id, second_id)) + expect_mapequal(index$metadata, upstream$index$data()$metadata) + expect_setequal(index$unpacked, c(first_id, second_id)) +}) + +test_that("Can import packet with existing metadata", { + upstream <- create_temporary_root(use_file_store = TRUE) + id <- create_random_packet(upstream) + + # We want to bring in the packets metadata into the downstream repository, + # but not copy any of the actual files (yet). We do this by adding a path + # location and pulling the metadata from it. + downstream <- create_temporary_root() + orderly_location_add("upstream", "path", list(path = upstream$path), + root = downstream) + orderly_location_pull_metadata(root = downstream) + + index <- downstream$index$data() + expect_setequal(names(index$metadata), id) + expect_equal(length(index$unpacked), 0) + + path <- tempfile() + orderly_export_zip(path, id, root = upstream) + orderly_import_zip(path, root = downstream) + + index <- downstream$index$data() + expect_setequal(names(index$metadata), id) + expect_setequal(index$unpacked, id) +}) + +test_that("Importing a zip file with mismatching metadata fails", { + upstream <- create_temporary_root() + downstream <- create_temporary_root() + + id <- outpack_id() + create_random_packet(upstream, id = id) + create_random_packet(downstream, id = id) + + path <- tempfile() + orderly_export_zip(path, id, root = upstream) + + expect_error( + orderly_import_zip(path, root = downstream), + "Imported file has conflicting metadata") +}) From 899ca64bf081d297398e28f0b1f6052b71bfc5fc Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Tue, 19 Mar 2024 17:34:43 +0000 Subject: [PATCH 02/12] Update pkgdown index. --- _pkgdown.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/_pkgdown.yml b/_pkgdown.yml index 72bbe997..dfe5b298 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -66,6 +66,10 @@ reference: - orderly_location_list - orderly_location_remove - orderly_location_rename + - title: Exporting packets + contents: + - orderly_export_zip + - orderly_import_zip - title: Help for developing contents: - orderly_new From 26004861dbdddfd02f11e4a9396a8233e4937f67 Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Tue, 19 Mar 2024 19:19:51 +0000 Subject: [PATCH 03/12] Add an outpack.json at the root of the export. --- R/export.R | 105 ++++++++++++++++++-------------- inst/schema/orderly/export.json | 21 +++++++ 2 files changed, 79 insertions(+), 47 deletions(-) create mode 100644 inst/schema/orderly/export.json diff --git a/R/export.R b/R/export.R index d19be5d9..4872d8ee 100644 --- a/R/export.R +++ b/R/export.R @@ -19,9 +19,9 @@ orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { root <- root_open(root, locate = locate, require_orderly = FALSE, call = environment()) - metadata <- root$index$data()$metadata - packets <- find_all_dependencies(packets, metadata) - files <- find_all_files(packets, metadata) + index <- root$index$data() + packets <- find_all_dependencies(packets, index$metadata) + files <- find_all_files(packets, index$metadata) dest <- withr::local_tempfile() fs::dir_create(dest) @@ -36,7 +36,16 @@ orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { store$put(find_file_by_hash(root, hash), hash) } - zip::zip(fs::path_abs(path), root = dest, files = c("metadata", "files")) + packet_list <- index$location[ + match(index$location$packet, packets), c("packet", "hash")] + contents <- list(packets = packet_list) + + writeLines(to_json(contents, "orderly/export.json"), + file.path(dest, "outpack.json")) + + zip::zip(fs::path_abs(path), root = dest, + files = c("outpack.json", "metadata", "files")) + invisible() } @@ -51,60 +60,38 @@ orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { orderly_import_zip <- function(path, root = NULL, locate = TRUE) { root <- root_open(root, locate = locate, require_orderly = FALSE, call = environment()) - index <- root$index$data() - - hash_algorithm <- root$config$core$hash_algorithm src <- withr::local_tempfile() zip::unzip(path, exdir = src) - store <- file_store$new(file.path(src, "files")) - ids <- dir(file.path(src, "metadata")) + contents <- jsonlite::read_json(file.path(src, "outpack.json"), + simplifyVector = TRUE) - # TODO: is using the root's hash algorithm correct? What if the origin had - # used a different hash, now there are two hashes for the same packet. We - # don't record the hash algorithm anywhere in the zip files, maybe we should. - metadata_hashes <- hash_metadata_files( - file.path(src, "metadata", ids), hash_algorithm) + import_zip_metadata(root, src, contents$packets, call = environment()) + import_zip_packets(root, src, contents$packets) - known_packets <- ids %in% names(index$metadata) - missing_packets <- !(ids %in% index$unpacked) + invisible(contents$packets$packet) +} - import_check_hashes(src, ids[known_packets], metadata_hashes[known_packets], - root, call = environment()) +import_zip_metadata <- function(root, src, packets, call) { + index <- root$index$data() + new_packets <- !(packets$packet %in% names(index$metadata)) - fs::file_copy( - file.path(src, "metadata", ids[!known_packets]), - file.path(root$path, ".outpack", "metadata", ids[!known_packets])) + ids <- packets$packet[new_packets] + src_paths <- file.path(src, "metadata", ids) + dst_paths <- file.path(root$path, ".outpack", "metadata", ids) + expected_hash <- packets$hash[new_packets] - if (root$config$core$use_file_store) { - # The index needs reloading to take into account the new metadata we just - # pulled. - index <- root$index$data() - files <- find_all_files(ids, index$metadata) - files <- files[!root$files$exists(files)] - for (hash in files) { - file_path <- store$get(hash, root$files$tmp(), overwrite = FALSE) - root$files$put(file_path, hash, move = TRUE) - } + for (i in seq_along(src_paths)) { + metadata <- read_string(src_paths[[i]]) + hash_validate_data(metadata, expected_hash[[i]], sprintf("metadata for '%s'", id), call = call) + writeLines(metadata, dst_paths[[i]]) } - for (i in which(missing_packets)) { - if (!is.null(root$config$core$path_archive)) { - location_pull_files_archive(ids[[i]], store, root) - } - mark_packet_known(ids[[i]], local, metadata_hashes[[i]], Sys.time(), root) - } - - invisible(ids) -} - -import_check_hashes <- function(src, ids, hashes, root, call) { - index <- root$index$data() - hash_algorithm <- root$config$core$hash_algorithm - - hash_here <- index$location$hash[match(ids, index$location$packet)] - err <- hashes != hash_here + seen_before <- intersect(packets$packet, index$location$packet) + hash_there <- packets$hash[match(seen_before, packets$packet)] + hash_here <- index$location$hash[match(seen_before, index$location$packet)] + err <- hash_there != hash_here if (any(err)) { cli::cli_abort( c("Imported file has conflicting metadata", @@ -117,5 +104,29 @@ import_check_hashes <- function(src, ids, hashes, root, call) { i = "We would be interested in this case, please let us know"), call = call) } + invisible() } + +import_zip_packets <- function(root, src, packets) { + store <- file_store$new(file.path(src, "files")) + index <- root$index$data() + missing_packets <- packets[!(packets$packet %in% index$unpacked), ] + + if (root$config$core$use_file_store) { + files <- find_all_files(missing_packets$packet, index$metadata) + files <- files[!root$files$exists(files)] + for (hash in files) { + file_path <- store$get(hash, root$files$tmp(), overwrite = FALSE) + root$files$put(file_path, hash, move = TRUE) + } + } + + for (i in seq_along(missing_packets$packet)) { + if (!is.null(root$config$core$path_archive)) { + location_pull_files_archive(missing_packets$packet[[i]], store, root) + } + mark_packet_known(missing_packets$packet[[i]], local, + missing_packets$hash[[i]], Sys.time(), root) + } +} diff --git a/inst/schema/orderly/export.json b/inst/schema/orderly/export.json new file mode 100644 index 00000000..36d78594 --- /dev/null +++ b/inst/schema/orderly/export.json @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "index of a exported zip file", + "version": "0.0.1", + + "type": "object", + "properties": { + "packets": { + "type": "array", + "items": { + "packet": { + "$ref": "../outpack/packet-id.json" + }, + + "hash": { + "$ref": "../outpack/hash.json" + } + } + } + } +} From a4371f2a63b8c385311bb8dc4dba7ac2a21d0a4f Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Tue, 19 Mar 2024 19:35:05 +0000 Subject: [PATCH 04/12] Add test for missing outpack.json --- R/export.R | 7 +++++++ R/outpack_hash.R | 8 -------- tests/testthat/test-export.R | 36 +++++++++++++++++++++++++----------- 3 files changed, 32 insertions(+), 19 deletions(-) diff --git a/R/export.R b/R/export.R index 4872d8ee..37ebdfc4 100644 --- a/R/export.R +++ b/R/export.R @@ -64,6 +64,13 @@ orderly_import_zip <- function(path, root = NULL, locate = TRUE) { src <- withr::local_tempfile() zip::unzip(path, exdir = src) + if (!file.exists(file.path(src, "outpack.json"))) { + cli::cli_abort( + c("Zip file does not contain an 'outpack.json' file at its root", + i = "Are you sure this file was produced by orderly2::orderly_export_zip?"), + call = environment()) + } + contents <- jsonlite::read_json(file.path(src, "outpack.json"), simplifyVector = TRUE) diff --git a/R/outpack_hash.R b/R/outpack_hash.R index f135cae6..df1e98ea 100644 --- a/R/outpack_hash.R +++ b/R/outpack_hash.R @@ -112,11 +112,3 @@ rehash <- function(data, hash_function, expected) { algorithm <- hash_parse(expected)$algorithm hash_function(data, algorithm) } - -## metadata files are hashed by ignoring leading and trailing newline -## characters. -hash_metadata_files <- function(path, hash_algorithm) { - vcapply(path, function(p) { - hash_data(read_string(p), hash_algorithm) - }) -} diff --git a/tests/testthat/test-export.R b/tests/testthat/test-export.R index 6c35bce7..d4bfa47b 100644 --- a/tests/testthat/test-export.R +++ b/tests/testthat/test-export.R @@ -15,7 +15,7 @@ test_that("Exporting a packet includes its transitive dependencies", { ids <- create_random_packet_chain(root, 3) other <- create_random_packet(root) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, ids[[3]], root = root) info <- export_info(path) @@ -33,7 +33,7 @@ test_that("Can export multiple packets", { second <- create_random_packet(root) ids <- c(first, second) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, ids, root = root) info <- export_info(path) @@ -45,7 +45,7 @@ test_that("Can export from a file store", { root <- create_temporary_root(use_file_store = TRUE) ids <- create_random_packet_chain(root, 3) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, ids[[3]], root = root) info <- export_info(path) @@ -57,7 +57,7 @@ test_that("Packet files are de-duplicated when exported", { root <- create_temporary_root() ids <- c(create_deterministic_packet(root), create_deterministic_packet(root)) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, ids, root = root) info <- export_info(path) @@ -65,13 +65,27 @@ test_that("Packet files are de-duplicated when exported", { expect_equal(length(info$files), 1) }) +test_that("Importing an invalid zip fails", { + dir <- withr::local_tempfile() + fs::dir_create(dir) + fs::file_create(file.path(dir, "hello.txt")) + + zipfile <- withr::local_tempfile() + zip::zip(zipfile, files=c("hello.txt"), root=dir) + + root <- create_temporary_root() + expect_error( + orderly_import_zip(zipfile, root = root), + "Zip file does not contain an 'outpack.json' file at its root") +}) + test_that("Can import a zip file", { upstream <- create_temporary_root() downstream <- create_temporary_root() id <- create_random_packet(upstream) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, id, root = upstream) imported <- orderly_import_zip(path, root = downstream) @@ -97,7 +111,7 @@ test_that("Can import a zip file to a file store", { ids <- create_random_packet_chain(upstream, 3) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, ids[[3]], root = upstream) orderly_import_zip(path, root = downstream) @@ -118,7 +132,7 @@ test_that("Importing a zip file is idempotent", { id <- create_random_packet(upstream) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, id, root = upstream) imported_once <- orderly_import_zip(path, root = downstream) imported_twice <- orderly_import_zip(path, root = downstream) @@ -136,11 +150,11 @@ test_that("New packets are imported", { upstream <- create_temporary_root() first_id <- create_random_packet(upstream) - first_zip <- tempfile() + first_zip <- withr::local_tempfile() orderly_export_zip(first_zip, first_id, root = upstream) second_id <- create_random_packet(upstream) - second_zip <- tempfile() + second_zip <- withr::local_tempfile() orderly_export_zip(second_zip, c(first_id, second_id), root = upstream) downstream <- create_temporary_root() @@ -173,7 +187,7 @@ test_that("Can import packet with existing metadata", { expect_setequal(names(index$metadata), id) expect_equal(length(index$unpacked), 0) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, id, root = upstream) orderly_import_zip(path, root = downstream) @@ -190,7 +204,7 @@ test_that("Importing a zip file with mismatching metadata fails", { create_random_packet(upstream, id = id) create_random_packet(downstream, id = id) - path <- tempfile() + path <- withr::local_tempfile() orderly_export_zip(path, id, root = upstream) expect_error( From 70ebb9b9b2bbc7520beb23bf4c23922d516516ff Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Tue, 19 Mar 2024 19:37:09 +0000 Subject: [PATCH 05/12] Fix formatting --- R/export.R | 8 ++++++-- tests/testthat/test-export.R | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/R/export.R b/R/export.R index 37ebdfc4..eabfae03 100644 --- a/R/export.R +++ b/R/export.R @@ -67,7 +67,8 @@ orderly_import_zip <- function(path, root = NULL, locate = TRUE) { if (!file.exists(file.path(src, "outpack.json"))) { cli::cli_abort( c("Zip file does not contain an 'outpack.json' file at its root", - i = "Are you sure this file was produced by orderly2::orderly_export_zip?"), + i = paste("Are you sure this file was produced by", + "orderly2::orderly_export_zip?")), call = environment()) } @@ -91,7 +92,10 @@ import_zip_metadata <- function(root, src, packets, call) { for (i in seq_along(src_paths)) { metadata <- read_string(src_paths[[i]]) - hash_validate_data(metadata, expected_hash[[i]], sprintf("metadata for '%s'", id), call = call) + + hash_validate_data(metadata, expected_hash[[i]], + sprintf("metadata for '%s'", id), call = call) + writeLines(metadata, dst_paths[[i]]) } diff --git a/tests/testthat/test-export.R b/tests/testthat/test-export.R index d4bfa47b..f269d31c 100644 --- a/tests/testthat/test-export.R +++ b/tests/testthat/test-export.R @@ -71,7 +71,7 @@ test_that("Importing an invalid zip fails", { fs::file_create(file.path(dir, "hello.txt")) zipfile <- withr::local_tempfile() - zip::zip(zipfile, files=c("hello.txt"), root=dir) + zip::zip(zipfile, files = c("hello.txt"), root = dir) root <- create_temporary_root() expect_error( From 45f890b1200ad1d1c514e2439d69ef118bbfaa85 Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Tue, 19 Mar 2024 19:39:42 +0000 Subject: [PATCH 06/12] Remove spurious empty line --- R/outpack_insert.R | 1 - 1 file changed, 1 deletion(-) diff --git a/R/outpack_insert.R b/R/outpack_insert.R index c13c74d3..e9ab53e6 100644 --- a/R/outpack_insert.R +++ b/R/outpack_insert.R @@ -39,7 +39,6 @@ outpack_insert_packet <- function(path, json, root = NULL) { ## TODO: once we get more flexible remotes, this will get moved into ## its own thing. hash <- hash_data(json, hash_algorithm) - time <- Sys.time() mark_packet_known(id, local, hash, time, root) } From 8a4647df05ba70c966a14403d46b4af8a7f95827 Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Tue, 19 Mar 2024 19:44:11 +0000 Subject: [PATCH 07/12] Fix export from file store --- R/export.R | 10 ++++++++-- tests/testthat/test-export.R | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/R/export.R b/R/export.R index eabfae03..5717bd0a 100644 --- a/R/export.R +++ b/R/export.R @@ -32,8 +32,14 @@ orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { file.path(root$path, ".outpack", "metadata", packets), file.path(dest, "metadata", packets)) - for (hash in files) { - store$put(find_file_by_hash(root, hash), hash) + if (root$config$core$use_file_store) { + for (hash in files) { + store$put(root$files$filename(hash), hash) + } + } else { + for (hash in files) { + store$put(find_file_by_hash(root, hash), hash) + } } packet_list <- index$location[ diff --git a/tests/testthat/test-export.R b/tests/testthat/test-export.R index f269d31c..2f2aa175 100644 --- a/tests/testthat/test-export.R +++ b/tests/testthat/test-export.R @@ -42,7 +42,7 @@ test_that("Can export multiple packets", { }) test_that("Can export from a file store", { - root <- create_temporary_root(use_file_store = TRUE) + root <- create_temporary_root(use_file_store = TRUE, path_archive = NULL) ids <- create_random_packet_chain(root, 3) path <- withr::local_tempfile() From 69b46c30d1ce6d0ff61019bcf3113bf3c4bbccc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paul=20Li=C3=A9tar?= Date: Wed, 20 Mar 2024 09:39:35 +0000 Subject: [PATCH 08/12] Update R/export.R Co-authored-by: Rich FitzJohn --- R/export.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/export.R b/R/export.R index 5717bd0a..97ee62f0 100644 --- a/R/export.R +++ b/R/export.R @@ -5,7 +5,7 @@ ##' ##' This is useful as one-time way to publish your results, for example as an ##' artefact accompanying a paper. For back-and-forth collaboration, a shared -##' location should be priviledged. +##' location should be preferred, as this offers more flexibility. ##' ##' @param path the path where the zip file will be created ##' From d8f3f3fefe7153359ace77e9d876740fa852e46a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paul=20Li=C3=A9tar?= Date: Wed, 20 Mar 2024 09:39:42 +0000 Subject: [PATCH 09/12] Update R/export.R Co-authored-by: Rich FitzJohn --- R/export.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/export.R b/R/export.R index 97ee62f0..5e323605 100644 --- a/R/export.R +++ b/R/export.R @@ -52,7 +52,7 @@ orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { zip::zip(fs::path_abs(path), root = dest, files = c("outpack.json", "metadata", "files")) - invisible() + invisible(path) } ##' Import packets from a zip file. From b2843062d28f1f678dc5bee58f8fe834e5c8b419 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paul=20Li=C3=A9tar?= Date: Wed, 20 Mar 2024 10:02:23 +0000 Subject: [PATCH 10/12] Update export.R --- R/export.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/export.R b/R/export.R index 5e323605..8256b00a 100644 --- a/R/export.R +++ b/R/export.R @@ -13,7 +13,7 @@ ##' ##' @inheritParams orderly_metadata ##' -##' @return Nothing +##' @return Invisibly, the path to the zip file ##' @export orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { root <- root_open(root, locate = locate, require_orderly = FALSE, From f741fba8b52789bcc6d1ed95876c1a79b6a31b85 Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Wed, 20 Mar 2024 10:06:46 +0000 Subject: [PATCH 11/12] Rename functions. --- NAMESPACE | 4 +- R/{export.R => zip.R} | 16 ++++---- _pkgdown.yml | 4 +- ...ly_export_zip.Rd => orderly_zip_export.Rd} | 14 +++---- ...ly_import_zip.Rd => orderly_zip_import.Rd} | 8 ++-- tests/testthat/{test-export.R => test-zip.R} | 40 +++++++++---------- 6 files changed, 43 insertions(+), 43 deletions(-) rename R/{export.R => zip.R} (91%) rename man/{orderly_export_zip.Rd => orderly_zip_export.Rd} (78%) rename man/{orderly_import_zip.Rd => orderly_zip_import.Rd} (85%) rename tests/testthat/{test-export.R => test-zip.R} (84%) diff --git a/NAMESPACE b/NAMESPACE index 205924ad..8694a828 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -12,11 +12,9 @@ export(orderly_copy_files) export(orderly_dependency) export(orderly_description) export(orderly_example) -export(orderly_export_zip) export(orderly_gitignore_update) export(orderly_hash_data) export(orderly_hash_file) -export(orderly_import_zip) export(orderly_init) export(orderly_interactive_set_search_options) export(orderly_list_src) @@ -48,4 +46,6 @@ export(orderly_search_options) export(orderly_shared_resource) export(orderly_strict_mode) export(orderly_validate_archive) +export(orderly_zip_export) +export(orderly_zip_import) importFrom(R6,R6Class) diff --git a/R/export.R b/R/zip.R similarity index 91% rename from R/export.R rename to R/zip.R index 8256b00a..eedd2b2a 100644 --- a/R/export.R +++ b/R/zip.R @@ -1,7 +1,7 @@ ##' Export packets as a zip file. ##' ##' The packets can be imported into a different repository using the -##' [orderly2::orderly_import_zip] function. +##' [orderly2::orderly_zip_import] function. ##' ##' This is useful as one-time way to publish your results, for example as an ##' artefact accompanying a paper. For back-and-forth collaboration, a shared @@ -15,7 +15,7 @@ ##' ##' @return Invisibly, the path to the zip file ##' @export -orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { +orderly_zip_export <- function(path, packets, root = NULL, locate = TRUE) { root <- root_open(root, locate = locate, require_orderly = FALSE, call = environment()) @@ -63,7 +63,7 @@ orderly_export_zip <- function(path, packets, root = NULL, locate = TRUE) { ##' ##' @return Invisibly, the IDs of the imported packets ##' @export -orderly_import_zip <- function(path, root = NULL, locate = TRUE) { +orderly_zip_import <- function(path, root = NULL, locate = TRUE) { root <- root_open(root, locate = locate, require_orderly = FALSE, call = environment()) @@ -74,20 +74,20 @@ orderly_import_zip <- function(path, root = NULL, locate = TRUE) { cli::cli_abort( c("Zip file does not contain an 'outpack.json' file at its root", i = paste("Are you sure this file was produced by", - "orderly2::orderly_export_zip?")), + "orderly2::orderly_zip_export?")), call = environment()) } contents <- jsonlite::read_json(file.path(src, "outpack.json"), simplifyVector = TRUE) - import_zip_metadata(root, src, contents$packets, call = environment()) - import_zip_packets(root, src, contents$packets) + zip_import_metadata(root, src, contents$packets, call = environment()) + zip_import_packets(root, src, contents$packets) invisible(contents$packets$packet) } -import_zip_metadata <- function(root, src, packets, call) { +zip_import_metadata <- function(root, src, packets, call) { index <- root$index$data() new_packets <- !(packets$packet %in% names(index$metadata)) @@ -125,7 +125,7 @@ import_zip_metadata <- function(root, src, packets, call) { invisible() } -import_zip_packets <- function(root, src, packets) { +zip_import_packets <- function(root, src, packets) { store <- file_store$new(file.path(src, "files")) index <- root$index$data() missing_packets <- packets[!(packets$packet %in% index$unpacked), ] diff --git a/_pkgdown.yml b/_pkgdown.yml index dfe5b298..42fbe877 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -68,8 +68,8 @@ reference: - orderly_location_rename - title: Exporting packets contents: - - orderly_export_zip - - orderly_import_zip + - orderly_zip_export + - orderly_zip_import - title: Help for developing contents: - orderly_new diff --git a/man/orderly_export_zip.Rd b/man/orderly_zip_export.Rd similarity index 78% rename from man/orderly_export_zip.Rd rename to man/orderly_zip_export.Rd index a207b1ea..a1e418d4 100644 --- a/man/orderly_export_zip.Rd +++ b/man/orderly_zip_export.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/export.R -\name{orderly_export_zip} -\alias{orderly_export_zip} +% Please edit documentation in R/zip.R +\name{orderly_zip_export} +\alias{orderly_zip_export} \title{Export packets as a zip file.} \usage{ -orderly_export_zip(path, packets, root = NULL, locate = TRUE) +orderly_zip_export(path, packets, root = NULL, locate = TRUE) } \arguments{ \item{path}{the path where the zip file will be created} @@ -24,14 +24,14 @@ parents until it finds an \code{.outpack} directory or \code{orderly_config.yml}} } \value{ -Nothing +Invisibly, the path to the zip file } \description{ The packets can be imported into a different repository using the -\link{orderly_import_zip} function. +\link{orderly_zip_import} function. } \details{ This is useful as one-time way to publish your results, for example as an artefact accompanying a paper. For back-and-forth collaboration, a shared -location should be priviledged. +location should be preferred, as this offers more flexibility. } diff --git a/man/orderly_import_zip.Rd b/man/orderly_zip_import.Rd similarity index 85% rename from man/orderly_import_zip.Rd rename to man/orderly_zip_import.Rd index bd9c66cb..140a3838 100644 --- a/man/orderly_import_zip.Rd +++ b/man/orderly_zip_import.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/export.R -\name{orderly_import_zip} -\alias{orderly_import_zip} +% Please edit documentation in R/zip.R +\name{orderly_zip_import} +\alias{orderly_zip_import} \title{Import packets from a zip file.} \usage{ -orderly_import_zip(path, root = NULL, locate = TRUE) +orderly_zip_import(path, root = NULL, locate = TRUE) } \arguments{ \item{path}{the path to the zip file to be imported.} diff --git a/tests/testthat/test-export.R b/tests/testthat/test-zip.R similarity index 84% rename from tests/testthat/test-export.R rename to tests/testthat/test-zip.R index 2f2aa175..760c265e 100644 --- a/tests/testthat/test-export.R +++ b/tests/testthat/test-zip.R @@ -16,7 +16,7 @@ test_that("Exporting a packet includes its transitive dependencies", { other <- create_random_packet(root) path <- withr::local_tempfile() - orderly_export_zip(path, ids[[3]], root = root) + orderly_zip_export(path, ids[[3]], root = root) info <- export_info(path) expect_setequal(info$metadata, ids) @@ -34,7 +34,7 @@ test_that("Can export multiple packets", { ids <- c(first, second) path <- withr::local_tempfile() - orderly_export_zip(path, ids, root = root) + orderly_zip_export(path, ids, root = root) info <- export_info(path) expect_setequal(info$metadata, ids) @@ -46,7 +46,7 @@ test_that("Can export from a file store", { ids <- create_random_packet_chain(root, 3) path <- withr::local_tempfile() - orderly_export_zip(path, ids[[3]], root = root) + orderly_zip_export(path, ids[[3]], root = root) info <- export_info(path) expect_setequal(info$metadata, ids) @@ -58,7 +58,7 @@ test_that("Packet files are de-duplicated when exported", { ids <- c(create_deterministic_packet(root), create_deterministic_packet(root)) path <- withr::local_tempfile() - orderly_export_zip(path, ids, root = root) + orderly_zip_export(path, ids, root = root) info <- export_info(path) expect_setequal(info$metadata, ids) @@ -75,7 +75,7 @@ test_that("Importing an invalid zip fails", { root <- create_temporary_root() expect_error( - orderly_import_zip(zipfile, root = root), + orderly_zip_import(zipfile, root = root), "Zip file does not contain an 'outpack.json' file at its root") }) @@ -86,9 +86,9 @@ test_that("Can import a zip file", { id <- create_random_packet(upstream) path <- withr::local_tempfile() - orderly_export_zip(path, id, root = upstream) + orderly_zip_export(path, id, root = upstream) - imported <- orderly_import_zip(path, root = downstream) + imported <- orderly_zip_import(path, root = downstream) expect_equal(imported, id) index <- downstream$index$data() @@ -112,8 +112,8 @@ test_that("Can import a zip file to a file store", { ids <- create_random_packet_chain(upstream, 3) path <- withr::local_tempfile() - orderly_export_zip(path, ids[[3]], root = upstream) - orderly_import_zip(path, root = downstream) + orderly_zip_export(path, ids[[3]], root = upstream) + orderly_zip_import(path, root = downstream) index <- downstream$index$data() expect_setequal(names(index$metadata), ids) @@ -133,9 +133,9 @@ test_that("Importing a zip file is idempotent", { id <- create_random_packet(upstream) path <- withr::local_tempfile() - orderly_export_zip(path, id, root = upstream) - imported_once <- orderly_import_zip(path, root = downstream) - imported_twice <- orderly_import_zip(path, root = downstream) + orderly_zip_export(path, id, root = upstream) + imported_once <- orderly_zip_import(path, root = downstream) + imported_twice <- orderly_zip_import(path, root = downstream) expect_equal(imported_once, id) expect_equal(imported_twice, id) @@ -151,20 +151,20 @@ test_that("New packets are imported", { first_id <- create_random_packet(upstream) first_zip <- withr::local_tempfile() - orderly_export_zip(first_zip, first_id, root = upstream) + orderly_zip_export(first_zip, first_id, root = upstream) second_id <- create_random_packet(upstream) second_zip <- withr::local_tempfile() - orderly_export_zip(second_zip, c(first_id, second_id), root = upstream) + orderly_zip_export(second_zip, c(first_id, second_id), root = upstream) downstream <- create_temporary_root() - orderly_import_zip(first_zip, root = downstream) + orderly_zip_import(first_zip, root = downstream) index <- downstream$index$data() expect_setequal(names(index$metadata), first_id) expect_setequal(index$unpacked, first_id) - orderly_import_zip(second_zip, root = downstream) + orderly_zip_import(second_zip, root = downstream) index <- downstream$index$data() expect_setequal(names(index$metadata), c(first_id, second_id)) expect_mapequal(index$metadata, upstream$index$data()$metadata) @@ -188,8 +188,8 @@ test_that("Can import packet with existing metadata", { expect_equal(length(index$unpacked), 0) path <- withr::local_tempfile() - orderly_export_zip(path, id, root = upstream) - orderly_import_zip(path, root = downstream) + orderly_zip_export(path, id, root = upstream) + orderly_zip_import(path, root = downstream) index <- downstream$index$data() expect_setequal(names(index$metadata), id) @@ -205,9 +205,9 @@ test_that("Importing a zip file with mismatching metadata fails", { create_random_packet(downstream, id = id) path <- withr::local_tempfile() - orderly_export_zip(path, id, root = upstream) + orderly_zip_export(path, id, root = upstream) expect_error( - orderly_import_zip(path, root = downstream), + orderly_zip_import(path, root = downstream), "Imported file has conflicting metadata") }) From 751c93c945107ffffd5256522b68e3f676588764 Mon Sep 17 00:00:00 2001 From: Paul Lietar Date: Mon, 25 Mar 2024 14:16:30 +0000 Subject: [PATCH 12/12] Fail faster on missing outpack.json --- R/zip.R | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/R/zip.R b/R/zip.R index eedd2b2a..d1060a9b 100644 --- a/R/zip.R +++ b/R/zip.R @@ -67,17 +67,16 @@ orderly_zip_import <- function(path, root = NULL, locate = TRUE) { root <- root_open(root, locate = locate, require_orderly = FALSE, call = environment()) - src <- withr::local_tempfile() - zip::unzip(path, exdir = src) - - if (!file.exists(file.path(src, "outpack.json"))) { + if (!("outpack.json" %in% zip::zip_list(path)$filename)) { cli::cli_abort( c("Zip file does not contain an 'outpack.json' file at its root", i = paste("Are you sure this file was produced by", - "orderly2::orderly_zip_export?")), - call = environment()) + "orderly2::orderly_zip_export?"))) } + src <- withr::local_tempfile() + zip::unzip(path, exdir = src) + contents <- jsonlite::read_json(file.path(src, "outpack.json"), simplifyVector = TRUE)