diff --git a/.gitignore b/.gitignore
index eac934387..bd3a4853c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -64,3 +64,6 @@ metastore_db
# forge test files
out
cache
+
+# any output files from generating public params
+output/
diff --git a/Cargo.toml b/Cargo.toml
index 035636f51..7e6276fc4 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -36,6 +36,7 @@ curve25519-dalek = { version = "4", features = ["rand_core"] }
derive_more = { version = "0.99" }
flexbuffers = { version = "2.0.0" }
indexmap = { version = "2.1", default-features = false }
+indicatif = "0.17.8"
itertools = { version = "0.13.0", default-features = false, features = ["use_alloc"] }
lalrpop = { version = "0.22.0" }
lalrpop-util = { version = "0.22.0", default-features = false }
@@ -48,13 +49,16 @@ postcard = { version = "1.0" }
proof-of-sql = { path = "crates/proof-of-sql" } # We automatically update this line during release. So do not modify it!
proof-of-sql-parser = { path = "crates/proof-of-sql-parser" } # We automatically update this line during release. So do not modify it!
rand = { version = "0.8", default-features = false }
+rand_chacha = { version = "0.3.1" }
rand_core = { version = "0.6", default-features = false }
rayon = { version = "1.5" }
serde = { version = "1", default-features = false }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
+sha2 = "0.10.8"
snafu = { version = "0.8.4", default-features = false }
sqlparser = { version = "0.45.0", default-features = false }
tiny-keccak = { version = "2.0.2", features = [ "keccak" ] }
+tempfile = "3.13.0"
tracing = { version = "0.1.36", default-features = false }
tracing-opentelemetry = { version = "0.22.0" }
tracing-subscriber = { version = "0.3.0" }
diff --git a/crates/proof-of-sql/Cargo.toml b/crates/proof-of-sql/Cargo.toml
index 423f704f0..7c75c55bb 100644
--- a/crates/proof-of-sql/Cargo.toml
+++ b/crates/proof-of-sql/Cargo.toml
@@ -30,10 +30,12 @@ blitzar = { workspace = true, optional = true }
bumpalo = { workspace = true, features = ["collections"] }
bytemuck = { workspace = true }
byte-slice-cast = { workspace = true }
+clap = { workspace = true, features = ["derive"] }
curve25519-dalek = { workspace = true, features = ["serde"] }
chrono = { workspace = true, features = ["serde"] }
derive_more = { workspace = true }
indexmap = { workspace = true, features = ["serde"] }
+indicatif = { workspace = true }
itertools = { workspace = true }
merlin = { workspace = true, optional = true }
num-traits = { workspace = true }
@@ -41,9 +43,11 @@ num-bigint = { workspace = true, default-features = false }
postcard = { workspace = true, features = ["alloc"] }
proof-of-sql-parser = { workspace = true }
rand = { workspace = true, default-features = false, optional = true }
+rand_chacha = { workspace = true}
rayon = { workspace = true, optional = true }
serde = { workspace = true, features = ["serde_derive"] }
serde_json = { workspace = true }
+sha2 = { workspace = true }
snafu = { workspace = true }
sqlparser = { workspace = true }
tiny-keccak = { workspace = true }
@@ -62,6 +66,7 @@ opentelemetry-jaeger = { workspace = true }
rand = { workspace = true, default-features = false }
rand_core = { workspace = true, default-features = false }
serde_json = { workspace = true }
+tempfile = { workspace = true }
tracing = { workspace = true }
tracing-opentelemetry = { workspace = true }
tracing-subscriber = { workspace = true }
@@ -83,6 +88,11 @@ std = ["snafu/std"]
[lints]
workspace = true
+[[bin]]
+name = "generate-parameters"
+path = "utils/generate-parameters/main.rs"
+required-features = [ "std", "blitzar"]
+
[[example]]
name = "hello_world"
required-features = ["test"]
diff --git a/crates/proof-of-sql/utils/generate-parameters/README.md b/crates/proof-of-sql/utils/generate-parameters/README.md
new file mode 100644
index 000000000..c98a9efa5
--- /dev/null
+++ b/crates/proof-of-sql/utils/generate-parameters/README.md
@@ -0,0 +1,68 @@
+# Space and Time ParamGen
+
+A simple tool to generate the Space and Time public network parameters.
+
+## 📑 Table of Contents
+
+- [🚀 Quick Start](#quick-start)
+- [📚 Background](#background)
+- [📚 Additional Resources](#additional-resources)
+
+## 🚀 Quick Start
+
+Ensure that you have rust installed. Then, clone this repo and simply run the following:
+
+```bash
+cargo run --release --bin generate-parameters
+```
+
+This generates the setups for both the prover and verifer as two seperate tar.gz files with a default nu value of 14. It saves these parameters at the head of this repo as tar.gz archives.
+
+| Description | Command |
+| --------------- | --------------- |
+| Run the Prover setup only | ```cargo run --release --bin generate-parameters -- --mode prover``` |
+|Run the Verifier setup only | ```cargo run --release --bin generate-parameters -- --mode verifier``` |
+| Run both Prover and Verifier setups with a custom nu value | ```cargo run --release --bin generate-parameters -- --mode all --nu 4``` |
+| Specify an output directory (with --target argument) | ```cargo run --release --bin generate-parameters -- --mode all --target ./output ``` |
+
+## 📚 Background
+
+### What are public parameters?
+
+There are a wide variety of zero-knowledge proof and argument systems, all offering different performance characterists. The classic example is the [Groth16](https://eprint.iacr.org/2016/260.pdf) argument, a commonly used proof system which establishes a trusted setup (known formally as a common reference string (CRS) or structured reference string (SRS)) to be shared among participants in the network. This setup is structured in such a way that allows arguments of valid computation to be produced with very small sizes. In the case of Groth16, this can be as low as a few group elements or a couple hundred bytes, which is the perfect size to store on a blockchain.
+
+The Space and Time network makes use of a few different argument systems. The Dory polynomial commitment scheme (PCS) is is a SNARK which requires a setup to be established between the proving and verifying parties. The Dory PCS is chosen because it is ammenable to forming proofs and arguments over matrices, which is perfect for the Proof-Of-SQL case, since databases and tables are essentially matrices. The Dory setup process is unique in that it is *transparent*, meaning there is no toxic waste or secret values to forget once the setup is complete. The setup is initialized with an arbitrary random string which establishes common parameters. We choose the random string "SpaceAndTime" for our setup. This string is a "[Nothing-up-my-sleeve number](https://en.wikipedia.org/wiki/Nothing-up-my-sleeve_number)", meaning it is easily auditable and has no hidden structure that can be exploited to generate false proofs or compromise the integrity of the system.
+
+The Space and Time implementation of the [Dory PCS](https://eprint.iacr.org/2020/1274) is non-zero knowledge and does not explicty blind the inputs used in the argument of correct sql execution. This yields a leaner implementation and slightly better performance. We may add zero-knowledge blinding in the future, but for now it is not necessary for Proof-Of-SQL to function correctly.
+
+This tool generates the public setups for either the prover or verifier. Both setups are parameterized over a value *nu*, which helps establish the maximum dimension of the table that can be argued against. The prover and the verifer both posses a slightly different setup. The verifier setup is relatively cheap to compute and scales linearly for large nu/table sizes. The prover setup is larger and has a higher cost to compute. We provide pre-computed setups that can easily be downloaded and used with the SxT network in order to skip the expensive generation process, but this repo contains a tool to generate the parameters at your option.
+
+### Table Sizes
+
+The maximum table supported table size for parameters generated by this tool is determined by the value of max nu. For Dynamic Dory, that size is $2^{2 \nu - 1}$. This translates into the number of rows that the parameter can support below:
+
+| ν (nu) | Number of rows |
+|--------|-------------------|
+| 8 | 33 thousand |
+| 9 | 131 thousand |
+| 10 | 524 thousand |
+| 11 | 2.10 million |
+| 12 | 8.39 million |
+| 13 | 33.55 million |
+| 14 | 134.22 million |
+| 15 | 536.87 million |
+| 16 | 2.15 billion |
+| 17 | 8.59 billion |
+| 18 | 34.36 billion |
+| 19 | 137.44 billion |
+| 20 | 549.76 billion |
+
+
+NOTE: Setups using the same random string but different nu values remain compatible with each other up to the minimum value of nu shared between them. SxT as of this writing uses a nu value of 16. Setups with smaller values of nu should be compatible with this setup.
+
+
+## 📚 Additional Resources
+
+- [Dory: Efficient, Transparent arguments for Generalised Inner Products and Polynomial Commitments](https://eprint.iacr.org/2020/1274)
+- [Groth16](https://eprint.iacr.org/2016/260.pdf)
+- [Nothing-up-my-sleeve number](https://en.wikipedia.org/wiki/Nothing-up-my-sleeve_number)
diff --git a/crates/proof-of-sql/utils/generate-parameters/main.rs b/crates/proof-of-sql/utils/generate-parameters/main.rs
new file mode 100644
index 000000000..c004ecc8c
--- /dev/null
+++ b/crates/proof-of-sql/utils/generate-parameters/main.rs
@@ -0,0 +1,326 @@
+//! A CLI utility to generate the public parameters for the prover and verifier
+
+#[cfg(test)]
+mod round_trip_test;
+
+use ark_std::rand::SeedableRng;
+use clap::{Parser, ValueEnum};
+use indicatif::{ProgressBar, ProgressStyle};
+use proof_of_sql::proof_primitive::dory::{ProverSetup, PublicParameters, VerifierSetup};
+use rand_chacha::ChaCha20Rng;
+use sha2::{Digest, Sha256};
+use std::{
+ env,
+ fs::{self, File, OpenOptions},
+ io::{self, Write},
+ path::Path,
+ process::Command,
+ time::{Duration, Instant},
+};
+
+/// Transparent public randomness
+const SEED: &str = "SpaceAndTime";
+
+const BLITZAR_PARTITION_WINDOW_WIDTH: &str = "BLITZAR_PARTITION_WINDOW_WIDTH";
+
+#[derive(Parser, Debug)]
+#[command(author, version, about, long_about = None)]
+struct Args {
+ /// The value for `nu` (number of public parameters)
+ #[arg(short, long, default_value_t = 8)]
+ nu: usize,
+
+ /// Mode for generating parameters: "p" for Prover, "v" for Verifier, pv for both
+ #[arg(short, long, default_value = "all")]
+ mode: Mode,
+
+ /// The initial randomness for the transparent setup
+ #[arg(short, long, default_value = SEED)]
+ seed: String,
+
+ /// The directory to store generated files and archives
+ #[arg(short, long, default_value = "./output")]
+ target: String,
+}
+
+// An enum representing possible modes of operation,
+// abbreviated to keep clap commands concise, instead
+// of requiring the user to type "ProverAndVerifier",
+// they type "pv"
+#[derive(Debug, Clone, ValueEnum)]
+enum Mode {
+ Prover, //Prover
+ Verifier, //verifier
+ All, //Both
+}
+
+fn main() {
+ // Set the BLITZAR_PARTITION_WINDOW_WIDTH environment variable
+ env::set_var(BLITZAR_PARTITION_WINDOW_WIDTH, "14");
+
+ // Confirm that it was set by reading it back
+ match env::var(BLITZAR_PARTITION_WINDOW_WIDTH) {
+ Ok(value) => {
+ println!("Environment variable {BLITZAR_PARTITION_WINDOW_WIDTH} set to {value}");
+ }
+ Err(e) => {
+ eprintln!("Failed to set {BLITZAR_PARTITION_WINDOW_WIDTH}: {e}");
+ }
+ }
+
+ // Parse command-line arguments
+ let args = Args::parse();
+
+ // Ensure the target directory exists
+ if let Ok(()) = fs::create_dir_all(&args.target) {
+ generate_parameters(&args);
+ } else {
+ eprintln!(
+ "Skipping generation, failed to write or create target directory: {}. Check path and try again.",
+ args.target,
+ );
+ std::process::exit(-1)
+ };
+}
+
+fn generate_parameters(args: &Args) {
+ // Clear out the digests.txt file if it already exists
+ let digests_path = format!("{}/digests_nu_{}.txt", args.target, args.nu);
+ if Path::new(&digests_path).exists() {
+ match fs::write(&digests_path, "") {
+ Ok(()) => {}
+ Err(e) => {
+ eprintln!("Failed to clear digests.txt file: {e}");
+ std::process::exit(-1)
+ }
+ }
+ }
+
+ let mut rng = rng_from_seed(args);
+
+ let spinner = spinner(format!(
+ "Generating a random public setup with seed {SEED:?} please wait..."
+ ));
+
+ // Obtain public parameter from nu
+ let public_parameters = PublicParameters::rand(args.nu, &mut rng);
+ spinner.finish_with_message("Public parameter setup complete");
+
+ match args.mode {
+ Mode::All => {
+ println!("Generating parameters for Prover...");
+ generate_prover_setup(&public_parameters, args.nu, &args.target);
+ println!("Generating parameters for Verifier...");
+ generate_verifier_setup(&public_parameters, args.nu, &args.target);
+ }
+ Mode::Prover => {
+ println!("Generating parameters for Prover...");
+ generate_prover_setup(&public_parameters, args.nu, &args.target);
+ }
+ Mode::Verifier => {
+ println!("Generating parameters for Verifier...");
+ generate_verifier_setup(&public_parameters, args.nu, &args.target);
+ }
+ }
+}
+
+/// # Panics
+/// expects that a [u8; 32] always contains 32 elements, guaranteed not to panic
+fn rng_from_seed(args: &Args) -> ChaCha20Rng {
+ // Convert the seed string to bytes and create a seeded RNG
+ let seed_bytes = args
+ .seed
+ .bytes()
+ .chain(std::iter::repeat(0u8))
+ .take(32)
+ .collect::>()
+ .try_into()
+ .expect("collection is guaranteed to contain 32 elements");
+ ChaCha20Rng::from_seed(seed_bytes)
+}
+
+/// Generates and writes the ```ProverSetup``` from initial public parameters
+fn generate_prover_setup(public_parameters: &PublicParameters, nu: usize, target: &str) {
+ let spinner = spinner(
+ "Generating parameters for the SxT network. This may take a long time, please wait..."
+ .into(),
+ );
+
+ let start_time = Instant::now();
+
+ // Heavy operation
+ let setup = ProverSetup::from(public_parameters);
+
+ spinner.finish_with_message("Prover setup complete.");
+ let duration = start_time.elapsed();
+ println!("Generated prover setup in {duration:.2?}");
+
+ let public_parameters_path = format!("{target}/public_parameters_nu_{nu}.bin");
+ let param_save_result = public_parameters.save_to_file(Path::new(&public_parameters_path));
+ let file_path = format!("{target}/blitzar_handle_nu_{nu}.bin");
+
+ match param_save_result {
+ Ok(()) => {
+ write_prover_blitzar_handle(setup, &file_path);
+
+ // Compute and save SHA-256
+ let mut digests = Vec::new();
+ let public_parameters_digest = compute_sha256(&public_parameters_path);
+ let blitzar_handle_digest = compute_sha256(&file_path);
+ if let Some(digest) = public_parameters_digest {
+ digests.push((public_parameters_path.clone(), digest));
+ }
+ if let Some(digest) = blitzar_handle_digest {
+ digests.push((file_path.clone(), digest));
+ }
+ save_digests(&digests, target, nu); // Save digests to digests.txt
+ }
+ Err(e) => {
+ eprintln!("Failed to save prover setup: {e}.");
+ std::process::exit(-1)
+ }
+ }
+}
+
+// Generates and writes the VerifierSetup from initial public parameters
+fn generate_verifier_setup(public_parameters: &PublicParameters, nu: usize, target: &str) {
+ let spinner = spinner(
+ "Generating parameters for the SxT network. This may take a long time, please wait..."
+ .into(),
+ );
+
+ let start_time = Instant::now();
+
+ // Heavy operation
+ let setup = VerifierSetup::from(public_parameters);
+
+ spinner.finish_with_message("Verifier setup complete.");
+ let duration = start_time.elapsed();
+ println!("Generated verifier setup in {duration:.2?}");
+
+ let file_path = format!("{target}/verifier_setup_nu_{nu}.bin");
+ let result = write_verifier_setup(&setup, &file_path);
+
+ match result {
+ Ok(()) => {
+ println!("Verifier setup saved successfully.");
+
+ // Compute and save SHA-256
+ let mut digests = Vec::new();
+ if let Some(digest) = compute_sha256(&file_path) {
+ digests.push((file_path.clone(), digest));
+ }
+ save_digests(&digests, target, nu); // Save digests to digests.txt
+ }
+ Err(e) => {
+ eprintln!("Failed to save verifier setup: {e}.");
+ std::process::exit(-1)
+ }
+ }
+}
+
+// Function to compute SHA-256 hash of a file
+fn compute_sha256(file_path: &str) -> Option {
+ let mut file = File::open(file_path).ok()?;
+ let mut hasher = Sha256::new();
+ io::copy(&mut file, &mut hasher).ok()?;
+ Some(format!("{:x}", hasher.finalize()))
+}
+
+/// Function to save digests to a file, or print to console if file saving fails
+fn save_digests(digests: &[(String, String)], target: &str, nu: usize) {
+ let digests_path = format!("{target}/digests_nu_{nu}.txt");
+
+ // Attempt to open file in append mode, creating it if it doesn't exist
+ let mut file = if let Ok(f) = OpenOptions::new()
+ .create(true)
+ .append(true)
+ .open(&digests_path)
+ {
+ Some(f)
+ } else {
+ println!("Failed to open or create file at {digests_path}. Printing digests to console.");
+ None
+ };
+
+ for (file_path, digest) in digests {
+ if let Some(f) = &mut file {
+ // Attempt to write to file, fall back to printing if it fails
+ if writeln!(f, "{digest} {file_path}").is_err() {
+ println!(
+ "Failed to write to {digests_path}. Printing remaining digests to console."
+ );
+ file = None; // Stop trying to write to the file
+ }
+ }
+
+ if file.is_none() {
+ println!("{digest} {file_path}");
+ }
+ }
+
+ if file.is_some() {
+ println!("Digests saved to {digests_path}");
+ }
+}
+
+fn write_prover_blitzar_handle(setup: ProverSetup<'_>, file_path: &str) {
+ let blitzar_handle = setup.blitzar_handle();
+ blitzar_handle.write(file_path);
+
+ // Check the file size to see if it exceeds 2 GB
+ let metadata_res = fs::metadata(file_path);
+ match metadata_res {
+ Ok(m) => {
+ let file_size = m.len();
+
+ if file_size > 2 * 1024 * 1024 * 1024 {
+ // 2 GB in bytes
+ println!("Handle size exceeds 2 GB, splitting into parts...");
+
+ // Run `split` command to divide the file into 2.0 GB parts
+ let split_output = Command::new("split")
+ .arg("-b")
+ .arg("2000M")
+ .arg(file_path)
+ .arg(format!("{file_path}.part."))
+ .output();
+
+ match split_output {
+ Ok(_) => {
+ println!("File successfully split into parts.");
+ fs::remove_file(file_path).unwrap_or_else(|e| {
+ eprintln!("Error clearing large file during split: {e}");
+ std::process::exit(-1)
+ });
+ }
+ Err(e) => {
+ eprintln!("Error during file splitting: {e}");
+ std::process::exit(-1)
+ }
+ }
+ }
+ }
+ Err(e) => {
+ eprintln!("Failed to write blitzar_handle to file: {e}");
+ std::process::exit(-1)
+ }
+ }
+}
+
+fn write_verifier_setup(setup: &VerifierSetup, file_path: &str) -> std::io::Result<()> {
+ setup.save_to_file(Path::new(file_path))
+}
+
+// Get a spinner so we have haptic feedback during param generation
+fn spinner(message: String) -> ProgressBar {
+ let spinner = ProgressBar::new_spinner();
+ spinner.set_style(
+ ProgressStyle::default_spinner()
+ .template("{spinner:.green} {msg}")
+ .unwrap_or_else(|_| ProgressStyle::default_spinner()),
+ );
+ spinner.enable_steady_tick(Duration::from_millis(100));
+ spinner.set_message(message);
+ spinner
+}
diff --git a/crates/proof-of-sql/utils/generate-parameters/round_trip_test.rs b/crates/proof-of-sql/utils/generate-parameters/round_trip_test.rs
new file mode 100644
index 000000000..37aeea7dd
--- /dev/null
+++ b/crates/proof-of-sql/utils/generate-parameters/round_trip_test.rs
@@ -0,0 +1,119 @@
+use proof_of_sql::proof_primitive::dory::{ProverSetup, PublicParameters, VerifierSetup};
+use sha2::{Digest, Sha256};
+use std::{
+ fs::File,
+ io::{self, BufRead},
+ path::Path,
+ process::Command,
+};
+use tempfile::tempdir;
+
+/// # Panics
+/// This test will panic in a number of non-consequential, expected cases.
+#[test]
+fn we_can_generate_save_and_load_public_setups() {
+ // Create a temporary directory for the test
+ let temp_dir = tempdir().expect("Failed to create a temporary directory");
+ let temp_path = temp_dir.path().to_str().unwrap();
+
+ // Run the binary with nu = 4, mode = "pv", and target as the temp directory
+ let output = Command::new("cargo")
+ .arg("run")
+ .arg("--release")
+ .arg("--")
+ .arg("--nu")
+ .arg("4")
+ .arg("--mode")
+ .arg("all")
+ .arg("--target")
+ .arg(temp_path)
+ .output()
+ .expect("Failed to execute command");
+
+ // Check the output to make sure the process ran successfully
+ assert!(output.status.success(), "Process failed to run: {output:?}");
+
+ // Check that both Prover and Verifier files exist in the temp directory
+ let blitzar_handle_path = format!("{temp_path}/blitzar_handle_nu_4.bin");
+ let verifier_setup_path = format!("{temp_path}/verifier_setup_nu_4.bin");
+ let public_parameters_path = format!("{temp_path}/public_parameters_nu_4.bin");
+ let digests_path = format!("{temp_path}/digests_nu_4.txt");
+
+ assert!(
+ Path::new(&blitzar_handle_path).exists(),
+ "Prover setup file is missing"
+ );
+ assert!(
+ Path::new(&verifier_setup_path).exists(),
+ "Verifier setup file is missing"
+ );
+ assert!(
+ Path::new(&public_parameters_path).exists(),
+ "Public parameters file is missing"
+ );
+ assert!(Path::new(&digests_path).exists(), "Digests file is missing");
+
+ // Load the ProverSetup and VerifierSetup from their files
+ let handle = blitzar::compute::MsmHandle::new_from_file(&blitzar_handle_path);
+ let params = PublicParameters::load_from_file(Path::new(&public_parameters_path)).unwrap();
+
+ let _prover_setup = ProverSetup::from_public_parameters_and_blitzar_handle(¶ms, handle);
+ let _verifier_setup = VerifierSetup::load_from_file(Path::new(&verifier_setup_path))
+ .expect("Failed to load VerifierSetup");
+
+ // Verify that the digests.txt file contains the correct hash values
+ let mut expected_digests = Vec::new();
+
+ // Compute SHA-256 digests for each file
+ if let Some(digest) = compute_sha256(&public_parameters_path) {
+ expected_digests.push((public_parameters_path.clone(), digest));
+ }
+ if let Some(digest) = compute_sha256(&blitzar_handle_path) {
+ expected_digests.push((blitzar_handle_path.clone(), digest));
+ }
+ if let Some(digest) = compute_sha256(&verifier_setup_path) {
+ expected_digests.push((verifier_setup_path.clone(), digest));
+ }
+
+ // Read and parse digests from the file
+ let actual_digests = read_digests_from_file(&digests_path);
+
+ // Compare expected digests to those read from digests.txt
+ for (file_path, expected_digest) in &expected_digests {
+ let actual_digest = actual_digests
+ .get(file_path)
+ .unwrap_or_else(|| panic!("Digest for {file_path} not found in digests.txt"));
+ assert_eq!(
+ actual_digest, expected_digest,
+ "Digest mismatch for {file_path}"
+ );
+ }
+}
+
+/// Compute SHA-256 hash of a file and return it as a hex string.
+fn compute_sha256(file_path: &str) -> Option {
+ let mut file = File::open(file_path).ok()?;
+ let mut hasher = Sha256::new();
+ io::copy(&mut file, &mut hasher).ok()?;
+ Some(format!("{:x}", hasher.finalize()))
+}
+
+/// Read digests from the digests file and return them as a `HashMap`.
+/// # Panics
+/// because it is a test and is allowed to panic
+fn read_digests_from_file(digests_path: &str) -> std::collections::HashMap {
+ let file = File::open(digests_path).expect("Failed to open digests file");
+ let reader = io::BufReader::new(file);
+ let mut digests = std::collections::HashMap::new();
+
+ for line in reader.lines() {
+ let line = line.expect("Failed to read line from digests file");
+ let parts: Vec<&str> = line.split_whitespace().collect();
+ if parts.len() == 2 {
+ let digest = parts[0].to_string();
+ let file_path = parts[1].to_string();
+ digests.insert(file_path, digest);
+ }
+ }
+ digests
+}