Skip to content

Commit

Permalink
feat: read arguments from input
Browse files Browse the repository at this point in the history
  • Loading branch information
sifnoc committed Dec 12, 2023
1 parent 6f8f639 commit 3871529
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 10 deletions.
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ name = "mini-tree-server"
path = "bin/mini_tree_server.rs"

[[bin]]
name = "orchestrator-bench"
path = "benches/build_mst_with_workers.rs"
name = "aggregation-bench"
path = "benches/build_aggregation_mst.rs"

[features]
docker = []
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
#![feature(generic_const_exprs)]
use const_env::from_env;
use std::{error::Error, fs};
use std::{error::Error, fs, env};
use summa_aggregation::{executor::CloudSpawner, orchestrator::Orchestrator};
use tokio::time::Instant;
use summa_backend::{
apis::round::{Round, Snapshot},
contracts::signer::{AddressInput, SummaSigner},
tests::initialize_test_env,
};

#[from_env]
const LEVELS: usize = 20;
Expand All @@ -14,13 +19,17 @@ async fn main() -> Result<(), Box<dyn Error>> {
// We assume that custodians, when setting up their distributed environment, will obtain the URLs of worker nodes.
// In this example, we use two worker URLs corresponding to the workers spawned earlier.
// It is important to ensure that the number of URLs matches the number of executors.
let worker_node_urls = vec!["127.0.0.1:4000".to_string()];
let total_workers = worker_node_urls.len();
let worker_node_urls: Vec<String> = env::args().skip(1).collect();

// Ensure that at least one worker node URL is provided
if worker_node_urls.is_empty() {
return Err("No worker node URLs provided. Usage: cargo run <URL1> <URL2> ...".into());
}

const N_CURRENCIES: usize = 1;
const N_BYTES: usize = 14;
// Read the directory and collect CSV file paths

// // Read the directory and collect CSV file paths
let csv_directory = format!("benches/csv/level_{}/{}_chunks", LEVELS, CHUNK);
let csv_file_paths: Vec<String> = fs::read_dir(csv_directory)?
.filter_map(|entry| {
Expand All @@ -35,7 +44,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
.collect();

println!(
"LEVELS: {}, N_CURRENCIES: {}, number_of_csv: {}, number_of_workers: {}, ",
"LEVELS: {}, N_CURRENCIES: {}, chunk: {}, number_of_workers: {}, ",
LEVELS,
N_CURRENCIES,
csv_file_paths.len(),
Expand All @@ -46,17 +55,19 @@ async fn main() -> Result<(), Box<dyn Error>> {
let start = Instant::now();

let spawner = CloudSpawner::new(None, worker_node_urls.clone(), 4000);

let orchestrator =
Orchestrator::<N_CURRENCIES, N_BYTES>::new(Box::new(spawner), csv_file_paths);

let _aggregation_merkle_sum_tree = orchestrator
let aggregation_merkle_sum_tree = orchestrator
.create_aggregation_mst(worker_node_urls.len())
.await
.unwrap();

println!(
"Time to create aggregation merkle sum tree: {:?} s",
start.elapsed()
);
println!("aggregation_mst root: {:?}", aggregation_merkle_sum_tree.root());
Ok(())
}

0 comments on commit 3871529

Please sign in to comment.