Skip to content

Commit

Permalink
fix comments from DK
Browse files Browse the repository at this point in the history
  • Loading branch information
antoniupop committed Oct 16, 2024
1 parent eedb0c6 commit 14cf032
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 20 deletions.
3 changes: 3 additions & 0 deletions fhevm-engine/executor/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ pub struct Args {
#[arg(long, default_value_t = 96)]
pub fhe_compute_threads: usize,

#[arg(long, default_value_t = 8)]
pub fhe_operation_threads: usize,

#[arg(long, default_value = "127.0.0.1:50051")]
pub server_addr: String,
}
Expand Down
34 changes: 17 additions & 17 deletions fhevm-engine/executor/src/dfg/scheduler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,19 +76,19 @@ impl<'a> Scheduler<'a> {
pub async fn schedule(&mut self) -> Result<(), SyncComputeError> {
let schedule_type = std::env::var("FHEVM_DF_SCHEDULE");
match schedule_type {
Ok(val) if val == "MAX_PARALLELISM" => {
self.schedule_coarse_grain(PartitionStrategy::MaxParallelism)
.await
}
Ok(val) if val == "MAX_LOCALITY" => {
self.schedule_coarse_grain(PartitionStrategy::MaxLocality)
.await
}
Ok(val) if val == "BALANCED" => self.schedule_component_loop().await,
//Ok(val) if val == "BALANCED" => panic!("Unimplemented BALANCED scheduling strategy"),
Ok(val) if val == "FINE_GRAIN" => self.schedule_fine_grain().await,
Ok(unhandled) => panic!("Scheduling strategy {:?} does not exist", unhandled),

Ok(val) => match val.as_str() {
"MAX_PARALLELISM" => {
self.schedule_coarse_grain(PartitionStrategy::MaxParallelism)
.await
}
"MAX_LOCALITY" => {
self.schedule_coarse_grain(PartitionStrategy::MaxLocality)
.await
}
"LOOP" => self.schedule_component_loop().await,
"FINE_GRAIN" => self.schedule_fine_grain().await,
unhandled => panic!("Scheduling strategy {:?} does not exist", unhandled),
},
_ => self.schedule_fine_grain().await,
}
}
Expand Down Expand Up @@ -152,14 +152,14 @@ impl<'a> Scheduler<'a> {
Result<(Vec<(usize, InMemoryCiphertext)>, NodeIndex), SyncComputeError>,
> = JoinSet::new();
let mut execution_graph: Dag<ExecNode, ()> = Dag::default();
match strategy {
let _ = match strategy {
PartitionStrategy::MaxLocality => {
let _ = partition_components(self.graph, &mut execution_graph);
partition_components(self.graph, &mut execution_graph)
}
PartitionStrategy::MaxParallelism => {
let _ = partition_preserving_parallelism(self.graph, &mut execution_graph);
partition_preserving_parallelism(self.graph, &mut execution_graph)
}
}
};
let task_dependences = execution_graph.map(|_, _| (), |_, edge| *edge);

// Prime the scheduler with all nodes without dependences
Expand Down
3 changes: 2 additions & 1 deletion fhevm-engine/executor/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ thread_local! {
pub fn start(args: &crate::cli::Args) -> Result<()> {
let keys: FhevmKeys = SerializedFhevmKeys::load_from_disk().into();
let executor = FhevmExecutorService::new();
let rayon_threads = args.fhe_operation_threads;
rayon::broadcast(|_| {
set_server_key(keys.server_key.clone());
});
Expand All @@ -45,7 +46,7 @@ pub fn start(args: &crate::cli::Args) -> Result<()> {
.on_thread_start(move || {
set_server_key(keys.server_key.clone());
let rayon_pool = rayon::ThreadPoolBuilder::new()
.num_threads(8)
.num_threads(rayon_threads)
.build()
.unwrap();
rayon_pool.broadcast(|_| {
Expand Down
7 changes: 5 additions & 2 deletions fhevm-engine/executor/tests/scheduling_mapping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,11 @@ use utils::get_test;
mod utils;

fn get_handle(h: u32) -> Vec<u8> {
let tmp = [h; HANDLE_LEN / 4];
let res: [u8; HANDLE_LEN] = unsafe { std::mem::transmute(tmp) };
let mut res: Vec<u8> = Vec::with_capacity(HANDLE_LEN);
let slice: [u8; 4] = h.to_be_bytes();
for _i in 0..HANDLE_LEN / 4 {
res.extend_from_slice(&slice);
}
res.to_vec()
}

Expand Down

0 comments on commit 14cf032

Please sign in to comment.