Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sized get or insert with replacement #144

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 44 additions & 1 deletion examples/semantic_top_down_experiment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,11 @@ fn compare_sem_and_std(cnf: &Cnf, order: &VarOrder) -> (usize, usize) {
let sem_nodes = sem_dnnf.count_nodes();
let std_nodes = std_dnnf.count_nodes();

println!("===\nRebuilding sem...");
let rebuilt_sem = rebuild_and_compare(&sem_builder, &sem_dnnf);
let rebuilt_sem_nodes: usize = rebuilt_sem.count_nodes();

println!("\n\n===FINAL STATS===");
println!(
"sem/std size ratio: {:.4}x ({} / {})",
(sem_nodes as f64 / std_nodes as f64),
Expand All @@ -91,8 +96,46 @@ fn compare_sem_and_std(cnf: &Cnf, order: &VarOrder) -> (usize, usize) {
sem_time,
std_time
);
println!("===");
println!(
"sem rebuilt/sem size ratio: {:.4}x ({} / {})",
(rebuilt_sem_nodes as f64 / sem_nodes as f64),
rebuilt_sem_nodes,
sem_nodes
);

if diff_by_wmc(cnf.num_vars(), order, std_dnnf, rebuilt_sem) > 0.0001 {
println!(
"error on sem rebuild {}\n std bdd: {}\nsem bdd: {}",
cnf,
std_dnnf.to_string_debug(),
rebuilt_sem.to_string_debug()
);
}

(rebuilt_sem_nodes, std_nodes)
}

fn rebuild_and_compare<'a, const P: u128>(
builder: &'a SemanticDecisionNNFBuilder<'a, P>,
dnnf: &BddPtr<'a>,
) -> BddPtr<'a> {
println!("\nRebuilding...");
let og_nodes = dnnf.count_nodes();
let rebuilt = builder.rebuild(*dnnf);
let rebuilt_nodes = rebuilt.count_nodes();
println!(
"rebuilt/sem size ratio: {:.4}x ({} / {})",
(rebuilt_nodes as f64 / og_nodes as f64),
rebuilt_nodes,
og_nodes
);

if og_nodes != rebuilt_nodes {
return rebuild_and_compare(builder, &rebuilt);
}

(sem_nodes, std_nodes)
rebuilt
}

struct RandomVarOrders {
Expand Down
106 changes: 105 additions & 1 deletion src/backing_store/bump_table.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
//! A unique table based on a bump allocator and robin-hood hashing
//! this is the primary unique table for storing all nodes

use crate::{backing_store::UniqueTable, util::*};
use crate::{
backing_store::{AbstractlySized, UniqueTable},
util::*,
};
use bumpalo::Bump;
use rustc_hash::FxHasher;
use std::{
Expand Down Expand Up @@ -248,6 +251,107 @@ impl<'a, T: Eq + Hash + Clone> BackedRobinhoodTable<'a, T> {
}
}

impl<'a, T: Eq + Hash + Clone + AbstractlySized> BackedRobinhoodTable<'a, T> {
pub fn sized_get_or_insert_by_hash(
&'a mut self,
hash: u64,
elem: T,
equality_by_hash: bool,
) -> &'a T {
if (self.len + 1) as f64 > (self.cap as f64 * LOAD_FACTOR) {
self.grow();
}

// the current index into the array
let mut pos: usize = (hash as usize) % self.cap;
// the distance this item is from its desired location
let mut psl = 0;

loop {
if self.is_occupied(pos) {
let cur_itm = self.tbl[pos].clone();
// first check the hashes to see if these elements could
// possibly be equal; if they are, check if the items are
// equal and return the found pointer if so
if hash == cur_itm.hash {
println!("collision");
let found: &T = cur_itm.ptr.unwrap();
if equality_by_hash || *found == elem {
let diff = (elem.abstract_size() as i64)
- (cur_itm.ptr.unwrap().abstract_size() as i64);
if diff != 0 {
println!("size difference (g/i); delta: {}", diff);
}
self.hits += 1;
return found;
}
}

// not equal; begin probing
if cur_itm.psl < psl {
// elem is not in the table; insert it at pos and propagate
// the item that is currently here
self.propagate(cur_itm, pos);
let ptr = self.alloc.alloc(elem);
let entry = HashTableElement::new(ptr, hash, psl);
self.len += 1;
self.tbl[pos] = entry;
return ptr;
}
psl += 1;
pos = (pos + 1) % self.cap; // wrap to the beginning of the array
} else {
// this element is unique, so place it in the current spot
let ptr = self.alloc.alloc(elem);
let entry = HashTableElement::new(ptr, hash, psl);
self.len += 1;
self.tbl[pos] = entry;
return ptr;
}
}
}

pub fn sized_get_by_hash(&'a mut self, hash: u64, elem: T) -> Option<&'a T> {
// the current index into the array
let mut pos: usize = (hash as usize) % self.cap;
// the distance this item is from its desired location
let mut psl = 0;

loop {
if self.is_occupied(pos) {
let cur_itm = self.tbl[pos].clone();
if hash == cur_itm.hash {
self.hits += 1;

let e = elem.abstract_size() as i64;
let c = cur_itm.ptr.unwrap().abstract_size() as i64;

let diff = e - c;

if diff < 0 {
println!(
"replacing table elem (diff: {}; new: {}; old {})",
diff, e, c
);
let ptr = self.alloc.alloc(elem);
self.tbl[pos] = HashTableElement::new(ptr, hash, psl);
return Some(ptr);
}
return cur_itm.ptr;
}

if cur_itm.psl < psl {
return None;
}
psl += 1;
pos = (pos + 1) % self.cap;
} else {
return None;
}
}
}
}

impl<'a, T: Hash + Eq + Clone> Default for BackedRobinhoodTable<'a, T> {
fn default() -> Self {
Self::new()
Expand Down
4 changes: 4 additions & 0 deletions src/backing_store/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,7 @@ pub trait UniqueTable<'a, T: Eq + Hash> {
/// use a hash to allocate space in table, but use strict equality for probing/checking
fn get_or_insert(&'a mut self, item: T) -> &'a T;
}

pub trait AbstractlySized {
fn abstract_size(&self) -> usize;
}
29 changes: 24 additions & 5 deletions src/builder/decision_nnf/semantic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ impl<'a, const P: u128> DecisionNNFBuilder<'a> for SemanticDecisionNNFBuilder<'a
fn get_or_insert(&'a self, bdd: BddNode<'a>) -> BddPtr<'a> {
let semantic_hash = bdd.semantic_hash(&self.order, &self.map);

if let Some(bdd) = self.check_cached_hash_and_neg(semantic_hash) {
if let Some(bdd) = self.check_cached_hash_and_neg(semantic_hash, bdd.clone()) {
return bdd;
}

Expand All @@ -45,7 +45,7 @@ impl<'a, const P: u128> DecisionNNFBuilder<'a> for SemanticDecisionNNFBuilder<'a

unsafe {
let tbl = &mut *self.compute_table.as_ptr();
BddPtr::Reg(tbl.get_or_insert_by_hash(hash, bdd, true))
BddPtr::Reg(tbl.sized_get_or_insert_by_hash(hash, bdd, true))
}
}

Expand Down Expand Up @@ -80,14 +80,32 @@ impl<'a, const P: u128> SemanticDecisionNNFBuilder<'a, P> {
}
}

fn check_cached_hash_and_neg(&self, semantic_hash: FiniteField<P>) -> Option<BddPtr> {
pub fn rebuild(&'a self, bdd: BddPtr<'a>) -> BddPtr<'a> {
match bdd {
BddPtr::PtrTrue => BddPtr::PtrTrue,
BddPtr::PtrFalse => BddPtr::PtrFalse,
BddPtr::Reg(node) => self.get_or_insert(BddNode::new(
node.var,
self.rebuild(node.low),
self.rebuild(node.high),
)),
BddPtr::Compl(node) => self.rebuild(BddPtr::Reg(node)).neg(),
}
}

fn check_cached_hash_and_neg(
&'a self,
semantic_hash: FiniteField<P>,
bdd: BddNode<'a>,
) -> Option<BddPtr> {
// check regular hash
let mut hasher = FxHasher::default();
semantic_hash.value().hash(&mut hasher);
let hash = hasher.finish();

unsafe {
let tbl = &mut *self.compute_table.as_ptr();
if let Some(bdd) = tbl.get_by_hash(hash) {
if let Some(bdd) = tbl.sized_get_by_hash(hash, bdd.clone()) {
return Some(BddPtr::Reg(bdd));
}
}
Expand All @@ -97,9 +115,10 @@ impl<'a, const P: u128> SemanticDecisionNNFBuilder<'a, P> {
let mut hasher = FxHasher::default();
semantic_hash.value().hash(&mut hasher);
let hash = hasher.finish();

unsafe {
let tbl = &mut *self.compute_table.as_ptr();
if let Some(bdd) = tbl.get_by_hash(hash) {
if let Some(bdd) = tbl.sized_get_by_hash(hash, bdd) {
return Some(BddPtr::Compl(bdd));
}
}
Expand Down
12 changes: 10 additions & 2 deletions src/repr/bdd.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Binary decision diagram representation

use crate::{
backing_store::AbstractlySized,
repr::ddnnf::{DDNNFPtr, DDNNF},
repr::model::PartialModel,
repr::var_label::{Literal, VarLabel, VarSet},
Expand Down Expand Up @@ -325,14 +326,15 @@ impl<'a> BddPtr<'a> {
}

/// Gets the scratch value stored in `&self`
///
/// Panics if not node.
pub fn scratch<T: ?Sized + Clone + 'static>(&self) -> Option<T> {
match self {
Compl(n) | Reg(n) => {
if self.is_scratch_cleared() {
return None;
}
if n.data.borrow().is_none() {
return None;
}
// println!("dereferencing {:?}", n.data.as_ptr());
n.data
.borrow()
Expand Down Expand Up @@ -1121,3 +1123,9 @@ impl<'a> Ord for BddNode<'a> {
self.partial_cmp(other).unwrap()
}
}

impl<'a> AbstractlySized for BddNode<'a> {
fn abstract_size(&self) -> usize {
BddPtr::Reg(self).count_nodes()
}
}