diff --git a/benches/oeis.rs b/benches/oeis.rs index 4f4870c..afb485f 100644 --- a/benches/oeis.rs +++ b/benches/oeis.rs @@ -1,13 +1,13 @@ use std::io::Read; use std::usize; use pathmap::PathMap; -use pathmap::zipper::{Zipper, ZipperValues, ZipperMoving, ZipperWriting, ZipperCreation}; +use pathmap::zipper::{Zipper, ZipperValues, ZipperMoving, ZipperPath, ZipperWriting, ZipperCreation}; use num::BigInt; use divan::{Divan, Bencher, black_box}; const MAX_OFFSET: u8 = 10; -fn drop_symbol_head_byte + Zipper + ZipperMoving>(loc: &mut Z) { +fn drop_symbol_head_byte + Zipper + ZipperMoving + ZipperPath>(loc: &mut Z) { let mut it = loc.child_mask().iter(); let p = loc.path().to_vec(); @@ -15,7 +15,7 @@ fn drop_symbol_head_byte + Zipper + ZipperMoving>(loc: & if b == 0 { continue } loc.descend_to_existing_byte(b); loc.join_k_path_into(b as usize, true); - assert!(loc.ascend(1)); + assert_eq!(loc.ascend(1), 1); } loc.reset(); loc.descend_to(&p[..]); diff --git a/pathmap-book/src/1.02.00_zippers.md b/pathmap-book/src/1.02.00_zippers.md index 81888fe..f3768dc 100644 --- a/pathmap-book/src/1.02.00_zippers.md +++ b/pathmap-book/src/1.02.00_zippers.md @@ -16,6 +16,7 @@ Zipper capabilities are defined across a number of traits. There are many diffe | [`ZipperValues`](./1.02.02_zipper_values.md#zippervalues) | Access values | | [`ZipperReadOnlyValues`](./1.02.02_zipper_values.md#zipperreadonlyvalues) | Access values with extended lifetime | | [`ZipperReadOnlyConditionalValues`](./1.02.02_zipper_values.md#zipperreadonlyconditionalvalues) | Access values with `witness` pattern | +| [`ZipperPath`](./1.02.03_zipper_paths.md#zipper_relative_path) | Get basic path information | | [`ZipperAbsolutePath`](./1.02.03_zipper_paths.md#zipperabsolutepath) | Get more complete path information | | [`ZipperPathBuffer`](./1.02.03_zipper_paths.md#zipperpathbuffer) | Control zipper's internal buffer allocation | | [`ZipperMoving`](./1.02.04_zipper_moving.md) | Moves the zipper's focus within the trie | diff --git a/pathmap-book/src/1.02.03_zipper_paths.md b/pathmap-book/src/1.02.03_zipper_paths.md index 85af692..8630269 100644 --- a/pathmap-book/src/1.02.03_zipper_paths.md +++ b/pathmap-book/src/1.02.03_zipper_paths.md @@ -1,10 +1,10 @@ # Paths and Absolute Paths -Some zippers maintain can expose their focus position within the trie as a `path`. The following traits expose access to the path buffer in some way. +Some zippers maintain and can expose their focus position within the trie as a contiguous `path` of type `&[u8]`. The following traits expose access to the path buffer in some way. ## Zipper Relative Path -The [`path`] method in [`ZipperMoving`] returns the current path from the zipper's root to its focus as a byte slice. This represents the sequence of bytes traversed to reach the current focus position. +The [`path`] method in the [`ZipperPath`] trait returns the current path from the zipper's root to its focus, as a byte slice. This represents the sequence of bytes traversed to reach the current focus position. -Note that this path is relative to the zipper's root, which may not be the same as the absolute path from the original data structure's root if the zipper was created with a prefix or from a subtrie. +Note that this path is relative to the zipper's root, which may not be the same as the absolute path from the original data structure's root if the zipper was created with a prefix or from within a subtrie. ## ZipperAbsolutePath The [`ZipperAbsolutePath`] trait provides methods to access a more complete path slice, including the [`origin_path`], and may extend above the zipper's root. diff --git a/pathmap-derive/src/lib.rs b/pathmap-derive/src/lib.rs index b0a8619..530d848 100644 --- a/pathmap-derive/src/lib.rs +++ b/pathmap-derive/src/lib.rs @@ -213,6 +213,24 @@ pub fn derive_poly_zipper(input: TokenStream) -> TokenStream { // } // }; + // Generate ZipperPath trait implementation + let zipper_path_impl = { + let variant_arms = &variant_arms; + quote! { + impl #impl_generics pathmap::zipper::ZipperPath for #enum_name #ty_generics + where + #(#inner_types: pathmap::zipper::ZipperPath,)* + #where_clause + { + fn path(&self) -> &[u8] { + match self { + #(#variant_arms => inner.path(),)* + } + } + } + } + }; + // Generate ZipperMoving trait implementation let zipper_moving_impl = { let variant_arms = &variant_arms; @@ -222,9 +240,22 @@ pub fn derive_poly_zipper(input: TokenStream) -> TokenStream { #(#inner_types: pathmap::zipper::ZipperMoving,)* #where_clause { - fn path(&self) -> &[u8] { + fn at_root(&self) -> bool { match self { - #(#variant_arms => inner.path(),)* + #(#variant_arms => inner.at_root(),)* + } + } + + #[inline] + fn focus_byte(&self) -> Option { + match self { + #(#variant_arms => inner.focus_byte(),)* + } + } + + fn reset(&mut self) { + match self { + #(#variant_arms => inner.reset(),)* } } @@ -240,6 +271,18 @@ pub fn derive_poly_zipper(input: TokenStream) -> TokenStream { } } + fn descend_to_existing>(&mut self, k: K) -> usize { + match self { + #(#variant_arms => inner.descend_to_existing(k),)* + } + } + + fn descend_to_val>(&mut self, k: K) -> usize { + match self { + #(#variant_arms => inner.descend_to_val(k),)* + } + } + fn descend_to_check>(&mut self, k: K) -> bool { match self { #(#variant_arms => inner.descend_to_check(k),)* @@ -260,23 +303,71 @@ pub fn derive_poly_zipper(input: TokenStream) -> TokenStream { } } - fn ascend(&mut self, steps: usize) -> bool { + fn descend_indexed_byte(&mut self, idx: usize) -> Option { + match self { + #(#variant_arms => inner.descend_indexed_byte(idx),)* + } + } + + fn descend_first_byte(&mut self) -> Option { + match self { + #(#variant_arms => inner.descend_first_byte(),)* + } + } + + fn descend_last_byte(&mut self) -> Option { + match self { + #(#variant_arms => inner.descend_last_byte(),)* + } + } + + fn descend_until(&mut self, obs: &mut Obs) -> bool { + match self { + #(#variant_arms => inner.descend_until(obs),)* + } + } + + fn ascend(&mut self, steps: usize) -> usize { match self { #(#variant_arms => inner.ascend(steps),)* } } - fn ascend_until(&mut self) -> bool { + fn ascend_byte(&mut self) -> bool { + match self { + #(#variant_arms => inner.ascend_byte(),)* + } + } + + fn ascend_until(&mut self) -> usize { match self { #(#variant_arms => inner.ascend_until(),)* } } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { match self { #(#variant_arms => inner.ascend_until_branch(),)* } } + + fn to_next_sibling_byte(&mut self) -> Option { + match self { + #(#variant_arms => inner.to_next_sibling_byte(),)* + } + } + + fn to_prev_sibling_byte(&mut self) -> Option { + match self { + #(#variant_arms => inner.to_prev_sibling_byte(),)* + } + } + + fn to_next_step(&mut self) -> bool { + match self { + #(#variant_arms => inner.to_next_step(),)* + } + } } } }; @@ -447,6 +538,7 @@ pub fn derive_poly_zipper(input: TokenStream) -> TokenStream { #zipper_read_only_values_impl #zipper_read_only_conditional_values_impl // #zipper_forking_impl + #zipper_path_impl #zipper_moving_impl #zipper_concrete_impl #zipper_absolute_path_impl diff --git a/src/arena_compact.rs b/src/arena_compact.rs index a545bc9..6959509 100644 --- a/src/arena_compact.rs +++ b/src/arena_compact.rs @@ -76,17 +76,17 @@ //! [if (header&0x3f != 0) first_child: varint64] //! [ line_offset: varint64] //! ``` +use core::cell::Cell; +use core::marker::PhantomData; use std::{io::Write, hash::Hasher}; -use std::cell::Cell; -use std::marker::PhantomData; use fast_slice_utils::starts_with; use crate::{ morphisms::Catamorphism, utils::{BitMask, ByteMask, find_prefix_overlap}, zipper::{ - Zipper, ZipperValues, ZipperForking, ZipperAbsolutePath, ZipperIteration, - ZipperMoving, ZipperPathBuffer, ZipperReadOnlyValues, + PathObserver, Zipper, ZipperValues, ZipperForking, ZipperAbsolutePath, ZipperIteration, + ZipperMoving, ZipperPath, ZipperPathBuffer, ZipperReadOnlyValues, ZipperConcrete, ZipperReadOnlyConditionalValues, }, }; @@ -433,8 +433,8 @@ const SI_PREFIX: &[u8] = b"KMGTPE"; struct SiCount(usize); -impl std::fmt::Display for SiCount { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { +impl core::fmt::Display for SiCount { + fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result { let mut value = self.0 as f64; if value < 1000.0 { return write!(fmt, "{value:3.0}"); @@ -448,8 +448,8 @@ impl std::fmt::Display for SiCount { } } -impl std::fmt::Debug for Counters { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { +impl core::fmt::Debug for Counters { + fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result { let total_size = self.nodes_size + self.lines_size + self.line_data_size + 16 + 8; write!(fmt, @@ -828,13 +828,12 @@ impl ArenaCompactTree> { /// Construct [ArenaCompactTree] from a read zipper. /// # Examples /// ``` - /// use pathmap::{PathMap, arena_compact::ArenaCompactTree}; + /// use pathmap::{PathMap, zipper::*, arena_compact::ArenaCompactTree}; /// let items = ["ace", "acf", "adg", "adh", "bjk"]; /// let btm = PathMap::from_iter(items.iter().map(|i| (i, ()))); /// let tree1 = ArenaCompactTree::from_zipper(btm.read_zipper(), |_v| 0); /// let mut zipper = tree1.read_zipper(); /// for path in items { - /// use pathmap::zipper::ZipperMoving; /// zipper.reset(); /// assert!(zipper.descend_to_existing(path) == path.len()); /// assert_eq!(zipper.path(), path.as_bytes()); @@ -1487,39 +1486,44 @@ where Storage: AsRef<[u8]> Some(value) } - fn ascend_invalid(&mut self, limit: Option<&mut usize>) -> bool { + /// Ascends any non-existent portion of the path. Returns the number of steps ascended + /// + /// `limit` sets an upper-bound on the number of steps that will be ascended, and contains + /// the number of steps un-ascended upon return (i.e. if the zipper is already at an + /// existent, aka valid, path) + fn ascend_invalid(&mut self, limit: Option) -> usize { if self.invalid == 0 { - return true; + return 0; } let len = self.path.len(); let mut invalid_cut = self.invalid.min(len - self.origin_depth); if let Some(limit) = limit { - invalid_cut = invalid_cut.min(*limit); - *limit -= invalid_cut; + invalid_cut = invalid_cut.min(limit); } self.path.truncate(len - invalid_cut); self.invalid = self.invalid - invalid_cut; - self.invalid == 0 + invalid_cut } - fn ascend_to_branch(&mut self, need_value: bool) -> bool { + /// Returns the number of steps ascended + fn ascend_to_branch(&mut self, need_value: bool) -> usize { self.trace_pos(); - let mut moved = false; + let mut ascended = 0; if self.invalid > 0 { - moved = true; - if !self.ascend_invalid(None) { - return false; + ascended += self.ascend_invalid(None); + if self.invalid > 0 { + return ascended; } match &self.cur_node { Node::Line(line) => { if need_value && line.value.is_some() { - return true; + return ascended; } } Node::Branch(node) => { if need_value && node.value.is_some() { - return true; + return ascended; } } } @@ -1529,16 +1533,15 @@ where Storage: AsRef<[u8]> let mut this_steps = top_frame.node_depth .min(self.path.len() - self.origin_depth); top_frame.node_depth = 0; - moved |= this_steps > 0; if self.stack.len() > 1 { self.stack.pop(); let prev = self.stack.last().unwrap(); self.cur_node = self.tree.get_node(prev.node_id).0; nchildren = prev.child_count; - moved = true; this_steps += 1; } self.path.truncate(self.path.len() - this_steps); + ascended += this_steps; // eprintln!("path={:?}", self.path); let brk = match &self.cur_node { Node::Branch(node) => { @@ -1550,7 +1553,7 @@ where Storage: AsRef<[u8]> break; } } - moved + ascended } fn descend_cond(&mut self, path: &[u8], on_value: bool) -> usize { @@ -1615,26 +1618,30 @@ where Storage: AsRef<[u8]> descended } - fn to_sibling(&mut self, next: bool) -> bool { + fn to_sibling(&mut self, next: bool) -> Option { let top_frame = self.stack.last().unwrap(); if self.stack.len() <= 1 || top_frame.node_depth > 0 { // can't move to sibling at root, or along the path - return false; + return None; } let top2_frame = &self.stack[self.stack.len() - 2]; let sibling_idx = if next { let idx = top2_frame.child_index + 1; if idx >= top2_frame.child_count { - return false; + return None; } idx } else { if top2_frame.child_index == 0 { - return false; + return None; } top2_frame.child_index - 1 }; - self.ascend(1) && self.descend_indexed_byte(sibling_idx) + let ascended = self.ascend_byte(); + debug_assert!(ascended); + let result = self.descend_indexed_byte(sibling_idx); + debug_assert!(result.is_some()); + result } } @@ -1652,7 +1659,7 @@ where Storage: AsRef<[u8]> fn val(&self) -> Option<&u64> { let value = self.get_value()?; if self.tree.value.get() != value { - self.tree.value.set(value); + self.tree.value.set(value); } let ptr = self.tree.value.as_ptr(); // technically if someone borrows the value twice, they will hit UB @@ -1696,7 +1703,7 @@ where Storage: AsRef<[u8]> fn get_val(&self) -> Option<&'tree u64> { let value = self.get_value()?; if self.tree.value.get() != value { - self.tree.value.set(value); + self.tree.value.set(value); } let ptr = self.tree.value.as_ptr(); Some(unsafe { &*ptr }) @@ -1716,6 +1723,13 @@ where Storage: AsRef<[u8]> } } +impl<'tree, Storage, Value> ZipperPath for ACTZipper<'tree, Storage, Value> +where Storage: AsRef<[u8]> +{ + /// Returns the path from the zipper's root to the current focus + fn path(&self) -> &[u8] { &self.path[self.origin_depth..] } +} + /// An interface to enable moving a zipper around the trie and inspecting paths impl<'tree, Storage, Value> ZipperMoving for ACTZipper<'tree, Storage, Value> where Storage: AsRef<[u8]> @@ -1734,8 +1748,14 @@ where Storage: AsRef<[u8]> self.invalid = 0; } - /// Returns the path from the zipper's root to the current focus - fn path(&self) -> &[u8] { &self.path[self.origin_depth..] } + #[inline] + fn focus_byte(&self) -> Option { + if self.path.len() > self.origin_depth { + self.path.last().cloned() + } else { + None + } + } /// Returns the total number of values contained at and below the zipper's focus, including the focus itself /// @@ -1805,11 +1825,12 @@ where Storage: AsRef<[u8]> /// WARNING: The branch represented by a given index is not guaranteed to be stable across modifications /// to the trie. This method should only be used as part of a directed traversal operation, but /// index-based paths may not be stored as locations within the trie. - fn descend_indexed_byte(&mut self, idx: usize) -> bool { + fn descend_indexed_byte(&mut self, idx: usize) -> Option { if self.invalid > 0 { - return false; + return None; } self.trace_pos(); + let byte; let mut child_id: Option = None; match &self.cur_node { Node::Line(line) => { @@ -1817,22 +1838,23 @@ where Storage: AsRef<[u8]> let path = self.tree.get_line(line.path); let rest_path = &path[top_frame.node_depth..]; if idx != 0 || rest_path.is_empty() { - return false; + return None; } self.path.push(rest_path[0]); + byte = Some(rest_path[0]); if let (true, Some(line_child)) = (rest_path.len() == 1, line.child) { child_id = Some(line_child); } else { top_frame.node_depth += 1; - return true; + return byte; } } Node::Branch(node) => { let top_frame = self.stack.last_mut().unwrap(); if idx > top_frame.child_count { - return false; + return None; } - let byte = node.bytemask.indexed_bit::(idx); + byte = node.bytemask.indexed_bit::(idx); if let Some(byte) = byte { if top_frame.next_id.is_some() && top_frame.child_index + 1 == idx { child_id = top_frame.next_id; @@ -1852,20 +1874,18 @@ where Storage: AsRef<[u8]> self.stack.push(StackFrame::from(&node, child_id)); self.cur_node = node; } - child_id.is_some() + byte } /// Descends the zipper's focus one step into the first child branch in a depth-first traversal /// /// NOTE: This method should have identical behavior to passing `0` to [descend_indexed_byte](ZipperMoving::descend_indexed_byte), /// although with less overhead - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.descend_indexed_byte(0) } - /// Descends the zipper's focus until a branch or a value is encountered. Returns `true` if the focus - /// moved otherwise returns `false` - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.trace_pos(); let mut descended = false; 'descend: while self.child_count() == 1 { @@ -1875,9 +1895,13 @@ where Storage: AsRef<[u8]> let top_frame = self.stack.last_mut().unwrap(); let path = self.tree.get_line(line.path); let rest_path = &path[top_frame.node_depth..]; + if obs.remaining_limit() != usize::MAX && obs.remaining_limit() < rest_path.len() { + break 'descend; + } let line_child_hack = if line.child.is_some() { 1 } else { 0 }; top_frame.node_depth += rest_path.len() - line_child_hack; self.path.extend_from_slice(rest_path); + obs.descend_to(rest_path); child_id = line.child; if line.value.is_some() { descended = true; @@ -1887,7 +1911,11 @@ where Storage: AsRef<[u8]> Node::Branch(node) => { let Some(byte) = node.bytemask.iter().next() else { break 'descend }; + if obs.remaining_limit() < 1 { + break 'descend; + } self.path.push(byte); + obs.descend_to_byte(byte); child_id = node.first_child; } } @@ -1915,56 +1943,52 @@ where Storage: AsRef<[u8]> /// /// If the root is fewer than `n` steps from the zipper's position, then this method will stop at /// the root and return `false` - fn ascend(&mut self, mut steps: usize) -> bool { + fn ascend(&mut self, steps: usize) -> usize { self.trace_pos(); - if !self.ascend_invalid(Some(&mut steps)) { - return false; + let mut remaining = steps; + remaining -= self.ascend_invalid(Some(steps)); + if remaining == 0 { + return steps; } while let Some(top_frame) = self.stack.last_mut() { let rest_path = &self.path[self.origin_depth..]; - let mut this_steps = steps.min(top_frame.node_depth).min(rest_path.len()); + let mut this_steps = remaining.min(top_frame.node_depth).min(rest_path.len()); top_frame.node_depth -= this_steps; - steps -= this_steps; - if top_frame.node_depth == 0 && self.stack.len() > 1 && steps > 0 { + remaining -= this_steps; + if top_frame.node_depth == 0 && self.stack.len() > 1 && remaining > 0 { self.stack.pop(); let prev = self.stack.last().unwrap(); self.cur_node = self.tree.get_node(prev.node_id).0; this_steps += 1; - steps -= 1; + remaining -= 1; } self.path.truncate(self.path.len() - this_steps); - if self.at_root() || steps == 0 { - return steps == 0 && this_steps > 0; + if self.at_root() || remaining == 0 { + return steps - remaining; } } unreachable!(); } - /// Ascends the zipper up a single byte. Equivalent to passing `1` to [ascend](Self::ascend) fn ascend_byte(&mut self) -> bool { - self.ascend(1) + self.ascend(1) == 1 } - /// Ascends the zipper to the nearest upstream branch point or value. Returns `true` if the zipper - /// focus moved upwards, otherwise returns `false` if the zipper was already at the root - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { self.ascend_to_branch(true) } - /// Ascends the zipper to the nearest upstream branch point, skipping over values along the way. Returns - /// `true` if the zipper focus moved upwards, otherwise returns `false` if the zipper was already at the - /// root - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { self.ascend_to_branch(false) } #[inline] - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { self.to_sibling(true) } #[inline] - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { self.to_sibling(false) } @@ -2000,7 +2024,7 @@ where Storage: AsRef<[u8]> /// See: [to_next_k_path](ZipperIteration::to_next_k_path) fn descend_first_k_path(&mut self, k: usize) -> bool { for ii in 0..k { - if !self.descend_first_byte() { + if self.descend_first_byte().is_none() { self.ascend(ii); return false; } @@ -2023,7 +2047,7 @@ where Storage: AsRef<[u8]> let mut depth = k; 'outer: loop { while depth > 0 && self.child_count() <= 1 { - if !self.ascend(1) { + if !self.ascend_byte() { break 'outer; } depth -= 1; @@ -2031,16 +2055,17 @@ where Storage: AsRef<[u8]> let stack = self.stack.last_mut().unwrap(); let idx = stack.child_index + 1; if idx >= stack.child_count { - if depth == 0 || !self.ascend(1) { + if depth == 0 || !self.ascend_byte() { break 'outer; } depth -= 1; continue 'outer; } - assert!(self.descend_indexed_byte(idx)); + let descended = self.descend_indexed_byte(idx); + debug_assert!(descended.is_some()); depth += 1; for _ii in 0..k - depth { - if !self.descend_first_byte() { + if self.descend_first_byte().is_none() { continue 'outer; } depth += 1; @@ -2056,7 +2081,10 @@ where Storage: AsRef<[u8]> mod tests { use super::{ArenaCompactTree, ACTZipper}; use crate::{ - morphisms::Catamorphism, PathMap, zipper::{zipper_iteration_tests, zipper_moving_tests, ZipperIteration, ZipperMoving, ZipperValues} + morphisms::Catamorphism, PathMap, zipper::{ + zipper_iteration_tests, zipper_moving_tests, + ZipperIteration, ZipperPath, ZipperValues + }, }; zipper_moving_tests::zipper_moving_tests!(arena_compact_zipper, diff --git a/src/counters.rs b/src/counters.rs index 140e1e3..35e7154 100644 --- a/src/counters.rs +++ b/src/counters.rs @@ -4,7 +4,7 @@ use crate::trie_node::{TaggedNodeRef, TrieNode}; /// Example usage of counters /// -/// ``` +/// ```ignore /// pathmap::counters::print_traversal(&map.read_zipper()); /// let counters = pathmap::counters::Counters::count_ocupancy(&map); /// counters.print_histogram_by_depth(); diff --git a/src/dense_byte_node.rs b/src/dense_byte_node.rs index a948704..34bd503 100644 --- a/src/dense_byte_node.rs +++ b/src/dense_byte_node.rs @@ -2,7 +2,7 @@ use core::fmt::{Debug, Formatter}; use core::ptr; use std::collections::HashMap; -use std::hint::unreachable_unchecked; +use core::hint::unreachable_unchecked; use crate::alloc::Allocator; use crate::ring::*; @@ -54,7 +54,7 @@ pub type CellByteNode = ByteNode, A>; #[repr(C)] pub struct ByteNode { #[cfg(feature = "slim_ptrs")] - refcnt: std::sync::atomic::AtomicU32, + refcnt: core::sync::atomic::AtomicU32, pub mask: ByteMask, #[cfg(feature = "nightly")] values: Vec, @@ -100,7 +100,7 @@ impl> Clone for ByteN fn clone(&self) -> Self { Self { #[cfg(feature = "slim_ptrs")] - refcnt: std::sync::atomic::AtomicU32::new(1), + refcnt: core::sync::atomic::AtomicU32::new(1), mask: self.mask, values: self.values.clone(), alloc: self.alloc.clone(), @@ -109,7 +109,7 @@ impl> Clone for ByteN } impl> Debug for ByteNode { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { //Recursively printing a whole tree will get pretty unwieldy. Should do something // like serialization for inspection using standard tools. write!(f, "ByteNode {{count={}", self.values.len())?; @@ -134,7 +134,7 @@ impl> ByteNode fn new_with_fields_in(mask: ByteMask, values: ValuesVec, alloc: A) -> Self { Self { #[cfg(feature = "slim_ptrs")] - refcnt: std::sync::atomic::AtomicU32::new(1), + refcnt: core::sync::atomic::AtomicU32::new(1), mask, values: values.v, alloc, diff --git a/src/empty_zipper.rs b/src/empty_zipper.rs index 9528c2e..a6d7b09 100644 --- a/src/empty_zipper.rs +++ b/src/empty_zipper.rs @@ -34,7 +34,14 @@ impl Zipper for EmptyZipper { impl ZipperMoving for EmptyZipper { fn at_root(&self) -> bool { self.path.len() == self.path_start_idx } fn reset(&mut self) { self.path.truncate(self.path_start_idx) } - fn path(&self) -> &[u8] { &self.path[self.path_start_idx..] } + #[inline] + fn focus_byte(&self) -> Option { + if self.path.len() > self.path_start_idx { + self.path.last().cloned() + } else { + None + } + } fn val_count(&self) -> usize { 0 } fn descend_to>(&mut self, k: K) { self.path.extend_from_slice(k.as_ref()); @@ -42,16 +49,17 @@ impl ZipperMoving for EmptyZipper { fn descend_to_byte(&mut self, k: u8) { self.path.push(k); } - fn descend_indexed_byte(&mut self, _idx: usize) -> bool { false } - fn descend_first_byte(&mut self) -> bool { false } - fn descend_until(&mut self) -> bool { false } - fn ascend(&mut self, steps: usize) -> bool { - if steps > self.path.len() - self.path_start_idx { + fn descend_indexed_byte(&mut self, _idx: usize) -> Option { None } + fn descend_first_byte(&mut self) -> Option { None } + fn descend_until(&mut self, _obs: &mut Obs) -> bool { false } + fn ascend(&mut self, steps: usize) -> usize { + let old_path_len = self.path.len() - self.path_start_idx; + if steps > old_path_len { self.reset(); - false + old_path_len } else { self.path.truncate(self.path.len() - self.path_start_idx - steps); - true + steps } } fn ascend_byte(&mut self) -> bool { @@ -62,19 +70,24 @@ impl ZipperMoving for EmptyZipper { false } } - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { if self.at_root() { - false + 0 } else { + let old_path_len = self.path.len() - self.path_start_idx; self.reset(); - true + old_path_len } } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { self.ascend_until() } - fn to_next_sibling_byte(&mut self) -> bool { false } - fn to_prev_sibling_byte(&mut self) -> bool { false } + fn to_next_sibling_byte(&mut self) -> Option { None } + fn to_prev_sibling_byte(&mut self) -> Option { None } +} + +impl ZipperPath for EmptyZipper { + fn path(&self) -> &[u8] { &self.path[self.path_start_idx..] } } impl ZipperAbsolutePath for EmptyZipper { diff --git a/src/experimental.rs b/src/experimental.rs index 904c50c..3400072 100644 --- a/src/experimental.rs +++ b/src/experimental.rs @@ -37,7 +37,10 @@ impl ZipperPathBuffer for FullZipper { impl ZipperMoving for FullZipper { fn at_root(&self) -> bool { self.path.len() == 0 } fn reset(&mut self) { self.path.clear() } - fn path(&self) -> &[u8] { &self.path[..] } + #[inline] + fn focus_byte(&self) -> Option { + self.path.last().cloned() + } fn val_count(&self) -> usize { usize::MAX/2 } // usize::MAX is a dangerous default for overflow fn descend_to>(&mut self, k: K) { self.path.extend_from_slice(k.as_ref()); @@ -45,52 +48,56 @@ impl ZipperMoving for FullZipper { fn descend_to_byte(&mut self, k: u8) { self.path.push(k); } - fn descend_indexed_byte(&mut self, idx: usize) -> bool { + fn descend_indexed_byte(&mut self, idx: usize) -> Option { assert!(idx < 256); self.path.push(idx as u8); - true + Some(idx as u8) } - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.path.push(0); - true + Some(0) } - fn descend_until(&mut self) -> bool { - self.path.push(0); // not sure? - true + fn descend_until(&mut self, obs: &mut Obs) -> bool { + false } - fn ascend(&mut self, steps: usize) -> bool { + fn ascend(&mut self, steps: usize) -> usize { if steps > self.path.len() { + let old_depth = self.path.len(); self.path.clear(); - false + old_depth } else { self.path.truncate(self.path.len() - steps); - true + steps } } fn ascend_byte(&mut self) -> bool { self.path.pop().is_some() } - fn ascend_until(&mut self) -> bool { - self.path.pop().is_some() // not sure? + fn ascend_until(&mut self) -> usize { + self.ascend(1) } - fn ascend_until_branch(&mut self) -> bool { - self.path.pop().is_some() // not sure? What's the difference with the previous? + fn ascend_until_branch(&mut self) -> usize { + self.ascend(1) } - fn to_next_sibling_byte(&mut self) -> bool { self.to_sibling(true) } - fn to_prev_sibling_byte(&mut self) -> bool { self.to_sibling(false) } + fn to_next_sibling_byte(&mut self) -> Option { self.to_sibling(true) } + fn to_prev_sibling_byte(&mut self) -> Option { self.to_sibling(false) } +} + +impl ZipperPath for FullZipper { + fn path(&self) -> &[u8] { &self.path[..] } } impl FullZipper { - fn to_sibling(&mut self, next: bool) -> bool { - if self.path.is_empty() { return false } // right? + fn to_sibling(&mut self, next: bool) -> Option { + if self.path.is_empty() { return None } // right? if next { let last = self.path.last_mut().unwrap(); - if *last != 255 { *last = *last + 1; true } - else { false } + if *last != 255 { *last = *last + 1; Some(*last) } + else { None } } else { let first = self.path.first_mut().unwrap(); - if *first != 0 { *first = *first - 1; true } - else { false } + if *first != 0 { *first = *first - 1; Some(*first) } + else { None } } } } diff --git a/src/fuzzer.rs b/src/fuzzer.rs index f427967..5f39cda 100644 --- a/src/fuzzer.rs +++ b/src/fuzzer.rs @@ -7,7 +7,7 @@ use std::ptr::null; use rand::distr::{Iter, Uniform}; use crate::TrieValue; use crate::utils::{BitMask, ByteMask}; -use crate::zipper::{ReadZipperUntracked, Zipper, ZipperReadOnlyIteration, ZipperMoving, ZipperReadOnlyValues}; +use crate::zipper::*; use crate::gxhash::{HashMap, HashMapExt}; @@ -577,7 +577,7 @@ mod tests { assert_eq!(rz.get_val(), trie.get_val_at(&path[..])); path_fuzzer.clone().sample_iter(rng_.clone()).take(N_DESCENDS).for_each(|path| { rz.descend_to(&path[..]); - rz.ascend(path.len()); + let _ = rz.ascend(path.len()); }); assert_eq!(rz.path(), &path[..]); assert_eq!(rz.get_val(), trie.get_val_at(&path[..])); diff --git a/src/lib.rs b/src/lib.rs index c047e73..4e7b911 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -89,6 +89,9 @@ pub mod utils; /// Extensions to the API that may or may not become permanant pub mod experimental; +/// Wrapper to allow tracking path for a blind zipper +pub mod path_tracker; + /// Compact representation of the trie #[cfg(feature = "arena_compact")] pub mod arena_compact; diff --git a/src/morphisms.rs b/src/morphisms.rs index 36838b5..9f27aaf 100644 --- a/src/morphisms.rs +++ b/src/morphisms.rs @@ -433,7 +433,7 @@ fn cata_side_effect_body<'a, Z, V: 'a, W, Err, AlgF, const JUMPING: bool>(mut z: z.prepare_buffers(); //Push a stack frame for the root, and start on the first branch off the root stack.push(StackFrame::from(&z)); - if !z.descend_first_byte() { + if !z.descend_first_byte().is_some() { //Empty trie is a special case return alg_f(&ByteMask::EMPTY, &mut [], 0, z.val(), z.origin_path()) } @@ -442,7 +442,7 @@ fn cata_side_effect_body<'a, Z, V: 'a, W, Err, AlgF, const JUMPING: bool>(mut z: //Descend to the next forking point, or leaf let mut is_leaf = false; while z.child_count() < 2 { - if !z.descend_until() { + if !z.descend_until(&mut ()) { is_leaf = true; break; } @@ -488,7 +488,7 @@ fn cata_side_effect_body<'a, Z, V: 'a, W, Err, AlgF, const JUMPING: bool>(mut z: //Position to descend the next child branch let descended = z.descend_indexed_byte(stack[frame_idx].child_idx as usize); - debug_assert!(descended); + debug_assert!(descended.is_some()); } else { //Push a new stack frame for this branch Stack::push_state_raw(&mut stack, &mut frame_idx, &z); @@ -518,7 +518,7 @@ fn ascend_to_fork<'a, Z, V: 'a, W, Err, AlgF, const JUMPING: bool>(z: &mut Z, let old_path_len = z.origin_path().len(); let old_val = z.get_val_with_witness(&z_witness); let ascended = z.ascend_until(); - debug_assert!(ascended); + debug_assert!(ascended > 0); let origin_path = unsafe{ z.origin_path_assert_len(old_path_len) }; let jump_len = if z.child_count() != 1 || z.is_val() { @@ -655,7 +655,7 @@ where V: 'static + Clone + Send + Sync + Unpin, W: Default, I: IntoIterator, - WZ: ZipperWriting + zipper::ZipperMoving, + WZ: ZipperWriting + zipper::ZipperMoving + zipper::ZipperPath, CoAlgF: Copy + FnMut(W, &[u8]) -> (&'a [u8], ByteMask, I, Option), { let (prefix, bm, ws, mv) = coalg_f(w, wz.path()); @@ -668,7 +668,8 @@ where new_map_from_ana_jumping(wz, w, coalg_f); wz.ascend_byte(); } - wz.ascend(prefix_len); + let ascended = wz.ascend(prefix_len); + debug_assert_eq!(ascended, prefix_len); } /// A trait to dictate if and how the value should be cached. @@ -761,7 +762,7 @@ pub(crate) fn into_cata_cached_body<'a, Z, V: 'a, W, E, AlgF, Cache, const JUMPI // Descend until leaf or branch let mut is_leaf = false; 'descend: while zipper.child_count() < 2 { - if !zipper.descend_until() { + if !zipper.descend_until(&mut ()) { is_leaf = true; break 'descend; } @@ -962,7 +963,8 @@ pub(crate) fn new_map_from_ana_in(w: W, mut alg_f: Alg // Path from a graft, we shouldn't descend WOrNode::Node(node) => { z.core().graft_internal(Some(node)); - z.ascend(child_path_len); + let ascended = z.ascend(child_path_len); + debug_assert_eq!(ascended, child_path_len); } } } else { @@ -970,7 +972,9 @@ pub(crate) fn new_map_from_ana_in(w: W, mut alg_f: Alg if frame_idx == 0 { break } - z.ascend(stack[frame_idx].1); + let to_ascend = stack[frame_idx].1; + let ascended = z.ascend(to_ascend); + debug_assert_eq!(ascended, to_ascend); stack[frame_idx].0.reset(); frame_idx -= 1; } diff --git a/src/overlay_zipper.rs b/src/overlay_zipper.rs index f39e6fe..b3cf6a5 100644 --- a/src/overlay_zipper.rs +++ b/src/overlay_zipper.rs @@ -18,7 +18,7 @@ use fast_slice_utils::find_prefix_overlap; use crate::utils::{BitMask, ByteMask}; -use crate::zipper::{Zipper, ZipperMoving, ZipperIteration, ZipperValues}; +use crate::zipper::{PathObserver, Zipper, ZipperMoving, ZipperPath, ZipperIteration, ZipperValues}; /// Zipper that traverses a virtual trie formed by fusing the tries of two other zippers pub struct OverlayZipper @@ -69,16 +69,16 @@ impl impl OverlayZipper where - AZipper: ZipperMoving + ZipperValues, - BZipper: ZipperMoving + ZipperValues, + AZipper: ZipperMoving + ZipperValues + ZipperPath, + BZipper: ZipperMoving + ZipperValues + ZipperPath, Mapping: for<'a> Fn(Option<&'a AV>, Option<&'a BV>) -> Option<&'a OutV>, { - fn to_sibling(&mut self, next: bool) -> bool { + fn to_sibling(&mut self, next: bool) -> Option { let path = self.path(); let Some(&last) = path.last() else { - return false; + return None; }; - self.ascend(1); + self.ascend_byte(); let child_mask = self.child_mask(); let maybe_child = if next { child_mask.next_bit(last) @@ -87,10 +87,10 @@ impl }; let Some(child) = maybe_child else { self.descend_to_byte(last); - return false; + return None; }; self.descend_to_byte(child); - true + Some(child) } } @@ -132,8 +132,8 @@ impl Zipper impl ZipperMoving for OverlayZipper where - AZipper: ZipperMoving + ZipperValues, - BZipper: ZipperMoving + ZipperValues, + AZipper: ZipperMoving + ZipperValues + ZipperPath, + BZipper: ZipperMoving + ZipperValues + ZipperPath, Mapping: for<'a> Fn(Option<&'a AV>, Option<&'a BV>) -> Option<&'a OutV>, { fn at_root(&self) -> bool { @@ -145,8 +145,11 @@ impl ZipperMoving self.b.reset(); } - fn path(&self) -> &[u8] { - self.a.path() + #[inline] + fn focus_byte(&self) -> Option { + let byte = self.a.focus_byte(); + debug_assert_eq!(byte, self.b.focus_byte()); + byte } fn val_count(&self) -> usize { @@ -180,7 +183,8 @@ impl ZipperMoving let depth_o = self.b.descend_to_val(path); if depth_a < depth_o { if self.a.is_val() { - self.b.ascend(depth_o - depth_a); + let ascended = self.b.ascend(depth_o - depth_a); + debug_assert_eq!(ascended, depth_o - depth_a); depth_a } else { self.a.descend_to(&path[depth_a..depth_o]); @@ -188,7 +192,8 @@ impl ZipperMoving } } else if depth_o < depth_a { if self.b.is_val() { - self.a.ascend(depth_a - depth_o); + let ascended = self.a.ascend(depth_a - depth_o); + debug_assert_eq!(ascended, depth_a - depth_o); depth_o } else { self.a.descend_to(&path[depth_o..depth_a]); @@ -204,23 +209,23 @@ impl ZipperMoving self.b.descend_to(&[k]); } - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.descend_indexed_byte(0) } - fn descend_indexed_byte(&mut self, idx: usize) -> bool { + fn descend_indexed_byte(&mut self, idx: usize) -> Option { let child_mask = self.child_mask(); - let Some(byte) = child_mask.indexed_bit::(idx) else { - return false; - }; + let byte = child_mask.indexed_bit::(idx)?; self.descend_to_byte(byte); - true + debug_assert!(self.path_exists()); + Some(byte) } - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { let start_depth = self.a.path().len(); - let desc_a = self.a.descend_until(); - let desc_b = self.b.descend_until(); + //GOAT: TODO. We need to shadow the `obs` with a type that reflects the current obs remaining_limit + let desc_a = self.a.descend_until(&mut ()); + let desc_b = self.b.descend_until(&mut ()); let path_a = &self.a.path()[start_depth..]; let path_b = &self.b.path()[start_depth..]; if !desc_a && !desc_b { @@ -231,7 +236,9 @@ impl ZipperMoving self.a.descend_to(path_b); return true; } else { - self.b.ascend(self.b.path().len() - start_depth); + let to_ascend = self.b.path().len() - start_depth; + let ascended = self.b.ascend(to_ascend); + debug_assert_eq!(ascended, to_ascend); return false; } } @@ -240,29 +247,45 @@ impl ZipperMoving self.b.descend_to(path_a); return true; } else { - self.a.ascend(self.a.path().len() - start_depth); + let to_ascend = self.a.path().len() - start_depth; + let ascended = self.a.ascend(to_ascend); + debug_assert_eq!(ascended, to_ascend); return false; } } let overlap = find_prefix_overlap(path_a, path_b); if path_a.len() > overlap { - self.a.ascend(path_a.len() - overlap); + let to_ascend = path_a.len() - overlap; + let ascended = self.a.ascend(to_ascend); + debug_assert_eq!(ascended, to_ascend); } if path_b.len() > overlap { - self.b.ascend(path_b.len() - overlap); + let to_ascend = path_b.len() - overlap; + let ascended = self.b.ascend(to_ascend); + debug_assert_eq!(ascended, to_ascend); + } + debug_assert_eq!(self.a.path(), self.b.path()); + debug_assert_eq!(start_depth + overlap, self.a.path().len()); + if overlap > 0 { + obs.descend_to(&self.a.path()[start_depth..]); + true + } else { + false } - overlap > 0 } - fn ascend(&mut self, steps: usize) -> bool { - self.a.ascend(steps) | self.b.ascend(steps) + fn ascend(&mut self, steps: usize) -> usize { + let rv_a = self.a.ascend(steps); + let rv_b = self.b.ascend(steps); + debug_assert_eq!(rv_a, rv_b); + rv_a } fn ascend_byte(&mut self) -> bool { - self.ascend(1) + self.ascend(1) == 1 } - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { debug_assert_eq!(self.a.path(), self.b.path()); // eprintln!("asc_until i {:?} {:?}", self.base.path(), self.overlay.path()); let asc_a = self.a.ascend_until(); @@ -271,47 +294,66 @@ impl ZipperMoving let asc_b = self.b.ascend_until(); let path_b = self.b.path(); let depth_b = path_b.len(); - if !(asc_b || asc_a) { - return false; - } + let min = match (asc_a, asc_b) { + (0, 0) => return 0, + (a, 0) | (0, a) => a, + (a, b) => a.min(b), + }; // eprintln!("asc_until {path_a:?} {path_b:?}"); if depth_b > depth_a { self.a.descend_to(&path_b[depth_a..]); } else if depth_a > depth_b { self.b.descend_to(&path_a[depth_b..]); } - true + min } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { let asc_a = self.a.ascend_until_branch(); let path_a = self.a.path(); let depth_a = path_a.len(); let asc_b = self.b.ascend_until_branch(); let path_b = self.b.path(); let depth_b = path_b.len(); + let min = match (asc_a, asc_b) { + (0, 0) => return 0, + (a, 0) | (0, a) => a, + (a, b) => a.min(b), + }; if depth_b > depth_a { self.a.descend_to(&path_b[depth_a..]); } else if depth_a > depth_b { self.b.descend_to(&path_a[depth_b..]); } - asc_a || asc_b + min } - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { self.to_sibling(true) } - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { self.to_sibling(false) } } +impl ZipperPath + for OverlayZipper + where + AZipper: ZipperMoving + ZipperValues + ZipperPath, + BZipper: ZipperMoving + ZipperValues + ZipperPath, + Mapping: for<'a> Fn(Option<&'a AV>, Option<&'a BV>) -> Option<&'a OutV>, +{ + fn path(&self) -> &[u8] { + self.a.path() + } +} + impl ZipperIteration for OverlayZipper where - AZipper: ZipperMoving + ZipperValues, - BZipper: ZipperMoving + ZipperValues, + AZipper: ZipperMoving + ZipperValues + ZipperPath, + BZipper: ZipperMoving + ZipperValues + ZipperPath, Mapping: for<'a> Fn(Option<&'a AV>, Option<&'a BV>) -> Option<&'a OutV>, { } @@ -354,7 +396,7 @@ use super::{OverlayZipper}; |keys: &[&[u8]]| { let cutoff = keys.len() / 3 * 2; // eprintln!("keys={:?}", &keys); - eprintln!("a_keys={:?}\nb_keys={:?}", &keys[..cutoff], &keys[cutoff..]); + // eprintln!("a_keys={:?}\nb_keys={:?}", &keys[..cutoff], &keys[cutoff..]); let a = keys[..cutoff].into_iter().map(|k| (k, ())).collect::>(); let b = keys[cutoff..].into_iter().map(|k| (k, ())).collect::>(); (a, b) diff --git a/src/path_tracker.rs b/src/path_tracker.rs new file mode 100644 index 0000000..b850dfb --- /dev/null +++ b/src/path_tracker.rs @@ -0,0 +1,226 @@ +use crate::{ + utils::ByteMask, + zipper::{ + PathObserver, Zipper, ZipperAbsolutePath, ZipperMoving, ZipperIteration, + ZipperPath, ZipperPathBuffer, ZipperValues, + ZipperReadOnlyValues, ZipperReadOnlyConditionalValues, + }, +}; + +/// Wrapper to implement [`ZipperPaths`] for zipper types that implement `ZipperMoving`. +/// This is useful for tracking the path of "blind" zipper types +/// +/// The "blind" zipper pattern enables nested virtual zippers to efficiently compose, +/// without repeating the work of copying paths. +/// +/// Example: +/// ```rust +/// use crate::pathmap::zipper::{ZipperPath, ZipperMoving}; +/// // the example uses `PathMap`, but this works with any zipper. +/// let btm = pathmap::PathMap::from_iter([(b"hello", ())]); +/// let zipper = btm.read_zipper(); +/// let mut with_path = pathmap::zipper::PathTracker::new(zipper); +/// assert_eq!(with_path.descend_to_existing("hello"), 5); +/// println!("current path: {:?}", with_path.path()); +/// assert_eq!(with_path.path(), b"hello"); +/// ``` +pub struct PathTracker { + zipper: Z, + path: Vec, + origin_len: usize, +} + +impl PathTracker { + pub fn new(mut zipper: Z) -> Self { + zipper.reset(); + Self { + zipper, + path: Vec::new(), + origin_len: 0, + } + } + pub fn with_origin(mut zipper: Z, origin: &[u8]) -> Self { + zipper.reset(); + Self { + zipper, + path: origin.to_vec(), + origin_len: origin.len(), + } + } +} + +impl Zipper for PathTracker { + #[inline] fn path_exists(&self) -> bool { self.zipper.path_exists() } + #[inline] fn is_val(&self) -> bool { self.zipper.is_val() } + #[inline] fn child_count(&self) -> usize { self.zipper.child_count() } + #[inline] fn child_mask(&self) -> ByteMask { self.zipper.child_mask() } +} +impl ZipperMoving for PathTracker { + #[inline] fn at_root(&self) -> bool { self.zipper.at_root() } + fn reset(&mut self) { + self.zipper.reset(); + self.path.truncate(self.origin_len); + } + #[inline] + fn focus_byte(&self) -> Option { + if self.path.len() > self.origin_len { + self.path.last().cloned() + } else { + None + } + } + fn val_count(&self) -> usize { todo!() } + fn descend_to>(&mut self, path: K) { + let path = path.as_ref(); + self.path.extend_from_slice(path); + self.zipper.descend_to(path) + } + fn descend_to_existing>(&mut self, path: K) -> usize { + let path = path.as_ref(); + let descended = self.zipper.descend_to_existing(path); + self.path.extend_from_slice(&path[..descended]); + descended + } + fn descend_to_existing_byte(&mut self, k: u8) -> bool { + if self.zipper.descend_to_existing_byte(k) { + self.path.push(k); + true + } else { + false + } + } + fn descend_to_val>(&mut self, path: K) -> usize { + let path = path.as_ref(); + let descended = self.zipper.descend_to_val(path); + self.path.extend_from_slice(&path[..descended]); + descended + } + fn descend_to_byte(&mut self, k: u8) { + self.path.push(k); + self.zipper.descend_to_byte(k) + } + fn ascend(&mut self, steps: usize) -> usize { + let ascended = self.zipper.ascend(steps); + let orig_len = self.path.len(); + self.path.truncate(orig_len - ascended); + ascended + } + fn ascend_byte(&mut self) -> bool { + if !self.zipper.ascend_byte() { + return false; + } + self.path.pop(); + true + } + fn ascend_until(&mut self) -> usize { + let ascended = self.zipper.ascend_until(); + if ascended == 0 { return 0; } + let orig_len = self.path.len(); + self.path.truncate(orig_len - ascended); + ascended + } + fn ascend_until_branch(&mut self) -> usize { + let ascended = self.zipper.ascend_until_branch(); + if ascended == 0 { return 0; } + let orig_len = self.path.len(); + self.path.truncate(orig_len - ascended); + ascended + } + fn to_next_sibling_byte(&mut self) -> Option { + let byte = self.zipper.to_next_sibling_byte()?; + let last = self.path.last_mut().expect("path must not be empty"); + *last = byte; + Some(byte) + } + fn to_prev_sibling_byte(&mut self) -> Option { + let byte = self.zipper.to_prev_sibling_byte()?; + let last = self.path.last_mut().expect("path must not be empty"); + *last = byte; + Some(byte) + } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { + let byte = self.zipper.descend_indexed_byte(child_idx)?; + self.path.push(byte); + Some(byte) + } + fn descend_first_byte(&mut self) -> Option { + let byte = self.zipper.descend_first_byte()?; + self.path.push(byte); + Some(byte) + } + fn descend_until(&mut self, obs: &mut Obs) -> bool { + let orig_len = self.path.len(); + //GOAT, we need to create a wrapper around path that captures the `obs` remaining_limit, so the wrapped zipper doesn't descend. + let descended = self.zipper.descend_until(&mut self.path); + obs.descend_to(&self.path[orig_len..]); + descended + } + // TODO: using default impl. re-using zipper's own `to_next_step` implementation + // would require changing the API such that path can be updated. + // fn to_next_step(&mut self) -> bool; +} + +impl ZipperIteration for PathTracker { } + +impl ZipperPath for PathTracker { + fn path(&self) -> &[u8] { &self.path[self.origin_len..] } +} + +impl ZipperAbsolutePath for PathTracker { + fn origin_path(&self) -> &[u8] { &self.path } + fn root_prefix_path(&self) -> &[u8] { &self.path[..self.origin_len] } +} + +impl, V> ZipperValues for PathTracker { + fn val(&self) -> Option<&V> { self.zipper.val() } +} + +impl<'a, Z: ZipperReadOnlyValues<'a, V>, V> ZipperReadOnlyValues<'a, V> for PathTracker { + fn get_val(&self) -> Option<&'a V> { self.zipper.get_val() } +} + +impl<'a, Z: ZipperReadOnlyConditionalValues<'a, V>, V> ZipperReadOnlyConditionalValues<'a, V> for PathTracker { + type WitnessT = Z::WitnessT; + fn witness<'w>(&self) -> Self::WitnessT { self.zipper.witness() } + fn get_val_with_witness<'w>(&self, witness: &'w Self::WitnessT) -> Option<&'w V> where 'a: 'w { + self.zipper.get_val_with_witness(witness) + } +} + +impl ZipperPathBuffer for PathTracker { + unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] { + let ptr = self.path.as_ptr(); + unsafe { core::slice::from_raw_parts(ptr, len) } + } + fn prepare_buffers(&mut self) { } + fn reserve_buffers(&mut self, path_len: usize, _stack: usize) { + self.path.reserve(path_len); + } +} + +#[cfg(test)] +mod tests { + use super::{PathTracker}; + use crate::{ + PathMap, + zipper::{zipper_iteration_tests, zipper_moving_tests}, + }; + + zipper_moving_tests::zipper_moving_tests!(track_path, + |keys: &[&[u8]]| { + keys.into_iter().map(|k| (k, ())).collect::>() + }, + |trie: &mut PathMap<()>, path: &[u8]| { + PathTracker::with_origin(trie.read_zipper_at_path(path), path) + } + ); + + zipper_iteration_tests::zipper_iteration_tests!(track_path, + |keys: &[&[u8]]| { + keys.into_iter().map(|k| (k, ())).collect::>() + }, + |trie: &mut PathMap<()>, path: &[u8]| { + PathTracker::with_origin(trie.read_zipper_at_path(path), path) + } + ); +} diff --git a/src/paths_serialization.rs b/src/paths_serialization.rs index 088fc50..0b0f423 100644 --- a/src/paths_serialization.rs +++ b/src/paths_serialization.rs @@ -279,7 +279,7 @@ pub fn for_each_deserialized_path st #[cfg(test)] mod test { - use crate::zipper::{ZipperIteration, ZipperValues, ZipperMoving}; + use crate::zipper::{ZipperIteration, ZipperValues, ZipperPath}; use super::*; #[cfg(not(miri))] // miri really hates the zlib-ng-sys C API diff --git a/src/poly_zipper.rs b/src/poly_zipper.rs index d6b9255..491eefe 100644 --- a/src/poly_zipper.rs +++ b/src/poly_zipper.rs @@ -9,6 +9,7 @@ use crate::zipper::*; /// /// The `PolyZipper` macro implements the following traits, provided they are implemented on each of the enum variants: /// * [`Zipper`] +/// * [`ZipperPath`] /// * [`ZipperAbsolutePath`] /// * [`ZipperConcrete`] /// * [`ZipperIteration`] diff --git a/src/prefix_zipper.rs b/src/prefix_zipper.rs index e2ba155..7942e01 100644 --- a/src/prefix_zipper.rs +++ b/src/prefix_zipper.rs @@ -61,7 +61,7 @@ pub struct PrefixZipper<'prefix, Z> { impl<'prefix, Z> PrefixZipper<'prefix, Z> where - Z: ZipperMoving + Z: ZipperMoving + ZipperPath { /// Creates a new `PrefixZipper` wrapping the supplied `source` zipper and prepending the /// supplied `prefix` @@ -115,47 +115,46 @@ impl<'prefix, Z> PrefixZipper<'prefix, Z> }; } - fn ascend_n(&mut self, mut steps: usize) -> Result<(), usize> { + fn ascend_n(&mut self, steps: usize) -> usize { + let mut remaining = steps; if let PrefixPos::PrefixOff { valid, mut invalid } = self.position { - if invalid > steps { - invalid -= steps; + if invalid > remaining { + invalid -= remaining; self.position = PrefixPos::PrefixOff { valid, invalid }; - return Ok(()); + return steps; } - steps -= invalid; - self.set_valid(valid.saturating_sub(steps)); - return if let Some(remaining) = steps.checked_sub(valid) { - Err(remaining) + remaining -= invalid; + self.set_valid(valid.saturating_sub(remaining)); + return if let Some(remaining) = remaining.checked_sub(valid) { + steps - remaining } else { - Ok(()) + steps }; } + if self.position.is_source() { - // let Err(remaining) = self.source.ascend(steps) else { - // return Ok(()); - // }; let len_before = self.source.path().len(); - if self.source.ascend(steps) { - return Ok(()) + if self.source.ascend(remaining) == remaining { + return steps } let len_after = self.source.path().len(); - steps -= len_before - len_after; + remaining -= len_before - len_after; self.position = PrefixPos::Prefix { valid: self.prefix.len() - self.origin_depth }; // Intermediate state: self.position points one off } if let PrefixPos::Prefix { valid } = self.position { - self.set_valid(valid.saturating_sub(steps)); - return if let Some(remaining) = steps.checked_sub(valid) { - Err(remaining) + self.set_valid(valid.saturating_sub(remaining)); + return if let Some(remaining) = remaining.checked_sub(valid) { + steps - remaining } else { - Ok(()) + steps }; } - Err(steps) + steps - remaining } - fn ascend_until_n(&mut self) -> Option { + fn ascend_until_n(&mut self) -> usize { if self.at_root() { - return None; + return 0; } let mut ascended = 0; if self.position.is_source() { @@ -164,13 +163,13 @@ impl<'prefix, Z> PrefixZipper<'prefix, Z> // } let len_before = self.source.path().len(); let was_good = if VAL { - self.source.ascend_until() + self.source.ascend_until() > 0 } else { - self.source.ascend_until_branch() + self.source.ascend_until_branch() > 0 }; if was_good && ((VAL && self.source.is_val()) || self.source.child_count() > 1) { let len_after = self.source.path().len(); - return Some(len_before - len_after); + return len_before - len_after; } ascended += len_before; let valid = self.prefix.len() - self.origin_depth; @@ -179,7 +178,7 @@ impl<'prefix, Z> PrefixZipper<'prefix, Z> ascended += self.position.prefixed_depth() .expect("we should no longer pointe at source at this point"); self.set_valid(0); - Some(ascended) + ascended } } @@ -242,7 +241,7 @@ impl<'prefix, 'source, Z, V> ZipperReadOnlyConditionalValues<'source, V> } impl<'prefix, Z> ZipperPathBuffer for PrefixZipper<'prefix, Z> - where Z: ZipperMoving + where Z: ZipperMoving + ZipperPath { unsafe fn origin_path_assert_len(&self, len: usize) -> &[u8] { assert!(self.path.capacity() >= len); @@ -305,7 +304,7 @@ impl<'prefix, Z> Zipper for PrefixZipper<'prefix, Z> impl<'prefix, Z> ZipperMoving for PrefixZipper<'prefix, Z> where - Z: ZipperMoving + Z: ZipperMoving + ZipperPath { fn at_root(&self) -> bool { match self.position { @@ -314,7 +313,10 @@ impl<'prefix, Z> ZipperMoving for PrefixZipper<'prefix, Z> PrefixPos::Source => self.prefix.len() <= self.origin_depth && self.source.at_root(), } } - + #[inline] + fn focus_byte(&self) -> Option { + self.path.last().cloned() + } fn reset(&mut self) { self.prepare_buffers(); self.path.truncate(self.origin_depth); @@ -323,11 +325,6 @@ impl<'prefix, Z> ZipperMoving for PrefixZipper<'prefix, Z> self.set_valid(0); } - #[inline] - fn path(&self) -> &[u8] { - &self.path[self.origin_depth..] - } - fn val_count(&self) -> usize { unimplemented!("method will probably get removed") } @@ -378,22 +375,20 @@ impl<'prefix, Z> ZipperMoving for PrefixZipper<'prefix, Z> self.descend_to([k]) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { let mask = self.child_mask(); - let Some(byte) = mask.indexed_bit::(child_idx) else { - return false; - }; + let byte = mask.indexed_bit::(child_idx)?; self.descend_to_byte(byte); debug_assert!(self.path_exists()); - true + Some(byte) } #[inline] - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.descend_indexed_byte(0) } - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { if self.position.is_invalid() { return false; } @@ -402,7 +397,7 @@ impl<'prefix, Z> ZipperMoving for PrefixZipper<'prefix, Z> self.position = PrefixPos::Source; } let len_before = self.source.path().len(); - if !self.source.descend_until() { + if !self.source.descend_until(obs) { return false; } let path = self.source.path(); @@ -411,57 +406,54 @@ impl<'prefix, Z> ZipperMoving for PrefixZipper<'prefix, Z> } #[inline] - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { if !self.position.is_source() { - return false; - } - if !self.source.to_next_sibling_byte() { - return false; + return None; } - let byte = *self.source.path().last().unwrap(); + let byte = self.source.to_next_sibling_byte()?; *self.path.last_mut().unwrap() = byte; - true + Some(byte) } #[inline] - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { if !self.position.is_source() { - return false; - } - if !self.source.to_prev_sibling_byte() { - return false; + return None; } - let byte = *self.source.path().last().unwrap(); + let byte = self.source.to_prev_sibling_byte()?; *self.path.last_mut().unwrap() = byte; - true + Some(byte) } - fn ascend(&mut self, steps: usize) -> bool { - let ascended = match self.ascend_n(steps) { - Err(remaining) => steps - remaining, - Ok(()) => steps, - }; + fn ascend(&mut self, steps: usize) -> usize { + let ascended = self.ascend_n(steps); self.path.truncate(self.path.len() - ascended); - ascended == steps + ascended } #[inline] fn ascend_byte(&mut self) -> bool { - self.ascend(1) + self.ascend(1) == 1 } #[inline] - fn ascend_until(&mut self) -> bool { - let Some(ascended) = self.ascend_until_n::() else { - return false; - }; + fn ascend_until(&mut self) -> usize { + let ascended = self.ascend_until_n::(); self.path.truncate(self.path.len() - ascended); - true + ascended } #[inline] - fn ascend_until_branch(&mut self) -> bool { - let Some(ascended) = self.ascend_until_n::() else { - return false; - }; + fn ascend_until_branch(&mut self) -> usize { + let ascended = self.ascend_until_n::(); self.path.truncate(self.path.len() - ascended); - true + ascended + } +} + +impl<'prefix, Z> ZipperPath for PrefixZipper<'prefix, Z> + where + Z: ZipperMoving + ZipperPath +{ + #[inline] + fn path(&self) -> &[u8] { + &self.path[self.origin_depth..] } } @@ -540,8 +532,7 @@ impl<'prefix, V: Clone + Send + Sync, Z, A: Allocator> zipper_priv::ZipperPriv f mod tests { use super::PrefixZipper; use crate::trie_map::PathMap; - use crate::zipper::ZipperMoving; - use crate::zipper::ZipperAbsolutePath; + use crate::zipper::{ZipperMoving, ZipperPath, ZipperAbsolutePath}; const PATHS1: &[(&[u8], u64)] = &[ (b"0000", 0), (b"00000", 1), @@ -561,18 +552,18 @@ mod tests { let mut rz = PrefixZipper::new(b"prefix", map.read_zipper()); rz.set_root_prefix_path(b"pre").unwrap(); assert_eq!(rz.descend_to_existing(b"fix00000"), 8); - assert_eq!(rz.ascend_until(), true); + assert_eq!(rz.ascend_until(), 1); assert_eq!(rz.path(), b"fix0000"); assert_eq!(rz.origin_path(), b"prefix0000"); assert_eq!(rz.descend_to_existing(b"0"), 1); - assert_eq!(rz.ascend_until_branch(), true); + assert_eq!(rz.ascend_until_branch(), 2); assert_eq!(rz.path(), b"fix000"); - assert_eq!(rz.ascend_until_branch(), true); + assert_eq!(rz.ascend_until_branch(), 3); assert_eq!(rz.path(), b"fix"); - assert_eq!(rz.ascend_until_branch(), true); + assert_eq!(rz.ascend_until_branch(), 3); assert_eq!(rz.path(), b""); assert_eq!(rz.origin_path(), b"pre"); - assert_eq!(rz.ascend_until_branch(), false); + assert_eq!(rz.ascend_until_branch(), 0); } #[test] @@ -581,20 +572,20 @@ mod tests { let mut rz = PrefixZipper::new(b"prefix", map.read_zipper()); rz.set_root_prefix_path(b"pre").unwrap(); assert_eq!(rz.descend_to_existing(b"fix00000"), 8); - assert_eq!(rz.ascend_until(), true); + assert_eq!(rz.ascend_until(), 2); assert_eq!(rz.path(), b"fix000"); assert_eq!(rz.origin_path(), b"prefix000"); - assert_eq!(rz.ascend_until(), true); + assert_eq!(rz.ascend_until(), 1); assert_eq!(rz.path(), b"fix00"); - assert_eq!(rz.ascend_until(), true); + assert_eq!(rz.ascend_until(), 5); assert_eq!(rz.path(), b""); - assert_eq!(rz.ascend_until(), false); + assert_eq!(rz.ascend_until(), 0); assert_eq!(rz.descend_to_existing(b"fix00000"), 8); - assert_eq!(rz.ascend_until_branch(), true); + assert_eq!(rz.ascend_until_branch(), 3); assert_eq!(rz.path(), b"fix00"); - assert_eq!(rz.ascend_until_branch(), true); + assert_eq!(rz.ascend_until_branch(), 5); assert_eq!(rz.path(), b""); assert_eq!(rz.origin_path(), b"pre"); - assert_eq!(rz.ascend_until_branch(), false); + assert_eq!(rz.ascend_until_branch(), 0); } } \ No newline at end of file diff --git a/src/product_zipper.rs b/src/product_zipper.rs index 9ae1f05..33d5d7d 100644 --- a/src/product_zipper.rs +++ b/src/product_zipper.rs @@ -192,14 +192,14 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper fn at_root(&self) -> bool { self.path().len() == 0 } + #[inline] + fn focus_byte(&self) -> Option { + self.z.focus_byte() + } fn reset(&mut self) { self.factor_paths.clear(); self.z.reset() } - #[inline] - fn path(&self) -> &[u8] { - self.z.path() - } fn val_count(&self) -> usize { assert!(self.focus_factor() == self.factor_count() - 1); self.z.val_count() @@ -270,20 +270,20 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper } descended } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { let result = self.z.descend_indexed_byte(child_idx); self.ensure_descend_next_factor(); result } - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { let result = self.z.descend_first_byte(); self.ensure_descend_next_factor(); result } - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { let mut moved = false; while self.z.child_count() == 1 { - moved |= self.z.descend_until(); + moved |= self.z.descend_until(obs); self.ensure_descend_next_factor(); if self.z.is_val() { break; @@ -291,7 +291,7 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper } moved } - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { if self.factor_paths.last().cloned().unwrap_or(0) == self.path().len() { self.factor_paths.pop(); } @@ -299,7 +299,7 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper self.ensure_descend_next_factor(); moved } - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { if self.factor_paths.last().cloned().unwrap_or(0) == self.path().len() { self.factor_paths.pop(); } @@ -307,7 +307,7 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper self.ensure_descend_next_factor(); moved } - fn ascend(&mut self, steps: usize) -> bool { + fn ascend(&mut self, steps: usize) -> usize { let ascended = self.z.ascend(steps); self.fix_after_ascend(); ascended @@ -317,18 +317,25 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper self.fix_after_ascend(); ascended } - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { let ascended = self.z.ascend_until(); self.fix_after_ascend(); ascended } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { let ascended = self.z.ascend_until_branch(); self.fix_after_ascend(); ascended } } +impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPath for ProductZipper<'_, 'trie, V, A> { + #[inline] + fn path(&self) -> &[u8] { + self.z.path() + } +} + impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperIteration for ProductZipper<'_, 'trie, V, A> { } //Use the default impl for all methods impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperValues for ProductZipper<'_, 'trie, V, A> { @@ -419,8 +426,8 @@ pub struct ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> impl<'trie, PrimaryZ, SecondaryZ, V> ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving, - SecondaryZ: ZipperMoving, + PrimaryZ: ZipperMoving + ZipperPath, + SecondaryZ: ZipperMoving + ZipperPath, { /// Creates a new `ProductZipper` from the provided zippers pub fn new(primary: PrimaryZ, other_zippers: ZipperList) -> Self @@ -511,8 +518,9 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ProductZipperG<'trie, PrimaryZ, SecondaryZ, /// A combination between `ascend_until` and `ascend_until_branch`. /// if `allow_stop_on_val` is `true`, behaves as `ascend_until` - fn ascend_cond(&mut self, allow_stop_on_val: bool) -> bool { + fn ascend_cond(&mut self, allow_stop_on_val: bool) -> usize { let mut plen = self.path().len(); + let mut total_ascended = 0; loop { while self.factor_paths.last() == Some(&plen) { self.factor_paths.pop(); @@ -527,26 +535,31 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ProductZipperG<'trie, PrimaryZ, SecondaryZ, }; let delta = before - zipper.path().len(); plen -= delta; - self.primary.ascend(delta); - if rv && (self.child_count() != 1 || (allow_stop_on_val && self.is_val())) { - return true; + let ascended = self.primary.ascend(delta); + debug_assert_eq!(ascended, delta); + total_ascended += ascended; + if rv > 0 && (self.child_count() != 1 || (allow_stop_on_val && self.is_val())) { + return total_ascended; } } else { - return if allow_stop_on_val { + let ascended = if allow_stop_on_val { self.primary.ascend_until() } else { self.primary.ascend_until_branch() }; - } + total_ascended += ascended; + return total_ascended + }; } } /// a combination between `to_next_sibling` and `to_prev_sibling` - fn to_sibling_byte(&mut self, next: bool) -> bool { + fn to_sibling_byte(&mut self, next: bool) -> Option { let Some(&byte) = self.path().last() else { - return false; + return None; }; - assert!(self.ascend(1), "must ascend"); + let ascended = self.ascend(1); + debug_assert_eq!(ascended, 1, "must ascend"); let child_mask = self.child_mask(); let Some(sibling_byte) = (if next { child_mask.next_bit(byte) @@ -554,10 +567,10 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ProductZipperG<'trie, PrimaryZ, SecondaryZ, child_mask.prev_bit(byte) }) else { self.descend_to_byte(byte); - return false; + return None; }; self.descend_to_byte(sibling_byte); - true + Some(sibling_byte) } } @@ -565,8 +578,8 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperAbsolutePath for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperAbsolutePath, - SecondaryZ: ZipperMoving, + PrimaryZ: ZipperAbsolutePath + ZipperPath, + SecondaryZ: ZipperMoving + ZipperPath, { fn origin_path(&self) -> &[u8] { self.primary.origin_path() } fn root_prefix_path(&self) -> &[u8] { self.primary.root_prefix_path() } @@ -576,8 +589,8 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperConcrete for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving + ZipperConcrete, - SecondaryZ: ZipperMoving + ZipperConcrete, + PrimaryZ: ZipperMoving + ZipperPath + ZipperConcrete, + SecondaryZ: ZipperMoving + ZipperPath + ZipperConcrete, { fn shared_node_id(&self) -> Option { if let Some(idx) = self.factor_idx(true) { @@ -611,8 +624,8 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperValues for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving + ZipperValues, - SecondaryZ: ZipperMoving + ZipperValues, + PrimaryZ: ZipperMoving + ZipperPath + ZipperValues, + SecondaryZ: ZipperMoving + ZipperPath + ZipperValues, { fn val(&self) -> Option<&V> { if let Some(idx) = self.factor_idx(true) { @@ -627,8 +640,8 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperReadOnlyValues<'trie, V> for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving + ZipperReadOnlyValues<'trie, V>, - SecondaryZ: ZipperMoving + ZipperReadOnlyValues<'trie, V>, + PrimaryZ: ZipperMoving + ZipperPath + ZipperReadOnlyValues<'trie, V>, + SecondaryZ: ZipperMoving + ZipperPath + ZipperReadOnlyValues<'trie, V>, { fn get_val(&self) -> Option<&'trie V> { if let Some(idx) = self.factor_idx(true) { @@ -643,8 +656,8 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperReadOnlyConditionalValues<'trie, V> for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving + ZipperReadOnlyConditionalValues<'trie, V>, - SecondaryZ: ZipperMoving + ZipperReadOnlyConditionalValues<'trie, V>, + PrimaryZ: ZipperMoving + ZipperPath + ZipperReadOnlyConditionalValues<'trie, V>, + SecondaryZ: ZipperMoving + ZipperPath + ZipperReadOnlyConditionalValues<'trie, V>, { type WitnessT = (PrimaryZ::WitnessT, Vec); fn witness<'w>(&self) -> Self::WitnessT { @@ -664,8 +677,8 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperReadOnlyConditionalValues<'trie, V> impl<'trie, PrimaryZ, SecondaryZ, V> Zipper for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving + Zipper, - SecondaryZ: ZipperMoving + Zipper, + PrimaryZ: ZipperMoving + ZipperPath + Zipper, + SecondaryZ: ZipperMoving + ZipperPath + Zipper, { fn path_exists(&self) -> bool { if let Some(idx) = self.factor_idx(true) { @@ -700,12 +713,16 @@ impl<'trie, PrimaryZ, SecondaryZ, V> Zipper for ProductZipperG<'trie, PrimaryZ, impl<'trie, PrimaryZ, SecondaryZ, V> ZipperMoving for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where V: Clone + Send + Sync, - PrimaryZ: ZipperMoving, - SecondaryZ: ZipperMoving, + PrimaryZ: ZipperMoving + ZipperPath, + SecondaryZ: ZipperMoving + ZipperPath, { fn at_root(&self) -> bool { self.path().is_empty() } + #[inline] + fn focus_byte(&self) -> Option { + self.primary.path().last().cloned() + } fn reset(&mut self) { self.factor_paths.clear(); for secondary in &mut self.secondary { @@ -713,10 +730,6 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperMoving for ProductZipperG<'trie, Prim } self.primary.reset(); } - #[inline] - fn path(&self) -> &[u8] { - self.primary.path() - } fn val_count(&self) -> usize { unimplemented!("method will probably get removed") } @@ -758,33 +771,34 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperMoving for ProductZipperG<'trie, Prim fn descend_to_byte(&mut self, k: u8) { self.descend_to([k]) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { let mask = self.child_mask(); let Some(byte) = mask.indexed_bit::(child_idx) else { - return false; + return None; }; self.descend_to_byte(byte); - true + debug_assert!(self.path_exists()); + Some(byte) } #[inline] - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.descend_indexed_byte(0) } - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { let mut moved = false; self.enter_factors(); while self.child_count() == 1 { moved |= if let Some(idx) = self.factor_idx(false) { let zipper = &mut self.secondary[idx]; let before = zipper.path().len(); - let rv = zipper.descend_until(); + let rv = zipper.descend_until(obs); let path = zipper.path(); if path.len() > before { self.primary.descend_to(&path[before..]); } rv } else { - self.primary.descend_until() + self.primary.descend_until(obs) }; self.enter_factors(); if self.is_val() { @@ -794,42 +808,58 @@ impl<'trie, PrimaryZ, SecondaryZ, V> ZipperMoving for ProductZipperG<'trie, Prim moved } #[inline] - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { self.to_sibling_byte(true) } #[inline] - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { self.to_sibling_byte(false) } - fn ascend(&mut self, mut steps: usize) -> bool { - while steps > 0 { + fn ascend(&mut self, steps: usize) -> usize { + let mut remaining = steps; + while remaining > 0 { self.exit_factors(); if let Some(idx) = self.factor_idx(false) { let len = self.path().len() - self.factor_paths[idx]; - let delta = len.min(steps); - self.secondary[idx].ascend(delta); - self.primary.ascend(delta); - steps -= delta; + let delta = len.min(remaining); + let ascended = self.secondary[idx].ascend(delta); + debug_assert_eq!(ascended, delta); + let ascended = self.primary.ascend(delta); + debug_assert_eq!(ascended, delta); + remaining -= delta; } else { - return self.primary.ascend(steps); + let ascended = steps - remaining; + return ascended + self.primary.ascend(remaining); } } - true + steps } #[inline] fn ascend_byte(&mut self) -> bool { - self.ascend(1) + self.ascend(1) == 1 } #[inline] - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { self.ascend_cond(true) } #[inline] - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { self.ascend_cond(false) } } +impl<'trie, PrimaryZ, SecondaryZ, V> ZipperPath for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> + where + V: Clone + Send + Sync, + PrimaryZ: ZipperMoving + ZipperPath, + SecondaryZ: ZipperMoving + ZipperPath, +{ + #[inline] + fn path(&self) -> &[u8] { + self.primary.path() + } +} + impl<'trie, PrimaryZ, SecondaryZ, V> ZipperIteration for ProductZipperG<'trie, PrimaryZ, SecondaryZ, V> where @@ -932,19 +962,19 @@ mod tests { assert_eq!(pz.child_count(), 0); //Make sure we can ascend out of a secondary factor; in this sub-test we'll hit the path middles - assert!(pz.ascend(1)); + assert_eq!(pz.ascend(1), 1); assert_eq!(pz.val(), None); assert_eq!(pz.path(), b"AAaDDdG"); assert_eq!(pz.child_count(), 0); - assert!(pz.ascend(3)); + assert_eq!(pz.ascend(3), 3); assert_eq!(pz.path(), b"AAaD"); assert_eq!(pz.val(), None); assert_eq!(pz.child_count(), 1); - assert!(pz.ascend(2)); + assert_eq!(pz.ascend(2), 2); assert_eq!(pz.path(), b"AA"); assert_eq!(pz.val(), None); assert_eq!(pz.child_count(), 3); - assert!(!pz.ascend(3)); + assert_eq!(pz.ascend(3), 2); assert_eq!(pz.path(), b""); assert_eq!(pz.val(), None); assert_eq!(pz.child_count(), 1); @@ -955,17 +985,16 @@ mod tests { assert_eq!(pz.path(), b"AAaDDdGG"); assert_eq!(pz.val(), None); assert_eq!(pz.child_count(), 0); - //Now try to hit the path transition points - assert!(pz.ascend(2)); + assert_eq!(pz.ascend(2), 2); assert_eq!(pz.path(), b"AAaDDd"); assert_eq!(pz.val(), Some(&1000)); assert_eq!(pz.child_count(), 0); - assert!(pz.ascend(3)); + assert_eq!(pz.ascend(3), 3); assert_eq!(pz.path(), b"AAa"); assert_eq!(pz.val(), Some(&0)); assert_eq!(pz.child_count(), 3); - assert!(pz.ascend(3)); + assert_eq!(pz.ascend(3), 3); assert_eq!(pz.path(), b""); assert_eq!(pz.val(), None); assert_eq!(pz.child_count(), 1); @@ -1099,7 +1128,7 @@ mod tests { p.descend_to("abcdefghijklmnopqrstuvwxyzbowfo"); assert!(p.path_exists()); assert_eq!(p.path(), b"abcdefghijklmnopqrstuvwxyzbowfo"); - assert!(p.descend_first_byte()); + assert_eq!(p.descend_first_byte(), Some(b'o')); assert_eq!(p.path(), b"abcdefghijklmnopqrstuvwxyzbowfoo"); } { @@ -1129,9 +1158,9 @@ mod tests { assert!(p.path_exists()); assert_eq!(p.path(), b"abcdefghijklmnopqrstuvwxyzbowpho"); assert!(p.is_val()); - assert!(p.ascend_until()); + assert_eq!(p.ascend_until(), 3); assert_eq!(p.path(), b"abcdefghijklmnopqrstuvwxyzbow"); - assert!(p.ascend(3)); + assert_eq!(p.ascend(3), 3); assert_eq!(vec![b'A', b'a', b'b'], p.child_mask().iter().collect::>()); p.descend_to("ABCDEFGHIJKLMNOPQRSTUVWXYZ"); assert!(p.path_exists()); @@ -1156,7 +1185,7 @@ mod tests { p.descend_to("ABCDEFGHIJKLMNOPQRSTUVWXYZ"); assert!(!p.path_exists()); // println!("p {}", std::str::from_utf8(p.path()).unwrap()); - assert!(!p.ascend(27)); + assert_eq!(p.ascend(27), 26); } } @@ -1192,7 +1221,7 @@ mod tests { // Validate that I can back up, and re-descend { - p2.ascend(20); + assert_eq!(p2.ascend(20), 20); assert_eq!(p2.path(), b"arr"); assert_eq!(p2.path_exists(), true); assert!(p2.is_val()); @@ -1217,7 +1246,7 @@ mod tests { assert_eq!(p2.path(), b"arrowbowclub"); assert_eq!(p2.path_exists(), false); - p2.ascend(9); + assert_eq!(p2.ascend(9), 9); assert_eq!(p2.path(), b"arr"); assert_eq!(p2.path_exists(), true); assert!(p2.is_val()); @@ -1237,7 +1266,7 @@ mod tests { assert_eq!(p2.path(), b"arrowheadbowclub"); assert_eq!(p2.path_exists(), false); - p2.ascend(5); + assert_eq!(p2.ascend(5), 5); assert_eq!(p2.path(), b"arrowheadbo"); assert_eq!(p2.path_exists(), true); assert!(p2.is_val()); @@ -1269,7 +1298,9 @@ mod tests { // println!("{}", String::from_utf8_lossy(path)); let overlap = find_prefix_overlap(path, moving_pz.path()); if overlap < moving_pz.path().len() { - moving_pz.ascend(moving_pz.path().len() - overlap); + let to_ascend = moving_pz.path().len() - overlap; + let ascended = moving_pz.ascend(to_ascend); + assert_eq!(ascended, to_ascend); } if moving_pz.path().len() < path.len() { moving_pz.descend_to(&path[moving_pz.path().len()..]); @@ -1483,7 +1514,7 @@ mod tests { assert_eq!(pz.is_val(), false); // test ascend - assert_eq!(pz.ascend(snip.len() * (repeats-1)), true); + assert_eq!(pz.ascend(snip.len() * (repeats-1)), snip.len() * (repeats-1)); assert_eq!(pz.path(), snip); assert_eq!(pz.path_exists(), true); assert_eq!(pz.child_count(), 1); @@ -1492,7 +1523,7 @@ mod tests { // test ascend_until pz.reset(); pz.descend_to(&full_path); - assert_eq!(pz.ascend_until(), true); + assert_eq!(pz.ascend_until(), full_path.len()); assert_eq!(pz.path(), []); assert_eq!(pz.path_exists(), true); assert_eq!(pz.child_count(), 1); @@ -1500,14 +1531,14 @@ mod tests { // test ascend_until_branch pz.descend_to(&full_path); - assert_eq!(pz.ascend_until_branch(), true); + assert_eq!(pz.ascend_until_branch(), full_path.len()); assert_eq!(pz.path(), []); assert_eq!(pz.path_exists(), true); assert_eq!(pz.child_count(), 1); assert_eq!(pz.is_val(), false); // test descend_until - assert_eq!(pz.descend_until(), true); + assert_eq!(pz.descend_until(&mut ()), true); assert_eq!(pz.path(), full_path); assert_eq!(pz.path_exists(), true); assert_eq!(pz.child_count(), 0); @@ -1558,6 +1589,7 @@ mod tests { assert_eq!(pz.path_indices()[1], 16); assert_eq!(pz.path(), b"nopqrstuvwxyzbowph"); } + } // --- END OF MACRO GENERATED MOD --- }; diff --git a/src/tree_serialization.rs b/src/tree_serialization.rs index 4705e34..eb91896 100644 --- a/src/tree_serialization.rs +++ b/src/tree_serialization.rs @@ -34,7 +34,13 @@ pub fn serialize_fork, F: FnMut(usize, &[u8] } /// WIP -pub fn deserialize_fork + zipper::ZipperMoving, F: Fn(usize, &[u8]) -> V>(node: usize, wz: &mut WZ, source: &[u8], fv: F) -> std::io::Result { +pub fn deserialize_fork(node: usize, wz: &mut WZ, source: &[u8], fv: F) -> std::io::Result +where + V: TrieValue, + A: Allocator, + WZ: ZipperWriting + zipper::ZipperMoving + zipper::ZipperPath, + F: Fn(usize, &[u8]) -> V, +{ unsafe { // let mut recovered = 0; new_map_from_ana_jumping(wz, node, |n: usize, path: &[u8]| { diff --git a/src/utils/debug/diff_zipper.rs b/src/utils/debug/diff_zipper.rs index a55f3ec..060b708 100644 --- a/src/utils/debug/diff_zipper.rs +++ b/src/utils/debug/diff_zipper.rs @@ -60,9 +60,9 @@ impl ZipperMoving for DiffZi println!("DiffZipper: reset") } } - fn path(&self) -> &[u8] { - let a = self.a.path(); - let b = self.b.path(); + fn focus_byte(&self) -> Option { + let a = self.a.focus_byte(); + let b = self.b.focus_byte(); assert_eq!(a, b); a } @@ -109,7 +109,7 @@ impl ZipperMoving for DiffZi } assert_eq!(self.a.path_exists(), self.b.path_exists()); } - fn descend_indexed_byte(&mut self, idx: usize) -> bool { + fn descend_indexed_byte(&mut self, idx: usize) -> Option { let a = self.a.descend_indexed_byte(idx); let b = self.b.descend_indexed_byte(idx); if self.log_moves { @@ -118,7 +118,7 @@ impl ZipperMoving for DiffZi assert_eq!(a, b); a } - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { let a = self.a.descend_first_byte(); let b = self.b.descend_first_byte(); if self.log_moves { @@ -127,16 +127,20 @@ impl ZipperMoving for DiffZi assert_eq!(a, b); a } - fn descend_until(&mut self) -> bool { - let a = self.a.descend_until(); - let b = self.b.descend_until(); + fn descend_until(&mut self, obs: &mut Obs) -> bool { + let mut a_bytes: Vec = vec![]; + let mut b_bytes: Vec = vec![]; + let a = self.a.descend_until(&mut a_bytes); + let b = self.b.descend_until(&mut b_bytes); if self.log_moves { println!("DiffZipper: descend_until") } + assert_eq!(a_bytes, b_bytes); assert_eq!(a, b); + obs.descend_to(&a_bytes); a } - fn ascend(&mut self, steps: usize) -> bool { + fn ascend(&mut self, steps: usize) -> usize { let a = self.a.ascend(steps); let b = self.b.ascend(steps); if self.log_moves { @@ -154,7 +158,7 @@ impl ZipperMoving for DiffZi assert_eq!(a, b); a } - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { let a = self.a.ascend_until(); let b = self.b.ascend_until(); if self.log_moves { @@ -163,7 +167,7 @@ impl ZipperMoving for DiffZi assert_eq!(a, b); a } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { let a = self.a.ascend_until_branch(); let b = self.b.ascend_until_branch(); if self.log_moves { @@ -172,7 +176,7 @@ impl ZipperMoving for DiffZi assert_eq!(a, b); a } - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { let a = self.a.to_next_sibling_byte(); let b = self.b.to_next_sibling_byte(); if self.log_moves { @@ -181,7 +185,7 @@ impl ZipperMoving for DiffZi assert_eq!(a, b); a } - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { let a = self.a.to_prev_sibling_byte(); let b = self.b.to_prev_sibling_byte(); if self.log_moves { @@ -192,6 +196,15 @@ impl ZipperMoving for DiffZi } } +impl ZipperPath for DiffZipper { + fn path(&self) -> &[u8] { + let a = self.a.path(); + let b = self.b.path(); + assert_eq!(a, b); + a + } +} + impl ZipperAbsolutePath for DiffZipper { fn origin_path(&self) -> &[u8] { diff --git a/src/utils/ints.rs b/src/utils/ints.rs index eb16fb7..052b614 100644 --- a/src/utils/ints.rs +++ b/src/utils/ints.rs @@ -302,7 +302,7 @@ fn int_range_generator_5() { drop(buildz); let mut z = zh.read_zipper_at_path(&[0]).unwrap(); - z.descend_until(); + z.descend_until(&mut ()); z.descend_first_byte(); let _z2 = zh.read_zipper_at_path(z.origin_path()).unwrap(); diff --git a/src/viz.rs b/src/viz.rs index 015f162..798514c 100644 --- a/src/viz.rs +++ b/src/viz.rs @@ -225,7 +225,7 @@ fn viz_zipper_logical ZipperSubtries impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperMoving for WriteZipperTracked<'a, 'path, V, A> { fn at_root(&self) -> bool { self.z.at_root() } + fn focus_byte(&self) -> Option { self.z.focus_byte() } fn reset(&mut self) { self.z.reset() } - fn path(&self) -> &[u8] { self.z.path() } fn val_count(&self) -> usize { self.z.val_count() } fn descend_to>(&mut self, k: K) { self.z.descend_to(k) } fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) } - fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() } - fn descend_until(&mut self) -> bool { self.z.descend_until() } - fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() } - fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.z.descend_indexed_byte(child_idx) } + fn descend_first_byte(&mut self) -> Option { self.z.descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.z.descend_until(obs) } + fn to_next_sibling_byte(&mut self) -> Option { self.z.to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { self.z.to_prev_sibling_byte() } + fn ascend(&mut self, steps: usize) -> usize { self.z.ascend(steps) } fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() } - fn ascend_until(&mut self) -> bool { self.z.ascend_until() } - fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() } + fn ascend_until(&mut self) -> usize { self.z.ascend_until() } + fn ascend_until_branch(&mut self) -> usize { self.z.ascend_until_branch() } +} +impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperPath for WriteZipperTracked<'a, 'path, V, A> { + fn path(&self) -> &[u8] { self.z.path() } } - impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> zipper_priv::ZipperPriv for WriteZipperTracked<'a, 'path, V, A> { type V = V; type A = A; @@ -501,20 +503,23 @@ impl<'a, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperSubtries impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperMoving for WriteZipperUntracked<'a, 'path, V, A> { fn at_root(&self) -> bool { self.z.at_root() } + fn focus_byte(&self) -> Option { self.z.focus_byte() } fn reset(&mut self) { self.z.reset() } - fn path(&self) -> &[u8] { self.z.path() } fn val_count(&self) -> usize { self.z.val_count() } fn descend_to>(&mut self, k: K) { self.z.descend_to(k) } fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) } - fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() } - fn descend_until(&mut self) -> bool { self.z.descend_until() } - fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() } - fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.z.descend_indexed_byte(child_idx) } + fn descend_first_byte(&mut self) -> Option { self.z.descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.z.descend_until(obs) } + fn to_next_sibling_byte(&mut self) -> Option { self.z.to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { self.z.to_prev_sibling_byte() } + fn ascend(&mut self, steps: usize) -> usize { self.z.ascend(steps) } fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() } - fn ascend_until(&mut self) -> bool { self.z.ascend_until() } - fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() } + fn ascend_until(&mut self) -> usize { self.z.ascend_until() } + fn ascend_until_branch(&mut self) -> usize { self.z.ascend_until_branch() } +} +impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperPath for WriteZipperUntracked<'a, 'path, V, A> { + fn path(&self) -> &[u8] { self.z.path() } } impl<'a, 'k, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> zipper_priv::ZipperPriv for WriteZipperUntracked<'a, 'k, V, A> { @@ -663,20 +668,23 @@ impl ZipperSubtries for Writ impl ZipperMoving for WriteZipperOwned { fn at_root(&self) -> bool { self.z.at_root() } + fn focus_byte(&self) -> Option { self.z.focus_byte() } fn reset(&mut self) { self.z.reset() } - fn path(&self) -> &[u8] { self.z.path() } fn val_count(&self) -> usize { self.z.val_count() } fn descend_to>(&mut self, k: K) { self.z.descend_to(k) } fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) } - fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() } - fn descend_until(&mut self) -> bool { self.z.descend_until() } - fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() } - fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.z.descend_indexed_byte(child_idx) } + fn descend_first_byte(&mut self) -> Option { self.z.descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.z.descend_until(obs) } + fn to_next_sibling_byte(&mut self) -> Option { self.z.to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { self.z.to_prev_sibling_byte() } + fn ascend(&mut self, steps: usize) -> usize { self.z.ascend(steps) } fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() } - fn ascend_until(&mut self) -> bool { self.z.ascend_until() } - fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() } + fn ascend_until(&mut self) -> usize { self.z.ascend_until() } + fn ascend_until_branch(&mut self) -> usize { self.z.ascend_until_branch() } +} +impl ZipperPath for WriteZipperOwned { + fn path(&self) -> &[u8] { self.z.path() } } impl zipper_priv::ZipperPriv for WriteZipperOwned { @@ -939,20 +947,17 @@ impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperMoving self.key.prefix_buf.len() <= self.key.origin_path.len() } + #[inline] + fn focus_byte(&self) -> Option { + self.key.prefix_buf.last().cloned() + } + fn reset(&mut self) { self.focus_stack.to_root(); self.key.prefix_buf.truncate(self.key.origin_path.len()); self.key.prefix_idx.clear(); } - fn path(&self) -> &[u8] { - if self.key.prefix_buf.len() > 0 { - &self.key.prefix_buf[self.key.origin_path.len()..] - } else { - &[] - } - } - fn val_count(&self) -> usize { let root_val = self.is_val() as usize; let focus = self.get_focus(); @@ -969,32 +974,34 @@ impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperMoving self.descend_to_internal(); } - fn ascend(&mut self, mut steps: usize) -> bool { + fn ascend(&mut self, steps: usize) -> usize { + let mut remaining = steps; loop { if self.key.node_key().len() == 0 { self.ascend_across_nodes(); } - if steps == 0 { - return true + if remaining == 0 { + return steps; } if self.at_root() { - return false + return steps - remaining; } debug_assert!(self.key.node_key().len() > 0); - let cur_jump = steps.min(self.key.excess_key_len()); + let cur_jump = remaining.min(self.key.excess_key_len()); self.key.prefix_buf.truncate(self.key.prefix_buf.len() - cur_jump); - steps -= cur_jump; + remaining -= cur_jump; } } - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { if self.at_root() { - return false; + return 0; } + let mut ascended = 0; loop { - self.ascend_within_node(); + ascended += self.ascend_within_node(); if self.at_root() { - return true; + return ascended; } if self.key.node_key().len() == 0 { self.ascend_across_nodes(); @@ -1004,17 +1011,18 @@ impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperMoving } } debug_assert!(self.key.node_key().len() > 0); //We should never finish with a zero-length node-key - true + ascended } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { if self.at_root() { - return false; + return 0; } + let mut ascended = 0; loop { - self.ascend_within_node(); + ascended += self.ascend_within_node(); if self.at_root() { - return true; + return ascended; } if self.key.node_key().len() == 0 { self.ascend_across_nodes(); @@ -1024,7 +1032,59 @@ impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperMoving } } debug_assert!(self.key.node_key().len() > 0); //We should never finish with a zero-length node-key - true + ascended + } + fn to_next_sibling_byte(&mut self) -> Option { + let cur_byte = match self.path().last() { + Some(byte) => *byte, + None => return None, + }; + if !self.ascend_byte() { + return None; + } + let mask = self.child_mask(); + match mask.next_bit(cur_byte) { + Some(byte) => { + self.descend_to_byte(byte); + debug_assert!(self.path_exists()); + Some(byte) + }, + None => { + self.descend_to_byte(cur_byte); + None + } + } + } + fn to_prev_sibling_byte(&mut self) -> Option { + let cur_byte = match self.path().last() { + Some(byte) => *byte, + None => return None, + }; + if !self.ascend_byte() { + return None; + } + let mask = self.child_mask(); + match mask.prev_bit(cur_byte) { + Some(byte) => { + self.descend_to_byte(byte); + debug_assert!(self.path_exists()); + Some(byte) + }, + None => { + self.descend_to_byte(cur_byte); + debug_assert!(self.path_exists()); + None + } + } + } +} +impl<'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> ZipperPath for WriteZipperCore<'a, 'path, V, A> { + fn path(&self) -> &[u8] { + if self.key.prefix_buf.len() > 0 { + &self.key.prefix_buf[self.key.origin_path.len()..] + } else { + &[] + } } } @@ -1561,18 +1621,18 @@ impl <'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> WriteZipperC fn k_path_internal(&mut self, k: usize, base_idx: usize) -> bool { loop { if self.path().len() < base_idx + k { - while self.descend_first_byte() { + while self.descend_first_byte().is_some() { if self.path().len() == base_idx + k { return true } } } - if self.to_next_sibling_byte() { + if self.to_next_sibling_byte().is_some() { if self.path().len() == base_idx + k { return true } continue } while self.path().len() > base_idx { self.ascend_byte(); if self.path().len() == base_idx { return false } - if self.to_next_sibling_byte() { break } + if self.to_next_sibling_byte().is_some() { break } } } } @@ -1594,7 +1654,7 @@ impl <'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> WriteZipperC let downstream_node = self.get_focus().into_option(); - let fully_ascended = self.ascend(n); + let fully_ascended = self.ascend(n) == n; self.graft_internal(downstream_node); fully_ascended @@ -1922,7 +1982,8 @@ impl <'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> WriteZipperC /// See [ZipperWriting::prune_ascend] fn prune_ascend(&mut self) -> usize { let bytes = self.prune_path(); - self.ascend(bytes); + let ascended = self.ascend(bytes); + debug_assert_eq!(ascended, bytes); bytes } @@ -2186,10 +2247,12 @@ impl <'a, 'path, V: Clone + Send + Sync + Unpin, A: Allocator + 'a> WriteZipperC } /// Internal method used to impement `ascend_until` when ascending within a node #[inline] - fn ascend_within_node(&mut self) { + fn ascend_within_node(&mut self) -> usize { let branch_key = self.focus_stack.top().unwrap().prior_branch_key(self.key.node_key()); let new_len = self.key.origin_path.len().max(self.key.node_key_start() + branch_key.len()); + let old_len = self.key.prefix_buf.len(); self.key.prefix_buf.truncate(new_len); + old_len - new_len } } @@ -2909,22 +2972,22 @@ mod tests { // and then clean them up wz.descend_to([0, 0, 0, 0]); wz.set_val(()); - wz.ascend(4); + assert_eq!(wz.ascend(4), 4); wz.descend_to([1, 0, 0, 1]); wz.set_val(()); - wz.ascend(4); + assert_eq!(wz.ascend(4), 4); wz.descend_to([2, 0, 0, 2]); wz.set_val(()); - wz.ascend(4); + assert_eq!(wz.ascend(4), 4); wz.descend_to([0, 0, 0, 0]); wz.remove_val(true); - wz.ascend(4); + assert_eq!(wz.ascend(4), 4); wz.descend_to([1, 0, 0, 1]); wz.remove_val(true); - wz.ascend(4); + assert_eq!(wz.ascend(4), 4); wz.descend_to([2, 0, 0, 2]); wz.remove_val(true); - wz.ascend(4); + assert_eq!(wz.ascend(4), 4); wz.meet_into(&rz, true); @@ -2932,7 +2995,7 @@ mod tests { wz.descend_to([194, 7, 162]); assert!(wz.path_exists()); assert!(wz.val().is_some()); - assert!(wz.ascend(3)); + assert_eq!(wz.ascend(3), 3); wz.descend_to([194, 7, 163]); assert!(wz.path_exists()); assert!(wz.val().is_some()); @@ -3042,26 +3105,26 @@ mod tests { assert!(wz.path_exists()); assert_eq!(wz.path(), b"mulus"); assert_eq!(wz.child_count(), 0); - assert!(wz.ascend_until()); + assert_eq!(wz.ascend_until(), 4); assert_eq!(wz.path(), b"m"); assert_eq!(wz.child_count(), 3); //Make sure we can't ascend above the zipper's root with ascend_until - assert!(wz.ascend_until()); + assert_eq!(wz.ascend_until(), 1); assert_eq!(wz.path(), b""); - assert!(!wz.ascend_until()); + assert_eq!(wz.ascend_until(), 0); //Test step-wise `ascend` wz.descend_to(b"manus"); assert_eq!(wz.path(), b"manus"); - assert_eq!(wz.ascend(1), true); + assert_eq!(wz.ascend(1), 1); assert_eq!(wz.path(), b"manu"); - assert_eq!(wz.ascend(5), false); + assert_eq!(wz.ascend(5), 4); assert_eq!(wz.path(), b""); assert_eq!(wz.at_root(), true); wz.descend_to(b"mane"); assert_eq!(wz.path(), b"mane"); - assert_eq!(wz.ascend(3), true); + assert_eq!(wz.ascend(3), 3); assert_eq!(wz.path(), b"m"); assert_eq!(wz.child_count(), 3); } @@ -3766,7 +3829,7 @@ mod tests { assert_eq!(wz.origin_path(), b"This path can take you anywhere. Just close your eyes... and open your heart."); // Test forking a zipper from a WriteZipper and make sure it inherits the origin_path - wz.ascend(6); + assert_eq!(wz.ascend(6), 6); assert_eq!(wz.is_val(), false); let mut rz = wz.fork_read_zipper(); assert_eq!(rz.path(), b""); @@ -3785,7 +3848,7 @@ mod tests { assert_eq!(rz.is_val(), true); assert_eq!(rz.path(), b" and open your heart."); assert_eq!(rz.origin_path(), b"This path can take you anywhere. Just close your eyes... and open your heart."); - rz.ascend(6); + assert_eq!(rz.ascend(6), 6); assert_eq!(rz.path(), b" and open your "); assert_eq!(rz.origin_path(), b"This path can take you anywhere. Just close your eyes... and open your "); assert_eq!(rz.is_val(), false); @@ -3884,9 +3947,9 @@ mod tests { assert_eq!(wz.path(), &[0, 0, 1, 0, 0, 2, 3, 4]); assert_eq!(wz.prune_path(), 6); //Prune back to the value at [0, 0, 0] assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(4), true); + assert_eq!(wz.ascend(4), 4); assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(2), true); + assert_eq!(wz.ascend(2), 2); assert_eq!(wz.path_exists(), true); assert_eq!(wz.path(), &[0, 0]); @@ -3946,16 +4009,16 @@ mod tests { assert_eq!(wz.prune_path(), 0); //And try pruning above the end - assert_eq!(wz.ascend(2), true); + assert_eq!(wz.ascend(2), 2); assert_eq!(wz.prune_path(), 0); - assert_eq!(wz.descend_first_byte(), true); + assert_eq!(wz.descend_first_byte(), Some(4)); //Now validate that prune goes all the way to the root assert_eq!(wz.path(), &[0, 0, 0, 1, 2, 3, 4]); assert_eq!(wz.path_exists(), true); assert_eq!(wz.prune_path(), 7); assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(7), true); + assert_eq!(wz.ascend(7), 7); assert_eq!(wz.path(), &[]); assert_eq!(wz.path_exists(), true); assert_eq!(wz.child_count(), 0); @@ -4007,9 +4070,9 @@ mod tests { assert_eq!(wz.path(), &[1, 0, 3, 4, 5, 6]); assert_eq!(wz.prune_path(), 4); //Prune back to the value at [1, 0, 0] assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(3), true); + assert_eq!(wz.ascend(3), 3); assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(1), true); + assert_eq!(wz.ascend(1), 1); assert_eq!(wz.path_exists(), true); assert_eq!(wz.path(), &[1, 0]); assert_eq!(wz.child_count(), 3); @@ -4040,7 +4103,7 @@ mod tests { assert_eq!(wz.path_exists(), true); assert_eq!(wz.prune_path(), 50); assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(49), true); + assert_eq!(wz.ascend(49), 49); assert_eq!(wz.path_exists(), false); assert_eq!(wz.ascend_byte(), true); assert_eq!(wz.path_exists(), true); @@ -4062,7 +4125,7 @@ mod tests { assert_eq!(wz.path_exists(), true); assert_eq!(wz.prune_path(), 3); assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(3), true); + assert_eq!(wz.ascend(3), 3); assert_eq!(wz.child_count(), 2); } @@ -4089,18 +4152,18 @@ mod tests { assert_eq!(wz.prune_path(), 3); assert_eq!(wz.path(), &[0, 0, 1, 0, 0]); assert_eq!(wz.path_exists(), false); - assert_eq!(wz.ascend(3), true); + assert_eq!(wz.ascend(3), 3); assert_eq!(wz.path_exists(), true); //Recreate some new paths, remove one and try re-extending it wz.descend_to([0, 0, 0, 0]); assert_eq!(wz.path_exists(), false); assert_eq!(wz.set_val(()), None); - assert_eq!(wz.ascend(4), true); + assert_eq!(wz.ascend(4), 4); wz.descend_to([0, 0, 1, 0]); assert_eq!(wz.path_exists(), false); assert_eq!(wz.set_val(()), None); - assert_eq!(wz.ascend(2), true); + assert_eq!(wz.ascend(2), 2); wz.descend_to_byte(0); assert_eq!(wz.remove_branches(false), true); assert_eq!(wz.path_exists(), true); @@ -4158,7 +4221,7 @@ mod tests { assert_eq!(src_z.prune_path(), 3); assert_eq!(src_z.path(), &[0, 0, 1, 0, 0]); assert_eq!(src_z.path_exists(), false); - assert_eq!(src_z.ascend(3), true); + assert_eq!(src_z.ascend(3), 3); assert_eq!(src_z.path_exists(), true); //Test removing from a node boundary @@ -4232,11 +4295,11 @@ mod tests { assert_eq!(wz.path_exists(), true); assert_eq!(wz.create_path(), false); - assert_eq!(wz.descend_first_byte(), true); + assert_eq!(wz.descend_first_byte(), Some(0)); assert_eq!(wz.path_exists(), true); assert_eq!(wz.create_path(), false); assert_eq!(wz.val(), Some(&())); - assert_eq!(wz.descend_first_byte(), false); + assert_eq!(wz.descend_first_byte(), None); wz.descend_to_byte(0); assert_eq!(wz.path_exists(), false); assert_eq!(wz.create_path(), true); @@ -4265,7 +4328,7 @@ mod tests { wz.descend_to([3, 0, 0, 0]); assert_eq!(wz.path_exists(), false); assert_eq!(wz.create_path(), true); - assert_eq!(wz.ascend(4), true); + assert_eq!(wz.ascend(4), 4); assert_eq!(wz.child_count(), 4); } @@ -4322,7 +4385,7 @@ mod tests { assert_eq!(zipper.path(), b""); assert_eq!(zipper.val_count(), 2); - assert_eq!(zipper.descend_until(), true); + assert_eq!(zipper.descend_until(&mut ()), true); assert_eq!(zipper.path(), b"arrow"); assert_eq!(zipper.val_count(), 1); } diff --git a/src/zipper.rs b/src/zipper.rs index 45cb6c3..9ceab8b 100644 --- a/src/zipper.rs +++ b/src/zipper.rs @@ -20,32 +20,11 @@ pub use crate::zipper_head::*; pub use crate::product_zipper::{ProductZipper, ProductZipperG}; pub use crate::overlay_zipper::{OverlayZipper}; pub use crate::prefix_zipper::{PrefixZipper}; +pub use crate::path_tracker::{PathTracker}; pub use crate::empty_zipper::{EmptyZipper}; pub use crate::poly_zipper::PolyZipper; use crate::zipper_tracking::*; - - -// mod goat { -// use crate as pathmap; -// use pathmap::*; -// use pathmap::zipper::*; - -// #[derive(PolyZipper)] -// enum MyPolyZipper<'trie, 'path, V: Clone + Send + Sync + Unpin> { -// Tracked(ReadZipperTracked<'trie, 'path, V>), -// Untracked(ReadZipperUntracked<'trie, 'path, V>), -// } - -// #[test] -// pub fn goat_f() { -// let map = PathMap::<()>::new(); - -// let x = MyPolyZipper::from(map.read_zipper()); -// } -// } - - /// The most fundamantal interface for a zipper, compatible with all zipper types pub trait Zipper { /// Returns `true` if the zipper's focus is on a path within the trie, otherwise `false` @@ -136,9 +115,16 @@ pub trait ZipperSubtries: Zi pub trait ZipperMoving: Zipper { /// Returns `true` if the zipper's focus is at its root, and it cannot ascend further, otherwise returns `false` fn at_root(&self) -> bool { - self.path().len() == 0 + self.focus_byte().is_none() } + /// Returns the path byte at the zipper's focus, or `None` if the zipper is at the root + /// + /// NOTE: This method is not aware of the zipper's position within a larger trie, and + /// therefore it will always return `None` when the zipper is at the root, even if + /// [`root_prefix_path`](ZipperAbsolutePath::root_prefix_path) returns a non-empty slice. + fn focus_byte(&self) -> Option; + /// Resets the zipper's focus back to its root fn reset(&mut self) { while !self.at_root() { @@ -146,34 +132,12 @@ pub trait ZipperMoving: Zipper { } } - /// Returns the path from the zipper's root to the current focus - fn path(&self) -> &[u8]; - /// Returns the total number of values contained at and below the zipper's focus, including the focus itself /// /// WARNING: This is not a cheap method. It may have an order-N cost //GOAT! This doesn't belong here. Should be a function that uses a non-side-effect catamorphism fn val_count(&self) -> usize; - /// Moves the zipper's focus to a specific location specified by `path`, relative to the zipper's root - /// - /// Returns the number of bytes shared between the old and new location - fn move_to_path>(&mut self, path: K) -> usize { - let path = path.as_ref(); - let p = self.path(); - let overlap = find_prefix_overlap(path, p); - let to_ascend = p.len() - overlap; - if overlap == 0 { // This heuristic can be fine-tuned for performance; the behavior of the two branches is equivalent - self.reset(); - self.descend_to(path); - overlap - } else { - self.ascend(to_ascend); - self.descend_to(&path[overlap..]); - overlap - } - } - /// Moves the zipper deeper into the trie, to the `key` specified relative to the current zipper focus fn descend_to>(&mut self, k: K); @@ -261,22 +225,22 @@ pub trait ZipperMoving: Zipper { /// WARNING: The branch represented by a given index is not guaranteed to be stable across modifications /// to the trie. This method should only be used as part of a directed traversal operation, but /// index-based paths may not be stored as locations within the trie. - fn descend_indexed_byte(&mut self, idx: usize) -> bool { + fn descend_indexed_byte(&mut self, idx: usize) -> Option { let mask = self.child_mask(); let child_byte = match mask.indexed_bit::(idx) { Some(byte) => byte, None => { - return false + return None; } }; self.descend_to_byte(child_byte); debug_assert!(self.path_exists()); - true + Some(child_byte) } /// A deprecated alias for [ZipperMoving::descend_indexed_byte] #[deprecated] //GOAT-old-names - fn descend_indexed_branch(&mut self, idx: usize) -> bool { + fn descend_indexed_branch(&mut self, idx: usize) -> Option { self.descend_indexed_byte(idx) } @@ -284,7 +248,7 @@ pub trait ZipperMoving: Zipper { /// /// NOTE: This method should have identical behavior to passing `0` to [descend_indexed_byte](ZipperMoving::descend_indexed_byte), /// although with less overhead - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.descend_indexed_byte(0) } @@ -292,24 +256,24 @@ pub trait ZipperMoving: Zipper { /// /// NOTE: This method should have identical behavior to passing `child_count() - 1` to [descend_indexed_byte](ZipperMoving::descend_indexed_byte), /// although with less overhead - fn descend_last_byte(&mut self) -> bool { + fn descend_last_byte(&mut self) -> Option { let cc = self.child_count(); - if cc == 0 { false } + if cc == 0 { None } else { self.descend_indexed_byte( cc- 1) } } - /// Descends the zipper's focus until a branch or a value is encountered. Returns `true` if the focus - /// moved otherwise returns `false` + /// Descends the zipper's focus until a branch, a value, or the path end is encountered. Returns `true` + /// if the focus moved otherwise returns `false` /// /// If there is a value at the focus, the zipper will descend to the next value or branch, however the /// zipper will not descend further if this method is called with the focus already on a branch. - /// - /// Does nothing and returns `false` if the zipper's focus is on a non-existent path. - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { let mut descended = false; - while self.child_count() == 1 { + while self.child_count() == 1 && obs.remaining_limit() > 0 { descended = true; - self.descend_first_byte(); + if let Some(byte) = self.descend_first_byte() { + let _ = obs.descend_to_byte(byte); + } if self.is_val() { break; } @@ -317,29 +281,28 @@ pub trait ZipperMoving: Zipper { descended } - /// Ascends the zipper `steps` steps. Returns `true` if the zipper sucessfully moved `steps` + /// Ascends the zipper `steps` steps. Returns the number of bytes ascended /// - /// If the root is fewer than `n` steps from the zipper's position, then this method will stop at - /// the root and return `false` - fn ascend(&mut self, steps: usize) -> bool; + /// If the zipper's focus is fewer than `n` steps from the root, then this method will stop at + /// the root and return the number of steps ascended, which may be smaller than `steps`. + fn ascend(&mut self, steps: usize) -> usize; /// Ascends the zipper up a single byte. Equivalent to passing `1` to [ascend](Self::ascend) fn ascend_byte(&mut self) -> bool { - self.ascend(1) + self.ascend(1) == 1 } - /// Ascends the zipper to the nearest upstream branch point or value. Returns `true` if the zipper - /// focus moved upwards, otherwise returns `false` if the zipper was already at the root - /// - /// NOTE: A default implementation could be provided, but all current zippers have more optimal native implementations. - fn ascend_until(&mut self) -> bool; + /// Ascends the zipper to the nearest upstream branch point or value. Returns the number of bytes + /// ascended. Returns `0` if the zipper was already at the root + // + // NOTE: A default implementation could be provided, but all current zippers have more optimal native implementations. + fn ascend_until(&mut self) -> usize; /// Ascends the zipper to the nearest upstream branch point, skipping over values along the way. Returns - /// `true` if the zipper focus moved upwards, otherwise returns `false` if the zipper was already at the - /// root - /// - /// NOTE: A default implementation could be provided, but all current zippers have more optimal native implementations. - fn ascend_until_branch(&mut self) -> bool; + /// the number of bytes ascended. Returns `0` if the zipper was already at the root + // + // NOTE: A default implementation could be provided, but all current zippers have more optimal native implementations. + fn ascend_until_branch(&mut self) -> usize; /// Moves the zipper's focus to the next sibling byte with the same parent /// @@ -348,24 +311,21 @@ pub trait ZipperMoving: Zipper { /// /// This method is equivalent to calling [ZipperMoving::ascend] with `1`, followed by [ZipperMoving::descend_indexed_byte] /// where the index passed is 1 more than the index of the current focus position. - fn to_next_sibling_byte(&mut self) -> bool { - let cur_byte = match self.path().last() { - Some(byte) => *byte, - None => return false - }; + fn to_next_sibling_byte(&mut self) -> Option { + let cur_byte = self.focus_byte()?; if !self.ascend_byte() { - return false + return None } let mask = self.child_mask(); match mask.next_bit(cur_byte) { Some(byte) => { self.descend_to_byte(byte); debug_assert!(self.path_exists()); - true + Some(byte) }, None => { self.descend_to_byte(cur_byte); - false + None } } } @@ -377,25 +337,22 @@ pub trait ZipperMoving: Zipper { /// /// This method is equivalent to calling [Self::ascend] with `1`, followed by [Self::descend_indexed_byte] /// where the index passed is 1 less than the index of the current focus position. - fn to_prev_sibling_byte(&mut self) -> bool { - let cur_byte = match self.path().last() { - Some(byte) => *byte, - None => return false - }; + fn to_prev_sibling_byte(&mut self) -> Option { + let cur_byte = self.focus_byte()?; if !self.ascend_byte() { - return false + return None } let mask = self.child_mask(); match mask.prev_bit(cur_byte) { Some(byte) => { self.descend_to_byte(byte); debug_assert!(self.path_exists()); - true + Some(byte) }, None => { self.descend_to_byte(cur_byte); debug_assert!(self.path_exists()); - false + None } } } @@ -409,18 +366,68 @@ pub trait ZipperMoving: Zipper { //If we're at a leaf ascend until we're not and jump to the next sibling if self.child_count() == 0 { //We can stop ascending when we succeed in moving to a sibling - while !self.to_next_sibling_byte() { + while !self.to_next_sibling_byte().is_some() { if !self.ascend_byte() { return false; } } } else { - return self.descend_first_byte() + return self.descend_first_byte().is_some() } true } } +/// Implemented on types used to observe the effects of methods from [`ZipperMoving`], [`ZipperIteration`], +/// and other methods that affect a zipper's focus position +pub trait PathObserver { + /// Informs the `PathObserver` that the zipper is descending `path` bytes, relative + /// to the zipper's current focus + fn descend_to(&mut self, path: &[u8]); + + /// Equivalent to `self.descend_to(&[byte])` but with slightly less overhead + fn descend_to_byte(&mut self, byte: u8) { + self.descend_to(&[byte]); + } + + /// Informs the `PathObserver` that the zipper is ascending `steps` bytes, relative + /// to the zipper's current focus + fn ascend(&mut self, steps: usize); + + /// Returns the number of bytes remaining in the observer's buffer. Pass [`usize::MAX`] to + /// indicate that the buffer may grow dynamically or that a limit does not apply + // + //GOAT, after implementing (and in some cases failing to implement) graceful exits from + // methods when the limit is insufficient, I am coming to believe exposing this limit here + // is the wrong design. It puts tons of branches into the zipper methods we are hoping to + // streamline, so it'll come with a perf and maintainability cost. Also, really long paths + // are already considered an anti-pattern, so we shouldn't bump up against a need for really + // big buffers very often. It seems to me that a `PathObserver` that wants to absolutely + // avoid a realloc in zipper movement should take care to preallocate a sufficient buffer + // for the common case, and spill the unwritten bytes somehow inside its implementation, + // setting whatever retry flags are needed for the caller. + fn remaining_limit(&self) -> usize { + usize::MAX + } +} + +impl PathObserver for Vec { + fn descend_to(&mut self, path: &[u8]) { + self.extend_from_slice(path); + } + fn descend_to_byte(&mut self, byte: u8) { + self.push(byte) + } + fn ascend(&mut self, steps: usize) { + self.truncate(self.len() - steps) + } +} + +impl PathObserver for () { + fn descend_to(&mut self, _path: &[u8]) { } + fn ascend(&mut self, _steps: usize) { } +} + /// An interface to access values through a [Zipper] that cannot modify the trie. Allows /// references with lifetimes that may outlive the zipper /// @@ -513,7 +520,7 @@ pub trait ZipperReadOnlySubtries<'a, V: Clone + Send + Sync, A: Allocator = Glob /// An interface for advanced [Zipper] movements used for various types of iteration; such as iterating /// every value, or iterating all paths descending from a common root at a certain depth -pub trait ZipperIteration: ZipperMoving { +pub trait ZipperIteration: ZipperMoving + ZipperPath { /// Systematically advances to the next value accessible from the zipper, traversing in a depth-first /// order /// @@ -521,18 +528,18 @@ pub trait ZipperIteration: ZipperMoving { /// encountered the root. fn to_next_val(&mut self) -> bool { loop { - if self.descend_first_byte() { + if self.descend_first_byte().is_some() { if self.is_val() { return true } - if self.descend_until() { + if self.descend_until(&mut ()) { if self.is_val() { return true } } } else { 'ascending: loop { - if self.to_next_sibling_byte() { + if self.to_next_sibling_byte().is_some() { if self.is_val() { return true } @@ -558,9 +565,9 @@ pub trait ZipperIteration: ZipperMoving { /// position. fn descend_last_path(&mut self) -> bool { let mut any = false; - while self.descend_last_byte() { + while self.descend_last_byte().is_some() { any = true; - self.descend_until(); + self.descend_until(&mut ()); } any } @@ -602,27 +609,27 @@ pub trait ZipperIteration: ZipperMoving { /// The default implementation of both [ZipperIteration::to_next_k_path] and [ZipperIteration::descend_first_k_path] #[inline] -fn k_path_default_internal(z: &mut Z, k: usize, base_idx: usize) -> bool { +fn k_path_default_internal(z: &mut Z, k: usize, base_idx: usize) -> bool { loop { if z.path().len() < base_idx + k { - while z.descend_first_byte() { + while z.descend_first_byte().is_some() { if z.path().len() == base_idx + k { return true } } } - if z.to_next_sibling_byte() { + if z.to_next_sibling_byte().is_some() { if z.path().len() == base_idx + k { return true } continue } while z.path().len() > base_idx { z.ascend_byte(); if z.path().len() == base_idx { return false } - if z.to_next_sibling_byte() { break } + if z.to_next_sibling_byte().is_some() { break } } } } /// An interface for a [Zipper] to support accessing the full path buffer used to create the zipper -pub trait ZipperAbsolutePath: ZipperMoving { +pub trait ZipperAbsolutePath: ZipperPath { /// Returns the entire path from the zipper's origin to its current focus /// /// The zipper's origin depends on how the zipper was created. For zippers created directly from a @@ -676,8 +683,41 @@ pub trait ZipperConcrete { fn is_shared(&self) -> bool; } +/// Provides path to the current focus. +/// +/// If a zipper doesn't implement this trait, it is a "blind" zipper, +/// which doesn't track it's own path in the trie. +pub trait ZipperPath: ZipperMoving { + /// Returns the path from the zipper's root to the current focus + /// + /// This method will always return a zero-length slice when [`at_root`](ZipperMoving::at_root) returns `true`, + /// and a non-zero-length slice when it returns `false`. + /// + /// `z.path().last()` is guaranteed to equal the return value of [`focus_byte`](ZipperMoving::focus_byte). + fn path(&self) -> &[u8]; + + /// Moves the zipper's focus to a specific location specified by `path`, relative to the zipper's root + /// + /// Returns the number of bytes shared between the old and new location + fn move_to_path>(&mut self, path: K) -> usize { + let path = path.as_ref(); + let p = self.path(); + let overlap = find_prefix_overlap(path, p); + let to_ascend = p.len() - overlap; + if overlap == 0 { // This heuristic can be fine-tuned for performance; the behavior of the two branches is equivalent + self.reset(); + self.descend_to(path); + overlap + } else { + self.ascend(to_ascend); + self.descend_to(&path[overlap..]); + overlap + } + } +} + /// Provides more direct control over a [ZipperMoving] zipper's path buffer -pub trait ZipperPathBuffer: ZipperMoving { +pub trait ZipperPathBuffer: ZipperPath { /// Internal method to get the path, beyond its length. Panics if `len` > the path's capacity, or /// if the zipper is relative and doesn't have an `origin_path` /// @@ -775,7 +815,7 @@ impl Zipper for &mut Z where Z: Zipper { impl ZipperMoving for &mut Z where Z: ZipperMoving + Zipper { fn at_root(&self) -> bool { (**self).at_root() } fn reset(&mut self) { (**self).reset() } - fn path(&self) -> &[u8] { (**self).path() } + fn focus_byte(&self) -> Option { (**self).focus_byte() } fn val_count(&self) -> usize { (**self).val_count() } fn descend_to>(&mut self, k: K) { (**self).descend_to(k) } fn descend_to_check>(&mut self, k: K) -> bool { (**self).descend_to_check(k) } @@ -783,18 +823,22 @@ impl ZipperMoving for &mut Z where Z: ZipperMoving + Zipper { fn descend_to_val>(&mut self, k: K) -> usize { (**self).descend_to_val(k) } fn descend_to_byte(&mut self, k: u8) { (**self).descend_to_byte(k) } fn descend_to_existing_byte(&mut self, k: u8) -> bool { (**self).descend_to_existing_byte(k) } - fn descend_indexed_byte(&mut self, idx: usize) -> bool { (**self).descend_indexed_byte(idx) } - fn descend_first_byte(&mut self) -> bool { (**self).descend_first_byte() } - fn descend_until(&mut self) -> bool { (**self).descend_until() } - fn ascend(&mut self, steps: usize) -> bool { (**self).ascend(steps) } + fn descend_indexed_byte(&mut self, idx: usize) -> Option { (**self).descend_indexed_byte(idx) } + fn descend_first_byte(&mut self) -> Option { (**self).descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { (**self).descend_until(obs) } + fn ascend(&mut self, steps: usize) -> usize { (**self).ascend(steps) } fn ascend_byte(&mut self) -> bool { (**self).ascend_byte() } - fn ascend_until(&mut self) -> bool { (**self).ascend_until() } - fn ascend_until_branch(&mut self) -> bool { (**self).ascend_until_branch() } - fn to_next_sibling_byte(&mut self) -> bool { (**self).to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { (**self).to_prev_sibling_byte() } + fn ascend_until(&mut self) -> usize { (**self).ascend_until() } + fn ascend_until_branch(&mut self) -> usize { (**self).ascend_until_branch() } + fn to_next_sibling_byte(&mut self) -> Option { (**self).to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { (**self).to_prev_sibling_byte() } fn to_next_step(&mut self) -> bool { (**self).to_next_step() } } +impl ZipperPath for &mut Z where Z: ZipperPath { + fn path(&self) -> &[u8] { (**self).path() } +} + impl ZipperAbsolutePath for &mut Z where Z: ZipperAbsolutePath { fn origin_path(&self) -> &[u8] { (**self).origin_path() } fn root_prefix_path(&self) -> &[u8] { (**self).root_prefix_path() } @@ -910,9 +954,8 @@ impl ZipperSubtries for Read impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperMoving for ReadZipperTracked<'trie, '_, V, A> { fn at_root(&self) -> bool { self.z.at_root() } + fn focus_byte(&self) -> Option { self.z.focus_byte() } fn reset(&mut self) { self.z.reset() } - #[inline] - fn path(&self) -> &[u8] { self.z.path() } fn val_count(&self) -> usize { self.z.val_count() } fn descend_to>(&mut self, k: K) { self.z.descend_to(k) } fn descend_to_check>(&mut self, k: K) -> bool { self.z.descend_to_check(k) } @@ -920,18 +963,23 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper fn descend_to_val>(&mut self, k: K) -> usize { self.z.descend_to_val(k) } fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) } fn descend_to_existing_byte(&mut self, k: u8) -> bool { self.z.descend_to_existing_byte(k) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) } - fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() } - fn descend_until(&mut self) -> bool { self.z.descend_until() } - fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() } - fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.z.descend_indexed_byte(child_idx) } + fn descend_first_byte(&mut self) -> Option { self.z.descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.z.descend_until(obs) } + fn to_next_sibling_byte(&mut self) -> Option { self.z.to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { self.z.to_prev_sibling_byte() } + fn ascend(&mut self, steps: usize) -> usize { self.z.ascend(steps) } fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() } - fn ascend_until(&mut self) -> bool { self.z.ascend_until() } - fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() } + fn ascend_until(&mut self) -> usize { self.z.ascend_until() } + fn ascend_until_branch(&mut self) -> usize { self.z.ascend_until_branch() } fn to_next_step(&mut self) -> bool { self.z.to_next_step() } } +impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPath for ReadZipperTracked<'trie, '_, V, A> { + #[inline] + fn path(&self) -> &[u8] { self.z.path() } +} + impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalValues<'trie, V> for ReadZipperTracked<'trie, '_, V, A> { type WitnessT = ReadZipperWitness; fn witness<'w>(&self) -> ReadZipperWitness { self.z.witness() } @@ -1059,9 +1107,8 @@ impl ZipperSubtries for Read impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperMoving for ReadZipperUntracked<'trie, '_, V, A> { fn at_root(&self) -> bool { self.z.at_root() } + fn focus_byte(&self) -> Option { self.z.focus_byte() } fn reset(&mut self) { self.z.reset() } - #[inline] - fn path(&self) -> &[u8] { self.z.path() } fn val_count(&self) -> usize { self.z.val_count() } fn descend_to>(&mut self, k: K) { self.z.descend_to(k) } fn descend_to_check>(&mut self, k: K) -> bool { self.z.descend_to_check(k) } @@ -1069,18 +1116,23 @@ impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> Zipper fn descend_to_val>(&mut self, k: K) -> usize { self.z.descend_to_val(k) } fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) } fn descend_to_existing_byte(&mut self, k: u8) -> bool { self.z.descend_to_existing_byte(k) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) } - fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() } - fn descend_until(&mut self) -> bool { self.z.descend_until() } - fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() } - fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.z.descend_indexed_byte(child_idx) } + fn descend_first_byte(&mut self) -> Option { self.z.descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.z.descend_until(obs) } + fn to_next_sibling_byte(&mut self) -> Option { self.z.to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { self.z.to_prev_sibling_byte() } + fn ascend(&mut self, steps: usize) -> usize { self.z.ascend(steps) } fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() } - fn ascend_until(&mut self) -> bool { self.z.ascend_until() } - fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() } + fn ascend_until(&mut self) -> usize { self.z.ascend_until() } + fn ascend_until_branch(&mut self) -> usize { self.z.ascend_until_branch() } fn to_next_step(&mut self) -> bool { self.z.to_next_step() } } +impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPath for ReadZipperUntracked<'trie, '_, V, A> { + #[inline] + fn path(&self) -> &[u8] { self.z.path() } +} + impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyValues<'trie, V> for ReadZipperUntracked<'trie, '_, V, A> { fn get_val(&self) -> Option<&'trie V> { unsafe{ self.z.get_val() } } } @@ -1258,9 +1310,8 @@ impl ZipperSubtries for Read impl ZipperMoving for ReadZipperOwned { fn at_root(&self) -> bool { self.z.at_root() } + fn focus_byte(&self) -> Option { self.z.focus_byte() } fn reset(&mut self) { self.z.reset() } - #[inline] - fn path(&self) -> &[u8] { self.z.path() } fn val_count(&self) -> usize { self.z.val_count() } fn descend_to>(&mut self, k: K) { self.z.descend_to(k) } fn descend_to_check>(&mut self, k: K) -> bool { self.z.descend_to_check(k) } @@ -1268,18 +1319,23 @@ impl ZipperMoving for ReadZipperOw fn descend_to_val>(&mut self, k: K) -> usize { self.z.descend_to_val(k) } fn descend_to_byte(&mut self, k: u8) { self.z.descend_to_byte(k) } fn descend_to_existing_byte(&mut self, k: u8) -> bool { self.z.descend_to_existing_byte(k) } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { self.z.descend_indexed_byte(child_idx) } - fn descend_first_byte(&mut self) -> bool { self.z.descend_first_byte() } - fn descend_until(&mut self) -> bool { self.z.descend_until() } - fn to_next_sibling_byte(&mut self) -> bool { self.z.to_next_sibling_byte() } - fn to_prev_sibling_byte(&mut self) -> bool { self.z.to_prev_sibling_byte() } - fn ascend(&mut self, steps: usize) -> bool { self.z.ascend(steps) } + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.z.descend_indexed_byte(child_idx) } + fn descend_first_byte(&mut self) -> Option { self.z.descend_first_byte() } + fn descend_until(&mut self, obs: &mut Obs) -> bool { self.z.descend_until(obs) } + fn to_next_sibling_byte(&mut self) -> Option { self.z.to_next_sibling_byte() } + fn to_prev_sibling_byte(&mut self) -> Option { self.z.to_prev_sibling_byte() } + fn ascend(&mut self, steps: usize) -> usize { self.z.ascend(steps) } fn ascend_byte(&mut self) -> bool { self.z.ascend_byte() } - fn ascend_until(&mut self) -> bool { self.z.ascend_until() } - fn ascend_until_branch(&mut self) -> bool { self.z.ascend_until_branch() } + fn ascend_until(&mut self) -> usize { self.z.ascend_until() } + fn ascend_until_branch(&mut self) -> usize { self.z.ascend_until_branch() } fn to_next_step(&mut self) -> bool { self.z.to_next_step() } } +impl ZipperPath for ReadZipperOwned { + #[inline] + fn path(&self) -> &[u8] { self.z.path() } +} + impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperReadOnlyConditionalValues<'trie, V> for ReadZipperOwned { type WitnessT = ReadZipperWitness; fn witness<'w>(&self) -> ReadZipperWitness { self.z.witness() } @@ -1568,6 +1624,11 @@ pub(crate) mod read_zipper_core { self.prefix_buf.len() <= self.origin_path.len() } + #[inline] + fn focus_byte(&self) -> Option { + self.prefix_buf.last().cloned() + } + fn reset(&mut self) { self.ancestors.truncate(1); match self.ancestors.pop() { @@ -1580,15 +1641,6 @@ pub(crate) mod read_zipper_core { self.prefix_buf.truncate(self.origin_path.len()); } - #[inline] - fn path(&self) -> &[u8] { - if self.prefix_buf.len() > 0 { - &self.prefix_buf[self.origin_path.len()..] - } else { - &[] - } - } - fn val_count(&self) -> usize { let root_val = self.is_val() as usize; if self.node_key().len() == 0 { @@ -1668,7 +1720,7 @@ pub(crate) mod read_zipper_core { } } - fn descend_indexed_byte(&mut self, child_idx: usize) -> bool { + fn descend_indexed_byte(&mut self, child_idx: usize) -> Option { self.prepare_buffers(); debug_assert!(self.is_regularized()); @@ -1678,18 +1730,18 @@ pub(crate) mod read_zipper_core { self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len())); *self.focus_node = child_node; self.focus_iter_token = NODE_ITER_INVALID; - true + Some(prefix) }, (Some(prefix), None) => { self.prefix_buf.push(prefix); self.focus_iter_token = NODE_ITER_INVALID; - true + Some(prefix) }, - (None, _) => false + (None, _) => None } } - fn descend_first_byte(&mut self) -> bool { + fn descend_first_byte(&mut self) -> Option { self.prepare_buffers(); debug_assert!(self.is_regularized()); let cur_tok = self.focus_node.iter_token_for_path(self.node_key()); @@ -1701,7 +1753,7 @@ pub(crate) mod read_zipper_core { let byte_idx = self.node_key().len(); if byte_idx >= key_bytes.len() { debug_assert!(self.is_regularized()); - return false; //We can't go any deeper down this path + return None; //We can't go any deeper down this path } self.focus_iter_token = new_tok; self.prefix_buf.push(key_bytes[byte_idx]); @@ -1717,20 +1769,20 @@ pub(crate) mod read_zipper_core { } } debug_assert!(self.is_regularized()); - true + Some(key_bytes[byte_idx]) } else { self.focus_iter_token = new_tok; debug_assert!(self.is_regularized()); - false + None } } - fn descend_until(&mut self) -> bool { + fn descend_until(&mut self, obs: &mut Obs) -> bool { debug_assert!(self.is_regularized()); let mut moved = false; while self.child_count() == 1 { moved = true; - self.descend_first(); + self.descend_first(obs); if self.is_val_internal() { break; } @@ -1803,10 +1855,10 @@ pub(crate) mod read_zipper_core { // self.focus_node.node_first_val_depth_along_key(); // } - fn to_next_sibling_byte(&mut self) -> bool { + fn to_next_sibling_byte(&mut self) -> Option { self.prepare_buffers(); if self.prefix_buf.len() == 0 { - return false + return None; } debug_assert!(self.is_regularized()); self.deregularize(); @@ -1817,7 +1869,7 @@ pub(crate) mod read_zipper_core { if self.focus_iter_token == NODE_ITER_FINISHED { self.regularize(); - return false + return None; } let (mut new_tok, mut key_bytes, mut child_node, mut _value) = self.focus_node.next_items(self.focus_iter_token); @@ -1826,16 +1878,17 @@ pub(crate) mod read_zipper_core { let node_key = self.node_key(); if node_key.len() == 0 { self.focus_iter_token = NODE_ITER_INVALID; - return false; + return None; } let fixed_len = node_key.len() - 1; if fixed_len >= key_bytes.len() || key_bytes[..fixed_len] != node_key[..fixed_len] { self.regularize(); - return false; + return None; } if key_bytes[fixed_len] > node_key[fixed_len] { - *self.prefix_buf.last_mut().unwrap() = key_bytes[node_key.len()-1]; + let byte = key_bytes[node_key.len()-1]; + *self.prefix_buf.last_mut().unwrap() = byte; self.focus_iter_token = new_tok; //If this operation landed us at the end of the path within the node, then we @@ -1852,7 +1905,7 @@ pub(crate) mod read_zipper_core { } debug_assert!(self.is_regularized()); - return true + return Some(byte); } (new_tok, key_bytes, child_node, _value) = self.focus_node.next_items(new_tok); @@ -1860,16 +1913,17 @@ pub(crate) mod read_zipper_core { self.focus_iter_token = NODE_ITER_FINISHED; self.regularize(); - false + None } - fn to_prev_sibling_byte(&mut self) -> bool { + fn to_prev_sibling_byte(&mut self) -> Option { self.to_sibling(false) } - fn ascend(&mut self, mut steps: usize) -> bool { + fn ascend(&mut self, steps: usize) -> usize { + let mut remaining = steps; debug_assert!(self.is_regularized()); - while steps > 0 { + while remaining > 0 { if self.excess_key_len() == 0 { match self.ancestors.pop() { Some((node, iter_tok, _prefix_offset)) => { @@ -1878,16 +1932,16 @@ pub(crate) mod read_zipper_core { }, None => { debug_assert!(self.is_regularized()); - return false + return steps - remaining; } }; } - let cur_jump = steps.min(self.excess_key_len()); + let cur_jump = remaining.min(self.excess_key_len()); self.prefix_buf.truncate(self.prefix_buf.len() - cur_jump); - steps -= cur_jump; + remaining -= cur_jump; } debug_assert!(self.is_regularized()); - true + steps } fn ascend_byte(&mut self) -> bool { @@ -1909,39 +1963,52 @@ pub(crate) mod read_zipper_core { true } - fn ascend_until(&mut self) -> bool { + fn ascend_until(&mut self) -> usize { debug_assert!(self.is_regularized()); if self.at_root() { - return false; + return 0; } + let mut ascended = 0; loop { if self.node_key().len() == 0 { self.ascend_across_nodes(); } - self.ascend_within_node(); + ascended += self.ascend_within_node(); if self.child_count() > 1 || self.is_val() || self.at_root() { - return true; + return ascended; } } } - fn ascend_until_branch(&mut self) -> bool { + fn ascend_until_branch(&mut self) -> usize { debug_assert!(self.is_regularized()); if self.at_root() { - return false; + return 0; } + let mut ascended = 0; loop { if self.node_key().len() == 0 { self.ascend_across_nodes(); } - self.ascend_within_node(); + ascended += self.ascend_within_node(); if self.child_count() > 1 || self.at_root() { - return true; + return ascended; } } } } + impl<'trie, V: Clone + Send + Sync + Unpin + 'trie, A: Allocator + 'trie> ZipperPath for ReadZipperCore<'trie, '_, V, A> { + #[inline] + fn path(&self) -> &[u8] { + if self.prefix_buf.len() > 0 { + &self.prefix_buf[self.origin_path.len()..] + } else { + &[] + } + } + } + impl zipper_priv::ZipperPriv for ReadZipperCore<'_, '_, V, A> { type V = V; type A = A; @@ -2473,7 +2540,7 @@ pub(crate) mod read_zipper_core { /// performs about as well as the `to_next_sibling_byte` that is there, but doesn't /// update the zipper's iter tokens #[inline] - fn to_sibling(&mut self, next: bool) -> bool { + fn to_sibling(&mut self, next: bool) -> Option { self.prepare_buffers(); debug_assert!(self.is_regularized()); if self.node_key().len() != 0 { @@ -2483,33 +2550,33 @@ pub(crate) mod read_zipper_core { self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len())); *self.focus_node = child_node; self.focus_iter_token = NODE_ITER_INVALID; - true + Some(prefix) }, (Some(prefix), None) => { *self.prefix_buf.last_mut().unwrap() = prefix; - true + Some(prefix) }, - (None, _) => false + (None, _) => None } } else { let mut should_pop = false; let result = match self.ancestors.last() { - None => { false } + None => { None } Some((parent, _iter_tok, _prefix_offset)) => { match parent.get_sibling_of_child(self.parent_key(), next) { (Some(prefix), Some(child_node)) => { *self.prefix_buf.last_mut().unwrap() = prefix; *self.focus_node = child_node; self.focus_iter_token = NODE_ITER_INVALID; - true + Some(prefix) }, (Some(prefix), None) => { *self.prefix_buf.last_mut().unwrap() = prefix; should_pop = true; - true + Some(prefix) }, (None, _) => { - false + None } } } @@ -2669,24 +2736,34 @@ pub(crate) mod read_zipper_core { /// Internal method implementing part of [Self::descend_until], but doesn't pay attention to to [Self::child_count] #[inline] - fn descend_first(&mut self) { + fn descend_first(&mut self, obs: &mut Obs) { self.prepare_buffers(); match self.focus_node.first_child_from_key(self.node_key()) { (Some(prefix), Some(child_node)) => { - //Step to a new node - self.prefix_buf.extend(prefix); - self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len())); - *self.focus_node = child_node; - self.focus_iter_token = NODE_ITER_INVALID; + if obs.remaining_limit() == usize::MAX || obs.remaining_limit() <= prefix.len() { + //Step to a new node + self.prefix_buf.extend(prefix); + obs.descend_to(prefix); + self.ancestors.push((*self.focus_node.clone(), self.focus_iter_token, self.prefix_buf.len())); + *self.focus_node = child_node; + self.focus_iter_token = NODE_ITER_INVALID; - //If we're at the root of the new node, descend to the first child - if prefix.len() == 0 { - self.descend_first() + //If we're at the root of the new node, descend to the first child + if prefix.len() == 0 { + self.descend_first(obs) + } + } else { + return } }, (Some(prefix), None) => { - //Stay within the same node - self.prefix_buf.extend(prefix); + if obs.remaining_limit() == usize::MAX || obs.remaining_limit() <= prefix.len() { + //Stay within the same node + self.prefix_buf.extend(prefix); + obs.descend_to(prefix); + } else { + return + } }, (None, _) => unreachable!() } @@ -2774,10 +2851,12 @@ pub(crate) mod read_zipper_core { } /// Internal method used to impement `ascend_until` when ascending within a node #[inline] - fn ascend_within_node(&mut self) { + fn ascend_within_node(&mut self) -> usize { let branch_key = self.focus_node.prior_branch_key(self.node_key()); let new_len = self.origin_path.len().max(self.node_key_start() + branch_key.len()); + let old_len = self.prefix_buf.len(); self.prefix_buf.truncate(new_len); + old_len - new_len } /// Push a new node-path pair onto the zipper. This is used in the internal implementation of /// the [crate::zipper::ProductZipper] @@ -3144,7 +3223,7 @@ pub(crate) mod zipper_moving_tests { /// from https://en.wikipedia.org/wiki/Radix_tree#/media/File:Patricia_trie.svg pub const ZIPPER_MOVING_BASIC_TEST_KEYS: &[&[u8]] = &[b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"]; - pub fn zipper_moving_basic_test(mut zipper: Z) { + pub fn zipper_moving_basic_test(mut zipper: Z) { fn assert_in_list(val: &[u8], list: &[&[u8]]) { for test_val in list { if *test_val == val { @@ -3157,32 +3236,32 @@ pub(crate) mod zipper_moving_tests { zipper.descend_to(&[b'r']); zipper.descend_to(&[b'o']); zipper.descend_to(&[b'm']); // focus = rom zipper.descend_to(&[b'\'']); assert!(zipper.path_exists()); // focus = rom' (' is the lowest byte) - assert!(zipper.to_next_sibling_byte()); // focus = roma (a is the second byte), but we can't actually guarantee whether we land on 'a' or 'u' + assert!(zipper.to_next_sibling_byte().is_some()); // focus = roma (a is the second byte), but we can't actually guarantee whether we land on 'a' or 'u' assert_in_list(zipper.path(), &[b"roma", b"romu"]); assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'n']); // both follow-ups romane and romanus have n following a - assert!(zipper.to_next_sibling_byte()); // focus = romu (u is the third byte) + assert!(zipper.to_next_sibling_byte().is_some()); // focus = romu (u is the third byte) assert_in_list(zipper.path(), &[b"roma", b"romu"]); assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'l']); // and romu is followed by lus - assert!(!zipper.to_next_sibling_byte()); // fails because there were only 3 children ['\'', 'a', 'u'] - assert!(zipper.to_prev_sibling_byte()); // focus = roma or romu (we stepped back) + assert_eq!(zipper.to_next_sibling_byte(), None); // fails because there were only 3 children ['\'', 'a', 'u'] + assert!(zipper.to_prev_sibling_byte().is_some()); // focus = roma or romu (we stepped back) assert_in_list(zipper.path(), &[b"roma", b"romu"]); - assert!(zipper.to_prev_sibling_byte()); // focus = rom' (we stepped back to where we began) + assert_eq!(zipper.to_prev_sibling_byte(), Some(39)); // focus = rom' (we stepped back to where we began) assert_eq!(zipper.path(), b"rom'"); assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'i']); - assert!(zipper.ascend(1)); // focus = rom + assert_eq!(zipper.ascend(1), 1); // focus = rom assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'\'', b'a', b'u']); // all three options we visited - assert!(zipper.descend_indexed_byte(0)); // focus = rom' + assert_eq!(zipper.descend_indexed_byte(0), Some(39)); // focus = rom' assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'i']); - assert!(zipper.ascend(1)); // focus = rom - assert!(zipper.descend_indexed_byte(1)); // focus = roma + assert_eq!(zipper.ascend(1), 1); // focus = rom + assert_eq!(zipper.descend_indexed_byte(1), Some(b'a')); // focus = roma assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'n']); - assert!(zipper.ascend(1)); - assert!(zipper.descend_indexed_byte(2)); // focus = romu + assert_eq!(zipper.ascend(1), 1); + assert_eq!(zipper.descend_indexed_byte(2), Some(b'u')); // focus = romu assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'l']); - assert!(zipper.ascend(1)); - assert!(zipper.descend_indexed_byte(1)); // focus = roma + assert_eq!(zipper.ascend(1), 1); + assert_eq!(zipper.descend_indexed_byte(1), Some(b'a')); // focus = roma assert_eq!(zipper.child_mask().iter().collect::>(), vec![b'n']); - assert!(zipper.ascend(1)); + assert_eq!(zipper.ascend(1), 1); // ' < a < u // 39 105 117 } @@ -3191,7 +3270,7 @@ pub(crate) mod zipper_moving_tests { pub const ZIPPER_WITH_ROOT_PATH_PATH: &[u8] = b"ro"; /// Tests creating a zipper at a specific key within a map - pub fn zipper_with_root_path(mut zipper: Z) { + pub fn zipper_with_root_path(mut zipper: Z) { //Test `descend_to` and `ascend_until` assert_eq!(zipper.path(), b""); @@ -3205,57 +3284,58 @@ pub(crate) mod zipper_moving_tests { zipper.descend_to(b"e"); assert_eq!(zipper.path(), b"mane"); assert_eq!(zipper.child_count(), 0); - assert_eq!(zipper.ascend_until(), true); + assert_eq!(zipper.ascend_until(), 1); zipper.descend_to(b"us"); assert_eq!(zipper.path(), b"manus"); assert_eq!(zipper.child_count(), 0); - assert_eq!(zipper.ascend_until(), true); + assert_eq!(zipper.ascend_until(), 2); assert_eq!(zipper.path(), b"man"); assert_eq!(zipper.child_count(), 2); - assert_eq!(zipper.ascend_until(), true); + assert_eq!(zipper.ascend_until(), 2); assert_eq!(zipper.path(), b"m"); assert_eq!(zipper.child_count(), 3); - assert_eq!(zipper.ascend_until(), true); + assert_eq!(zipper.ascend_until(), 1); assert_eq!(zipper.path(), b""); assert_eq!(zipper.child_count(), 1); assert_eq!(zipper.at_root(), true); - assert_eq!(zipper.ascend_until(), false); + assert_eq!(zipper.ascend_until(), 0); //Test `ascend` zipper.descend_to(b"manus"); assert_eq!(zipper.path(), b"manus"); - assert_eq!(zipper.ascend(1), true); + assert_eq!(zipper.ascend(1), 1); assert_eq!(zipper.path(), b"manu"); - assert_eq!(zipper.ascend(5), false); + assert_eq!(zipper.ascend(5), 4); assert_eq!(zipper.path(), b""); assert_eq!(zipper.at_root(), true); zipper.descend_to(b"mane"); assert_eq!(zipper.path(), b"mane"); - assert_eq!(zipper.ascend(3), true); + assert_eq!(zipper.ascend(3), 3); assert_eq!(zipper.path(), b"m"); + eprintln!("child_mask = {:?}", zipper.child_mask()); assert_eq!(zipper.child_count(), 3); } // A wide shallow trie pub const ZIPPER_INDEXED_BYTE_TEST1_KEYS: &[&[u8]] = &[b"0", b"1", b"2", b"3", b"4", b"5", b"6"]; - pub fn zipper_indexed_bytes_test1(mut zip: Z) { + pub fn zipper_indexed_bytes_test1(mut zip: Z) { zip.descend_to("2"); assert_eq!(zip.is_val(), true); assert_eq!(zip.child_count(), 0); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.path(), b"2"); zip.reset(); - assert!(zip.descend_indexed_byte(2)); + assert_eq!(zip.descend_indexed_byte(2), Some(b'2')); assert_eq!(zip.is_val(), true); assert_eq!(zip.child_count(), 0); assert_eq!(zip.path(), b"2"); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.path(), b"2"); zip.reset(); - assert!(!zip.descend_indexed_byte(7)); + assert_eq!(zip.descend_indexed_byte(7), None); assert_eq!(zip.is_val(), false); assert_eq!(zip.child_count(), 7); assert_eq!(zip.path(), b""); @@ -3269,26 +3349,26 @@ pub(crate) mod zipper_moving_tests { assert_eq!(zip.val(), Some(&())); assert_eq!(zip.path(), b"000"); assert_eq!(zip.child_count(), 0); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.path(), b"000"); zip.reset(); - assert!(!zip.descend_indexed_byte(2)); + assert_eq!(zip.descend_indexed_byte(2), None); assert_eq!(zip.child_count(), 2); - assert!(zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), Some(b'1')); assert_eq!(zip.path(), b"1"); assert_eq!(zip.val(), None); assert_eq!(zip.child_count(), 1); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.val(), None); assert_eq!(zip.path(), b"1"); zip.reset(); - assert!(zip.descend_indexed_byte(0)); + assert_eq!(zip.descend_indexed_byte(0), Some(b'0')); assert_eq!(zip.path(), b"0"); assert_eq!(zip.val(), None); assert_eq!(zip.child_count(), 1); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.val(), None); assert_eq!(zip.path(), b"0"); } @@ -3296,31 +3376,31 @@ pub(crate) mod zipper_moving_tests { // A narrow deeper trie pub const ZIPPER_INDEXED_BYTE_TEST2_KEYS: &[&[u8]] = &[b"000", b"1Z", b"00AAA", b"00AA000", b"00AA00AAA"]; - pub fn zipper_indexed_bytes_test2(mut zip: Z) { + pub fn zipper_indexed_bytes_test2(mut zip: Z) { zip.descend_to("000"); assert_eq!(zip.is_val(), true); assert_eq!(zip.path(), b"000"); assert_eq!(zip.child_count(), 0); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.path(), b"000"); zip.reset(); - assert!(!zip.descend_indexed_byte(2)); + assert_eq!(zip.descend_indexed_byte(2), None); assert_eq!(zip.child_count(), 2); - assert!(zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), Some(b'1')); assert_eq!(zip.path(), b"1"); assert_eq!(zip.is_val(), false); assert_eq!(zip.child_count(), 1); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.is_val(), false); assert_eq!(zip.path(), b"1"); zip.reset(); - assert!(zip.descend_indexed_byte(0)); + assert_eq!(zip.descend_indexed_byte(0), Some(b'0')); assert_eq!(zip.path(), b"0"); assert_eq!(zip.is_val(), false); assert_eq!(zip.child_count(), 1); - assert!(!zip.descend_indexed_byte(1)); + assert_eq!(zip.descend_indexed_byte(1), None); assert_eq!(zip.is_val(), false); assert_eq!(zip.path(), b"0"); } @@ -3328,9 +3408,9 @@ pub(crate) mod zipper_moving_tests { // Tests how descend_until treats values along paths pub const ZIPPER_DESCEND_UNTIL_TEST1_KEYS: &[&[u8]] = &[b"a", b"ab", b"abCDEf", b"abCDEfGHi"]; - pub fn zipper_descend_until_test1(mut zip: Z) { + pub fn zipper_descend_until_test1(mut zip: Z) { for key in ZIPPER_DESCEND_UNTIL_TEST1_KEYS { - assert!(zip.descend_until()); + assert!(zip.descend_until(&mut ())); assert_eq!(zip.path(), *key); } } @@ -3338,65 +3418,65 @@ pub(crate) mod zipper_moving_tests { // Test a 3-way branch, so we definitely don't have a pair node pub const ZIPPER_ASCEND_UNTIL_TEST1_KEYS: &[&[u8]] = &[b"AAa", b"AAb", b"AAc"]; - pub fn zipper_ascend_until_test1(mut zip: Z) { + pub fn zipper_ascend_until_test1(mut zip: Z) { zip.descend_to(b"AAaDDd"); assert!(!zip.path_exists()); assert_eq!(zip.path(), b"AAaDDd"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 3); assert_eq!(zip.path(), b"AAa"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"AA"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 2); assert_eq!(zip.path(), b""); - assert!(!zip.ascend_until()); + assert_eq!(zip.ascend_until(), 0); } // Test what's likely to be represented as a pair node pub const ZIPPER_ASCEND_UNTIL_TEST2_KEYS: &[&[u8]] = &[b"AAa", b"AAb"]; - pub fn zipper_ascend_until_test2(mut zip: Z) { + pub fn zipper_ascend_until_test2(mut zip: Z) { zip.descend_to(b"AAaDDd"); assert!(!zip.path_exists()); assert_eq!(zip.path(), b"AAaDDd"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 3); assert_eq!(zip.path(), b"AAa"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"AA"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 2); assert_eq!(zip.path(), b""); - assert!(!zip.ascend_until()); + assert_eq!(zip.ascend_until(), 0); } /// Test a straight-line trie pub const ZIPPER_ASCEND_UNTIL_TEST3_KEYS: &[&[u8]] = &[b"1", b"12", b"123", b"1234", b"12345"]; - pub fn zipper_ascend_until_test3(mut zip: Z) { + pub fn zipper_ascend_until_test3(mut zip: Z) { //First test that ascend_until stops when transitioning from non-existent path zip.descend_to(b"123456"); assert_eq!(zip.path_exists(), false); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"12345"); //Test that ascend_until stops at each value - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"1234"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"123"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"12"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"1"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b""); - assert!(!zip.ascend_until()); + assert_eq!(zip.ascend_until(), 0); assert!(zip.at_root()); //Test that ascend_until_branch skips over all the values zip.descend_to(b"12345"); assert!(zip.path_exists()); assert_eq!(zip.path(), b"12345"); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 5); assert_eq!(zip.path(), b""); assert!(zip.at_root()); @@ -3409,34 +3489,34 @@ pub(crate) mod zipper_moving_tests { zip.descend_to(b"12345"); assert!(zip.path_exists()); assert_eq!(zip.path(), b"12345"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"1234"); // "1234" is a branch only assert_eq!(zip.is_val(), false); assert_eq!(zip.child_count(), 2); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"123"); // "123" is a value only assert_eq!(zip.child_count(), 1); assert_eq!(zip.is_val(), true); - assert!(zip.ascend_until()); // Jump over "12" because it's neither a branch nor a value + assert_eq!(zip.ascend_until(), 2); // Jump over "12" because it's neither a branch nor a value assert_eq!(zip.path(), b"1"); // "1" is both a branch and a value assert_eq!(zip.is_val(), true); assert_eq!(zip.child_count(), 2); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b""); assert_eq!(zip.child_count(), 1); - assert!(!zip.ascend_until()); + assert_eq!(zip.ascend_until(), 0); assert!(zip.at_root()); //Test that ascend_until_branch skips over all the values zip.descend_to(b"12345"); assert!(zip.path_exists()); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 1); assert_eq!(zip.path(), b"1234"); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 3); assert_eq!(zip.path(), b"1"); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 1); assert_eq!(zip.path(), b""); - assert!(!zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 0); assert!(zip.at_root()); } @@ -3444,72 +3524,73 @@ pub(crate) mod zipper_moving_tests { /// Some paths encountered will be values only, some will be branches only, and some will be both pub const ZIPPER_ASCEND_UNTIL_TEST4_KEYS: &[&[u8]] = &[b"1", b"123", b"12345", b"1abc", b"1234abc"]; - pub fn zipper_ascend_until_test4(mut zip: Z) { + pub fn zipper_ascend_until_test4(mut zip: Z) { zip.descend_to(b"12345"); assert!(zip.path_exists()); assert_eq!(zip.path(), b"12345"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"1234"); // "1234" is a branch only assert_eq!(zip.is_val(), false); assert_eq!(zip.child_count(), 2); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"123"); // "123" is a value only assert_eq!(zip.child_count(), 1); assert_eq!(zip.is_val(), true); - assert!(zip.ascend_until()); // Jump over "12" because it's neither a branch nor a value + assert_eq!(zip.ascend_until(), 2); // Jump over "12" because it's neither a branch nor a value assert_eq!(zip.path(), b"1"); // "1" is both a branch and a value assert_eq!(zip.is_val(), true); assert_eq!(zip.child_count(), 2); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b""); assert_eq!(zip.child_count(), 1); - assert!(!zip.ascend_until()); + assert_eq!(zip.ascend_until(), 0); assert!(zip.at_root()); //Test that ascend_until_branch skips over all the values zip.descend_to(b"12345"); assert!(zip.path_exists()); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 1); assert_eq!(zip.path(), b"1234"); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 3); assert_eq!(zip.path(), b"1"); - assert!(zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 1); assert_eq!(zip.path(), b""); - assert!(!zip.ascend_until_branch()); + assert_eq!(zip.ascend_until_branch(), 0); assert!(zip.at_root()); } /// Test ascending over a long key that spans multiple nodes pub const ZIPPER_ASCEND_UNTIL_TEST5_KEYS: &[&[u8]] = &[b"A", b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"]; - pub fn zipper_ascend_until_test5(mut zip: Z) { + pub fn zipper_ascend_until_test5(mut zip: Z) { //Test that ascend_until stops when transitioning from non-existent path zip.descend_to(b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB"); assert_eq!(zip.path_exists(), false); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"); //Test that jump all the way back to where we want to be - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 126); assert_eq!(zip.path(), b"A"); - assert!(zip.ascend_until()); + assert_eq!(zip.ascend_until(), 1); assert_eq!(zip.path(), b""); - assert_eq!(zip.ascend_until(), false); + assert_eq!(zip.ascend_until(), 0); } pub const ZIPPER_INDEXED_MOVEMENT_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"]; - pub fn indexed_zipper_movement1(mut zipper: Z) { - //descends a single specific byte using `descend_indexed_byte`. Just for testing. A real user would use `descend_towards` - fn descend_byte(zipper: &mut Z, byte: u8) { + pub fn indexed_zipper_movement1(mut zipper: Z) { + //descends a single specific byte using `descend_indexed_byte`. Just for testing. A real user would use `descend_to` or `descend_to_byte` + fn descend_byte(zipper: &mut Z, byte: u8) { for i in 0..zipper.child_count() { - assert_eq!(zipper.descend_indexed_byte(i), true); + let descended = zipper.descend_indexed_byte(i); + assert!(descended.is_some()); if *zipper.path().last().unwrap() == byte { break } else { - assert_eq!(zipper.ascend(1), true); + assert_eq!(zipper.ascend(1), 1); } } } @@ -3519,24 +3600,24 @@ pub(crate) mod zipper_moving_tests { descend_byte(&mut zipper, b'r'); assert_eq!(zipper.path(), b"r"); assert_eq!(zipper.child_count(), 2); - assert_eq!(zipper.descend_until(), false); + assert_eq!(zipper.descend_until(&mut ()), false); descend_byte(&mut zipper, b'o'); assert_eq!(zipper.path(), b"ro"); assert_eq!(zipper.child_count(), 1); - assert_eq!(zipper.descend_until(), true); + assert_eq!(zipper.descend_until(&mut ()), true); assert_eq!(zipper.path(), b"rom"); assert_eq!(zipper.child_count(), 3); zipper.reset(); - assert_eq!(zipper.descend_until(), false); + assert_eq!(zipper.descend_until(&mut ()), false); descend_byte(&mut zipper, b'a'); assert_eq!(zipper.path(), b"a"); assert_eq!(zipper.child_count(), 1); - assert_eq!(zipper.descend_until(), true); + assert_eq!(zipper.descend_until(&mut ()), true); assert_eq!(zipper.path(), b"arrow"); assert_eq!(zipper.child_count(), 0); - assert_eq!(zipper.ascend(3), true); + assert_eq!(zipper.ascend(3), 3); assert_eq!(zipper.path(), b"ar"); assert_eq!(zipper.child_count(), 1); } @@ -3557,10 +3638,10 @@ pub(crate) mod zipper_moving_tests { assert_eq!(zipper.is_val(), true); zipper.descend_to(b"e"); assert_eq!(zipper.is_val(), true); - assert_eq!(zipper.ascend(1), true); + assert_eq!(zipper.ascend(1), 1); zipper.descend_to(b"u"); assert_eq!(zipper.is_val(), false); - zipper.descend_until(); + zipper.descend_until(&mut ()); assert_eq!(zipper.is_val(), true); } @@ -3610,7 +3691,7 @@ pub(crate) mod zipper_moving_tests { pub const ZIPPER_DESCEND_TO_EXISTING_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"]; - pub fn descend_to_existing_test1(mut zipper: Z) { + pub fn descend_to_existing_test1(mut zipper: Z) { assert_eq!(3, zipper.descend_to_existing("bowling")); assert_eq!("bow".as_bytes(), zipper.path()); @@ -3628,7 +3709,7 @@ pub(crate) mod zipper_moving_tests { pub const ZIPPER_DESCEND_TO_EXISTING_TEST2_KEYS: &[&[u8]] = &[b"arrow"]; /// Tests a really long path that doesn't exist, to exercise the chunk-descending code - pub fn descend_to_existing_test2(mut zipper: Z) { + pub fn descend_to_existing_test2(mut zipper: Z) { assert_eq!(5, zipper.descend_to_existing("arrow0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")); assert_eq!(zipper.path(), &b"arrow"[..]); @@ -3641,7 +3722,7 @@ pub(crate) mod zipper_moving_tests { pub const ZIPPER_DESCEND_TO_EXISTING_TEST3_KEYS: &[&[u8]] = &[b"arrow"]; /// Tests calling the method when the focus is already on a non-existent path - pub fn descend_to_existing_test3(mut zipper: Z) { + pub fn descend_to_existing_test3(mut zipper: Z) { zipper.descend_to("arrow00000"); assert_eq!(false, zipper.path_exists()); @@ -3653,7 +3734,7 @@ pub(crate) mod zipper_moving_tests { pub const ZIPPER_TO_NEXT_STEP_TEST1_KEYS: &[&[u8]] = &[b"arrow", b"bow", b"cannon", b"roman", b"romane", b"romanus", b"romulus", b"rubens", b"ruber", b"rubicon", b"rubicundus", b"rom'i"]; - pub fn to_next_step_test1(mut zipper: Z) { + pub fn to_next_step_test1(mut zipper: Z) { let mut i = 0; while zipper.to_next_step() { match i { @@ -3685,45 +3766,45 @@ pub(crate) mod zipper_moving_tests { pub const ZIPPER_BYTES_ITER_TEST1_KEYS: &[&[u8]] = &[b"ABCDEFGHIJKLMNOPQRSTUVWXYZ", b"ab",]; - pub fn zipper_byte_iter_test1(mut zipper: Z) { + pub fn zipper_byte_iter_test1(mut zipper: Z) { zipper.descend_to_byte(b'A'); assert_eq!(zipper.path_exists(), true); - assert_eq!(zipper.descend_first_byte(), true); + assert_eq!(zipper.descend_first_byte(), Some(b'B')); assert_eq!(zipper.path(), b"AB"); - assert_eq!(zipper.to_next_sibling_byte(), false); + assert_eq!(zipper.to_next_sibling_byte(), None); assert_eq!(zipper.path(), b"AB"); } pub const ZIPPER_BYTES_ITER_TEST2_KEYS: &[&[u8]] = &[&[2, 194, 1, 1, 193, 5], &[3, 194, 1, 0, 193, 6, 193, 5], &[3, 193, 4, 193]]; pub const ZIPPER_BYTES_ITER_TEST2_PATH: &[u8] = &[2, 194]; - pub fn zipper_byte_iter_test2(mut zipper: Z) { - assert_eq!(zipper.descend_first_byte(), true); + pub fn zipper_byte_iter_test2(mut zipper: Z) { + assert_eq!(zipper.descend_first_byte(), Some(1)); assert_eq!(zipper.path(), &[1]); - assert_eq!(zipper.to_next_sibling_byte(), false); + assert_eq!(zipper.to_next_sibling_byte(), None); assert_eq!(zipper.path(), &[1]); } pub const ZIPPER_BYTES_ITER_TEST3_KEYS: &[&[u8]] = &[&[3, 193, 4, 193, 5, 2, 193, 6, 193, 7], &[3, 193, 4, 193, 5, 2, 193, 6, 255]]; pub const ZIPPER_BYTES_ITER_TEST3_PATH: &[u8] = &[3, 193, 4, 193, 5, 2, 193]; - pub fn zipper_byte_iter_test3(mut zipper: Z) { + pub fn zipper_byte_iter_test3(mut zipper: Z) { assert_eq!(zipper.path(), &[]); - assert_eq!(zipper.descend_first_byte(), true); + assert_eq!(zipper.descend_first_byte(), Some(6)); assert_eq!(zipper.path(), &[6]); - assert_eq!(zipper.descend_first_byte(), true); + assert_eq!(zipper.descend_first_byte(), Some(193)); assert_eq!(zipper.path(), &[6, 193]); - assert_eq!(zipper.descend_first_byte(), true); + assert_eq!(zipper.descend_first_byte(), Some(7)); assert_eq!(zipper.path(), &[6, 193, 7]); } pub const ZIPPER_BYTES_ITER_TEST4_KEYS: &[&[u8]] = &[b"ABC", b"ABCDEF", b"ABCdef"]; - pub fn zipper_byte_iter_test4(mut zipper: Z) { + pub fn zipper_byte_iter_test4(mut zipper: Z) { //Check that we end up at the first leaf by depth-first search - while zipper.descend_first_byte() {} + while zipper.descend_first_byte().is_some() {} assert_eq!(zipper.path(), b"ABCDEF"); //Try taking a different branch @@ -3731,13 +3812,13 @@ pub(crate) mod zipper_moving_tests { zipper.descend_to(b"ABC"); assert!(zipper.path_exists()); assert_eq!(zipper.path(), b"ABC"); - assert!(zipper.descend_indexed_byte(1)); + assert_eq!(zipper.descend_indexed_byte(1), Some(b'd')); assert_eq!(zipper.path(), b"ABCd"); - assert!(zipper.descend_first_byte()); + assert_eq!(zipper.descend_first_byte(), Some(b'e')); assert_eq!(zipper.path(), b"ABCde"); - assert!(zipper.descend_first_byte()); + assert_eq!(zipper.descend_first_byte(), Some(b'f')); assert_eq!(zipper.path(), b"ABCdef"); - assert!(!zipper.descend_first_byte()); + assert_eq!(zipper.descend_first_byte(), None); } pub const ZIPPER_BYTES_ITER_TEST5_KEYS: &[&[u8]] = &[ @@ -3753,16 +3834,16 @@ pub(crate) mod zipper_moving_tests { zipper.reset(); zipper.descend_to(&keys[0][..i]); if i != 18 && i != 5 { - assert_eq!(zipper.to_next_sibling_byte(), false); + assert_eq!(zipper.to_next_sibling_byte(), None); } } zipper.reset(); zipper.descend_to([2, 197, 97, 120, 105, 111, 109, 3, 193, 61, 4, 193, 97, 192, 192, 3, 193, 75]); - assert_eq!(zipper.to_next_sibling_byte(), true); + assert_eq!(zipper.to_next_sibling_byte(), Some(84)); zipper.reset(); zipper.descend_to([2, 197, 97, 120, 105]); - assert_eq!(zipper.to_next_sibling_byte(), true); + assert_eq!(zipper.to_next_sibling_byte(), Some(255)); } } @@ -3915,12 +3996,12 @@ pub(crate) mod zipper_iteration_tests { pub fn k_path_test1<'a, Z: ZipperIteration>(mut zipper: Z) { //This is a cheesy way to encode lengths, but it's is more readable than unprintable chars - assert!(zipper.descend_indexed_byte(0)); + assert_eq!(zipper.descend_indexed_byte(0), Some(b'5')); let sym_len = usize::from_str_radix(std::str::from_utf8(&[zipper.path()[0]]).unwrap(), 10).unwrap(); assert_eq!(sym_len, 5); //Step over the ':' character - assert!(zipper.descend_indexed_byte(0)); + assert_eq!(zipper.descend_indexed_byte(0), Some(b':')); assert_eq!(zipper.child_count(), 6); //Start iterating over all the symbols of length=sym_len @@ -3994,6 +4075,7 @@ pub(crate) mod zipper_iteration_tests { zipper.reset(); zipper.descend_to(b"1a1"); assert!(zipper.path_exists()); + assert_eq!(zipper.path(), b"1a1"); assert_eq!(zipper.descend_first_k_path(1), true); assert_eq!(zipper.path(), b"1a1A"); assert_eq!(zipper.to_next_k_path(1), true); @@ -4030,7 +4112,7 @@ pub(crate) mod zipper_iteration_tests { assert!(zipper.path_exists()); assert_eq!(zipper.descend_first_k_path(1), true); assert_eq!(zipper.path(), b"1a"); - assert_eq!(zipper.descend_indexed_byte(0), true); + assert_eq!(zipper.descend_indexed_byte(0), Some(b'1')); assert_eq!(zipper.path(), b"1a1"); assert_eq!(zipper.descend_first_k_path(1), true); assert_eq!(zipper.path(), b"1a1A"); @@ -4040,7 +4122,7 @@ pub(crate) mod zipper_iteration_tests { assert_eq!(zipper.path(), b"1a1C"); assert_eq!(zipper.to_next_k_path(1), false); assert_eq!(zipper.path(), b"1a1"); - assert_eq!(zipper.ascend(1), true); + assert_eq!(zipper.ascend(1), 1); assert_eq!(zipper.path(), b"1a"); assert_eq!(zipper.to_next_k_path(1), true); assert_eq!(zipper.path(), b"1b"); @@ -4184,7 +4266,7 @@ pub(crate) mod zipper_iteration_tests { zipper.descend_to(path); assert!(zipper.path_exists()); }, - |zipper, steps| assert!(zipper.ascend(steps)), + |zipper, steps| assert_eq!(zipper.ascend(steps), steps), ); //Try with a `descend_to_byte` & `ascend_byte` @@ -4205,8 +4287,8 @@ pub(crate) mod zipper_iteration_tests { //Try with a `descend_first_byte` & `ascend_byte` test_loop(&mut zipper, |zipper, path| { - for _ in 0..path.len() { - assert!(zipper.descend_first_byte()); + for ii in 0..path.len() { + assert_eq!(zipper.descend_first_byte(), Some(path[ii])); } }, |zipper, steps| { @@ -4338,11 +4420,11 @@ mod tests { zipper.descend_to(b"e"); assert_eq!(zipper.is_val(), true); assert_eq!(zipper.val(), Some(&"romane")); - assert_eq!(zipper.ascend(1), true); + assert_eq!(zipper.ascend(1), 1); zipper.descend_to(b"u"); assert_eq!(zipper.is_val(), false); assert_eq!(zipper.val(), None); - zipper.descend_until(); + zipper.descend_until(&mut ()); assert_eq!(zipper.is_val(), true); assert_eq!(zipper.val(), Some(&"romanus")); } @@ -4477,16 +4559,16 @@ mod tests { r0.descend_to_byte(0); assert_eq!(r0.path_exists(), true); let mut r1 = r0.fork_read_zipper(); - assert_eq!(r1.to_next_sibling_byte(), false); + assert_eq!(r1.to_next_sibling_byte(), None); assert_eq!(r1.child_mask().0[0], (1<<3) | (1<<4) | (1<<5)); r1.descend_to_byte(3); assert_eq!(r1.path_exists(), true); assert_eq!(r1.child_mask().0[0], 0); - assert_eq!(r1.to_next_sibling_byte(), true); + assert_eq!(r1.to_next_sibling_byte(), Some(4)); assert_eq!(r1.origin_path(), &[0, 4]); assert_eq!(r1.path(), &[4]); - assert_eq!(r1.to_next_sibling_byte(), true); - assert_eq!(r1.to_next_sibling_byte(), false); + assert_eq!(r1.to_next_sibling_byte(), Some(5)); + assert_eq!(r1.to_next_sibling_byte(), None); } #[test] @@ -4679,7 +4761,7 @@ mod tests { assert_eq!(zipper.path(), b""); assert_eq!(zipper.val_count(), 2); - assert_eq!(zipper.descend_until(), true); + assert_eq!(zipper.descend_until(&mut ()), true); assert_eq!(zipper.path(), b"arrow"); assert_eq!(zipper.val_count(), 1); } diff --git a/src/zipper_head.rs b/src/zipper_head.rs index b18be9b..14a55b0 100644 --- a/src/zipper_head.rs +++ b/src/zipper_head.rs @@ -826,7 +826,7 @@ mod tests { let mut wz = zh.write_zipper_at_exclusive_path(&[3, 194, 22]).unwrap(); assert_eq!(rz.val(), None); - assert!(rz.descend_first_byte()); + assert_eq!(rz.descend_first_byte(), Some(133)); assert_eq!(rz.val(), Some(&())); assert_eq!(wz.val(), None); @@ -834,7 +834,7 @@ mod tests { rz.reset(); assert_eq!(rz.val(), None); - assert!(rz.descend_first_byte()); + assert_eq!(rz.descend_first_byte(), Some(133)); assert_eq!(rz.val(), Some(&())); drop(wz); @@ -856,7 +856,7 @@ mod tests { let mut wz = zh.write_zipper_at_exclusive_path(&[3, 194, 22]).unwrap(); assert_eq!(rz.val(), None); - assert!(rz.descend_first_byte()); + assert_eq!(rz.descend_first_byte(), Some(133)); assert_eq!(rz.val(), Some(&1004)); assert_eq!(rz2.val(), Some(&1003)); @@ -1018,7 +1018,7 @@ mod tests { assert_eq!(z.child_count(), 0); assert_eq!(z.child_mask(), ByteMask::EMPTY); assert_eq!(z.path_exists(), true); - assert_eq!(z.to_next_sibling_byte(), false); + assert_eq!(z.to_next_sibling_byte(), None); assert_eq!(z.ascend_byte(), false); // Test creating a zipper at a path that doesn't exist @@ -1026,7 +1026,7 @@ mod tests { assert_eq!(z2.val(), None); assert_eq!(z2.child_count(), 0); assert_eq!(z2.child_mask(), ByteMask::EMPTY); - assert_eq!(z2.to_next_sibling_byte(), false); + assert_eq!(z2.to_next_sibling_byte(), None); assert_eq!(z2.ascend_byte(), false); //Conceptually this should be `false`, but the act of creating the ReadZipper currently creates @@ -1046,28 +1046,28 @@ mod tests { // Create a zipper and test to make sure it behaves properly let mut z = zh.read_zipper_at_path(b"A").unwrap(); assert_eq!(z.val(), Some(&24)); - assert_eq!(z.to_next_sibling_byte(), false); - z.descend_until(); + assert_eq!(z.to_next_sibling_byte(), None); + z.descend_until(&mut ()); assert_eq!(z.path(), b"BCDEFG"); assert_eq!(z.origin_path(), b"ABCDEFG"); assert_eq!(z.val(), Some(&42)); - assert_eq!(z.to_next_sibling_byte(), false); + assert_eq!(z.to_next_sibling_byte(), None); // Create a second zipper and ensure it's valid let mut z2 = zh.read_zipper_at_path(z.origin_path()).unwrap(); assert_eq!(z2.path(), b""); assert_eq!(z2.origin_path(), b"ABCDEFG"); assert_eq!(z2.val(), Some(&42)); - assert_eq!(z2.to_next_sibling_byte(), false); + assert_eq!(z2.to_next_sibling_byte(), None); // Test the original zipper assert_eq!(z.val(), Some(&42)); - assert_eq!(z.to_next_sibling_byte(), false); - assert_eq!(z.ascend_until(), true); + assert_eq!(z.to_next_sibling_byte(), None); + assert_eq!(z.ascend_until(), 6); assert_eq!(z.path(), b""); assert_eq!(z.origin_path(), b"A"); assert_eq!(z.val(), Some(&24)); - assert_eq!(z.to_next_sibling_byte(), false); + assert_eq!(z.to_next_sibling_byte(), None); } /// Similar test to zipper_headc, but here we are testing to make sure there are no issues when @@ -1144,7 +1144,7 @@ mod tests { //Try pre-creating trie in the parent that will be visited by the child zipper b_zipper.descend_to(b"-children-0+metadata"); b_zipper.set_val(-3); - b_zipper.ascend(10); + assert_eq!(b_zipper.ascend(10), 10); //Make a ZipperHead on the WriteZipper, and make two more parallel zippers let b_head = b_zipper.zipper_head(); @@ -1415,7 +1415,7 @@ mod tests { //Make sure we cleaned up the dangling path, but nothing else let mut rz = zh.read_zipper_at_borrowed_path(b"a_path_").unwrap(); - assert!(rz.descend_until()); + assert!(rz.descend_until(&mut ())); assert_eq!(rz.path(), b"to_somewhere"); drop(rz); @@ -1424,7 +1424,7 @@ mod tests { zh.cleanup_write_zipper(wz); let mut rz = zh.read_zipper_at_borrowed_path(b"a_path_").unwrap(); assert_eq!(rz.path(), b""); - assert!(rz.descend_until()); + assert!(rz.descend_until(&mut ())); assert_eq!(rz.path(), b"to_somewhere"); drop(rz); diff --git a/src/zipper_tracking.rs b/src/zipper_tracking.rs index 8fa2cd0..0638abe 100644 --- a/src/zipper_tracking.rs +++ b/src/zipper_tracking.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use std::sync::RwLock; use crate::PathMap; -use crate::zipper::{ReadZipperUntracked, Zipper, ZipperAbsolutePath, ZipperForking, ZipperMoving, ZipperReadOnlyValues, ZipperWriting, ZipperIteration, ZipperReadOnlyIteration, }; +use crate::zipper::{ReadZipperUntracked, Zipper, ZipperPath, ZipperAbsolutePath, ZipperForking, ZipperMoving, ZipperReadOnlyValues, ZipperWriting, ZipperIteration, ZipperReadOnlyIteration, }; /// Marker to track an outstanding read zipper pub struct TrackingRead;