|
|
|
@ -1,4 +1,4 @@
|
|
|
|
|
use alloc::{borrow::Cow, vec::Vec};
|
|
|
|
|
use alloc::vec::Vec;
|
|
|
|
|
use core::{
|
|
|
|
|
iter::{self, FusedIterator},
|
|
|
|
|
num::NonZero,
|
|
|
|
@ -14,10 +14,10 @@ use super::{
|
|
|
|
|
/// with empty nodes.
|
|
|
|
|
///
|
|
|
|
|
/// Empty nodes in the path are stored only as their position, represented with a bitmask. A
|
|
|
|
|
/// maximum of 64 nodes (`SMT_MAX_DEPTH`) can be stored (empty and non-empty). The more nodes in a
|
|
|
|
|
/// path are empty, the less memory this struct will use. This type calculates empty nodes on-demand
|
|
|
|
|
/// when iterated through, converted to a [MerklePath], or an empty node is retrieved with
|
|
|
|
|
/// [`SparseMerklePath::at_depth()`], which will incur overhead.
|
|
|
|
|
/// maximum of 64 nodes in the path can be empty. The more nodes in a path are empty, the less
|
|
|
|
|
/// memory this struct will use. This type calculates empty nodes on-demand when iterated through,
|
|
|
|
|
/// converted to a [MerklePath], or an empty node is retrieved with [`SparseMerklePath::at_idx()`]
|
|
|
|
|
/// or [`SparseMerklePath::at_depth()`], which will incur overhead.
|
|
|
|
|
///
|
|
|
|
|
/// NOTE: This type assumes that Merkle paths always span from the root of the tree to a leaf.
|
|
|
|
|
/// Partial paths are not supported.
|
|
|
|
@ -25,8 +25,6 @@ use super::{
|
|
|
|
|
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
|
|
|
|
|
pub struct SparseMerklePath {
|
|
|
|
|
/// A bitmask representing empty nodes. The set bit corresponds to the depth of an empty node.
|
|
|
|
|
/// The least significant bit (bit 0) describes depth 1 node (root's children).
|
|
|
|
|
/// The `bit index + 1` is equal to node's depth.
|
|
|
|
|
empty_nodes_mask: u64,
|
|
|
|
|
/// The non-empty nodes, stored in depth-order, but not contiguous across depth.
|
|
|
|
|
nodes: Vec<RpoDigest>,
|
|
|
|
@ -38,7 +36,8 @@ impl SparseMerklePath {
|
|
|
|
|
/// of deepest to shallowest.
|
|
|
|
|
///
|
|
|
|
|
/// Knowing the size is necessary to calculate the depth of the tree, which is needed to detect
|
|
|
|
|
/// which nodes are empty nodes.
|
|
|
|
|
/// which nodes are empty nodes. If you know the size but your iterator type is not
|
|
|
|
|
/// [ExactSizeIterator], use [`SparseMerklePath::from_iter_with_depth()`].
|
|
|
|
|
///
|
|
|
|
|
/// # Errors
|
|
|
|
|
/// Returns [MerkleError::DepthTooBig] if `tree_depth` is greater than [SMT_MAX_DEPTH].
|
|
|
|
@ -47,27 +46,38 @@ impl SparseMerklePath {
|
|
|
|
|
I: IntoIterator<IntoIter: ExactSizeIterator, Item = RpoDigest>,
|
|
|
|
|
{
|
|
|
|
|
let iterator = iterator.into_iter();
|
|
|
|
|
let tree_depth = iterator.len() as u8;
|
|
|
|
|
// `iterator.len() as u8` will truncate, but not below `SMT_MAX_DEPTH`, which
|
|
|
|
|
// `from_iter_with_depth` checks for.
|
|
|
|
|
Self::from_iter_with_depth(iterator.len() as u8, iterator)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Constructs a sparse Merkle path from a manually specified tree depth, and an iterator over
|
|
|
|
|
/// Merkle nodes from deepest to shallowest.
|
|
|
|
|
///
|
|
|
|
|
/// Knowing the size is necessary to calculate the depth of the tree, which is needed to detect
|
|
|
|
|
/// which nodes are empty nodes.
|
|
|
|
|
///
|
|
|
|
|
/// # Errors
|
|
|
|
|
/// Returns [MerkleError::DepthTooBig] if `tree_depth` is greater than [SMT_MAX_DEPTH].
|
|
|
|
|
pub fn from_iter_with_depth(
|
|
|
|
|
tree_depth: u8,
|
|
|
|
|
iter: impl IntoIterator<Item = RpoDigest>,
|
|
|
|
|
) -> Result<Self, MerkleError> {
|
|
|
|
|
if tree_depth > SMT_MAX_DEPTH {
|
|
|
|
|
return Err(MerkleError::DepthTooBig(tree_depth as u64));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let mut empty_nodes_mask: u64 = 0;
|
|
|
|
|
let mut nodes: Vec<RpoDigest> = Default::default();
|
|
|
|
|
let path: Self = iter::zip(path_depth_iter(tree_depth), iter)
|
|
|
|
|
.map(|(depth, node)| {
|
|
|
|
|
let &equivalent_empty_node = EmptySubtreeRoots::entry(tree_depth, depth.get());
|
|
|
|
|
let is_empty = node == equivalent_empty_node;
|
|
|
|
|
let node = if is_empty { None } else { Some(node) };
|
|
|
|
|
|
|
|
|
|
for (depth, node) in iter::zip(path_depth_iter(tree_depth), iterator) {
|
|
|
|
|
let &equivalent_empty_node = EmptySubtreeRoots::entry(tree_depth, depth.get());
|
|
|
|
|
let is_empty = node == equivalent_empty_node;
|
|
|
|
|
let node = if is_empty { None } else { Some(node) };
|
|
|
|
|
(depth, node)
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
match node {
|
|
|
|
|
Some(node) => nodes.push(node),
|
|
|
|
|
None => empty_nodes_mask |= Self::bitmask_for_depth(depth),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(SparseMerklePath { nodes, empty_nodes_mask })
|
|
|
|
|
Ok(path)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the total depth of this path, i.e., the number of nodes this path represents.
|
|
|
|
@ -85,24 +95,63 @@ impl SparseMerklePath {
|
|
|
|
|
/// Returns [MerkleError::DepthTooBig] if `node_depth` is greater than the total depth of this
|
|
|
|
|
/// path.
|
|
|
|
|
pub fn at_depth(&self, node_depth: NonZero<u8>) -> Result<RpoDigest, MerkleError> {
|
|
|
|
|
let node = self
|
|
|
|
|
.at_depth_nonempty(node_depth)?
|
|
|
|
|
.unwrap_or_else(|| *EmptySubtreeRoots::entry(self.depth(), node_depth.get()));
|
|
|
|
|
|
|
|
|
|
Ok(node)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Get a specific non-empty node in this path at a given depth, or `None` if the specified
|
|
|
|
|
/// node is an empty node.
|
|
|
|
|
///
|
|
|
|
|
/// # Errors
|
|
|
|
|
/// Returns [MerkleError::DepthTooBig] if `node_depth` is greater than the total depth of this
|
|
|
|
|
/// path.
|
|
|
|
|
pub fn at_depth_nonempty(
|
|
|
|
|
&self,
|
|
|
|
|
node_depth: NonZero<u8>,
|
|
|
|
|
) -> Result<Option<RpoDigest>, MerkleError> {
|
|
|
|
|
if node_depth.get() > self.depth() {
|
|
|
|
|
return Err(MerkleError::DepthTooBig(node_depth.get().into()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let node = if let Some(nonempty_index) = self.get_nonempty_index(node_depth) {
|
|
|
|
|
self.nodes[nonempty_index]
|
|
|
|
|
} else {
|
|
|
|
|
*EmptySubtreeRoots::entry(self.depth(), node_depth.get())
|
|
|
|
|
};
|
|
|
|
|
if self.is_depth_empty(node_depth) {
|
|
|
|
|
return Ok(None);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(node)
|
|
|
|
|
// Our index needs to account for all the empty nodes that aren't in `self.nodes`.
|
|
|
|
|
let nonempty_index = self.get_nonempty_index(node_depth);
|
|
|
|
|
|
|
|
|
|
Ok(Some(self.nodes[nonempty_index]))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the path node at the specified index, or [None] if the index is out of bounds.
|
|
|
|
|
///
|
|
|
|
|
/// The node at index 0 is the deepest part of the path.
|
|
|
|
|
///
|
|
|
|
|
/// ```
|
|
|
|
|
/// # use core::num::NonZero;
|
|
|
|
|
/// # use miden_crypto::{ZERO, ONE, hash::rpo::RpoDigest, merkle::SparseMerklePath};
|
|
|
|
|
/// # let zero = RpoDigest::new([ZERO; 4]);
|
|
|
|
|
/// # let one = RpoDigest::new([ONE; 4]);
|
|
|
|
|
/// # let sparse_path = SparseMerklePath::from_sized_iter(vec![zero, one, one, zero]).unwrap();
|
|
|
|
|
/// let depth = NonZero::new(sparse_path.depth()).unwrap();
|
|
|
|
|
/// assert_eq!(
|
|
|
|
|
/// sparse_path.at_idx(0).unwrap(),
|
|
|
|
|
/// sparse_path.at_depth(depth).unwrap(),
|
|
|
|
|
/// );
|
|
|
|
|
/// ```
|
|
|
|
|
pub fn at_idx(&self, index: usize) -> Option<RpoDigest> {
|
|
|
|
|
// If this overflows *or* if the depth is zero then the index was out of bounds.
|
|
|
|
|
let depth = NonZero::new(u8::checked_sub(self.depth(), index as u8)?)?;
|
|
|
|
|
self.at_depth(depth).ok()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// PROVIDERS
|
|
|
|
|
// ============================================================================================
|
|
|
|
|
|
|
|
|
|
/// Constructs a borrowing iterator over the nodes in this path.
|
|
|
|
|
/// Starts from the leaf and iterates toward the root (excluding the root).
|
|
|
|
|
pub fn iter(&self) -> impl ExactSizeIterator<Item = RpoDigest> {
|
|
|
|
|
self.into_iter()
|
|
|
|
|
}
|
|
|
|
@ -119,20 +168,14 @@ impl SparseMerklePath {
|
|
|
|
|
(self.empty_nodes_mask & Self::bitmask_for_depth(node_depth)) != 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Index of the non-empty node in the `self.nodes` vector. If the specified depth is
|
|
|
|
|
/// empty, None is returned.
|
|
|
|
|
fn get_nonempty_index(&self, node_depth: NonZero<u8>) -> Option<usize> {
|
|
|
|
|
if self.is_depth_empty(node_depth) {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn get_nonempty_index(&self, node_depth: NonZero<u8>) -> usize {
|
|
|
|
|
let bit_index = node_depth.get() - 1;
|
|
|
|
|
let without_shallower = self.empty_nodes_mask >> bit_index;
|
|
|
|
|
let empty_deeper = without_shallower.count_ones() as usize;
|
|
|
|
|
// The vec index we would use if we didn't have any empty nodes to account for...
|
|
|
|
|
let normal_index = (self.depth() - node_depth.get()) as usize;
|
|
|
|
|
// subtracted by the number of empty nodes that are deeper than us.
|
|
|
|
|
Some(normal_index - empty_deeper)
|
|
|
|
|
normal_index - empty_deeper
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -152,21 +195,8 @@ impl Deserializable for SparseMerklePath {
|
|
|
|
|
source: &mut R,
|
|
|
|
|
) -> Result<Self, DeserializationError> {
|
|
|
|
|
let depth = source.read_u8()?;
|
|
|
|
|
if depth > SMT_MAX_DEPTH {
|
|
|
|
|
return Err(DeserializationError::InvalidValue(format!(
|
|
|
|
|
"SparseMerklePath max depth exceeded ({} > {})",
|
|
|
|
|
depth, SMT_MAX_DEPTH
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
let empty_nodes_mask = source.read_u64()?;
|
|
|
|
|
let empty_nodes_count = empty_nodes_mask.count_ones();
|
|
|
|
|
if empty_nodes_count > depth as u32 {
|
|
|
|
|
return Err(DeserializationError::InvalidValue(format!(
|
|
|
|
|
"SparseMerklePath has more empty nodes ({}) than its full length ({})",
|
|
|
|
|
empty_nodes_count, depth
|
|
|
|
|
)));
|
|
|
|
|
}
|
|
|
|
|
let count = depth as u32 - empty_nodes_count;
|
|
|
|
|
let count = depth as u32 - empty_nodes_mask.count_ones();
|
|
|
|
|
let nodes = source.read_many::<RpoDigest>(count as usize)?;
|
|
|
|
|
Ok(Self { empty_nodes_mask, nodes })
|
|
|
|
|
}
|
|
|
|
@ -181,13 +211,13 @@ impl From<SparseMerklePath> for MerklePath {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// # Errors
|
|
|
|
|
///
|
|
|
|
|
/// This conversion returns [MerkleError::DepthTooBig] if the path length is greater than
|
|
|
|
|
/// [`SMT_MAX_DEPTH`].
|
|
|
|
|
impl TryFrom<MerklePath> for SparseMerklePath {
|
|
|
|
|
type Error = MerkleError;
|
|
|
|
|
|
|
|
|
|
/// # Errors
|
|
|
|
|
///
|
|
|
|
|
/// This conversion returns [MerkleError::DepthTooBig] if the path length is greater than
|
|
|
|
|
/// [`SMT_MAX_DEPTH`].
|
|
|
|
|
fn try_from(path: MerklePath) -> Result<Self, MerkleError> {
|
|
|
|
|
SparseMerklePath::from_sized_iter(path)
|
|
|
|
|
}
|
|
|
|
@ -202,11 +232,41 @@ impl From<SparseMerklePath> for Vec<RpoDigest> {
|
|
|
|
|
// ITERATORS
|
|
|
|
|
// ================================================================================================
|
|
|
|
|
|
|
|
|
|
/// Iterator for [`SparseMerklePath`]. Starts from the leaf and iterates toward the root (excluding
|
|
|
|
|
/// the root).
|
|
|
|
|
/// Contructs a [SparseMerklePath] out of an iterator of optional nodes, where `None` indicates an
|
|
|
|
|
/// empty node.
|
|
|
|
|
impl FromIterator<(NonZero<u8>, Option<RpoDigest>)> for SparseMerklePath {
|
|
|
|
|
fn from_iter<I>(iter: I) -> SparseMerklePath
|
|
|
|
|
where
|
|
|
|
|
I: IntoIterator<Item = (NonZero<u8>, Option<RpoDigest>)>,
|
|
|
|
|
{
|
|
|
|
|
let mut empty_nodes_mask: u64 = 0;
|
|
|
|
|
let mut nodes: Vec<RpoDigest> = Default::default();
|
|
|
|
|
|
|
|
|
|
for (depth, node) in iter {
|
|
|
|
|
match node {
|
|
|
|
|
Some(node) => nodes.push(node),
|
|
|
|
|
None => empty_nodes_mask |= Self::bitmask_for_depth(depth),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
SparseMerklePath { nodes, empty_nodes_mask }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'p> IntoIterator for &'p SparseMerklePath {
|
|
|
|
|
type Item = <SparseMerklePathIter<'p> as Iterator>::Item;
|
|
|
|
|
type IntoIter = SparseMerklePathIter<'p>;
|
|
|
|
|
|
|
|
|
|
fn into_iter(self) -> SparseMerklePathIter<'p> {
|
|
|
|
|
let tree_depth = self.depth();
|
|
|
|
|
SparseMerklePathIter { path: self, next_depth: tree_depth }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Borrowing iterator for [`SparseMerklePath`].
|
|
|
|
|
pub struct SparseMerklePathIter<'p> {
|
|
|
|
|
/// The "inner" value we're iterating over.
|
|
|
|
|
path: Cow<'p, SparseMerklePath>,
|
|
|
|
|
path: &'p SparseMerklePath,
|
|
|
|
|
|
|
|
|
|
/// The depth a `next()` call will get. `next_depth == 0` indicates that the iterator has been
|
|
|
|
|
/// exhausted.
|
|
|
|
@ -223,10 +283,7 @@ impl Iterator for SparseMerklePathIter<'_> {
|
|
|
|
|
self.next_depth = this_depth.get() - 1;
|
|
|
|
|
|
|
|
|
|
// `this_depth` is only ever decreasing, so it can't ever exceed `self.path.depth()`.
|
|
|
|
|
let node = self
|
|
|
|
|
.path
|
|
|
|
|
.at_depth(this_depth)
|
|
|
|
|
.expect("current depth should never exceed the path depth");
|
|
|
|
|
let node = self.path.at_depth(this_depth).unwrap();
|
|
|
|
|
Some(node)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -247,32 +304,57 @@ impl FusedIterator for SparseMerklePathIter<'_> {}
|
|
|
|
|
|
|
|
|
|
// TODO: impl DoubleEndedIterator.
|
|
|
|
|
|
|
|
|
|
/// Owning iterator for [SparseMerklePath].
|
|
|
|
|
pub struct IntoIter {
|
|
|
|
|
/// The "inner" value we're iterating over.
|
|
|
|
|
path: SparseMerklePath,
|
|
|
|
|
|
|
|
|
|
/// The depth a `next()` call will get. `next_depth == 0` indicates that the iterator has been
|
|
|
|
|
/// exhausted.
|
|
|
|
|
next_depth: u8,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl IntoIterator for SparseMerklePath {
|
|
|
|
|
type IntoIter = SparseMerklePathIter<'static>;
|
|
|
|
|
type IntoIter = IntoIter;
|
|
|
|
|
type Item = <Self::IntoIter as Iterator>::Item;
|
|
|
|
|
|
|
|
|
|
fn into_iter(self) -> SparseMerklePathIter<'static> {
|
|
|
|
|
fn into_iter(self) -> IntoIter {
|
|
|
|
|
let tree_depth = self.depth();
|
|
|
|
|
SparseMerklePathIter {
|
|
|
|
|
path: Cow::Owned(self),
|
|
|
|
|
next_depth: tree_depth,
|
|
|
|
|
}
|
|
|
|
|
IntoIter { path: self, next_depth: tree_depth }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'p> IntoIterator for &'p SparseMerklePath {
|
|
|
|
|
type Item = <SparseMerklePathIter<'p> as Iterator>::Item;
|
|
|
|
|
type IntoIter = SparseMerklePathIter<'p>;
|
|
|
|
|
impl Iterator for IntoIter {
|
|
|
|
|
type Item = RpoDigest;
|
|
|
|
|
|
|
|
|
|
fn into_iter(self) -> SparseMerklePathIter<'p> {
|
|
|
|
|
let tree_depth = self.depth();
|
|
|
|
|
SparseMerklePathIter {
|
|
|
|
|
path: Cow::Borrowed(self),
|
|
|
|
|
next_depth: tree_depth,
|
|
|
|
|
}
|
|
|
|
|
fn next(&mut self) -> Option<RpoDigest> {
|
|
|
|
|
let this_depth = self.next_depth;
|
|
|
|
|
// Paths don't include the root, so if `this_depth` is 0 then we keep returning `None`.
|
|
|
|
|
let this_depth = NonZero::new(this_depth)?;
|
|
|
|
|
self.next_depth = this_depth.get() - 1;
|
|
|
|
|
|
|
|
|
|
// `this_depth` is only ever decreasing, so it can't ever exceed `self.path.depth()`.
|
|
|
|
|
let node = self.path.at_depth(this_depth).unwrap();
|
|
|
|
|
Some(node)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// IntoIter always knows its exact size.
|
|
|
|
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
|
|
|
|
let remaining = ExactSizeIterator::len(self);
|
|
|
|
|
(remaining, Some(remaining))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl ExactSizeIterator for IntoIter {
|
|
|
|
|
fn len(&self) -> usize {
|
|
|
|
|
self.next_depth as usize
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl FusedIterator for IntoIter {}
|
|
|
|
|
|
|
|
|
|
// TODO: impl DoubleEndedIterator.
|
|
|
|
|
|
|
|
|
|
// COMPARISONS
|
|
|
|
|
// ================================================================================================
|
|
|
|
|
impl PartialEq<MerklePath> for SparseMerklePath {
|
|
|
|
@ -324,13 +406,13 @@ impl From<(SparseMerklePath, Word)> for SparseValuePath {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// # Errors
|
|
|
|
|
///
|
|
|
|
|
/// This conversion returns [MerkleError::DepthTooBig] if the path length is greater than
|
|
|
|
|
/// [`SMT_MAX_DEPTH`].
|
|
|
|
|
impl TryFrom<ValuePath> for SparseValuePath {
|
|
|
|
|
type Error = MerkleError;
|
|
|
|
|
|
|
|
|
|
/// # Errors
|
|
|
|
|
///
|
|
|
|
|
/// This conversion returns [MerkleError::DepthTooBig] if the path length is greater than
|
|
|
|
|
/// [`SMT_MAX_DEPTH`].
|
|
|
|
|
fn try_from(other: ValuePath) -> Result<Self, MerkleError> {
|
|
|
|
|
let ValuePath { value, path } = other;
|
|
|
|
|
let path = SparseMerklePath::try_from(path)?;
|
|
|
|
@ -374,12 +456,10 @@ fn path_depth_iter(tree_depth: u8) -> impl ExactSizeIterator<Item = NonZero<u8>>
|
|
|
|
|
top_down_iter.rev()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TESTS
|
|
|
|
|
// ================================================================================================
|
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use alloc::vec::Vec;
|
|
|
|
|
use core::num::NonZero;
|
|
|
|
|
use core::{iter, num::NonZero};
|
|
|
|
|
|
|
|
|
|
use assert_matches::assert_matches;
|
|
|
|
|
|
|
|
|
@ -477,34 +557,145 @@ mod tests {
|
|
|
|
|
|
|
|
|
|
assert_eq!(sparse_path.empty_nodes_mask, EMPTY_BITS);
|
|
|
|
|
|
|
|
|
|
// Keep track of how many non-empty nodes we have seen
|
|
|
|
|
let mut nonempty_idx = 0;
|
|
|
|
|
// Depth 8.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 8;
|
|
|
|
|
|
|
|
|
|
// Test starting from the deepest nodes (depth 8)
|
|
|
|
|
for depth in (1..=8).rev() {
|
|
|
|
|
// Check that the way we calculate these indices is correct.
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
let bit = 1 << (depth - 1);
|
|
|
|
|
assert_eq!(idx, 0);
|
|
|
|
|
|
|
|
|
|
// Check that the depth bit is set correctly...
|
|
|
|
|
// Check that the way we calculate these bitmasks is correct.
|
|
|
|
|
let bit = 0b1000_0000;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
|
|
|
|
|
// Check that the depth-8 bit is not set...
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert!(!is_set);
|
|
|
|
|
// ...which should match the status of the `sparse_nodes` element being `None`.
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
|
|
|
|
|
if is_set {
|
|
|
|
|
// Check that we don't return digests for empty nodes
|
|
|
|
|
let &test_node = sparse_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, None);
|
|
|
|
|
} else {
|
|
|
|
|
// Check that we can calculate non-empty indices correctly.
|
|
|
|
|
let control_node = raw_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(
|
|
|
|
|
sparse_path.get_nonempty_index(NonZero::new(depth).unwrap()).unwrap(),
|
|
|
|
|
nonempty_idx
|
|
|
|
|
);
|
|
|
|
|
let test_node = sparse_path.nodes.get(nonempty_idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, control_node);
|
|
|
|
|
// And finally, check that we can calculate non-empty indices correctly.
|
|
|
|
|
let control_node = raw_nodes.get(idx).unwrap();
|
|
|
|
|
let nonempty_idx: usize = 0;
|
|
|
|
|
assert_eq!(sparse_path.get_nonempty_index(NonZero::new(depth).unwrap()), nonempty_idx);
|
|
|
|
|
let test_node = sparse_path.nodes.get(nonempty_idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, control_node);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
nonempty_idx += 1;
|
|
|
|
|
}
|
|
|
|
|
// Rinse and repeat for each remaining depth.
|
|
|
|
|
|
|
|
|
|
// Depth 7.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 7;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 1);
|
|
|
|
|
let bit = 0b0100_0000;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert!(is_set);
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
|
|
|
|
|
let &test_node = sparse_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, None);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Depth 6.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 6;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 2);
|
|
|
|
|
let bit = 0b0010_0000;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
assert!(is_set);
|
|
|
|
|
|
|
|
|
|
let &test_node = sparse_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, None);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Depth 5.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 5;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 3);
|
|
|
|
|
let bit = 0b0001_0000;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
assert!(!is_set);
|
|
|
|
|
|
|
|
|
|
let control_node = raw_nodes.get(idx).unwrap();
|
|
|
|
|
let nonempty_idx: usize = 1;
|
|
|
|
|
assert_eq!(sparse_path.nodes.get(nonempty_idx).unwrap(), control_node);
|
|
|
|
|
assert_eq!(sparse_path.get_nonempty_index(NonZero::new(depth).unwrap()), nonempty_idx,);
|
|
|
|
|
let test_node = sparse_path.nodes.get(nonempty_idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, control_node);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Depth 4.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 4;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 4);
|
|
|
|
|
let bit = 0b0000_1000;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
assert!(!is_set);
|
|
|
|
|
|
|
|
|
|
let control_node = raw_nodes.get(idx).unwrap();
|
|
|
|
|
let nonempty_idx: usize = 2;
|
|
|
|
|
assert_eq!(sparse_path.nodes.get(nonempty_idx).unwrap(), control_node);
|
|
|
|
|
assert_eq!(sparse_path.get_nonempty_index(NonZero::new(depth).unwrap()), nonempty_idx,);
|
|
|
|
|
let test_node = sparse_path.nodes.get(nonempty_idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, control_node);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Depth 3.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 3;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 5);
|
|
|
|
|
let bit = 0b0000_0100;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert!(is_set);
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
|
|
|
|
|
let &test_node = sparse_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, None);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Depth 2.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 2;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 6);
|
|
|
|
|
let bit = 0b0000_0010;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert!(is_set);
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
|
|
|
|
|
let &test_node = sparse_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, None);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Depth 1.
|
|
|
|
|
{
|
|
|
|
|
let depth: u8 = 1;
|
|
|
|
|
let idx = (sparse_path.depth() - depth) as usize;
|
|
|
|
|
assert_eq!(idx, 7);
|
|
|
|
|
let bit = 0b0000_0001;
|
|
|
|
|
assert_eq!(bit, 1 << (depth - 1));
|
|
|
|
|
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
|
|
|
|
|
assert!(is_set);
|
|
|
|
|
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
|
|
|
|
|
|
|
|
|
|
let &test_node = sparse_nodes.get(idx).unwrap();
|
|
|
|
|
assert_eq!(test_node, None);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@ -516,25 +707,21 @@ mod tests {
|
|
|
|
|
let index = NodeIndex::from(Smt::key_to_leaf_index(key));
|
|
|
|
|
|
|
|
|
|
let control_path = tree.get_path(key);
|
|
|
|
|
for (&control_node, proof_index) in
|
|
|
|
|
itertools::zip_eq(&*control_path, index.proof_indices())
|
|
|
|
|
{
|
|
|
|
|
let proof_node = tree.get_node_hash(proof_index);
|
|
|
|
|
assert_eq!(control_node, proof_node);
|
|
|
|
|
for (&control_node, proof_index) in iter::zip(&*control_path, index.proof_indices()) {
|
|
|
|
|
let proof_node = tree.get_hash(proof_index);
|
|
|
|
|
assert_eq!(control_node, proof_node, "WHat");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let sparse_path =
|
|
|
|
|
SparseMerklePath::from_sized_iter(control_path.clone().into_iter()).unwrap();
|
|
|
|
|
for (sparse_node, proof_idx) in
|
|
|
|
|
itertools::zip_eq(sparse_path.clone(), index.proof_indices())
|
|
|
|
|
{
|
|
|
|
|
let proof_node = tree.get_node_hash(proof_idx);
|
|
|
|
|
assert_eq!(sparse_node, proof_node);
|
|
|
|
|
for (sparse_node, proof_idx) in iter::zip(sparse_path.clone(), index.proof_indices()) {
|
|
|
|
|
let proof_node = tree.get_hash(proof_idx);
|
|
|
|
|
assert_eq!(sparse_node, proof_node, "WHat");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
assert_eq!(control_path.depth(), sparse_path.depth());
|
|
|
|
|
for (control, sparse) in itertools::zip_eq(control_path, sparse_path) {
|
|
|
|
|
assert_eq!(control, sparse);
|
|
|
|
|
for (i, (control, sparse)) in iter::zip(control_path, sparse_path).enumerate() {
|
|
|
|
|
assert_eq!(control, sparse, "on iteration {i}");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
@ -551,15 +738,24 @@ mod tests {
|
|
|
|
|
|
|
|
|
|
// Test random access by depth.
|
|
|
|
|
for depth in path_depth_iter(control_path.depth()) {
|
|
|
|
|
let control_node = control_path.at_depth(depth).unwrap();
|
|
|
|
|
let &control_node = control_path.at_depth(depth).unwrap();
|
|
|
|
|
let sparse_node = sparse_path.at_depth(depth).unwrap();
|
|
|
|
|
assert_eq!(control_node, sparse_node, "at depth {depth} for entry {i}");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Test random access by index.
|
|
|
|
|
// Letting index get to `control_path.len()` will test that both sides correctly return
|
|
|
|
|
// `None` for out of bounds access.
|
|
|
|
|
for index in 0..=(control_path.len()) {
|
|
|
|
|
let control_node = control_path.at_idx(index).copied();
|
|
|
|
|
let sparse_node = sparse_path.at_idx(index);
|
|
|
|
|
assert_eq!(control_node, sparse_node);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn test_borrowing_iterator() {
|
|
|
|
|
fn test_owning_iterator() {
|
|
|
|
|
let tree = make_smt(8192);
|
|
|
|
|
|
|
|
|
|
for (key, _value) in tree.entries() {
|
|
|
|
@ -570,9 +766,7 @@ mod tests {
|
|
|
|
|
|
|
|
|
|
// Test that both iterators yield the same amount of the same values.
|
|
|
|
|
let mut count: u64 = 0;
|
|
|
|
|
for (&control_node, sparse_node) in
|
|
|
|
|
itertools::zip_eq(control_path.iter(), sparse_path.iter())
|
|
|
|
|
{
|
|
|
|
|
for (&control_node, sparse_node) in iter::zip(control_path.iter(), sparse_path.iter()) {
|
|
|
|
|
count += 1;
|
|
|
|
|
assert_eq!(control_node, sparse_node);
|
|
|
|
|
}
|
|
|
|
@ -581,7 +775,7 @@ mod tests {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn test_owning_iterator() {
|
|
|
|
|
fn test_borrowing_iterator() {
|
|
|
|
|
let tree = make_smt(8192);
|
|
|
|
|
|
|
|
|
|
for (key, _value) in tree.entries() {
|
|
|
|
@ -593,7 +787,7 @@ mod tests {
|
|
|
|
|
|
|
|
|
|
// Test that both iterators yield the same amount of the same values.
|
|
|
|
|
let mut count: u64 = 0;
|
|
|
|
|
for (control_node, sparse_node) in itertools::zip_eq(control_path, sparse_path) {
|
|
|
|
|
for (control_node, sparse_node) in iter::zip(control_path, sparse_path) {
|
|
|
|
|
count += 1;
|
|
|
|
|
assert_eq!(control_node, sparse_node);
|
|
|
|
|
}
|
|
|
|
@ -612,6 +806,7 @@ mod tests {
|
|
|
|
|
sparse_path.at_depth(NonZero::new(1).unwrap()),
|
|
|
|
|
Err(MerkleError::DepthTooBig(1))
|
|
|
|
|
);
|
|
|
|
|
assert_eq!(sparse_path.at_idx(0), None);
|
|
|
|
|
assert_eq!(sparse_path.iter().next(), None);
|
|
|
|
|
assert_eq!(sparse_path.into_iter().next(), None);
|
|
|
|
|
}
|
|
|
|
|