feat: add SparseMerklePath (#389)

This commit is contained in:
Qyriad 2025-05-04 03:27:13 +02:00 committed by GitHub
parent 78672585f1
commit e070fc19ce
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 759 additions and 47 deletions

View file

@ -16,6 +16,7 @@
- Optimized duplicate key detection in `Smt::with_entries_concurrent` (#395).
- [BREAKING] Moved `rand` to version `0.9` removing the `try_fill_bytes` method (#398).
- [BREAKING] Increment minimum supported Rust version to 1.85 (#399).
- Added `SparseMerklePath`, a compact representation of `MerklePath` which compacts empty nodes into a bitmask (#389).
## 0.13.3 (2025-02-18)

14
Cargo.lock generated
View file

@ -254,7 +254,7 @@ dependencies = [
"clap",
"criterion-plot",
"is-terminal",
"itertools",
"itertools 0.10.5",
"num-traits",
"once_cell",
"oorandom",
@ -275,7 +275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"cast",
"itertools",
"itertools 0.10.5",
]
[[package]]
@ -453,6 +453,15 @@ dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.15"
@ -524,6 +533,7 @@ dependencies = [
"glob",
"hashbrown",
"hex",
"itertools 0.14.0",
"num",
"num-complex",
"proptest",

View file

@ -83,6 +83,7 @@ assert_matches = { version = "1.5", default-features = false }
criterion = { version = "0.5", features = ["html_reports"] }
getrandom = { version = "0.3", default-features = false }
hex = { version = "0.4", default-features = false, features = ["alloc"] }
itertools = { version = "0.14" }
proptest = { version = "1.6", default-features = false, features = ["alloc"]}
rand_chacha = { version = "0.9", default-features = false }
rand-utils = { version = "0.12", package = "winter-rand-utils" }

View file

@ -164,6 +164,18 @@ impl NodeIndex {
self.depth = self.depth.saturating_sub(delta);
self.value >>= delta as u32;
}
// ITERATORS
// --------------------------------------------------------------------------------------------
/// Return an iterator of the indices required for a Merkle proof of inclusion of a node at
/// `self`.
///
/// This is *exclusive* on both ends: neither `self` nor the root index are included in the
/// returned iterator.
pub fn proof_indices(&self) -> impl ExactSizeIterator<Item = NodeIndex> + use<> {
ProofIter { next_index: self.sibling() }
}
}
impl Display for NodeIndex {
@ -188,6 +200,39 @@ impl Deserializable for NodeIndex {
}
}
/// Implementation for [`NodeIndex::proof_indices()`].
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Hash)]
struct ProofIter {
next_index: NodeIndex,
}
impl Iterator for ProofIter {
type Item = NodeIndex;
fn next(&mut self) -> Option<NodeIndex> {
if self.next_index.is_root() {
return None;
}
let index = self.next_index;
self.next_index = index.parent().sibling();
Some(index)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let remaining = ExactSizeIterator::len(self);
(remaining, Some(remaining))
}
}
impl ExactSizeIterator for ProofIter {
fn len(&self) -> usize {
self.next_index.depth() as usize
}
}
#[cfg(test)]
mod tests {
use assert_matches::assert_matches;

View file

@ -95,26 +95,16 @@ impl MerkleTree {
/// Returns an error if:
/// * The specified depth is greater than the depth of the tree.
/// * The specified value is not valid for the specified depth.
pub fn get_path(&self, mut index: NodeIndex) -> Result<MerklePath, MerkleError> {
pub fn get_path(&self, index: NodeIndex) -> Result<MerklePath, MerkleError> {
if index.is_root() {
return Err(MerkleError::DepthTooSmall(index.depth()));
} else if index.depth() > self.depth() {
return Err(MerkleError::DepthTooBig(index.depth() as u64));
}
// TODO should we create a helper in `NodeIndex` that will encapsulate traversal to root so
// we always use inlined `for` instead of `while`? the reason to use `for` is because its
// easier for the compiler to vectorize.
let mut path = Vec::with_capacity(index.depth() as usize);
for _ in 0..index.depth() {
let sibling = index.sibling().to_scalar_index() as usize;
path.push(self.nodes[sibling]);
index.move_up();
}
debug_assert!(index.is_root(), "the path walk must go all the way to the root");
Ok(path.into())
Ok(MerklePath::from(Vec::from_iter(
index.proof_indices().map(|index| self.get_node(index).unwrap()),
)))
}
// ITERATORS

View file

@ -20,6 +20,9 @@ pub use merkle_tree::{MerkleTree, path_to_text, tree_to_text};
mod path;
pub use path::{MerklePath, RootPath, ValuePath};
mod sparse_path;
pub use sparse_path::{SparseMerklePath, SparseValuePath};
mod smt;
pub use smt::{
InnerNode, LeafIndex, MutationSet, NodeMutation, PartialSmt, SMT_DEPTH, SMT_MAX_DEPTH,

View file

@ -1,5 +1,8 @@
use alloc::vec::Vec;
use core::ops::{Deref, DerefMut};
use core::{
num::NonZero,
ops::{Deref, DerefMut},
};
use super::{InnerNodeInfo, MerkleError, NodeIndex, Rpo256, RpoDigest};
use crate::{
@ -11,6 +14,9 @@ use crate::{
// ================================================================================================
/// A merkle path container, composed of a sequence of nodes of a Merkle tree.
///
/// Indexing into this type starts at the deepest part of the path and gets shallower. That is,
/// the node at index `0` is deeper than the node at index `self.len() - 1`.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub struct MerklePath {
@ -22,6 +28,8 @@ impl MerklePath {
// --------------------------------------------------------------------------------------------
/// Creates a new Merkle path from a list of nodes.
///
/// The list must be in order of deepest to shallowest.
pub fn new(nodes: Vec<RpoDigest>) -> Self {
assert!(nodes.len() <= u8::MAX.into(), "MerklePath may have at most 256 items");
Self { nodes }
@ -30,12 +38,22 @@ impl MerklePath {
// PROVIDERS
// --------------------------------------------------------------------------------------------
/// Returns a reference to the path node at the specified depth.
///
/// The `depth` parameter is defined in terms of `self.depth()`. Merkle paths conventionally do
/// not include the root, so the shallowest depth is `1`, and the deepest depth is
/// `self.depth()`.
pub fn at_depth(&self, depth: NonZero<u8>) -> Option<RpoDigest> {
let index = u8::checked_sub(self.depth(), depth.get())?;
self.nodes.get(index as usize).copied()
}
/// Returns the depth in which this Merkle path proof is valid.
pub fn depth(&self) -> u8 {
self.nodes.len() as u8
}
/// Returns a reference to the [MerklePath]'s nodes.
/// Returns a reference to the [MerklePath]'s nodes, in order of deepest to shallowest.
pub fn nodes(&self) -> &[RpoDigest] {
&self.nodes
}

View file

@ -79,28 +79,34 @@ pub(crate) trait SparseMerkleTree<const DEPTH: u8> {
// PROVIDED METHODS
// ---------------------------------------------------------------------------------------------
/// Returns a [MerklePath] to the specified key.
///
/// Mostly this is an implementation detail of [`Self::open()`].
fn get_path(&self, key: &Self::Key) -> MerklePath {
let index = NodeIndex::from(Self::key_to_leaf_index(key));
index.proof_indices().map(|index| self.get_node_hash(index)).collect()
}
/// Get the hash of a node at an arbitrary index, including the root or leaf hashes.
///
/// The root index simply returns [`Self::root()`]. Other hashes are retrieved by calling
/// [`Self::get_inner_node()`] on the parent, and returning the respective child hash.
fn get_node_hash(&self, index: NodeIndex) -> RpoDigest {
if index.is_root() {
return self.root();
}
let InnerNode { left, right } = self.get_inner_node(index.parent());
let index_is_right = index.is_value_odd();
if index_is_right { right } else { left }
}
/// Returns an opening of the leaf associated with `key`. Conceptually, an opening is a Merkle
/// path to the leaf, as well as the leaf itself.
fn open(&self, key: &Self::Key) -> Self::Opening {
let leaf = self.get_leaf(key);
let mut index: NodeIndex = {
let leaf_index: LeafIndex<DEPTH> = Self::key_to_leaf_index(key);
leaf_index.into()
};
let merkle_path = {
let mut path = Vec::with_capacity(index.depth() as usize);
for _ in 0..index.depth() {
let is_right = index.is_value_odd();
index.move_up();
let InnerNode { left, right } = self.get_inner_node(index);
let value = if is_right { left } else { right };
path.push(value);
}
MerklePath::new(path)
};
let merkle_path = self.get_path(key);
Self::path_and_leaf_to_opening(merkle_path, leaf)
}

View file

@ -46,8 +46,8 @@ impl PartialSmt {
{
let mut partial_smt = Self::new();
for (leaf, path) in paths.into_iter().map(SmtProof::into_parts) {
partial_smt.add_path(path, leaf)?;
for (path, leaf) in paths.into_iter().map(SmtProof::into_parts) {
partial_smt.add_path(leaf, path)?;
}
Ok(partial_smt)

View file

@ -5,6 +5,7 @@ use super::{
LeafIndex, MerkleError, MerklePath, MutationSet, NodeIndex, RpoDigest, SMT_MAX_DEPTH,
SMT_MIN_DEPTH, SparseMerkleTree, Word,
};
use crate::merkle::{SparseMerklePath, SparseValuePath};
#[cfg(test)]
mod tests;
@ -169,8 +170,15 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
/// Returns an opening of the leaf associated with `key`. Conceptually, an opening is a Merkle
/// path to the leaf, as well as the leaf itself.
pub fn open(&self, key: &LeafIndex<DEPTH>) -> ValuePath {
<Self as SparseMerkleTree<DEPTH>>::open(self, key)
pub fn open(&self, key: &LeafIndex<DEPTH>) -> SparseValuePath {
let value = RpoDigest::new(self.get_value(key));
let nodes = key.index.proof_indices().map(|index| self.get_node_hash(index));
// `from_sized_iter()` returns an error if there are more nodes than `SMT_MAX_DEPTH`, but
// this could only happen if we have more levels than `SMT_MAX_DEPTH` ourselves, which is
// guarded against in `SimpleSmt::new()`.
let path = SparseMerklePath::from_sized_iter(nodes).unwrap();
SparseValuePath { value, path }
}
/// Returns a boolean value indicating whether the SMT is empty.

View file

@ -10,8 +10,8 @@ use crate::{
EMPTY_WORD, Word,
hash::rpo::Rpo256,
merkle::{
EmptySubtreeRoots, InnerNodeInfo, LeafIndex, MerkleTree, digests_to_words, int_to_leaf,
int_to_node, smt::SparseMerkleTree,
EmptySubtreeRoots, InnerNodeInfo, LeafIndex, MerklePath, MerkleTree, digests_to_words,
int_to_leaf, int_to_node, smt::SparseMerkleTree,
},
};
@ -115,10 +115,22 @@ fn test_depth2_tree() {
assert_eq!(VALUES4[3], tree.get_node(NodeIndex::make(2, 3)).unwrap());
// check get_path(): depth 2
assert_eq!(vec![VALUES4[1], node3], *tree.open(&LeafIndex::<2>::new(0).unwrap()).path);
assert_eq!(vec![VALUES4[0], node3], *tree.open(&LeafIndex::<2>::new(1).unwrap()).path);
assert_eq!(vec![VALUES4[3], node2], *tree.open(&LeafIndex::<2>::new(2).unwrap()).path);
assert_eq!(vec![VALUES4[2], node2], *tree.open(&LeafIndex::<2>::new(3).unwrap()).path);
assert_eq!(
MerklePath::from(vec![VALUES4[1], node3]),
tree.open(&LeafIndex::<2>::new(0).unwrap()).path,
);
assert_eq!(
MerklePath::from(vec![VALUES4[0], node3]),
tree.open(&LeafIndex::<2>::new(1).unwrap()).path,
);
assert_eq!(
MerklePath::from(vec![VALUES4[3], node2]),
tree.open(&LeafIndex::<2>::new(2).unwrap()).path,
);
assert_eq!(
MerklePath::from(vec![VALUES4[2], node2]),
tree.open(&LeafIndex::<2>::new(3).unwrap()).path,
);
}
#[test]
@ -239,7 +251,7 @@ fn small_tree_opening_is_consistent() {
for (key, path) in cases {
let opening = tree.open(&LeafIndex::<3>::new(key).unwrap());
assert_eq!(path, *opening.path);
assert_eq!(MerklePath::from(path), opening.path);
}
}

View file

@ -0,0 +1,618 @@
use alloc::{borrow::Cow, vec::Vec};
use core::{
iter::{self, FusedIterator},
num::NonZero,
};
use winter_utils::{Deserializable, DeserializationError, Serializable};
use super::{
EmptySubtreeRoots, MerkleError, MerklePath, RpoDigest, SMT_MAX_DEPTH, ValuePath, Word,
};
/// A different representation of [`MerklePath`] designed for memory efficiency for Merkle paths
/// with empty nodes.
///
/// Empty nodes in the path are stored only as their position, represented with a bitmask. A
/// maximum of 64 nodes (`SMT_MAX_DEPTH`) can be stored (empty and non-empty). The more nodes in a
/// path are empty, the less memory this struct will use. This type calculates empty nodes on-demand
/// when iterated through, converted to a [MerklePath], or an empty node is retrieved with
/// [`SparseMerklePath::at_depth()`], which will incur overhead.
///
/// NOTE: This type assumes that Merkle paths always span from the root of the tree to a leaf.
/// Partial paths are not supported.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub struct SparseMerklePath {
/// A bitmask representing empty nodes. The set bit corresponds to the depth of an empty node.
/// The least significant bit (bit 0) describes depth 1 node (root's children).
/// The `bit index + 1` is equal to node's depth.
empty_nodes_mask: u64,
/// The non-empty nodes, stored in depth-order, but not contiguous across depth.
nodes: Vec<RpoDigest>,
}
impl SparseMerklePath {
/// Constructs a sparse Merkle path from an iterator over Merkle nodes that also knows its
/// exact size (such as iterators created with [Vec::into_iter]). The iterator must be in order
/// of deepest to shallowest.
///
/// Knowing the size is necessary to calculate the depth of the tree, which is needed to detect
/// which nodes are empty nodes.
///
/// # Errors
/// Returns [MerkleError::DepthTooBig] if `tree_depth` is greater than [SMT_MAX_DEPTH].
pub fn from_sized_iter<I>(iterator: I) -> Result<Self, MerkleError>
where
I: IntoIterator<IntoIter: ExactSizeIterator, Item = RpoDigest>,
{
let iterator = iterator.into_iter();
let tree_depth = iterator.len() as u8;
if tree_depth > SMT_MAX_DEPTH {
return Err(MerkleError::DepthTooBig(tree_depth as u64));
}
let mut empty_nodes_mask: u64 = 0;
let mut nodes: Vec<RpoDigest> = Default::default();
for (depth, node) in iter::zip(path_depth_iter(tree_depth), iterator) {
let &equivalent_empty_node = EmptySubtreeRoots::entry(tree_depth, depth.get());
let is_empty = node == equivalent_empty_node;
let node = if is_empty { None } else { Some(node) };
match node {
Some(node) => nodes.push(node),
None => empty_nodes_mask |= Self::bitmask_for_depth(depth),
}
}
Ok(SparseMerklePath { nodes, empty_nodes_mask })
}
/// Returns the total depth of this path, i.e., the number of nodes this path represents.
pub fn depth(&self) -> u8 {
(self.nodes.len() + self.empty_nodes_mask.count_ones() as usize) as u8
}
/// Get a specific node in this path at a given depth.
///
/// The `depth` parameter is defined in terms of `self.depth()`. Merkle paths conventionally do
/// not include the root, so the shallowest depth is `1`, and the deepest depth is
/// `self.depth()`.
///
/// # Errors
/// Returns [MerkleError::DepthTooBig] if `node_depth` is greater than the total depth of this
/// path.
pub fn at_depth(&self, node_depth: NonZero<u8>) -> Result<RpoDigest, MerkleError> {
if node_depth.get() > self.depth() {
return Err(MerkleError::DepthTooBig(node_depth.get().into()));
}
let node = if let Some(nonempty_index) = self.get_nonempty_index(node_depth) {
self.nodes[nonempty_index]
} else {
*EmptySubtreeRoots::entry(self.depth(), node_depth.get())
};
Ok(node)
}
// PROVIDERS
// ============================================================================================
/// Constructs a borrowing iterator over the nodes in this path.
/// Starts from the leaf and iterates toward the root (excluding the root).
pub fn iter(&self) -> impl ExactSizeIterator<Item = RpoDigest> {
self.into_iter()
}
// PRIVATE HELPERS
// ============================================================================================
const fn bitmask_for_depth(node_depth: NonZero<u8>) -> u64 {
// - 1 because paths do not include the root.
1 << (node_depth.get() - 1)
}
const fn is_depth_empty(&self, node_depth: NonZero<u8>) -> bool {
(self.empty_nodes_mask & Self::bitmask_for_depth(node_depth)) != 0
}
/// Index of the non-empty node in the `self.nodes` vector. If the specified depth is
/// empty, None is returned.
fn get_nonempty_index(&self, node_depth: NonZero<u8>) -> Option<usize> {
if self.is_depth_empty(node_depth) {
return None;
}
let bit_index = node_depth.get() - 1;
let without_shallower = self.empty_nodes_mask >> bit_index;
let empty_deeper = without_shallower.count_ones() as usize;
// The vec index we would use if we didn't have any empty nodes to account for...
let normal_index = (self.depth() - node_depth.get()) as usize;
// subtracted by the number of empty nodes that are deeper than us.
Some(normal_index - empty_deeper)
}
}
// SERIALIZATION
// ================================================================================================
impl Serializable for SparseMerklePath {
fn write_into<W: winter_utils::ByteWriter>(&self, target: &mut W) {
target.write_u8(self.depth());
target.write_u64(self.empty_nodes_mask);
target.write_many(&self.nodes);
}
}
impl Deserializable for SparseMerklePath {
fn read_from<R: winter_utils::ByteReader>(
source: &mut R,
) -> Result<Self, DeserializationError> {
let depth = source.read_u8()?;
if depth > SMT_MAX_DEPTH {
return Err(DeserializationError::InvalidValue(format!(
"SparseMerklePath max depth exceeded ({} > {})",
depth, SMT_MAX_DEPTH
)));
}
let empty_nodes_mask = source.read_u64()?;
let empty_nodes_count = empty_nodes_mask.count_ones();
if empty_nodes_count > depth as u32 {
return Err(DeserializationError::InvalidValue(format!(
"SparseMerklePath has more empty nodes ({}) than its full length ({})",
empty_nodes_count, depth
)));
}
let count = depth as u32 - empty_nodes_count;
let nodes = source.read_many::<RpoDigest>(count as usize)?;
Ok(Self { empty_nodes_mask, nodes })
}
}
// CONVERSIONS
// ================================================================================================
impl From<SparseMerklePath> for MerklePath {
fn from(sparse_path: SparseMerklePath) -> Self {
MerklePath::from_iter(sparse_path)
}
}
impl TryFrom<MerklePath> for SparseMerklePath {
type Error = MerkleError;
/// # Errors
///
/// This conversion returns [MerkleError::DepthTooBig] if the path length is greater than
/// [`SMT_MAX_DEPTH`].
fn try_from(path: MerklePath) -> Result<Self, MerkleError> {
SparseMerklePath::from_sized_iter(path)
}
}
impl From<SparseMerklePath> for Vec<RpoDigest> {
fn from(path: SparseMerklePath) -> Self {
Vec::from_iter(path)
}
}
// ITERATORS
// ================================================================================================
/// Iterator for [`SparseMerklePath`]. Starts from the leaf and iterates toward the root (excluding
/// the root).
pub struct SparseMerklePathIter<'p> {
/// The "inner" value we're iterating over.
path: Cow<'p, SparseMerklePath>,
/// The depth a `next()` call will get. `next_depth == 0` indicates that the iterator has been
/// exhausted.
next_depth: u8,
}
impl Iterator for SparseMerklePathIter<'_> {
type Item = RpoDigest;
fn next(&mut self) -> Option<RpoDigest> {
let this_depth = self.next_depth;
// Paths don't include the root, so if `this_depth` is 0 then we keep returning `None`.
let this_depth = NonZero::new(this_depth)?;
self.next_depth = this_depth.get() - 1;
// `this_depth` is only ever decreasing, so it can't ever exceed `self.path.depth()`.
let node = self
.path
.at_depth(this_depth)
.expect("current depth should never exceed the path depth");
Some(node)
}
// SparseMerkleIter always knows its exact size.
fn size_hint(&self) -> (usize, Option<usize>) {
let remaining = ExactSizeIterator::len(self);
(remaining, Some(remaining))
}
}
impl ExactSizeIterator for SparseMerklePathIter<'_> {
fn len(&self) -> usize {
self.next_depth as usize
}
}
impl FusedIterator for SparseMerklePathIter<'_> {}
// TODO: impl DoubleEndedIterator.
impl IntoIterator for SparseMerklePath {
type IntoIter = SparseMerklePathIter<'static>;
type Item = <Self::IntoIter as Iterator>::Item;
fn into_iter(self) -> SparseMerklePathIter<'static> {
let tree_depth = self.depth();
SparseMerklePathIter {
path: Cow::Owned(self),
next_depth: tree_depth,
}
}
}
impl<'p> IntoIterator for &'p SparseMerklePath {
type Item = <SparseMerklePathIter<'p> as Iterator>::Item;
type IntoIter = SparseMerklePathIter<'p>;
fn into_iter(self) -> SparseMerklePathIter<'p> {
let tree_depth = self.depth();
SparseMerklePathIter {
path: Cow::Borrowed(self),
next_depth: tree_depth,
}
}
}
// COMPARISONS
// ================================================================================================
impl PartialEq<MerklePath> for SparseMerklePath {
fn eq(&self, rhs: &MerklePath) -> bool {
if self.depth() != rhs.depth() {
return false;
}
for (node, &rhs_node) in iter::zip(self, rhs.iter()) {
if node != rhs_node {
return false;
}
}
true
}
}
impl PartialEq<SparseMerklePath> for MerklePath {
fn eq(&self, rhs: &SparseMerklePath) -> bool {
rhs == self
}
}
// SPARSE VALUE PATH
// ================================================================================================
/// A container for a [crate::Word] value and its [SparseMerklePath] opening.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SparseValuePath {
/// The node value opening for `path`.
pub value: RpoDigest,
/// The path from `value` to `root` (exclusive), using an efficient memory representation for
/// empty nodes.
pub path: SparseMerklePath,
}
impl SparseValuePath {
/// Convenience function to construct a [SparseValuePath].
///
/// `value` is the value `path` leads to, in the tree.
pub fn new(value: RpoDigest, path: SparseMerklePath) -> Self {
Self { value, path }
}
}
impl From<(SparseMerklePath, Word)> for SparseValuePath {
fn from((path, value): (SparseMerklePath, Word)) -> Self {
SparseValuePath::new(value.into(), path)
}
}
impl TryFrom<ValuePath> for SparseValuePath {
type Error = MerkleError;
/// # Errors
///
/// This conversion returns [MerkleError::DepthTooBig] if the path length is greater than
/// [`SMT_MAX_DEPTH`].
fn try_from(other: ValuePath) -> Result<Self, MerkleError> {
let ValuePath { value, path } = other;
let path = SparseMerklePath::try_from(path)?;
Ok(SparseValuePath { value, path })
}
}
impl From<SparseValuePath> for ValuePath {
fn from(other: SparseValuePath) -> Self {
let SparseValuePath { value, path } = other;
ValuePath { value, path: path.into() }
}
}
impl PartialEq<ValuePath> for SparseValuePath {
fn eq(&self, rhs: &ValuePath) -> bool {
self.value == rhs.value && self.path == rhs.path
}
}
impl PartialEq<SparseValuePath> for ValuePath {
fn eq(&self, rhs: &SparseValuePath) -> bool {
rhs == self
}
}
// HELPERS
// ================================================================================================
/// Iterator for path depths, which start at the deepest part of the tree and go the shallowest
/// depth before the root (depth 1).
fn path_depth_iter(tree_depth: u8) -> impl ExactSizeIterator<Item = NonZero<u8>> {
let top_down_iter = (1..=tree_depth).map(|depth| {
// SAFETY: `RangeInclusive<1, _>` cannot ever yield 0. Even if `tree_depth` is 0, then the
// range is `RangeInclusive<1, 0>` will simply not yield any values, and this block won't
// even be reached.
unsafe { NonZero::new_unchecked(depth) }
});
// Reverse the top-down iterator to get a bottom-up iterator.
top_down_iter.rev()
}
// TESTS
// ================================================================================================
#[cfg(test)]
mod tests {
use alloc::vec::Vec;
use core::num::NonZero;
use assert_matches::assert_matches;
use super::SparseMerklePath;
use crate::{
Felt, ONE, Word,
hash::rpo::RpoDigest,
merkle::{
EmptySubtreeRoots, MerkleError, MerklePath, NodeIndex, SMT_DEPTH, Smt,
smt::SparseMerkleTree, sparse_path::path_depth_iter,
},
};
fn make_smt(pair_count: u64) -> Smt {
let entries: Vec<(RpoDigest, Word)> = (0..pair_count)
.map(|n| {
let leaf_index = ((n as f64 / pair_count as f64) * 255.0) as u64;
let key = RpoDigest::new([ONE, ONE, Felt::new(n), Felt::new(leaf_index)]);
let value = [ONE, ONE, ONE, ONE];
(key, value)
})
.collect();
Smt::with_entries(entries).unwrap()
}
#[test]
fn test_roundtrip() {
let tree = make_smt(8192);
for (key, _value) in tree.entries() {
let (control_path, _) = tree.open(key).into_parts();
assert_eq!(control_path.len(), tree.depth() as usize);
let sparse_path = SparseMerklePath::try_from(control_path.clone()).unwrap();
assert_eq!(control_path.depth(), sparse_path.depth());
assert_eq!(sparse_path.depth(), SMT_DEPTH);
let test_path = MerklePath::from_iter(sparse_path.clone().into_iter());
assert_eq!(control_path, test_path);
}
}
/// Manually test the exact bit patterns for a sample path of 8 nodes, including both empty and
/// non-empty nodes.
///
/// This also offers an overview of what each part of the bit-math involved means and
/// represents.
#[test]
fn test_sparse_bits() {
const DEPTH: u8 = 8;
let raw_nodes: [RpoDigest; DEPTH as usize] = [
// Depth 8.
([8u8, 8, 8, 8].into()),
// Depth 7.
*EmptySubtreeRoots::entry(DEPTH, 7),
// Depth 6.
*EmptySubtreeRoots::entry(DEPTH, 6),
// Depth 5.
[5u8, 5, 5, 5].into(),
// Depth 4.
[4u8, 4, 4, 4].into(),
// Depth 3.
*EmptySubtreeRoots::entry(DEPTH, 3),
// Depth 2.
*EmptySubtreeRoots::entry(DEPTH, 2),
// Depth 1.
*EmptySubtreeRoots::entry(DEPTH, 1),
// Root is not included.
];
let sparse_nodes: [Option<RpoDigest>; DEPTH as usize] = [
// Depth 8.
Some([8u8, 8, 8, 8].into()),
// Depth 7.
None,
// Depth 6.
None,
// Depth 5.
Some([5u8, 5, 5, 5].into()),
// Depth 4.
Some([4u8, 4, 4, 4].into()),
// Depth 3.
None,
// Depth 2.
None,
// Depth 1.
None,
// Root is not included.
];
const EMPTY_BITS: u64 = 0b0110_0111;
let sparse_path = SparseMerklePath::from_sized_iter(raw_nodes).unwrap();
assert_eq!(sparse_path.empty_nodes_mask, EMPTY_BITS);
// Keep track of how many non-empty nodes we have seen
let mut nonempty_idx = 0;
// Test starting from the deepest nodes (depth 8)
for depth in (1..=8).rev() {
let idx = (sparse_path.depth() - depth) as usize;
let bit = 1 << (depth - 1);
// Check that the depth bit is set correctly...
let is_set = (sparse_path.empty_nodes_mask & bit) != 0;
assert_eq!(is_set, sparse_nodes.get(idx).unwrap().is_none());
if is_set {
// Check that we don't return digests for empty nodes
let &test_node = sparse_nodes.get(idx).unwrap();
assert_eq!(test_node, None);
} else {
// Check that we can calculate non-empty indices correctly.
let control_node = raw_nodes.get(idx).unwrap();
assert_eq!(
sparse_path.get_nonempty_index(NonZero::new(depth).unwrap()).unwrap(),
nonempty_idx
);
let test_node = sparse_path.nodes.get(nonempty_idx).unwrap();
assert_eq!(test_node, control_node);
nonempty_idx += 1;
}
}
}
#[test]
fn from_sized_iter() {
let tree = make_smt(8192);
for (key, _value) in tree.entries() {
let index = NodeIndex::from(Smt::key_to_leaf_index(key));
let control_path = tree.get_path(key);
for (&control_node, proof_index) in
itertools::zip_eq(&*control_path, index.proof_indices())
{
let proof_node = tree.get_node_hash(proof_index);
assert_eq!(control_node, proof_node);
}
let sparse_path =
SparseMerklePath::from_sized_iter(control_path.clone().into_iter()).unwrap();
for (sparse_node, proof_idx) in
itertools::zip_eq(sparse_path.clone(), index.proof_indices())
{
let proof_node = tree.get_node_hash(proof_idx);
assert_eq!(sparse_node, proof_node);
}
assert_eq!(control_path.depth(), sparse_path.depth());
for (control, sparse) in itertools::zip_eq(control_path, sparse_path) {
assert_eq!(control, sparse);
}
}
}
#[test]
fn test_random_access() {
let tree = make_smt(8192);
for (i, (key, _value)) in tree.entries().enumerate() {
let control_path = tree.get_path(key);
let sparse_path = SparseMerklePath::try_from(control_path.clone()).unwrap();
assert_eq!(control_path.depth(), sparse_path.depth());
assert_eq!(sparse_path.depth(), SMT_DEPTH);
// Test random access by depth.
for depth in path_depth_iter(control_path.depth()) {
let control_node = control_path.at_depth(depth).unwrap();
let sparse_node = sparse_path.at_depth(depth).unwrap();
assert_eq!(control_node, sparse_node, "at depth {depth} for entry {i}");
}
}
}
#[test]
fn test_borrowing_iterator() {
let tree = make_smt(8192);
for (key, _value) in tree.entries() {
let control_path = tree.get_path(key);
let sparse_path = SparseMerklePath::try_from(control_path.clone()).unwrap();
assert_eq!(control_path.depth(), sparse_path.depth());
assert_eq!(sparse_path.depth(), SMT_DEPTH);
// Test that both iterators yield the same amount of the same values.
let mut count: u64 = 0;
for (&control_node, sparse_node) in
itertools::zip_eq(control_path.iter(), sparse_path.iter())
{
count += 1;
assert_eq!(control_node, sparse_node);
}
assert_eq!(count, control_path.depth() as u64);
}
}
#[test]
fn test_owning_iterator() {
let tree = make_smt(8192);
for (key, _value) in tree.entries() {
let control_path = tree.get_path(key);
let path_depth = control_path.depth();
let sparse_path = SparseMerklePath::try_from(control_path.clone()).unwrap();
assert_eq!(control_path.depth(), sparse_path.depth());
assert_eq!(sparse_path.depth(), SMT_DEPTH);
// Test that both iterators yield the same amount of the same values.
let mut count: u64 = 0;
for (control_node, sparse_node) in itertools::zip_eq(control_path, sparse_path) {
count += 1;
assert_eq!(control_node, sparse_node);
}
assert_eq!(count, path_depth as u64);
}
}
#[test]
fn test_zero_sized() {
let nodes: Vec<RpoDigest> = Default::default();
// Sparse paths that don't actually contain any nodes should still be well behaved.
let sparse_path = SparseMerklePath::from_sized_iter(nodes).unwrap();
assert_eq!(sparse_path.depth(), 0);
assert_matches!(
sparse_path.at_depth(NonZero::new(1).unwrap()),
Err(MerkleError::DepthTooBig(1))
);
assert_eq!(sparse_path.iter().next(), None);
assert_eq!(sparse_path.into_iter().next(), None);
}
}