Merge pull request #162 from 0xPolygonMiden/frisitano-tx-executor
Introduce data access recording capabilities
This commit is contained in:
commit
f52ac29a02
10 changed files with 532 additions and 93 deletions
|
@ -1,5 +1,5 @@
|
|||
use criterion::{black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion};
|
||||
use miden_crypto::merkle::{MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
||||
use miden_crypto::merkle::{DefaultMerkleStore as MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
||||
use miden_crypto::Word;
|
||||
use miden_crypto::{hash::rpo::RpoDigest, Felt};
|
||||
use rand_utils::{rand_array, rand_value};
|
||||
|
|
|
@ -28,6 +28,7 @@ use std::error::Error;
|
|||
///
|
||||
/// Since this is a full representation of the MMR, elements are never removed and the MMR will
|
||||
/// grow roughly `O(2n)` in number of leaf elements.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Mmr {
|
||||
/// Refer to the `forest` method documentation for details of the semantics of this value.
|
||||
pub(super) forest: usize,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::{
|
||||
hash::rpo::{Rpo256, RpoDigest},
|
||||
utils::collections::{vec, BTreeMap, BTreeSet, Vec},
|
||||
utils::collections::{vec, BTreeMap, BTreeSet, KvMap, RecordingMap, Vec},
|
||||
Felt, StarkField, Word, WORD_SIZE, ZERO,
|
||||
};
|
||||
use core::fmt;
|
||||
|
@ -33,7 +33,7 @@ mod mmr;
|
|||
pub use mmr::{Mmr, MmrPeaks, MmrProof};
|
||||
|
||||
mod store;
|
||||
pub use store::MerkleStore;
|
||||
pub use store::{DefaultMerkleStore, MerkleStore, RecordingMerkleStore};
|
||||
|
||||
mod node;
|
||||
pub use node::InnerNodeInfo;
|
||||
|
|
|
@ -154,7 +154,8 @@ impl PartialMerkleTree {
|
|||
self.leaves.iter().map(|&leaf| {
|
||||
(
|
||||
leaf,
|
||||
self.get_node(leaf).expect(&format!("Leaf with {leaf} is not in the nodes map")),
|
||||
self.get_node(leaf)
|
||||
.unwrap_or_else(|_| panic!("Leaf with {leaf} is not in the nodes map")),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
use super::{
|
||||
super::{digests_to_words, int_to_node, MerkleStore, MerkleTree, NodeIndex, PartialMerkleTree},
|
||||
super::{
|
||||
digests_to_words, int_to_node, DefaultMerkleStore as MerkleStore, MerkleTree, NodeIndex,
|
||||
PartialMerkleTree,
|
||||
},
|
||||
RpoDigest, ValuePath, Vec,
|
||||
};
|
||||
|
||||
|
|
|
@ -68,6 +68,12 @@ impl MerklePath {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<MerklePath> for Vec<RpoDigest> {
|
||||
fn from(path: MerklePath) -> Self {
|
||||
path.nodes
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<RpoDigest>> for MerklePath {
|
||||
fn from(path: Vec<RpoDigest>) -> Self {
|
||||
Self::new(path)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use super::{
|
||||
mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, MerkleError, MerklePath, MerklePathSet,
|
||||
MerkleTree, NodeIndex, RootPath, Rpo256, RpoDigest, SimpleSmt, TieredSmt, ValuePath, Vec,
|
||||
mmr::Mmr, BTreeMap, EmptySubtreeRoots, InnerNodeInfo, KvMap, MerkleError, MerklePath,
|
||||
MerklePathSet, MerkleTree, NodeIndex, RecordingMap, RootPath, Rpo256, RpoDigest, SimpleSmt,
|
||||
TieredSmt, ValuePath, Vec,
|
||||
};
|
||||
use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable};
|
||||
use core::borrow::Borrow;
|
||||
|
@ -8,6 +9,15 @@ use core::borrow::Borrow;
|
|||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// MERKLE STORE
|
||||
// ================================================================================================
|
||||
|
||||
/// A default [MerkleStore] which uses a simple [BTreeMap] as the backing storage.
|
||||
pub type DefaultMerkleStore = MerkleStore<BTreeMap<RpoDigest, Node>>;
|
||||
|
||||
/// A [MerkleStore] with recording capabilities which uses [RecordingMap] as the backing storage.
|
||||
pub type RecordingMerkleStore = MerkleStore<RecordingMap<RpoDigest, Node>>;
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Node {
|
||||
left: RpoDigest,
|
||||
|
@ -42,7 +52,7 @@ pub struct Node {
|
|||
/// # let T1 = MerkleTree::new([A, B, C, D, E, F, G, H1].to_vec()).expect("even number of leaves provided");
|
||||
/// # let ROOT0 = T0.root();
|
||||
/// # let ROOT1 = T1.root();
|
||||
/// let mut store = MerkleStore::new();
|
||||
/// let mut store: MerkleStore = MerkleStore::new();
|
||||
///
|
||||
/// // the store is initialized with the SMT empty nodes
|
||||
/// assert_eq!(store.num_internal_nodes(), 255);
|
||||
|
@ -51,9 +61,8 @@ pub struct Node {
|
|||
/// let tree2 = MerkleTree::new(vec![A, B, C, D, E, F, G, H1]).unwrap();
|
||||
///
|
||||
/// // populates the store with two merkle trees, common nodes are shared
|
||||
/// store
|
||||
/// .extend(tree1.inner_nodes())
|
||||
/// .extend(tree2.inner_nodes());
|
||||
/// store.extend(tree1.inner_nodes());
|
||||
/// store.extend(tree2.inner_nodes());
|
||||
///
|
||||
/// // every leaf except the last are the same
|
||||
/// for i in 0..7 {
|
||||
|
@ -78,40 +87,24 @@ pub struct Node {
|
|||
/// assert_eq!(store.num_internal_nodes() - 255, 10);
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct MerkleStore {
|
||||
nodes: BTreeMap<RpoDigest, Node>,
|
||||
pub struct MerkleStore<T: KvMap<RpoDigest, Node> = BTreeMap<RpoDigest, Node>> {
|
||||
nodes: T,
|
||||
}
|
||||
|
||||
impl Default for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> Default for MerkleStore<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> MerkleStore<T> {
|
||||
// CONSTRUCTORS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Creates an empty `MerkleStore` instance.
|
||||
pub fn new() -> MerkleStore {
|
||||
pub fn new() -> MerkleStore<T> {
|
||||
// pre-populate the store with the empty hashes
|
||||
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
||||
let nodes = subtrees
|
||||
.iter()
|
||||
.rev()
|
||||
.copied()
|
||||
.zip(subtrees.iter().rev().skip(1).copied())
|
||||
.map(|(child, parent)| {
|
||||
(
|
||||
parent,
|
||||
Node {
|
||||
left: child,
|
||||
right: child,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let nodes = empty_hashes().into_iter().collect();
|
||||
MerkleStore { nodes }
|
||||
}
|
||||
|
||||
|
@ -126,10 +119,10 @@ impl MerkleStore {
|
|||
/// Returns the node at `index` rooted on the tree `root`.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This method can return the following errors:
|
||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||
/// the store.
|
||||
pub fn get_node(&self, root: RpoDigest, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||
let mut hash = root;
|
||||
|
||||
|
@ -153,7 +146,8 @@ impl MerkleStore {
|
|||
/// # Errors
|
||||
/// This method can return the following errors:
|
||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||
/// the store.
|
||||
pub fn get_path(&self, root: RpoDigest, index: NodeIndex) -> Result<ValuePath, MerkleError> {
|
||||
let mut hash = root;
|
||||
let mut path = Vec::with_capacity(index.depth().into());
|
||||
|
@ -197,7 +191,7 @@ impl MerkleStore {
|
|||
/// - The path from the root continues to a depth greater than `tree_depth`.
|
||||
/// - The provided `tree_depth` is greater than `64.
|
||||
/// - The provided `index` is not valid for a depth equivalent to `tree_depth`. For more
|
||||
/// information, check [NodeIndex::new].
|
||||
/// information, check [NodeIndex::new].
|
||||
pub fn get_leaf_depth(
|
||||
&self,
|
||||
root: RpoDigest,
|
||||
|
@ -261,7 +255,7 @@ impl MerkleStore {
|
|||
/// nodes which are descendants of the specified roots.
|
||||
///
|
||||
/// The roots for which no descendants exist in this Merkle store are ignored.
|
||||
pub fn subset<I, R>(&self, roots: I) -> MerkleStore
|
||||
pub fn subset<I, R>(&self, roots: I) -> MerkleStore<T>
|
||||
where
|
||||
I: Iterator<Item = R>,
|
||||
R: Borrow<RpoDigest>,
|
||||
|
@ -286,23 +280,6 @@ impl MerkleStore {
|
|||
// STATE MUTATORS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Adds a sequence of nodes yielded by the provided iterator into the store.
|
||||
pub fn extend<I>(&mut self, iter: I) -> &mut MerkleStore
|
||||
where
|
||||
I: Iterator<Item = InnerNodeInfo>,
|
||||
{
|
||||
for node in iter {
|
||||
let value: RpoDigest = node.value;
|
||||
let left: RpoDigest = node.left;
|
||||
let right: RpoDigest = node.right;
|
||||
|
||||
debug_assert_eq!(Rpo256::merge(&[left, right]), value);
|
||||
self.nodes.insert(value, Node { left, right });
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds all the nodes of a Merkle path represented by `path`, opening to `node`. Returns the
|
||||
/// new root.
|
||||
///
|
||||
|
@ -360,10 +337,10 @@ impl MerkleStore {
|
|||
/// Sets a node to `value`.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This method can return the following errors:
|
||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||
/// the store.
|
||||
pub fn set_node(
|
||||
&mut self,
|
||||
mut root: RpoDigest,
|
||||
|
@ -401,6 +378,14 @@ impl MerkleStore {
|
|||
Ok(parent)
|
||||
}
|
||||
|
||||
// DESTRUCTURING
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Returns the inner storage of this MerkleStore while consuming `self`.
|
||||
pub fn into_inner(self) -> T {
|
||||
self.nodes
|
||||
}
|
||||
|
||||
// HELPER METHODS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
|
@ -423,52 +408,69 @@ impl MerkleStore {
|
|||
// CONVERSIONS
|
||||
// ================================================================================================
|
||||
|
||||
impl From<&MerkleTree> for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> From<&MerkleTree> for MerkleStore<T> {
|
||||
fn from(value: &MerkleTree) -> Self {
|
||||
let mut store = MerkleStore::new();
|
||||
store.extend(value.inner_nodes());
|
||||
store
|
||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&SimpleSmt> for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> From<&SimpleSmt> for MerkleStore<T> {
|
||||
fn from(value: &SimpleSmt) -> Self {
|
||||
let mut store = MerkleStore::new();
|
||||
store.extend(value.inner_nodes());
|
||||
store
|
||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Mmr> for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> From<&Mmr> for MerkleStore<T> {
|
||||
fn from(value: &Mmr) -> Self {
|
||||
let mut store = MerkleStore::new();
|
||||
store.extend(value.inner_nodes());
|
||||
store
|
||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TieredSmt> for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> From<&TieredSmt> for MerkleStore<T> {
|
||||
fn from(value: &TieredSmt) -> Self {
|
||||
let mut store = MerkleStore::new();
|
||||
store.extend(value.inner_nodes());
|
||||
store
|
||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<InnerNodeInfo> for MerkleStore {
|
||||
fn from_iter<T: IntoIterator<Item = InnerNodeInfo>>(iter: T) -> Self {
|
||||
let mut store = MerkleStore::new();
|
||||
store.extend(iter.into_iter());
|
||||
store
|
||||
impl<T: KvMap<RpoDigest, Node>> From<T> for MerkleStore<T> {
|
||||
fn from(values: T) -> Self {
|
||||
let nodes = values.into_iter().chain(empty_hashes().into_iter()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: KvMap<RpoDigest, Node>> FromIterator<InnerNodeInfo> for MerkleStore<T> {
|
||||
fn from_iter<I: IntoIterator<Item = InnerNodeInfo>>(iter: I) -> Self {
|
||||
let nodes = combine_nodes_with_empty_hashes(iter.into_iter()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: KvMap<RpoDigest, Node>> FromIterator<(RpoDigest, Node)> for MerkleStore<T> {
|
||||
fn from_iter<I: IntoIterator<Item = (RpoDigest, Node)>>(iter: I) -> Self {
|
||||
let nodes = iter.into_iter().chain(empty_hashes().into_iter()).collect();
|
||||
Self { nodes }
|
||||
}
|
||||
}
|
||||
|
||||
// ITERATORS
|
||||
// ================================================================================================
|
||||
|
||||
impl Extend<InnerNodeInfo> for MerkleStore {
|
||||
fn extend<T: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: T) {
|
||||
self.extend(iter.into_iter());
|
||||
impl<T: KvMap<RpoDigest, Node>> Extend<InnerNodeInfo> for MerkleStore<T> {
|
||||
fn extend<I: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: I) {
|
||||
self.nodes.extend(iter.into_iter().map(|info| {
|
||||
(
|
||||
info.value,
|
||||
Node {
|
||||
left: info.left,
|
||||
right: info.right,
|
||||
},
|
||||
)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -490,7 +492,7 @@ impl Deserializable for Node {
|
|||
}
|
||||
}
|
||||
|
||||
impl Serializable for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> Serializable for MerkleStore<T> {
|
||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||
target.write_u64(self.nodes.len() as u64);
|
||||
|
||||
|
@ -501,17 +503,55 @@ impl Serializable for MerkleStore {
|
|||
}
|
||||
}
|
||||
|
||||
impl Deserializable for MerkleStore {
|
||||
impl<T: KvMap<RpoDigest, Node>> Deserializable for MerkleStore<T> {
|
||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||
let len = source.read_u64()?;
|
||||
let mut nodes: BTreeMap<RpoDigest, Node> = BTreeMap::new();
|
||||
let mut nodes: Vec<(RpoDigest, Node)> = Vec::with_capacity(len as usize);
|
||||
|
||||
for _ in 0..len {
|
||||
let key = RpoDigest::read_from(source)?;
|
||||
let value = Node::read_from(source)?;
|
||||
nodes.insert(key, value);
|
||||
nodes.push((key, value));
|
||||
}
|
||||
|
||||
Ok(MerkleStore { nodes })
|
||||
Ok(nodes.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
// HELPER FUNCTIONS
|
||||
// ================================================================================================
|
||||
|
||||
/// Creates empty hashes for all the subtrees of a tree with a max depth of 255.
|
||||
fn empty_hashes() -> impl IntoIterator<Item = (RpoDigest, Node)> {
|
||||
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
||||
subtrees.iter().rev().copied().zip(subtrees.iter().rev().skip(1).copied()).map(
|
||||
|(child, parent)| {
|
||||
(
|
||||
parent,
|
||||
Node {
|
||||
left: child,
|
||||
right: child,
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Consumes an iterator of [InnerNodeInfo] and returns an iterator of `(value, node)` tuples
|
||||
/// which includes the nodes associate with roots of empty subtrees up to a depth of 255.
|
||||
fn combine_nodes_with_empty_hashes(
|
||||
nodes: impl IntoIterator<Item = InnerNodeInfo>,
|
||||
) -> impl Iterator<Item = (RpoDigest, Node)> {
|
||||
nodes
|
||||
.into_iter()
|
||||
.map(|info| {
|
||||
(
|
||||
info.value,
|
||||
Node {
|
||||
left: info.left,
|
||||
right: info.right,
|
||||
},
|
||||
)
|
||||
})
|
||||
.chain(empty_hashes().into_iter())
|
||||
}
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
use super::{
|
||||
Deserializable, EmptySubtreeRoots, MerkleError, MerklePath, MerkleStore, NodeIndex, RpoDigest,
|
||||
Serializable,
|
||||
DefaultMerkleStore as MerkleStore, EmptySubtreeRoots, MerkleError, MerklePath, NodeIndex,
|
||||
RecordingMerkleStore, RpoDigest,
|
||||
};
|
||||
use crate::{
|
||||
hash::rpo::Rpo256,
|
||||
merkle::{digests_to_words, int_to_leaf, int_to_node, MerklePathSet, MerkleTree, SimpleSmt},
|
||||
Felt, Word, WORD_SIZE,
|
||||
Felt, Word, ONE, WORD_SIZE, ZERO,
|
||||
};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use super::{Deserializable, Serializable};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use std::error::Error;
|
||||
|
||||
|
@ -17,6 +20,7 @@ use std::error::Error;
|
|||
const KEYS4: [u64; 4] = [0, 1, 2, 3];
|
||||
const VALUES4: [RpoDigest; 4] = [int_to_node(1), int_to_node(2), int_to_node(3), int_to_node(4)];
|
||||
|
||||
const KEYS8: [u64; 8] = [0, 1, 2, 3, 4, 5, 6, 7];
|
||||
const VALUES8: [RpoDigest; 8] = [
|
||||
int_to_node(1),
|
||||
int_to_node(2),
|
||||
|
@ -810,3 +814,54 @@ fn test_serialization() -> Result<(), Box<dyn Error>> {
|
|||
assert_eq!(store, decoded);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// MERKLE RECORDER
|
||||
// ================================================================================================
|
||||
#[test]
|
||||
fn test_recorder() {
|
||||
// instantiate recorder from MerkleTree and SimpleSmt
|
||||
let mtree = MerkleTree::new(digests_to_words(&VALUES4)).unwrap();
|
||||
let smtree = SimpleSmt::with_leaves(
|
||||
64,
|
||||
KEYS8.into_iter().zip(VALUES8.into_iter().map(|x| x.into()).rev()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut recorder: RecordingMerkleStore =
|
||||
mtree.inner_nodes().chain(smtree.inner_nodes()).collect();
|
||||
|
||||
// get nodes from both trees and make sure they are correct
|
||||
let index_0 = NodeIndex::new(mtree.depth(), 0).unwrap();
|
||||
let node = recorder.get_node(mtree.root(), index_0).unwrap();
|
||||
assert_eq!(node, mtree.get_node(index_0).unwrap());
|
||||
|
||||
let index_1 = NodeIndex::new(smtree.depth(), 1).unwrap();
|
||||
let node = recorder.get_node(smtree.root(), index_1).unwrap();
|
||||
assert_eq!(node, smtree.get_node(index_1).unwrap());
|
||||
|
||||
// insert a value and assert that when we request it next time it is accurate
|
||||
let new_value = [ZERO, ZERO, ONE, ONE].into();
|
||||
let index_2 = NodeIndex::new(smtree.depth(), 2).unwrap();
|
||||
let root = recorder.set_node(smtree.root(), index_2, new_value).unwrap().root;
|
||||
assert_eq!(recorder.get_node(root, index_2).unwrap(), new_value);
|
||||
|
||||
// construct the proof
|
||||
let rec_map = recorder.into_inner();
|
||||
let proof = rec_map.into_proof();
|
||||
let merkle_store: MerkleStore = proof.into();
|
||||
|
||||
// make sure the proof contains all nodes from both trees
|
||||
let node = merkle_store.get_node(mtree.root(), index_0).unwrap();
|
||||
assert_eq!(node, mtree.get_node(index_0).unwrap());
|
||||
|
||||
let node = merkle_store.get_node(smtree.root(), index_1).unwrap();
|
||||
assert_eq!(node, smtree.get_node(index_1).unwrap());
|
||||
|
||||
let node = merkle_store.get_node(smtree.root(), index_2).unwrap();
|
||||
assert_eq!(node, smtree.get_leaf(index_2.value()).unwrap().into());
|
||||
|
||||
// assert that is doesnt contain nodes that were not recorded
|
||||
let not_recorded_index = NodeIndex::new(smtree.depth(), 4).unwrap();
|
||||
assert!(merkle_store.get_node(smtree.root(), not_recorded_index).is_err());
|
||||
assert!(smtree.get_node(not_recorded_index).is_ok());
|
||||
}
|
||||
|
|
324
src/utils/kv_map.rs
Normal file
324
src/utils/kv_map.rs
Normal file
|
@ -0,0 +1,324 @@
|
|||
use core::cell::RefCell;
|
||||
use winter_utils::{
|
||||
collections::{btree_map::IntoIter, BTreeMap, BTreeSet},
|
||||
Box,
|
||||
};
|
||||
|
||||
// KEY-VALUE MAP TRAIT
|
||||
// ================================================================================================
|
||||
|
||||
/// A trait that defines the interface for a key-value map.
|
||||
pub trait KvMap<K: Ord + Clone, V: Clone>:
|
||||
Extend<(K, V)> + FromIterator<(K, V)> + IntoIterator<Item = (K, V)>
|
||||
{
|
||||
fn get(&self, key: &K) -> Option<&V>;
|
||||
fn contains_key(&self, key: &K) -> bool;
|
||||
fn len(&self) -> usize;
|
||||
fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
fn insert(&mut self, key: K, value: V) -> Option<V>;
|
||||
|
||||
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_>;
|
||||
}
|
||||
|
||||
// BTREE MAP `KvMap` IMPLEMENTATION
|
||||
// ================================================================================================
|
||||
|
||||
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for BTreeMap<K, V> {
|
||||
fn get(&self, key: &K) -> Option<&V> {
|
||||
self.get(key)
|
||||
}
|
||||
|
||||
fn contains_key(&self, key: &K) -> bool {
|
||||
self.contains_key(key)
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.len()
|
||||
}
|
||||
|
||||
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
||||
self.insert(key, value)
|
||||
}
|
||||
|
||||
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
||||
Box::new(self.iter())
|
||||
}
|
||||
}
|
||||
|
||||
// RECORDING MAP
|
||||
// ================================================================================================
|
||||
|
||||
/// A [RecordingMap] that records read requests to the underlying key-value map.
|
||||
///
|
||||
/// The data recorder is used to generate a proof for read requests.
|
||||
///
|
||||
/// The [RecordingMap] is composed of three parts:
|
||||
/// - `data`: which contains the current set of key-value pairs in the map.
|
||||
/// - `updates`: which tracks keys for which values have been since the map was instantiated.
|
||||
/// updates include both insertions and updates of values under existing keys.
|
||||
/// - `trace`: which contains the key-value pairs from the original data which have been accesses
|
||||
/// since the map was instantiated.
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq)]
|
||||
pub struct RecordingMap<K, V> {
|
||||
data: BTreeMap<K, V>,
|
||||
updates: BTreeSet<K>,
|
||||
trace: RefCell<BTreeMap<K, V>>,
|
||||
}
|
||||
|
||||
impl<K: Ord + Clone, V: Clone> RecordingMap<K, V> {
|
||||
// CONSTRUCTOR
|
||||
// --------------------------------------------------------------------------------------------
|
||||
/// Returns a new [RecordingMap] instance initialized with the provided key-value pairs.
|
||||
/// ([BTreeMap]).
|
||||
pub fn new(init: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||
RecordingMap {
|
||||
data: init.into_iter().collect(),
|
||||
updates: BTreeSet::new(),
|
||||
trace: RefCell::new(BTreeMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
// FINALIZER
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Consumes the [RecordingMap] and returns a [BTreeMap] containing the key-value pairs from
|
||||
/// the initial data set that were read during recording.
|
||||
pub fn into_proof(self) -> BTreeMap<K, V> {
|
||||
self.trace.take()
|
||||
}
|
||||
|
||||
// TEST HELPERS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn trace_len(&self) -> usize {
|
||||
self.trace.borrow().len()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn updates_len(&self) -> usize {
|
||||
self.updates.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for RecordingMap<K, V> {
|
||||
// PUBLIC ACCESSORS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Returns a reference to the value associated with the given key if the value exists.
|
||||
///
|
||||
/// If the key is part of the initial data set, the key access is recorded.
|
||||
fn get(&self, key: &K) -> Option<&V> {
|
||||
self.data.get(key).map(|value| {
|
||||
if !self.updates.contains(key) {
|
||||
self.trace.borrow_mut().insert(key.clone(), value.clone());
|
||||
}
|
||||
value
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a boolean to indicate whether the given key exists in the data set.
|
||||
///
|
||||
/// If the key is part of the initial data set, the key access is recorded.
|
||||
fn contains_key(&self, key: &K) -> bool {
|
||||
self.get(key).is_some()
|
||||
}
|
||||
|
||||
/// Returns the number of key-value pairs in the data set.
|
||||
fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
// MUTATORS
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Inserts a key-value pair into the data set.
|
||||
///
|
||||
/// If the key already exists in the data set, the value is updated and the old value is
|
||||
/// returned.
|
||||
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
||||
let new_update = self.updates.insert(key.clone());
|
||||
self.data.insert(key.clone(), value).map(|old_value| {
|
||||
if new_update {
|
||||
self.trace.borrow_mut().insert(key, old_value.clone());
|
||||
}
|
||||
old_value
|
||||
})
|
||||
}
|
||||
|
||||
// ITERATION
|
||||
// --------------------------------------------------------------------------------------------
|
||||
|
||||
/// Returns an iterator over the key-value pairs in the data set.
|
||||
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
||||
Box::new(self.data.iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Ord, V: Clone> Extend<(K, V)> for RecordingMap<K, V> {
|
||||
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
|
||||
iter.into_iter().for_each(move |(k, v)| {
|
||||
self.insert(k, v);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Ord, V: Clone> FromIterator<(K, V)> for RecordingMap<K, V> {
|
||||
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
|
||||
Self::new(iter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Clone + Ord, V: Clone> IntoIterator for RecordingMap<K, V> {
|
||||
type Item = (K, V);
|
||||
type IntoIter = IntoIter<K, V>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.data.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
// TESTS
|
||||
// ================================================================================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
const ITEMS: [(u64, u64); 5] = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)];
|
||||
|
||||
#[test]
|
||||
fn test_get_item() {
|
||||
// instantiate a recording map
|
||||
let map = RecordingMap::new(ITEMS.to_vec());
|
||||
|
||||
// get a few items
|
||||
let get_items = [0, 1, 2];
|
||||
for key in get_items.iter() {
|
||||
map.get(key);
|
||||
}
|
||||
|
||||
// convert the map into a proof
|
||||
let proof = map.into_proof();
|
||||
|
||||
// check that the proof contains the expected values
|
||||
for (key, value) in ITEMS.iter() {
|
||||
match get_items.contains(key) {
|
||||
true => assert_eq!(proof.get(key), Some(value)),
|
||||
false => assert_eq!(proof.get(key), None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_contains_key() {
|
||||
// instantiate a recording map
|
||||
let map = RecordingMap::new(ITEMS.to_vec());
|
||||
|
||||
// check if the map contains a few items
|
||||
let get_items = [0, 1, 2];
|
||||
for key in get_items.iter() {
|
||||
map.contains_key(key);
|
||||
}
|
||||
|
||||
// convert the map into a proof
|
||||
let proof = map.into_proof();
|
||||
|
||||
// check that the proof contains the expected values
|
||||
for (key, _) in ITEMS.iter() {
|
||||
match get_items.contains(key) {
|
||||
true => assert_eq!(proof.contains_key(key), true),
|
||||
false => assert_eq!(proof.contains_key(key), false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_len() {
|
||||
// instantiate a recording map
|
||||
let mut map = RecordingMap::new(ITEMS.to_vec());
|
||||
// length of the map should be equal to the number of items
|
||||
assert_eq!(map.len(), ITEMS.len());
|
||||
|
||||
// inserting entry with key that already exists should not change the length, but it does
|
||||
// add entries to the trace and update sets
|
||||
map.insert(4, 5);
|
||||
assert_eq!(map.len(), ITEMS.len());
|
||||
assert_eq!(map.trace_len(), 1);
|
||||
assert_eq!(map.updates_len(), 1);
|
||||
|
||||
// inserting entry with new key should increase the length; it should also record the key
|
||||
// as an updated key, but the trace length does not change since old values were not touched
|
||||
map.insert(5, 5);
|
||||
assert_eq!(map.len(), ITEMS.len() + 1);
|
||||
assert_eq!(map.trace_len(), 1);
|
||||
assert_eq!(map.updates_len(), 2);
|
||||
|
||||
// get some items so that they are saved in the trace; this should record original items
|
||||
// in the trace, but should not affect the set of updates
|
||||
let get_items = [0, 1, 2];
|
||||
for key in get_items.iter() {
|
||||
map.contains_key(key);
|
||||
}
|
||||
assert_eq!(map.trace_len(), 4);
|
||||
assert_eq!(map.updates_len(), 2);
|
||||
|
||||
// read the same items again, this should not have any effect on either length, trace, or
|
||||
// the set of updates
|
||||
let get_items = [0, 1, 2];
|
||||
for key in get_items.iter() {
|
||||
map.contains_key(key);
|
||||
}
|
||||
assert_eq!(map.trace_len(), 4);
|
||||
assert_eq!(map.updates_len(), 2);
|
||||
|
||||
// read a newly inserted item; this should not affect either length, trace, or the set of
|
||||
// updates
|
||||
let _val = map.get(&5).unwrap();
|
||||
assert_eq!(map.trace_len(), 4);
|
||||
assert_eq!(map.updates_len(), 2);
|
||||
|
||||
// update a newly inserted item; this should not affect either length, trace, or the set
|
||||
// of updates
|
||||
map.insert(5, 11);
|
||||
assert_eq!(map.trace_len(), 4);
|
||||
assert_eq!(map.updates_len(), 2);
|
||||
|
||||
// Note: The length reported by the proof will be different to the length originally
|
||||
// reported by the map.
|
||||
let proof = map.into_proof();
|
||||
|
||||
// length of the proof should be equal to get_items + 1. The extra item is the original
|
||||
// value at key = 4u64
|
||||
assert_eq!(proof.len(), get_items.len() + 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter() {
|
||||
let mut map = RecordingMap::new(ITEMS.to_vec());
|
||||
assert!(map.iter().all(|(x, y)| ITEMS.contains(&(*x, *y))));
|
||||
|
||||
// when inserting entry with key that already exists the iterator should return the new value
|
||||
let new_value = 5;
|
||||
map.insert(4, new_value);
|
||||
assert_eq!(map.iter().count(), ITEMS.len());
|
||||
assert!(map.iter().all(|(x, y)| if x == &4 {
|
||||
y == &new_value
|
||||
} else {
|
||||
ITEMS.contains(&(*x, *y))
|
||||
}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_empty() {
|
||||
// instantiate an empty recording map
|
||||
let empty_map: RecordingMap<u64, u64> = RecordingMap::default();
|
||||
assert!(empty_map.is_empty());
|
||||
|
||||
// instantiate a non-empty recording map
|
||||
let map = RecordingMap::new(ITEMS.to_vec());
|
||||
assert!(!map.is_empty());
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
use super::Word;
|
||||
use crate::utils::string::String;
|
||||
use super::{utils::string::String, Word};
|
||||
use core::fmt::{self, Write};
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
|
@ -8,13 +7,23 @@ pub use alloc::format;
|
|||
#[cfg(feature = "std")]
|
||||
pub use std::format;
|
||||
|
||||
mod kv_map;
|
||||
|
||||
// RE-EXPORTS
|
||||
// ================================================================================================
|
||||
pub use winter_utils::{
|
||||
collections, string, uninit_vector, ByteReader, ByteWriter, Deserializable,
|
||||
DeserializationError, Serializable, SliceReader,
|
||||
string, uninit_vector, Box, ByteReader, ByteWriter, Deserializable, DeserializationError,
|
||||
Serializable, SliceReader,
|
||||
};
|
||||
|
||||
pub mod collections {
|
||||
pub use super::kv_map::*;
|
||||
pub use winter_utils::collections::*;
|
||||
}
|
||||
|
||||
// UTILITY FUNCTIONS
|
||||
// ================================================================================================
|
||||
|
||||
/// Converts a [Word] into hex.
|
||||
pub fn word_to_hex(w: &Word) -> Result<String, fmt::Error> {
|
||||
let mut s = String::new();
|
Loading…
Add table
Reference in a new issue