refactor: simplify recording MerkleStore structure
This commit is contained in:
parent
679a30e02e
commit
f08644e4df
8 changed files with 348 additions and 364 deletions
|
@ -1,5 +1,5 @@
|
||||||
use criterion::{black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion};
|
use criterion::{black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion};
|
||||||
use miden_crypto::merkle::{MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
use miden_crypto::merkle::{DefaultMerkleStore as MerkleStore, MerkleTree, NodeIndex, SimpleSmt};
|
||||||
use miden_crypto::Word;
|
use miden_crypto::Word;
|
||||||
use miden_crypto::{hash::rpo::RpoDigest, Felt};
|
use miden_crypto::{hash::rpo::RpoDigest, Felt};
|
||||||
use rand_utils::{rand_array, rand_value};
|
use rand_utils::{rand_array, rand_value};
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#[cfg_attr(test, macro_use)]
|
#[cfg_attr(test, macro_use)]
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
pub mod data;
|
|
||||||
pub mod hash;
|
pub mod hash;
|
||||||
pub mod merkle;
|
pub mod merkle;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use super::{
|
use super::{
|
||||||
data::{KvMap, RecordingMap},
|
|
||||||
hash::rpo::{Rpo256, RpoDigest},
|
hash::rpo::{Rpo256, RpoDigest},
|
||||||
utils::collections::{vec, BTreeMap, BTreeSet, Vec},
|
utils::collections::{vec, BTreeMap, BTreeSet, KvMap, RecordingMap, Vec},
|
||||||
Felt, StarkField, Word, WORD_SIZE, ZERO,
|
Felt, StarkField, Word, WORD_SIZE, ZERO,
|
||||||
};
|
};
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
@ -34,10 +33,7 @@ mod mmr;
|
||||||
pub use mmr::{Mmr, MmrPeaks, MmrProof};
|
pub use mmr::{Mmr, MmrPeaks, MmrProof};
|
||||||
|
|
||||||
mod store;
|
mod store;
|
||||||
pub use store::{
|
pub use store::{DefaultMerkleStore, MerkleStore, RecordingMerkleStore};
|
||||||
GenericMerkleStore, MerkleMap, MerkleMapT, MerkleStore, RecordingMerkleMap,
|
|
||||||
RecordingMerkleStore,
|
|
||||||
};
|
|
||||||
|
|
||||||
mod node;
|
mod node;
|
||||||
pub use node::InnerNodeInfo;
|
pub use node::InnerNodeInfo;
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
use super::{
|
use super::{
|
||||||
super::{digests_to_words, int_to_node, MerkleStore, MerkleTree, NodeIndex, PartialMerkleTree},
|
super::{
|
||||||
|
digests_to_words, int_to_node, DefaultMerkleStore as MerkleStore, MerkleTree, NodeIndex,
|
||||||
|
PartialMerkleTree,
|
||||||
|
},
|
||||||
RpoDigest, ValuePath, Vec,
|
RpoDigest, ValuePath, Vec,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -9,56 +9,21 @@ use core::borrow::Borrow;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
// TRAIT / TYPE DECLARATIONS
|
|
||||||
// ================================================================================================
|
|
||||||
/// A supertrait that defines the required traits for a type to be used as a data map backend for
|
|
||||||
/// the [GenericMerkleStore]
|
|
||||||
pub trait MerkleMapT:
|
|
||||||
KvMap<RpoDigest, Node>
|
|
||||||
+ Extend<(RpoDigest, Node)>
|
|
||||||
+ FromIterator<(RpoDigest, Node)>
|
|
||||||
+ IntoIterator<Item = (RpoDigest, Node)>
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
// MERKLE STORE
|
// MERKLE STORE
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
/// Type that represents a standard MerkleStore.
|
|
||||||
pub type MerkleStore = GenericMerkleStore<MerkleMap>;
|
|
||||||
|
|
||||||
/// Declaration of a BTreeMap that uses a [RpoDigest] as a key and a [Node] as the value. This type
|
|
||||||
/// is used as a data backend for the standard [GenericMerkleStore].
|
|
||||||
pub type MerkleMap = BTreeMap<RpoDigest, Node>;
|
|
||||||
|
|
||||||
/// Implementation of [MerkleMapT] trait on [MerkleMap].
|
|
||||||
impl MerkleMapT for MerkleMap {}
|
|
||||||
|
|
||||||
// RECORDING MERKLE STORE
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
/// Type that represents a MerkleStore with recording capabilities.
|
|
||||||
pub type RecordingMerkleStore = GenericMerkleStore<RecordingMerkleMap>;
|
|
||||||
|
|
||||||
/// Declaration of a [RecordingMap] that uses a [RpoDigest] as a key and a [Node] as the value.
|
|
||||||
/// This type is used as a data backend for the recording [GenericMerkleStore].
|
|
||||||
pub type RecordingMerkleMap = RecordingMap<RpoDigest, Node>;
|
|
||||||
|
|
||||||
/// Implementation of [MerkleMapT] on [RecordingMerkleMap].
|
|
||||||
impl MerkleMapT for RecordingMerkleMap {}
|
|
||||||
|
|
||||||
// NODE DEFINITION
|
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A default [MerkleStore] which uses a simple [BTreeMap] as the backing storage.
|
||||||
|
pub type DefaultMerkleStore = MerkleStore<BTreeMap<RpoDigest, Node>>;
|
||||||
|
|
||||||
|
/// A [MerkleStore] with recording capabilities which uses [RecordingMap] as the backing storage.
|
||||||
|
pub type RecordingMerkleStore = MerkleStore<RecordingMap<RpoDigest, Node>>;
|
||||||
|
|
||||||
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct Node {
|
pub struct Node {
|
||||||
left: RpoDigest,
|
left: RpoDigest,
|
||||||
right: RpoDigest,
|
right: RpoDigest,
|
||||||
}
|
}
|
||||||
|
|
||||||
// MERKLE STORE IMPLEMENTATION
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
/// An in-memory data store for Merkelized data.
|
/// An in-memory data store for Merkelized data.
|
||||||
///
|
///
|
||||||
/// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple
|
/// This is a in memory data store for Merkle trees, this store allows all the nodes of multiple
|
||||||
|
@ -87,7 +52,7 @@ pub struct Node {
|
||||||
/// # let T1 = MerkleTree::new([A, B, C, D, E, F, G, H1].to_vec()).expect("even number of leaves provided");
|
/// # let T1 = MerkleTree::new([A, B, C, D, E, F, G, H1].to_vec()).expect("even number of leaves provided");
|
||||||
/// # let ROOT0 = T0.root();
|
/// # let ROOT0 = T0.root();
|
||||||
/// # let ROOT1 = T1.root();
|
/// # let ROOT1 = T1.root();
|
||||||
/// let mut store = MerkleStore::new();
|
/// let mut store: MerkleStore = MerkleStore::new();
|
||||||
///
|
///
|
||||||
/// // the store is initialized with the SMT empty nodes
|
/// // the store is initialized with the SMT empty nodes
|
||||||
/// assert_eq!(store.num_internal_nodes(), 255);
|
/// assert_eq!(store.num_internal_nodes(), 255);
|
||||||
|
@ -122,25 +87,25 @@ pub struct Node {
|
||||||
/// assert_eq!(store.num_internal_nodes() - 255, 10);
|
/// assert_eq!(store.num_internal_nodes() - 255, 10);
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct GenericMerkleStore<T: MerkleMapT> {
|
pub struct MerkleStore<T: KvMap<RpoDigest, Node> = BTreeMap<RpoDigest, Node>> {
|
||||||
nodes: T,
|
nodes: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: MerkleMapT> Default for GenericMerkleStore<T> {
|
impl<T: KvMap<RpoDigest, Node>> Default for MerkleStore<T> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::new()
|
Self::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: MerkleMapT> GenericMerkleStore<T> {
|
impl<T: KvMap<RpoDigest, Node>> MerkleStore<T> {
|
||||||
// CONSTRUCTORS
|
// CONSTRUCTORS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Creates an empty `GenericMerkleStore` instance.
|
/// Creates an empty `MerkleStore` instance.
|
||||||
pub fn new() -> GenericMerkleStore<T> {
|
pub fn new() -> MerkleStore<T> {
|
||||||
// pre-populate the store with the empty hashes
|
// pre-populate the store with the empty hashes
|
||||||
let nodes = empty_hashes().into_iter().collect();
|
let nodes = empty_hashes().into_iter().collect();
|
||||||
GenericMerkleStore { nodes }
|
MerkleStore { nodes }
|
||||||
}
|
}
|
||||||
|
|
||||||
// PUBLIC ACCESSORS
|
// PUBLIC ACCESSORS
|
||||||
|
@ -154,10 +119,10 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
/// Returns the node at `index` rooted on the tree `root`.
|
/// Returns the node at `index` rooted on the tree `root`.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
|
||||||
/// This method can return the following errors:
|
/// This method can return the following errors:
|
||||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||||
|
/// the store.
|
||||||
pub fn get_node(&self, root: RpoDigest, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
pub fn get_node(&self, root: RpoDigest, index: NodeIndex) -> Result<RpoDigest, MerkleError> {
|
||||||
let mut hash = root;
|
let mut hash = root;
|
||||||
|
|
||||||
|
@ -181,7 +146,8 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// This method can return the following errors:
|
/// This method can return the following errors:
|
||||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||||
|
/// the store.
|
||||||
pub fn get_path(&self, root: RpoDigest, index: NodeIndex) -> Result<ValuePath, MerkleError> {
|
pub fn get_path(&self, root: RpoDigest, index: NodeIndex) -> Result<ValuePath, MerkleError> {
|
||||||
let mut hash = root;
|
let mut hash = root;
|
||||||
let mut path = Vec::with_capacity(index.depth().into());
|
let mut path = Vec::with_capacity(index.depth().into());
|
||||||
|
@ -225,7 +191,7 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
/// - The path from the root continues to a depth greater than `tree_depth`.
|
/// - The path from the root continues to a depth greater than `tree_depth`.
|
||||||
/// - The provided `tree_depth` is greater than `64.
|
/// - The provided `tree_depth` is greater than `64.
|
||||||
/// - The provided `index` is not valid for a depth equivalent to `tree_depth`. For more
|
/// - The provided `index` is not valid for a depth equivalent to `tree_depth`. For more
|
||||||
/// information, check [NodeIndex::new].
|
/// information, check [NodeIndex::new].
|
||||||
pub fn get_leaf_depth(
|
pub fn get_leaf_depth(
|
||||||
&self,
|
&self,
|
||||||
root: RpoDigest,
|
root: RpoDigest,
|
||||||
|
@ -289,12 +255,12 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
/// nodes which are descendants of the specified roots.
|
/// nodes which are descendants of the specified roots.
|
||||||
///
|
///
|
||||||
/// The roots for which no descendants exist in this Merkle store are ignored.
|
/// The roots for which no descendants exist in this Merkle store are ignored.
|
||||||
pub fn subset<I, R>(&self, roots: I) -> GenericMerkleStore<T>
|
pub fn subset<I, R>(&self, roots: I) -> MerkleStore<T>
|
||||||
where
|
where
|
||||||
I: Iterator<Item = R>,
|
I: Iterator<Item = R>,
|
||||||
R: Borrow<RpoDigest>,
|
R: Borrow<RpoDigest>,
|
||||||
{
|
{
|
||||||
let mut store = GenericMerkleStore::new();
|
let mut store = MerkleStore::new();
|
||||||
for root in roots {
|
for root in roots {
|
||||||
let root = *root.borrow();
|
let root = *root.borrow();
|
||||||
store.clone_tree_from(root, self);
|
store.clone_tree_from(root, self);
|
||||||
|
@ -302,7 +268,7 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
store
|
store
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterator over the inner nodes of the [GenericMerkleStore].
|
/// Iterator over the inner nodes of the [MerkleStore].
|
||||||
pub fn inner_nodes(&self) -> impl Iterator<Item = InnerNodeInfo> + '_ {
|
pub fn inner_nodes(&self) -> impl Iterator<Item = InnerNodeInfo> + '_ {
|
||||||
self.nodes.iter().map(|(r, n)| InnerNodeInfo {
|
self.nodes.iter().map(|(r, n)| InnerNodeInfo {
|
||||||
value: *r,
|
value: *r,
|
||||||
|
@ -343,7 +309,7 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
/// This will compute the sibling elements for each Merkle `path` and include all the nodes
|
/// This will compute the sibling elements for each Merkle `path` and include all the nodes
|
||||||
/// into the store.
|
/// into the store.
|
||||||
///
|
///
|
||||||
/// For further reference, check [GenericMerkleStore::add_merkle_path].
|
/// For further reference, check [MerkleStore::add_merkle_path].
|
||||||
pub fn add_merkle_paths<I>(&mut self, paths: I) -> Result<(), MerkleError>
|
pub fn add_merkle_paths<I>(&mut self, paths: I) -> Result<(), MerkleError>
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = (u64, RpoDigest, MerklePath)>,
|
I: IntoIterator<Item = (u64, RpoDigest, MerklePath)>,
|
||||||
|
@ -356,7 +322,7 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
|
|
||||||
/// Appends the provided [MerklePathSet] into the store.
|
/// Appends the provided [MerklePathSet] into the store.
|
||||||
///
|
///
|
||||||
/// For further reference, check [GenericMerkleStore::add_merkle_path].
|
/// For further reference, check [MerkleStore::add_merkle_path].
|
||||||
pub fn add_merkle_path_set(
|
pub fn add_merkle_path_set(
|
||||||
&mut self,
|
&mut self,
|
||||||
path_set: &MerklePathSet,
|
path_set: &MerklePathSet,
|
||||||
|
@ -371,10 +337,10 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
/// Sets a node to `value`.
|
/// Sets a node to `value`.
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
///
|
|
||||||
/// This method can return the following errors:
|
/// This method can return the following errors:
|
||||||
/// - `RootNotInStore` if the `root` is not present in the store.
|
/// - `RootNotInStore` if the `root` is not present in the store.
|
||||||
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in the store.
|
/// - `NodeNotInStore` if a node needed to traverse from `root` to `index` is not present in
|
||||||
|
/// the store.
|
||||||
pub fn set_node(
|
pub fn set_node(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut root: RpoDigest,
|
mut root: RpoDigest,
|
||||||
|
@ -412,6 +378,14 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
Ok(parent)
|
Ok(parent)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DESTRUCTURING
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns the inner storage of this MerkleStore while consuming `self`.
|
||||||
|
pub fn into_inner(self) -> T {
|
||||||
|
self.nodes
|
||||||
|
}
|
||||||
|
|
||||||
// HELPER METHODS
|
// HELPER METHODS
|
||||||
// --------------------------------------------------------------------------------------------
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -431,19 +405,122 @@ impl<T: MerkleMapT> GenericMerkleStore<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// RECORDING MERKLE STORE FINALIZER
|
// CONVERSIONS
|
||||||
// ===============================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
impl RecordingMerkleStore {
|
impl<T: KvMap<RpoDigest, Node>> From<&MerkleTree> for MerkleStore<T> {
|
||||||
/// Consumes the [DataRecorder] and returns a [BTreeMap] containing the key-value pairs from
|
fn from(value: &MerkleTree) -> Self {
|
||||||
/// the initial data set that were read during recording.
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
pub fn into_proof(self) -> MerkleMap {
|
Self { nodes }
|
||||||
self.nodes.into_proof()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// EMPTY HASHES
|
impl<T: KvMap<RpoDigest, Node>> From<&SimpleSmt> for MerkleStore<T> {
|
||||||
|
fn from(value: &SimpleSmt) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> From<&Mmr> for MerkleStore<T> {
|
||||||
|
fn from(value: &Mmr) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> From<&TieredSmt> for MerkleStore<T> {
|
||||||
|
fn from(value: &TieredSmt) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> From<T> for MerkleStore<T> {
|
||||||
|
fn from(values: T) -> Self {
|
||||||
|
let nodes = values.into_iter().chain(empty_hashes().into_iter()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> FromIterator<InnerNodeInfo> for MerkleStore<T> {
|
||||||
|
fn from_iter<I: IntoIterator<Item = InnerNodeInfo>>(iter: I) -> Self {
|
||||||
|
let nodes = combine_nodes_with_empty_hashes(iter.into_iter()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> FromIterator<(RpoDigest, Node)> for MerkleStore<T> {
|
||||||
|
fn from_iter<I: IntoIterator<Item = (RpoDigest, Node)>>(iter: I) -> Self {
|
||||||
|
let nodes = iter.into_iter().chain(empty_hashes().into_iter()).collect();
|
||||||
|
Self { nodes }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATORS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> Extend<InnerNodeInfo> for MerkleStore<T> {
|
||||||
|
fn extend<I: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: I) {
|
||||||
|
self.nodes.extend(iter.into_iter().map(|info| {
|
||||||
|
(
|
||||||
|
info.value,
|
||||||
|
Node {
|
||||||
|
left: info.left,
|
||||||
|
right: info.right,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SERIALIZATION
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
impl Serializable for Node {
|
||||||
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
|
self.left.write_into(target);
|
||||||
|
self.right.write_into(target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deserializable for Node {
|
||||||
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
|
let left = RpoDigest::read_from(source)?;
|
||||||
|
let right = RpoDigest::read_from(source)?;
|
||||||
|
Ok(Node { left, right })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> Serializable for MerkleStore<T> {
|
||||||
|
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
||||||
|
target.write_u64(self.nodes.len() as u64);
|
||||||
|
|
||||||
|
for (k, v) in self.nodes.iter() {
|
||||||
|
k.write_into(target);
|
||||||
|
v.write_into(target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: KvMap<RpoDigest, Node>> Deserializable for MerkleStore<T> {
|
||||||
|
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
||||||
|
let len = source.read_u64()?;
|
||||||
|
let mut nodes: Vec<(RpoDigest, Node)> = Vec::with_capacity(len as usize);
|
||||||
|
|
||||||
|
for _ in 0..len {
|
||||||
|
let key = RpoDigest::read_from(source)?;
|
||||||
|
let value = Node::read_from(source)?;
|
||||||
|
nodes.push((key, value));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(nodes.into_iter().collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HELPER FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
/// Creates empty hashes for all the subtrees of a tree with a max depth of 255.
|
/// Creates empty hashes for all the subtrees of a tree with a max depth of 255.
|
||||||
fn empty_hashes() -> impl IntoIterator<Item = (RpoDigest, Node)> {
|
fn empty_hashes() -> impl IntoIterator<Item = (RpoDigest, Node)> {
|
||||||
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
let subtrees = EmptySubtreeRoots::empty_hashes(255);
|
||||||
|
@ -478,122 +555,3 @@ fn combine_nodes_with_empty_hashes(
|
||||||
})
|
})
|
||||||
.chain(empty_hashes().into_iter())
|
.chain(empty_hashes().into_iter())
|
||||||
}
|
}
|
||||||
|
|
||||||
// CONVERSIONS
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> From<&MerkleTree> for GenericMerkleStore<T> {
|
|
||||||
fn from(value: &MerkleTree) -> Self {
|
|
||||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
|
||||||
GenericMerkleStore { nodes }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> From<&SimpleSmt> for GenericMerkleStore<T> {
|
|
||||||
fn from(value: &SimpleSmt) -> Self {
|
|
||||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
|
||||||
GenericMerkleStore { nodes }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> From<&Mmr> for GenericMerkleStore<T> {
|
|
||||||
fn from(value: &Mmr) -> Self {
|
|
||||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
|
||||||
GenericMerkleStore { nodes }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> From<&TieredSmt> for GenericMerkleStore<T> {
|
|
||||||
fn from(value: &TieredSmt) -> Self {
|
|
||||||
let nodes = combine_nodes_with_empty_hashes(value.inner_nodes()).collect();
|
|
||||||
GenericMerkleStore { nodes }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> FromIterator<InnerNodeInfo> for GenericMerkleStore<T> {
|
|
||||||
fn from_iter<I: IntoIterator<Item = InnerNodeInfo>>(iter: I) -> Self {
|
|
||||||
let nodes = combine_nodes_with_empty_hashes(iter).collect();
|
|
||||||
GenericMerkleStore { nodes }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MerkleStore> for RecordingMerkleStore {
|
|
||||||
fn from(value: MerkleStore) -> Self {
|
|
||||||
GenericMerkleStore {
|
|
||||||
nodes: RecordingMerkleMap::new(value.nodes.into_iter()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<(RpoDigest, Node)> for RecordingMerkleMap {
|
|
||||||
fn from_iter<T: IntoIterator<Item = (RpoDigest, Node)>>(iter: T) -> Self {
|
|
||||||
RecordingMerkleMap::new(iter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MerkleMap> for MerkleStore {
|
|
||||||
fn from(value: MerkleMap) -> Self {
|
|
||||||
GenericMerkleStore { nodes: value }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ITERATORS
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> Extend<InnerNodeInfo> for GenericMerkleStore<T> {
|
|
||||||
fn extend<I: IntoIterator<Item = InnerNodeInfo>>(&mut self, iter: I) {
|
|
||||||
self.nodes.extend(iter.into_iter().map(|info| {
|
|
||||||
(
|
|
||||||
info.value,
|
|
||||||
Node {
|
|
||||||
left: info.left,
|
|
||||||
right: info.right,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SERIALIZATION
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
impl Serializable for Node {
|
|
||||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
|
||||||
self.left.write_into(target);
|
|
||||||
self.right.write_into(target);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserializable for Node {
|
|
||||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
|
||||||
let left = RpoDigest::read_from(source)?;
|
|
||||||
let right = RpoDigest::read_from(source)?;
|
|
||||||
Ok(Node { left, right })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: MerkleMapT> Serializable for GenericMerkleStore<T> {
|
|
||||||
fn write_into<W: ByteWriter>(&self, target: &mut W) {
|
|
||||||
target.write_u64(self.nodes.len() as u64);
|
|
||||||
|
|
||||||
for (k, v) in self.nodes.iter() {
|
|
||||||
k.write_into(target);
|
|
||||||
v.write_into(target);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserializable for GenericMerkleStore<MerkleMap> {
|
|
||||||
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
|
|
||||||
let len = source.read_u64()?;
|
|
||||||
let mut nodes: MerkleMap = BTreeMap::new();
|
|
||||||
|
|
||||||
for _ in 0..len {
|
|
||||||
let key = RpoDigest::read_from(source)?;
|
|
||||||
let value = Node::read_from(source)?;
|
|
||||||
nodes.insert(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(GenericMerkleStore { nodes })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::{
|
use super::{
|
||||||
EmptySubtreeRoots, MerkleError, MerklePath, MerkleStore, NodeIndex, RecordingMerkleStore,
|
DefaultMerkleStore as MerkleStore, EmptySubtreeRoots, MerkleError, MerklePath, NodeIndex,
|
||||||
RpoDigest,
|
RecordingMerkleStore, RpoDigest,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
hash::rpo::Rpo256,
|
hash::rpo::Rpo256,
|
||||||
|
@ -38,7 +38,7 @@ const VALUES8: [RpoDigest; 8] = [
|
||||||
#[test]
|
#[test]
|
||||||
fn test_root_not_in_store() -> Result<(), MerkleError> {
|
fn test_root_not_in_store() -> Result<(), MerkleError> {
|
||||||
let mtree = MerkleTree::new(digests_to_words(&VALUES4))?;
|
let mtree = MerkleTree::new(digests_to_words(&VALUES4))?;
|
||||||
let store = MerkleStore::default();
|
let store = MerkleStore::from(&mtree);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
store.get_node(VALUES4[0], NodeIndex::make(mtree.depth(), 0)),
|
store.get_node(VALUES4[0], NodeIndex::make(mtree.depth(), 0)),
|
||||||
Err(MerkleError::RootNotInStore(VALUES4[0])),
|
Err(MerkleError::RootNotInStore(VALUES4[0])),
|
||||||
|
@ -826,6 +826,7 @@ fn test_recorder() {
|
||||||
KEYS8.into_iter().zip(VALUES8.into_iter().map(|x| x.into()).rev()),
|
KEYS8.into_iter().zip(VALUES8.into_iter().map(|x| x.into()).rev()),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let mut recorder: RecordingMerkleStore =
|
let mut recorder: RecordingMerkleStore =
|
||||||
mtree.inner_nodes().chain(smtree.inner_nodes()).collect();
|
mtree.inner_nodes().chain(smtree.inner_nodes()).collect();
|
||||||
|
|
||||||
|
@ -845,7 +846,8 @@ fn test_recorder() {
|
||||||
assert_eq!(recorder.get_node(root, index_2).unwrap(), new_value);
|
assert_eq!(recorder.get_node(root, index_2).unwrap(), new_value);
|
||||||
|
|
||||||
// construct the proof
|
// construct the proof
|
||||||
let proof = recorder.into_proof();
|
let rec_map = recorder.into_inner();
|
||||||
|
let proof = rec_map.into_proof();
|
||||||
let merkle_store: MerkleStore = proof.into();
|
let merkle_store: MerkleStore = proof.into();
|
||||||
|
|
||||||
// make sure the proof contains all nodes from both trees
|
// make sure the proof contains all nodes from both trees
|
||||||
|
|
|
@ -1,178 +1,31 @@
|
||||||
use super::utils::{
|
use core::cell::RefCell;
|
||||||
|
use winter_utils::{
|
||||||
collections::{btree_map::IntoIter, BTreeMap, BTreeSet},
|
collections::{btree_map::IntoIter, BTreeMap, BTreeSet},
|
||||||
Box,
|
Box,
|
||||||
};
|
};
|
||||||
use core::{
|
|
||||||
cell::RefCell,
|
|
||||||
iter::{Chain, Filter},
|
|
||||||
};
|
|
||||||
|
|
||||||
// KEY-VALUE MAP TRAIT
|
// KEY-VALUE MAP TRAIT
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
/// A trait that defines the interface for a key-value map.
|
/// A trait that defines the interface for a key-value map.
|
||||||
pub trait KvMap<K, V> {
|
pub trait KvMap<K: Ord + Clone, V: Clone>:
|
||||||
|
Extend<(K, V)> + FromIterator<(K, V)> + IntoIterator<Item = (K, V)>
|
||||||
|
{
|
||||||
fn get(&self, key: &K) -> Option<&V>;
|
fn get(&self, key: &K) -> Option<&V>;
|
||||||
fn contains_key(&self, key: &K) -> bool;
|
fn contains_key(&self, key: &K) -> bool;
|
||||||
fn len(&self) -> usize;
|
fn len(&self) -> usize;
|
||||||
fn is_empty(&self) -> bool {
|
fn is_empty(&self) -> bool {
|
||||||
self.len() == 0
|
self.len() == 0
|
||||||
}
|
}
|
||||||
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_>;
|
|
||||||
fn insert(&mut self, key: K, value: V) -> Option<V>;
|
fn insert(&mut self, key: K, value: V) -> Option<V>;
|
||||||
}
|
|
||||||
|
|
||||||
// RECORDING MAP
|
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_>;
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
/// A [RecordingMap] that records read requests to the underlying key-value map.
|
|
||||||
/// The data recorder is used to generate a proof for read requests.
|
|
||||||
///
|
|
||||||
/// The [RecordingMap] is composed of three parts:
|
|
||||||
/// - `data`: which contains the initial key-value pairs from the underlying data set.
|
|
||||||
/// - `delta`: which contains key-value pairs which have been created after instantiation.
|
|
||||||
/// - `updated_keys`: which tracks keys from `data` which have been updated in `delta`.
|
|
||||||
/// - `trace`: which contains the keys from the initial data set (`data`) that are read.
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct RecordingMap<K, V> {
|
|
||||||
data: BTreeMap<K, V>,
|
|
||||||
delta: BTreeMap<K, V>,
|
|
||||||
updated_keys: BTreeSet<K>,
|
|
||||||
trace: RefCell<BTreeSet<K>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Ord + Clone, V: Clone> RecordingMap<K, V> {
|
|
||||||
// CONSTRUCTOR
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
/// Returns a new [RecordingMap] instance initialized with the provided key-value pairs.
|
|
||||||
/// ([BTreeMap]).
|
|
||||||
pub fn new(init: impl IntoIterator<Item = (K, V)>) -> Self {
|
|
||||||
RecordingMap {
|
|
||||||
data: init.into_iter().collect(),
|
|
||||||
delta: BTreeMap::new(),
|
|
||||||
updated_keys: BTreeSet::new(),
|
|
||||||
trace: RefCell::new(BTreeSet::new()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FINALIZER
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
/// Consumes the [DataRecorder] and returns a [BTreeMap] containing the key-value pairs from
|
|
||||||
/// the initial data set that were read during recording.
|
|
||||||
pub fn into_proof(self) -> BTreeMap<K, V> {
|
|
||||||
self.data
|
|
||||||
.into_iter()
|
|
||||||
.filter(|(k, _)| self.trace.borrow().contains(k))
|
|
||||||
.collect::<BTreeMap<_, _>>()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for RecordingMap<K, V> {
|
|
||||||
// ACCESSORS
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
/// Returns a reference to the value associated with the given key if the value exists. If the
|
|
||||||
/// key is part of the initial data set, the key access is recorded.
|
|
||||||
fn get(&self, key: &K) -> Option<&V> {
|
|
||||||
if let Some(value) = self.delta.get(key) {
|
|
||||||
return Some(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
match self.data.get(key) {
|
|
||||||
None => None,
|
|
||||||
Some(value) => {
|
|
||||||
self.trace.borrow_mut().insert(key.clone());
|
|
||||||
Some(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a boolean to indicate whether the given key exists in the data set. If the key is
|
|
||||||
/// part of the initial data set, the key access is recorded.
|
|
||||||
fn contains_key(&self, key: &K) -> bool {
|
|
||||||
if self.delta.contains_key(key) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
match self.data.contains_key(key) {
|
|
||||||
true => {
|
|
||||||
self.trace.borrow_mut().insert(key.clone());
|
|
||||||
true
|
|
||||||
}
|
|
||||||
false => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the number of key-value pairs in the data set.
|
|
||||||
fn len(&self) -> usize {
|
|
||||||
self.data.len() + self.delta.len() - self.updated_keys.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns an iterator over the key-value pairs in the data set.
|
|
||||||
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
|
||||||
Box::new(
|
|
||||||
self.data
|
|
||||||
.iter()
|
|
||||||
.filter(|(k, _)| !self.updated_keys.contains(k))
|
|
||||||
.chain(self.delta.iter()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MUTATORS
|
|
||||||
// --------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
/// Inserts a key-value pair into the data set. If the key already exists in the data set, the
|
|
||||||
/// value is updated and the old value is returned.
|
|
||||||
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
|
||||||
if let Some(value) = self.delta.insert(key.clone(), value) {
|
|
||||||
return Some(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
match self.data.get(&key) {
|
|
||||||
None => None,
|
|
||||||
Some(value) => {
|
|
||||||
self.trace.borrow_mut().insert(key.clone());
|
|
||||||
self.updated_keys.insert(key);
|
|
||||||
Some(value.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// RECORDING MAP TRAIT IMPLS
|
|
||||||
// ================================================================================================
|
|
||||||
|
|
||||||
impl<K: Clone + Ord, V: Clone> Extend<(K, V)> for RecordingMap<K, V> {
|
|
||||||
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
|
|
||||||
iter.into_iter().for_each(move |(k, v)| {
|
|
||||||
self.insert(k, v);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Ord + Clone, V: Clone> Default for RecordingMap<K, V> {
|
|
||||||
fn default() -> Self {
|
|
||||||
RecordingMap::new(BTreeMap::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Ord + 'static, V> IntoIterator for RecordingMap<K, V> {
|
|
||||||
type Item = (K, V);
|
|
||||||
type IntoIter =
|
|
||||||
Chain<Filter<IntoIter<K, V>, Box<dyn FnMut(&Self::Item) -> bool>>, IntoIter<K, V>>;
|
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
let filter_updated: Box<dyn FnMut(&Self::Item) -> bool> =
|
|
||||||
Box::new(move |(k, _)| !self.updated_keys.contains(k));
|
|
||||||
let data_iter = self.data.into_iter().filter(filter_updated);
|
|
||||||
let updates_iter = self.delta.into_iter();
|
|
||||||
|
|
||||||
data_iter.chain(updates_iter)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// BTREE MAP `KvMap` IMPLEMENTATION
|
// BTREE MAP `KvMap` IMPLEMENTATION
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
impl<K: Ord, V> KvMap<K, V> for BTreeMap<K, V> {
|
|
||||||
|
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for BTreeMap<K, V> {
|
||||||
fn get(&self, key: &K) -> Option<&V> {
|
fn get(&self, key: &K) -> Option<&V> {
|
||||||
self.get(key)
|
self.get(key)
|
||||||
}
|
}
|
||||||
|
@ -185,19 +38,153 @@ impl<K: Ord, V> KvMap<K, V> for BTreeMap<K, V> {
|
||||||
self.len()
|
self.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
||||||
|
self.insert(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
||||||
Box::new(self.iter())
|
Box::new(self.iter())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RECORDING MAP
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
|
/// A [RecordingMap] that records read requests to the underlying key-value map.
|
||||||
|
///
|
||||||
|
/// The data recorder is used to generate a proof for read requests.
|
||||||
|
///
|
||||||
|
/// The [RecordingMap] is composed of three parts:
|
||||||
|
/// - `data`: which contains the current set of key-value pairs in the map.
|
||||||
|
/// - `updates`: which tracks keys for which values have been since the map was instantiated.
|
||||||
|
/// updates include both insertions and updates of values under existing keys.
|
||||||
|
/// - `trace`: which contains the key-value pairs from the original data which have been accesses
|
||||||
|
/// since the map was instantiated.
|
||||||
|
#[derive(Debug, Default, Clone, Eq, PartialEq)]
|
||||||
|
pub struct RecordingMap<K, V> {
|
||||||
|
data: BTreeMap<K, V>,
|
||||||
|
updates: BTreeSet<K>,
|
||||||
|
trace: RefCell<BTreeMap<K, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Ord + Clone, V: Clone> RecordingMap<K, V> {
|
||||||
|
// CONSTRUCTOR
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
/// Returns a new [RecordingMap] instance initialized with the provided key-value pairs.
|
||||||
|
/// ([BTreeMap]).
|
||||||
|
pub fn new(init: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||||
|
RecordingMap {
|
||||||
|
data: init.into_iter().collect(),
|
||||||
|
updates: BTreeSet::new(),
|
||||||
|
trace: RefCell::new(BTreeMap::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FINALIZER
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Consumes the [RecordingMap] and returns a [BTreeMap] containing the key-value pairs from
|
||||||
|
/// the initial data set that were read during recording.
|
||||||
|
pub fn into_proof(self) -> BTreeMap<K, V> {
|
||||||
|
self.trace.take()
|
||||||
|
}
|
||||||
|
|
||||||
|
// TEST HELPERS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn trace_len(&self) -> usize {
|
||||||
|
self.trace.borrow().len()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn updates_len(&self) -> usize {
|
||||||
|
self.updates.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Ord + Clone, V: Clone> KvMap<K, V> for RecordingMap<K, V> {
|
||||||
|
// PUBLIC ACCESSORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns a reference to the value associated with the given key if the value exists.
|
||||||
|
///
|
||||||
|
/// If the key is part of the initial data set, the key access is recorded.
|
||||||
|
fn get(&self, key: &K) -> Option<&V> {
|
||||||
|
self.data.get(key).map(|value| {
|
||||||
|
if !self.updates.contains(key) {
|
||||||
|
self.trace.borrow_mut().insert(key.clone(), value.clone());
|
||||||
|
}
|
||||||
|
value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a boolean to indicate whether the given key exists in the data set.
|
||||||
|
///
|
||||||
|
/// If the key is part of the initial data set, the key access is recorded.
|
||||||
|
fn contains_key(&self, key: &K) -> bool {
|
||||||
|
self.get(key).is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of key-value pairs in the data set.
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.data.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
// MUTATORS
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Inserts a key-value pair into the data set.
|
||||||
|
///
|
||||||
|
/// If the key already exists in the data set, the value is updated and the old value is
|
||||||
|
/// returned.
|
||||||
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
fn insert(&mut self, key: K, value: V) -> Option<V> {
|
||||||
self.insert(key, value)
|
let new_update = self.updates.insert(key.clone());
|
||||||
|
self.data.insert(key.clone(), value).map(|old_value| {
|
||||||
|
if new_update {
|
||||||
|
self.trace.borrow_mut().insert(key, old_value.clone());
|
||||||
|
}
|
||||||
|
old_value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ITERATION
|
||||||
|
// --------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns an iterator over the key-value pairs in the data set.
|
||||||
|
fn iter(&self) -> Box<dyn Iterator<Item = (&K, &V)> + '_> {
|
||||||
|
Box::new(self.data.iter())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord, V: Clone> Extend<(K, V)> for RecordingMap<K, V> {
|
||||||
|
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
|
||||||
|
iter.into_iter().for_each(move |(k, v)| {
|
||||||
|
self.insert(k, v);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord, V: Clone> FromIterator<(K, V)> for RecordingMap<K, V> {
|
||||||
|
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
|
||||||
|
Self::new(iter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Clone + Ord, V: Clone> IntoIterator for RecordingMap<K, V> {
|
||||||
|
type Item = (K, V);
|
||||||
|
type IntoIter = IntoIter<K, V>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.data.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TESTS
|
// TESTS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test_recorder {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
const ITEMS: [(u64, u64); 5] = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)];
|
const ITEMS: [(u64, u64); 5] = [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)];
|
||||||
|
@ -255,19 +242,49 @@ mod test_recorder {
|
||||||
// length of the map should be equal to the number of items
|
// length of the map should be equal to the number of items
|
||||||
assert_eq!(map.len(), ITEMS.len());
|
assert_eq!(map.len(), ITEMS.len());
|
||||||
|
|
||||||
// inserting entry with key that already exists should not change the length
|
// inserting entry with key that already exists should not change the length, but it does
|
||||||
|
// add entries to the trace and update sets
|
||||||
map.insert(4, 5);
|
map.insert(4, 5);
|
||||||
assert_eq!(map.len(), ITEMS.len());
|
assert_eq!(map.len(), ITEMS.len());
|
||||||
|
assert_eq!(map.trace_len(), 1);
|
||||||
|
assert_eq!(map.updates_len(), 1);
|
||||||
|
|
||||||
// inserting entry with new key should increase the length
|
// inserting entry with new key should increase the length; it should also record the key
|
||||||
|
// as an updated key, but the trace length does not change since old values were not touched
|
||||||
map.insert(5, 5);
|
map.insert(5, 5);
|
||||||
assert_eq!(map.len(), ITEMS.len() + 1);
|
assert_eq!(map.len(), ITEMS.len() + 1);
|
||||||
|
assert_eq!(map.trace_len(), 1);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
// get some items so that they are saved in the trace
|
// get some items so that they are saved in the trace; this should record original items
|
||||||
|
// in the trace, but should not affect the set of updates
|
||||||
let get_items = [0, 1, 2];
|
let get_items = [0, 1, 2];
|
||||||
for key in get_items.iter() {
|
for key in get_items.iter() {
|
||||||
map.contains_key(key);
|
map.contains_key(key);
|
||||||
}
|
}
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// read the same items again, this should not have any effect on either length, trace, or
|
||||||
|
// the set of updates
|
||||||
|
let get_items = [0, 1, 2];
|
||||||
|
for key in get_items.iter() {
|
||||||
|
map.contains_key(key);
|
||||||
|
}
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// read a newly inserted item; this should not affect either length, trace, or the set of
|
||||||
|
// updates
|
||||||
|
let _val = map.get(&5).unwrap();
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
|
// update a newly inserted item; this should not affect either length, trace, or the set
|
||||||
|
// of updates
|
||||||
|
map.insert(5, 11);
|
||||||
|
assert_eq!(map.trace_len(), 4);
|
||||||
|
assert_eq!(map.updates_len(), 2);
|
||||||
|
|
||||||
// Note: The length reported by the proof will be different to the length originally
|
// Note: The length reported by the proof will be different to the length originally
|
||||||
// reported by the map.
|
// reported by the map.
|
|
@ -1,5 +1,4 @@
|
||||||
use super::Word;
|
use super::{utils::string::String, Word};
|
||||||
use crate::utils::string::String;
|
|
||||||
use core::fmt::{self, Write};
|
use core::fmt::{self, Write};
|
||||||
|
|
||||||
#[cfg(not(feature = "std"))]
|
#[cfg(not(feature = "std"))]
|
||||||
|
@ -8,13 +7,23 @@ pub use alloc::format;
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub use std::format;
|
pub use std::format;
|
||||||
|
|
||||||
|
mod kv_map;
|
||||||
|
|
||||||
// RE-EXPORTS
|
// RE-EXPORTS
|
||||||
// ================================================================================================
|
// ================================================================================================
|
||||||
pub use winter_utils::{
|
pub use winter_utils::{
|
||||||
collections, string, uninit_vector, Box, ByteReader, ByteWriter, Deserializable,
|
string, uninit_vector, Box, ByteReader, ByteWriter, Deserializable, DeserializationError,
|
||||||
DeserializationError, Serializable, SliceReader,
|
Serializable, SliceReader,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub mod collections {
|
||||||
|
pub use super::kv_map::*;
|
||||||
|
pub use winter_utils::collections::*;
|
||||||
|
}
|
||||||
|
|
||||||
|
// UTILITY FUNCTIONS
|
||||||
|
// ================================================================================================
|
||||||
|
|
||||||
/// Converts a [Word] into hex.
|
/// Converts a [Word] into hex.
|
||||||
pub fn word_to_hex(w: &Word) -> Result<String, fmt::Error> {
|
pub fn word_to_hex(w: &Word) -> Result<String, fmt::Error> {
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
Loading…
Add table
Reference in a new issue