//! Stateless Binary Merkle Tree (BMT). //! //! The Binary Merkle Tree is constructed level-by-level. The first level consists of position-hashed leaf digests. //! On each additional level, pairs of nodes are hashed from the previous level (if a level contains an odd //! number of nodes, the last node is duplicated). The finalized root of the tree incorporates the leaf count //! to prevent proof malleability: `root = hash(leaf_count || tree_root)`. //! //! For example, given three leaves A, B, and C, the tree is constructed as follows: //! //! ```text //! Level 2 (tree_root): [hash(hash(hash(0,A),hash(1,B)),hash(hash(2,C),hash(2,C)))] //! Level 1: [hash(hash(0,A),hash(1,B)),hash(hash(2,C),hash(2,C))] //! Level 0 (leaves): [hash(0,A),hash(1,B),hash(2,C)] //! Finalized root: hash(3 || tree_root) //! ``` //! //! A proof for one or more leaves is generated by collecting the siblings needed to reconstruct the root. //! An external process can then use this proof (with some trusted root) to verify that the leaves //! are part of the tree. //! //! # Example //! //! ```rust //! use commonware_storage::bmt::{Builder, Tree}; //! use commonware_cryptography::{Sha256, sha256::Digest, Hasher as _}; //! //! // Create transactions and compute their digests //! let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; //! let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); //! //! // Build a Merkle Tree from the digests //! let mut builder = Builder::::new(digests.len()); //! for digest in &digests { //! builder.add(digest); //! } //! let tree = builder.build(); //! let root = tree.root(); //! //! // Generate a proof for leaf at index 1 //! let mut hasher = Sha256::default(); //! let proof = tree.proof(1).unwrap(); //! assert!(proof.verify_element_inclusion(&mut hasher, &digests[1], 1, &root).is_ok()); //! ``` use alloc::collections::btree_set::BTreeSet; use commonware_codec::{EncodeSize, Read, ReadExt, ReadRangeExt, Write}; use commonware_cryptography::{Digest, Hasher}; use commonware_runtime::{Buf, BufMut}; use commonware_utils::{non_empty_vec, vec::NonEmptyVec}; use thiserror::Error; /// There should never be more than 255 levels in a proof (would mean the Binary Merkle Tree /// has more than 2^255 leaves). pub const MAX_LEVELS: usize = u8::MAX as usize; /// Errors that can occur when working with a Binary Merkle Tree (BMT). #[derive(Error, Debug)] pub enum Error { #[error("invalid position: {0}")] InvalidPosition(u32), #[error("invalid proof: {0} != {1}")] InvalidProof(String, String), #[error("no leaves")] NoLeaves, #[error("unaligned proof")] UnalignedProof, #[error("duplicate position: {0}")] DuplicatePosition(u32), } /// Constructor for a Binary Merkle Tree (BMT). pub struct Builder { hasher: H, leaves: Vec, } impl Builder { /// Creates a new Binary Merkle Tree builder. pub fn new(leaves: usize) -> Self { Self { hasher: H::new(), leaves: Vec::with_capacity(leaves), } } /// Adds a leaf to the Binary Merkle Tree. /// /// When added, the leaf is hashed with its position. pub fn add(&mut self, leaf: &H::Digest) -> u32 { let position: u32 = self.leaves.len().try_into().expect("too many leaves"); self.hasher.update(&position.to_be_bytes()); self.hasher.update(leaf); self.leaves.push(self.hasher.finalize()); position } /// Builds the Binary Merkle Tree. /// /// It is valid to build a tree with no leaves, in which case /// just an "empty" node is included (no leaves will be provable). pub fn build(self) -> Tree { Tree::new(self.hasher, self.leaves) } } /// Constructed Binary Merkle Tree (BMT). #[derive(Clone, Debug)] pub struct Tree { /// Records whether the tree is empty. empty: bool, /// The digests at each level of the tree (from leaves to root). levels: NonEmptyVec>, /// The finalized root digest, which incorporates the leaf count. /// /// This is computed as `H(leaf_count || tree_root)` to prevent /// proof malleability where proofs that declare different leaf /// counts could verify against the same root. root: D, } impl Tree { /// Builds a Merkle Tree from a slice of position-hashed leaf digests. fn new>(mut hasher: H, mut leaves: Vec) -> Self { // If no leaves, add an empty node. // // Because this node only includes a position, there is no way a valid proof // can be generated that references it. let mut empty = false; let leaf_count = leaves.len() as u32; if leaves.is_empty() { leaves.push(hasher.finalize()); empty = true; } // Create the first level let mut levels = non_empty_vec![non_empty_vec![@leaves]]; // Construct the tree level-by-level let mut current_level = levels.last(); while !current_level.is_singleton() { let mut next_level = Vec::with_capacity(current_level.len().get().div_ceil(2)); for chunk in current_level.chunks(2) { // Hash the left child hasher.update(&chunk[0]); // Hash the right child if chunk.len() == 2 { hasher.update(&chunk[1]); } else { // If no right child exists, duplicate left child. hasher.update(&chunk[0]); }; // Compute the parent digest next_level.push(hasher.finalize()); } // Add the computed level to the tree levels.push(non_empty_vec![@next_level]); current_level = levels.last(); } // Compute the finalized root: H(leaf_count || tree_root) // This binds the root to the tree size, preventing malleability attacks. let tree_root = levels.last().first(); hasher.update(&leaf_count.to_be_bytes()); hasher.update(tree_root); let root = hasher.finalize(); Self { empty, levels, root, } } /// Returns the finalized root of the tree. /// /// The root incorporates the leaf count via `H(leaf_count || tree_root)`, /// which prevents proof malleability attacks where different tree sizes /// could produce valid proofs for the same root. pub const fn root(&self) -> D { self.root } /// Generates a Merkle proof for the leaf at `position`. /// /// This is a single-element multi-proof, which includes the minimal siblings /// needed to reconstruct the root. pub fn proof(&self, position: u32) -> Result, Error> { self.multi_proof(core::iter::once(position)) } /// Generates a Merkle range proof for a contiguous set of leaves from `start` /// to `end` (inclusive). /// /// The proof contains the minimal set of sibling digests needed to reconstruct /// the root for all elements in the range. This is more efficient than individual /// proofs when proving multiple consecutive elements. pub fn range_proof(&self, start: u32, end: u32) -> Result, Error> { // For empty trees, return an empty proof if self.empty { if start == 0 && end == 0 { return Ok(Proof::default()); } return Err(Error::InvalidPosition(start)); } // Validate range bounds if start > end { return Err(Error::InvalidPosition(start)); } let leaf_count = self.levels.first().len().get() as u32; if start >= leaf_count { return Err(Error::InvalidPosition(start)); } if end >= leaf_count { return Err(Error::InvalidPosition(end)); } // Compute required siblings without enumerating every leaf in the range. let sibling_positions = siblings_required_for_range_proof(leaf_count, start, end)?; let siblings: Vec = sibling_positions .iter() .map(|&(level, index)| self.levels[level][index]) .collect(); Ok(Proof { leaf_count, siblings, }) } /// Generates a Merkle proof for multiple non-contiguous leaves at the given `positions`. /// /// The proof contains the minimal set of sibling digests needed to reconstruct /// the root for all elements at the specified positions. This is more efficient /// than individual proofs when proving multiple elements because shared siblings /// are deduplicated. /// /// Positions are sorted internally; duplicate positions will return an error. pub fn multi_proof(&self, positions: I) -> Result, Error> where I: IntoIterator, P: core::borrow::Borrow, { let mut positions = positions.into_iter().peekable(); // Handle empty positions first - can't prove zero elements let first = *positions.peek().ok_or(Error::NoLeaves)?.borrow(); // Handle empty tree case if self.empty { return Err(Error::InvalidPosition(first)); } let leaf_count = self.levels.first().len().get() as u32; // Get required sibling positions (this validates positions and checks for duplicates) let sibling_positions = siblings_required_for_multi_proof(leaf_count, positions.map(|p| *p.borrow()))?; // Collect sibling digests in order let siblings: Vec = sibling_positions .iter() .map(|&(level, index)| self.levels[level][index]) .collect(); Ok(Proof { leaf_count, siblings, }) } } /// A Merkle proof for multiple non-contiguous leaves in a Binary Merkle Tree. /// /// This proof type is more space-efficient than generating individual proofs /// for each leaf because sibling nodes that are shared between multiple paths /// are deduplicated. /// /// The proof contains the leaf count and sibling digests required for verification. /// The leaf count is incorporated into the root hash during finalization, so /// modifying it will cause verification to fail (preventing malleability attacks). #[derive(Clone, Debug, Eq, PartialEq)] pub struct Proof { /// The number of leaves in the tree. /// /// This value is incorporated into the root hash during finalization, /// so modifying it will cause verification to fail (prevents malleability). pub leaf_count: u32, /// The deduplicated sibling digests required to verify all elements, /// ordered by their position in the tree (level-major, then index within level). pub siblings: Vec, } impl Default for Proof { fn default() -> Self { Self { leaf_count: 0, siblings: Vec::new(), } } } impl Write for Proof { fn write(&self, writer: &mut impl BufMut) { self.leaf_count.write(writer); self.siblings.write(writer); } } impl Read for Proof { /// The maximum number of items being proven. /// /// The upper bound on sibling hashes is derived as `max_items * MAX_LEVELS`. type Cfg = usize; fn read_cfg( reader: &mut impl Buf, max_items: &Self::Cfg, ) -> Result { let leaf_count = u32::read(reader)?; let max_siblings = max_items.saturating_mul(MAX_LEVELS); let siblings = Vec::::read_range(reader, ..=max_siblings)?; Ok(Self { leaf_count, siblings, }) } } impl EncodeSize for Proof { fn encode_size(&self) -> usize { self.leaf_count.encode_size() + self.siblings.encode_size() } } #[cfg(feature = "arbitrary")] impl arbitrary::Arbitrary<'_> for Proof where D: for<'a> arbitrary::Arbitrary<'a>, { fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { Ok(Self { leaf_count: u.arbitrary()?, siblings: u.arbitrary()?, }) } } /// Returns the number of levels in a tree with `leaf_count` leaves. /// A tree with 1 leaf has 1 level, a tree with 2 leaves has 2 levels, etc. const fn levels_in_tree(leaf_count: u32) -> usize { (u32::BITS - (leaf_count.saturating_sub(1)).leading_zeros() + 1) as usize } /// Returns the sorted, deduplicated positions of siblings required to prove /// inclusion of leaves at the given positions. /// /// Each position in the result is encoded as `(level, index)` where level 0 is the leaf level. fn siblings_required_for_multi_proof( leaf_count: u32, positions: impl IntoIterator, ) -> Result, Error> { // Validate positions and check for duplicates. let mut current = BTreeSet::new(); for pos in positions { if pos >= leaf_count { return Err(Error::InvalidPosition(pos)); } if !current.insert(pos as usize) { return Err(Error::DuplicatePosition(pos)); } } if current.is_empty() { return Err(Error::NoLeaves); } // Track positions we can compute at each level and record missing siblings. // This keeps the work proportional to the number of positions, not the tree size. let mut sibling_positions = BTreeSet::new(); let levels_count = levels_in_tree(leaf_count); let mut level_size = leaf_count as usize; for level in 0..levels_count - 1 { for &index in ¤t { let sibling_index = if index.is_multiple_of(2) { if index + 1 < level_size { index + 1 } else { index } } else { index - 1 }; if sibling_index != index && !current.contains(&sibling_index) { sibling_positions.insert((level, sibling_index)); } } current = current.iter().map(|idx| idx / 2).collect(); level_size = level_size.div_ceil(2); } Ok(sibling_positions) } /// Returns the sorted, deduplicated positions of siblings required to prove /// inclusion of a contiguous range of leaves from `start` to `end` (inclusive). fn siblings_required_for_range_proof( leaf_count: u32, start: u32, end: u32, ) -> Result, Error> { if leaf_count == 0 { return Err(Error::NoLeaves); } if start > end { return Err(Error::InvalidPosition(start)); } if start >= leaf_count { return Err(Error::InvalidPosition(start)); } if end >= leaf_count { return Err(Error::InvalidPosition(end)); } let mut sibling_positions = BTreeSet::new(); let levels_count = levels_in_tree(leaf_count); let mut level_start = start as usize; let mut level_end = end as usize; let mut level_size = leaf_count as usize; for level in 0..levels_count - 1 { if !level_start.is_multiple_of(2) { sibling_positions.insert((level, level_start - 1)); } if level_end.is_multiple_of(2) { let right = level_end + 1; if right < level_size { sibling_positions.insert((level, right)); } } level_start /= 2; level_end /= 2; level_size = level_size.div_ceil(2); } Ok(sibling_positions) } impl Proof { /// Verifies that a given `leaf` at `position` is included in a Binary Merkle Tree /// with `root` using the provided `hasher`. /// /// The proof consists of sibling hashes stored from the leaf up to the root. At each /// level, if the current node is a left child (even index), the sibling is combined /// to the right; if it is a right child (odd index), the sibling is combined to the /// left. /// /// The `leaf_count` stored in the proof is incorporated into the finalized root /// computation, so any modification to it will cause verification to fail. pub fn verify_element_inclusion>( &self, hasher: &mut H, leaf: &D, mut position: u32, root: &D, ) -> Result<(), Error> { // Validate position if position >= self.leaf_count { return Err(Error::InvalidPosition(position)); } // Compute the position-hashed leaf hasher.update(&position.to_be_bytes()); hasher.update(leaf); let mut computed = hasher.finalize(); // Track level size to handle odd-sized levels let mut level_size = self.leaf_count as usize; let mut sibling_iter = self.siblings.iter(); // Traverse from leaf to root while level_size > 1 { // Check if this is the last node at an odd-sized level (no real sibling) let is_last_odd = position.is_multiple_of(2) && position as usize + 1 >= level_size; let (left_node, right_node) = if is_last_odd { // Node is duplicated - no sibling consumed from proof (&computed, &computed) } else if position.is_multiple_of(2) { // Even position: sibling is to the right let sibling = sibling_iter.next().ok_or(Error::UnalignedProof)?; (&computed, sibling) } else { // Odd position: sibling is to the left let sibling = sibling_iter.next().ok_or(Error::UnalignedProof)?; (sibling, &computed) }; // Compute the parent digest hasher.update(left_node); hasher.update(right_node); computed = hasher.finalize(); // Move up the tree position /= 2; level_size = level_size.div_ceil(2); } // Ensure all siblings were consumed if sibling_iter.next().is_some() { return Err(Error::UnalignedProof); } // Finalize the root by incorporating the leaf count: H(leaf_count || tree_root) // This binds the proof to the specific tree size, preventing malleability attacks. hasher.update(&self.leaf_count.to_be_bytes()); hasher.update(&computed); let finalized = hasher.finalize(); if finalized == *root { Ok(()) } else { Err(Error::InvalidProof(finalized.to_string(), root.to_string())) } } /// Verifies that the given `elements` at their respective positions are included /// in a Binary Merkle Tree with `root`. /// /// Elements can be provided in any order; positions are sorted internally. /// Duplicate positions will cause verification to fail. /// /// The `leaf_count` stored in the proof is incorporated into the finalized root /// computation, so any modification to it will cause verification to fail. pub fn verify_multi_inclusion>( &self, hasher: &mut H, elements: &[(D, u32)], root: &D, ) -> Result<(), Error> { // Handle empty case if elements.is_empty() { if self.leaf_count == 0 && self.siblings.is_empty() { // Compute finalized empty root: H(0 || empty_tree_root) let empty_tree_root = hasher.finalize(); hasher.update(&0u32.to_be_bytes()); hasher.update(&empty_tree_root); let finalized = hasher.finalize(); if finalized == *root { return Ok(()); } else { return Err(Error::InvalidProof(finalized.to_string(), root.to_string())); } } return Err(Error::NoLeaves); } // 1. Sort elements by position and check for duplicates/bounds let mut sorted: Vec<(u32, D)> = Vec::with_capacity(elements.len()); for (leaf, position) in elements { if *position >= self.leaf_count { return Err(Error::InvalidPosition(*position)); } hasher.update(&position.to_be_bytes()); hasher.update(leaf); sorted.push((*position, hasher.finalize())); } sorted.sort_unstable_by_key(|(pos, _)| *pos); // Check for duplicates (adjacent elements with same position after sorting) for i in 1..sorted.len() { if sorted[i - 1].0 == sorted[i].0 { return Err(Error::DuplicatePosition(sorted[i].0)); } } // 2. Iterate up the tree // Since we process left-to-right and parent_pos = pos/2, next_level stays sorted. let levels = levels_in_tree(self.leaf_count); let mut level_size = self.leaf_count; let mut sibling_iter = self.siblings.iter(); let mut current = sorted; let mut next_level: Vec<(u32, D)> = Vec::with_capacity(current.len()); for _ in 0..levels - 1 { let mut idx = 0; while idx < current.len() { let (pos, digest) = current[idx]; let parent_pos = pos / 2; // Determine if we have the left or right child let (left, right) = if pos.is_multiple_of(2) { // We are the LEFT child let left = digest; // Check if we have the right child in our current set let right = if idx + 1 < current.len() && current[idx + 1].0 == pos + 1 { idx += 1; current[idx].1 } else if pos + 1 >= level_size { // If no right child exists in tree, duplicate left left } else { // Otherwise, must consume a sibling *sibling_iter.next().ok_or(Error::UnalignedProof)? }; (left, right) } else { // We are the RIGHT child // This implies the LEFT child was missing from 'current', so it must be a sibling. let right = digest; let left = *sibling_iter.next().ok_or(Error::UnalignedProof)?; (left, right) }; // Hash parent hasher.update(&left); hasher.update(&right); next_level.push((parent_pos, hasher.finalize())); idx += 1; } // Prepare for next level core::mem::swap(&mut current, &mut next_level); next_level.clear(); level_size = level_size.div_ceil(2); } // 3. Verify root if sibling_iter.next().is_some() { return Err(Error::UnalignedProof); } if current.len() != 1 { return Err(Error::UnalignedProof); } // Finalize the root by incorporating the leaf count: H(leaf_count || tree_root) // This binds the proof to the specific tree size, preventing malleability attacks. let tree_root = current[0].1; hasher.update(&self.leaf_count.to_be_bytes()); hasher.update(&tree_root); let finalized = hasher.finalize(); if finalized == *root { Ok(()) } else { Err(Error::InvalidProof(finalized.to_string(), root.to_string())) } } /// Verifies that a contiguous range of `leaves` starting at `position` are included /// in a Binary Merkle Tree with `root`. /// /// This is a convenience method for verifying range proofs. The leaves must be /// in order starting from `position`. /// /// The `leaf_count` stored in the proof is incorporated into the finalized root /// computation, so any modification to it will cause verification to fail. pub fn verify_range_inclusion>( &self, hasher: &mut H, position: u32, leaves: &[D], root: &D, ) -> Result<(), Error> { // For empty trees, only position 0 with empty leaves is valid if leaves.is_empty() && position != 0 { return Err(Error::InvalidPosition(position)); } if !leaves.is_empty() { let leaves_len = u32::try_from(leaves.len()).map_err(|_| Error::InvalidPosition(position))?; let end = position .checked_add(leaves_len - 1) .ok_or(Error::InvalidPosition(position))?; if end >= self.leaf_count { return Err(Error::InvalidPosition(end)); } } // Convert to format expected by verify_multi_inclusion let elements: Vec<(D, u32)> = leaves .iter() .enumerate() .map(|(i, leaf)| (*leaf, position + i as u32)) .collect(); self.verify_multi_inclusion(hasher, &elements, root) } } #[cfg(test)] mod tests { use super::*; use commonware_codec::{Decode, Encode}; use commonware_cryptography::sha256::{Digest, Sha256}; use rstest::rstest; /// Regression test for https://github.com/commonwarexyz/monorepo/issues/2837 /// /// Before the fix, two proofs with identical siblings but different leaf_count /// values would both verify successfully against the same root, enabling /// proof malleability attacks. #[test] fn issue_2837_regression() { // Create a tree with 255 leaves (as in the issue report) let digests: Vec = (0..255u32) .map(|i| Sha256::hash(&i.to_be_bytes())) .collect(); let mut builder = Builder::::new(255); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get a valid proof for position 0 let original_proof = tree.proof(0).unwrap(); assert_eq!(original_proof.leaf_count, 255); // Original proof should verify let mut hasher = Sha256::default(); assert!( original_proof .verify_element_inclusion(&mut hasher, &digests[0], 0, &root) .is_ok(), "Original proof should verify" ); // Create a malleated proof with leaf_count=254 but same siblings // (This is the exact attack from issue #2837) let malleated_proof = Proof { leaf_count: 254, siblings: original_proof.siblings.clone(), }; // Malleated proof should NOT verify because the root now incorporates // the leaf_count: root = H(leaf_count || tree_root) let result = malleated_proof.verify_element_inclusion(&mut hasher, &digests[0], 0, &root); assert!( result.is_err(), "Malleated proof with wrong leaf_count must fail verification" ); } #[test] fn test_tampered_proof_no_siblings() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); let element = &digests[0]; // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Build proof let mut proof = tree.proof(0).unwrap(); // Tamper with proof proof.siblings = Vec::new(); // Fail verification with an empty proof. let mut hasher = Sha256::default(); assert!(proof .verify_element_inclusion(&mut hasher, element, 0, &root) .is_err()); } #[test] fn test_tampered_proof_extra_sibling() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); let element = &digests[0]; // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Build proof let mut proof = tree.proof(0).unwrap(); // Tamper with proof proof.siblings.push(*element); // Fail verification with extra sibling let mut hasher = Sha256::default(); assert!(proof .verify_element_inclusion(&mut hasher, element, 0, &root) .is_err()); } #[test] fn test_invalid_proof_wrong_element() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Generate a valid proof for leaf at index 2. let proof = tree.proof(2).unwrap(); // Use a wrong element (e.g. hash of a different transaction). let mut hasher = Sha256::default(); let wrong_leaf = Sha256::hash(b"wrong_tx"); assert!(proof .verify_element_inclusion(&mut hasher, &wrong_leaf, 2, &root) .is_err()); } #[test] fn test_invalid_proof_wrong_index() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Generate a valid proof for leaf at index 1. let proof = tree.proof(1).unwrap(); // Use an incorrect index (e.g. 2 instead of 1). let mut hasher = Sha256::default(); assert!(proof .verify_element_inclusion(&mut hasher, &digests[1], 2, &root) .is_err()); } #[test] fn test_invalid_proof_wrong_root() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Generate a valid proof for leaf at index 0. let proof = tree.proof(0).unwrap(); // Use a wrong root (hash of a different input). let mut hasher = Sha256::default(); let wrong_root = Sha256::hash(b"wrong_root"); assert!(proof .verify_element_inclusion(&mut hasher, &digests[0], 0, &wrong_root) .is_err()); } #[test] fn test_invalid_proof_serialization_truncated() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Generate a valid proof for leaf at index 1. let proof = tree.proof(1).unwrap(); let mut serialized = proof.encode(); // Truncate one byte. serialized.truncate(serialized.len() - 1); assert!(Proof::::decode_cfg(&mut serialized, &1).is_err()); } #[test] fn test_invalid_proof_serialization_extra() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Generate a valid proof for leaf at index 1. let proof = tree.proof(1).unwrap(); let mut serialized = proof.encode_mut(); // Append an extra byte. serialized.extend_from_slice(&[0u8]); assert!(Proof::::decode_cfg(&mut serialized, &1).is_err()); } #[test] fn test_invalid_proof_modified_hash() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3", b"tx4"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Generate a valid proof for leaf at index 2. let mut proof = tree.proof(2).unwrap(); // Modify the first hash in the proof. let mut hasher = Sha256::default(); proof.siblings[0] = Sha256::hash(b"modified"); assert!(proof .verify_element_inclusion(&mut hasher, &digests[2], 2, &root) .is_err()); } #[test] fn test_odd_tree_duplicate_index_proof() { // Create transactions and digests let txs = [b"tx1", b"tx2", b"tx3"]; let digests: Vec = txs.iter().map(|tx| Sha256::hash(*tx)).collect(); // Build tree let mut builder = Builder::::new(txs.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // The tree was built with 3 leaves; index 2 is the last valid index. let proof = tree.proof(2).unwrap(); // Verification should succeed for the proper index 2. let mut hasher = Sha256::default(); assert!(proof .verify_element_inclusion(&mut hasher, &digests[2], 2, &root) .is_ok()); // Should not be able to generate a proof for an out-of-range index (e.g. 3). assert!(tree.proof(3).is_err()); // Attempting to verify using an out-of-range index (e.g. 3, which would correspond // to a duplicate leaf that doesn't actually exist) should fail. assert!(proof .verify_element_inclusion(&mut hasher, &digests[2], 3, &root) .is_err()); } #[test] fn test_range_proof_basic() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Test range proof for elements 2-5 let range_proof = tree.range_proof(2, 5).unwrap(); let mut hasher = Sha256::default(); let range_leaves = &digests[2..6]; assert!(range_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_ok()); // Serialize and deserialize let mut serialized = range_proof.encode(); let deserialized = Proof::::decode_cfg(&mut serialized, &4).unwrap(); assert!(deserialized .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_ok()); } #[test] fn test_range_proof_single_element() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Test single element range proof for (i, digest) in digests.iter().enumerate() { let range_proof = tree.range_proof(i as u32, i as u32).unwrap(); let mut hasher = Sha256::default(); let result = range_proof.verify_range_inclusion(&mut hasher, i as u32, &[*digest], &root); assert!(result.is_ok()); } } #[test] fn test_range_proof_full_tree() { // Create test data let digests: Vec = (0..7u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Test full tree range proof let range_proof = tree.range_proof(0, (digests.len() - 1) as u32).unwrap(); let mut hasher = Sha256::default(); assert!(range_proof .verify_range_inclusion(&mut hasher, 0, &digests, &root) .is_ok()); } #[test] fn test_range_proof_edge_cases() { // Create test data let digests: Vec = (0..15u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test first half let range_proof = tree.range_proof(0, 7).unwrap(); assert!(range_proof .verify_range_inclusion(&mut hasher, 0, &digests[0..8], &root) .is_ok()); // Test second half let range_proof = tree.range_proof(8, 14).unwrap(); assert!(range_proof .verify_range_inclusion(&mut hasher, 8, &digests[8..15], &root) .is_ok()); // Test last elements let range_proof = tree.range_proof(13, 14).unwrap(); assert!(range_proof .verify_range_inclusion(&mut hasher, 13, &digests[13..15], &root) .is_ok()); } #[test] fn test_range_proof_invalid_range() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Test invalid ranges assert!(tree.range_proof(8, 8).is_err()); // Start out of bounds assert!(tree.range_proof(0, 8).is_err()); // End out of bounds assert!(tree.range_proof(5, 8).is_err()); // End out of bounds assert!(tree.range_proof(2, 1).is_err()); // Start > end } #[test] fn test_range_proof_tampering() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get valid range proof let range_proof = tree.range_proof(2, 4).unwrap(); let mut hasher = Sha256::default(); let range_leaves = &digests[2..5]; // Test with wrong leaves let wrong_leaves = vec![ Sha256::hash(b"wrong1"), Sha256::hash(b"wrong2"), Sha256::hash(b"wrong3"), ]; assert!(range_proof .verify_range_inclusion(&mut hasher, 2, &wrong_leaves, &root) .is_err()); // Test with wrong number of leaves assert!(range_proof .verify_range_inclusion(&mut hasher, 2, &digests[2..4], &root) .is_err()); // Test with tampered proof let mut tampered_proof = range_proof.clone(); assert!(!tampered_proof.siblings.is_empty()); // Tamper with the first sibling tampered_proof.siblings[0] = Sha256::hash(b"tampered"); assert!(tampered_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_err()); // Test with wrong root let wrong_root = Sha256::hash(b"wrong_root"); assert!(range_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &wrong_root) .is_err()); } #[test] fn test_range_proof_various_sizes() { // Test range proofs for trees of various sizes for tree_size in [1, 2, 3, 4, 5, 7, 8, 15, 16, 31, 32, 63, 64] { let digests: Vec = (0..tree_size as u32) .map(|i| Sha256::hash(&i.to_be_bytes())) .collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test various range sizes for range_size in 1..=tree_size.min(8) { for start in 0..=(tree_size - range_size) { let range_proof = tree .range_proof(start as u32, (start + range_size - 1) as u32) .unwrap(); let end = start + range_size; assert!( range_proof .verify_range_inclusion( &mut hasher, start as u32, &digests[start..end], &root ) .is_ok(), "Failed for tree_size={tree_size}, start={start}, range_size={range_size}" ); } } } } #[test] fn test_range_proof_malicious_wrong_position() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get valid range proof for position 2 to 4 let range_proof = tree.range_proof(2, 4).unwrap(); let mut hasher = Sha256::default(); let range_leaves = &digests[2..5]; // Try to verify with wrong position assert!(range_proof .verify_range_inclusion(&mut hasher, 3, range_leaves, &root) .is_err()); assert!(range_proof .verify_range_inclusion(&mut hasher, 1, range_leaves, &root) .is_err()); } #[test] fn test_range_proof_malicious_reordered_leaves() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get valid range proof for position 2 to 4 let range_proof = tree.range_proof(2, 4).unwrap(); let mut hasher = Sha256::default(); // Try to verify with reordered leaves let reordered_leaves = vec![digests[3], digests[2], digests[4]]; assert!(range_proof .verify_range_inclusion(&mut hasher, 2, &reordered_leaves, &root) .is_err()); } #[test] fn test_range_proof_malicious_extra_siblings() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get valid range proof let mut range_proof = tree.range_proof(2, 3).unwrap(); let mut hasher = Sha256::default(); let range_leaves = &digests[2..4]; // Tamper by adding extra siblings range_proof.siblings.push(Sha256::hash(b"extra")); assert!(range_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_err()); } #[test] fn test_range_proof_malicious_missing_siblings() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get valid range proof for a single element (which needs siblings) let mut range_proof = tree.range_proof(2, 2).unwrap(); let mut hasher = Sha256::default(); let range_leaves = &digests[2..3]; // The proof should have siblings assert!(!range_proof.siblings.is_empty()); // Remove a sibling range_proof.siblings.pop(); assert!(range_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_err()); } #[test] fn test_range_proof_integer_overflow_protection() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Test overflow in range_proof generation assert!(tree.range_proof(u32::MAX, u32::MAX).is_err()); assert!(tree.range_proof(u32::MAX - 1, u32::MAX).is_err()); assert!(tree.range_proof(7, u32::MAX).is_err()); } #[test] fn test_range_proof_malicious_wrong_tree_structure() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Get valid range proof let mut range_proof = tree.range_proof(2, 3).unwrap(); let mut hasher = Sha256::default(); let range_leaves = &digests[2..4]; // Add extra sibling (simulating proof from different tree structure) range_proof.siblings.push(Sha256::hash(b"fake_level")); assert!(range_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_err()); // Remove a sibling let mut range_proof = tree.range_proof(2, 2).unwrap(); let range_leaves = &digests[2..3]; assert!(!range_proof.siblings.is_empty()); range_proof.siblings.pop(); assert!(range_proof .verify_range_inclusion(&mut hasher, 2, range_leaves, &root) .is_err()); } #[test] fn test_range_proof_boundary_conditions() { // Test various power-of-2 boundary conditions for tree_size in [1, 2, 4, 8, 16, 32] { let digests: Vec = (0..tree_size as u32) .map(|i| Sha256::hash(&i.to_be_bytes())) .collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test edge cases // First element only let proof = tree.range_proof(0, 0).unwrap(); assert!(proof .verify_range_inclusion(&mut hasher, 0, &digests[0..1], &root) .is_ok()); // Last element only let last_idx = tree_size - 1; let proof = tree.range_proof(last_idx as u32, last_idx as u32).unwrap(); assert!(proof .verify_range_inclusion( &mut hasher, last_idx as u32, &digests[last_idx..tree_size], &root ) .is_ok()); // Full tree let proof = tree.range_proof(0, (tree_size - 1) as u32).unwrap(); assert!(proof .verify_range_inclusion(&mut hasher, 0, &digests, &root) .is_ok()); } } #[test] fn test_empty_tree_proof() { // Build an empty tree let builder = Builder::::new(0); let tree = builder.build(); // Empty tree should fail for any position since there are no elements assert!(tree.proof(0).is_err()); assert!(tree.proof(1).is_err()); assert!(tree.proof(100).is_err()); } #[test] fn test_empty_tree_range_proof() { // Build an empty tree let builder = Builder::::new(0); let tree = builder.build(); let root = tree.root(); // Empty tree should return default proof only for (0, 0) let range_proof = tree.range_proof(0, 0).unwrap(); assert!(range_proof.siblings.is_empty()); assert_eq!(range_proof, Proof::default()); // All other combinations should fail let invalid_ranges = vec![ (0, 1), (0, 10), (1, 1), (1, 2), (5, 5), (10, 10), (0, u32::MAX), (u32::MAX, u32::MAX), ]; for (start, end) in invalid_ranges { assert!(tree.range_proof(start, end).is_err()); } // Verify empty range proof against empty tree root let mut hasher = Sha256::default(); let empty_leaves: &[Digest] = &[]; assert!(range_proof .verify_range_inclusion(&mut hasher, 0, empty_leaves, &root) .is_ok()); // Should fail with non-empty leaves let non_empty_leaves = vec![Sha256::hash(b"leaf")]; assert!(range_proof .verify_range_inclusion(&mut hasher, 0, &non_empty_leaves, &root) .is_err()); // Should fail with wrong root let wrong_root = Sha256::hash(b"wrong"); assert!(range_proof .verify_range_inclusion(&mut hasher, 0, empty_leaves, &wrong_root) .is_err()); // Should fail with wrong position assert!(range_proof .verify_range_inclusion(&mut hasher, 1, empty_leaves, &root) .is_err()); } #[test] fn test_empty_range_proof_serialization() { let proof = Proof::::default(); let mut serialized = proof.encode(); let deserialized = Proof::::decode_cfg(&mut serialized, &0).unwrap(); assert_eq!(proof, deserialized); } #[test] fn test_empty_tree_root_consistency() { // Create multiple empty trees and verify they have the same root let mut roots = Vec::new(); for _ in 0..5 { let builder = Builder::::new(0); let tree = builder.build(); roots.push(tree.root()); } // All empty trees should have the same root for i in 1..roots.len() { assert_eq!(roots[0], roots[i]); } // The root should be the hash of empty data let mut hasher = Sha256::default(); hasher.update(0u32.to_be_bytes().as_slice()); hasher.update(Sha256::hash(b"").as_ref()); let expected_root = hasher.finalize(); assert_eq!(roots[0], expected_root); } #[rstest] #[case::need_left_sibling(1, 2)] // Range starting at odd index (needs left sibling) #[case::need_right_sibling(4, 4)] // Range starting at even index #[case::full_tree(0, 16)] // Full tree (no siblings needed at leaf level) fn test_range_proof_siblings_usage(#[case] start: u32, #[case] count: u32) { // This test ensures that all siblings in a range proof are actually used during verification let digests: Vec = (0..16u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); let range_proof = tree.range_proof(start, start + count - 1).unwrap(); let end = start as usize + count as usize; // Verify the proof works assert!(range_proof .verify_range_inclusion(&mut hasher, start, &digests[start as usize..end], &root) .is_ok()); // For each sibling, try tampering with it and verify the proof fails for sibling_idx in 0..range_proof.siblings.len() { let mut tampered_proof = range_proof.clone(); tampered_proof.siblings[sibling_idx] = Sha256::hash(b"tampered"); assert!(tampered_proof .verify_range_inclusion(&mut hasher, start, &digests[start as usize..end], &root) .is_err()); } } // Test trees with odd sizes that require duplicate nodes #[rstest] fn test_range_proof_duplicate_node_edge_cases( #[values(3, 5, 7, 9, 11, 13, 15)] tree_size: usize, ) { let digests: Vec = (0..tree_size as u32) .map(|i| Sha256::hash(&i.to_be_bytes())) .collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test range including the last element (which may require duplicate handling) let start = tree_size - 2; let proof = tree .range_proof(start as u32, (tree_size - 1) as u32) .unwrap(); assert!(proof .verify_range_inclusion(&mut hasher, start as u32, &digests[start..tree_size], &root) .is_ok()); } #[test] fn test_multi_proof_basic() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); // Test multi-proof for non-contiguous positions [0, 3, 5] let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); let mut hasher = Sha256::default(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!(multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok()); } #[test] fn test_multi_proof_single_element() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test single element multi-proof for each position for (i, digest) in digests.iter().enumerate() { let multi_proof = tree.multi_proof([i as u32]).unwrap(); let elements = [(*digest, i as u32)]; assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok(), "Failed for position {i}" ); } } #[test] fn test_multi_proof_all_elements() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test multi-proof for all elements let positions: Vec = (0..digests.len() as u32).collect(); let multi_proof = tree.multi_proof(&positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!(multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok()); // When proving all elements, we shouldn't need any siblings (all can be computed) assert!(multi_proof.siblings.is_empty()); } #[test] fn test_multi_proof_adjacent_elements() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test adjacent positions (should deduplicate shared siblings) let positions = [2, 3]; let multi_proof = tree.multi_proof(positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!(multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok()); } #[test] fn test_multi_proof_sparse_positions() { // Create test data let digests: Vec = (0..16u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test widely separated positions let positions = [0, 7, 8, 15]; let multi_proof = tree.multi_proof(positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!(multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok()); } #[test] fn test_multi_proof_empty_tree() { // Build empty tree let builder = Builder::::new(0); let tree = builder.build(); // Empty tree with empty positions should return NoLeaves error // (we can't prove zero elements) assert!(matches!( tree.multi_proof(std::iter::empty::()), Err(Error::NoLeaves) )); // Empty tree with any position should fail with InvalidPosition assert!(matches!( tree.multi_proof([0]), Err(Error::InvalidPosition(0)) )); } #[test] fn test_multi_proof_empty_positions() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Empty positions should return error assert!(matches!( tree.multi_proof(std::iter::empty::()), Err(Error::NoLeaves) )); } #[test] fn test_multi_proof_duplicate_positions_error() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Duplicate positions should return error assert!(matches!( tree.multi_proof([1, 1]), Err(Error::DuplicatePosition(1)) )); assert!(matches!( tree.multi_proof([0, 2, 2, 5]), Err(Error::DuplicatePosition(2)) )); } #[test] fn test_multi_proof_unsorted_input() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test with unsorted positions (should work - internal sorting) let positions = [5, 0, 3]; let multi_proof = tree.multi_proof(positions).unwrap(); // Verify with unsorted elements (should work - internal sorting) let unsorted_elements = [(digests[5], 5), (digests[0], 0), (digests[3], 3)]; assert!(multi_proof .verify_multi_inclusion(&mut hasher, &unsorted_elements, &root) .is_ok()); } #[test] fn test_multi_proof_various_sizes() { // Test multi-proofs for trees of various sizes for tree_size in [1, 2, 3, 4, 5, 7, 8, 15, 16, 31, 32] { let digests: Vec = (0..tree_size as u32) .map(|i| Sha256::hash(&i.to_be_bytes())) .collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test various position combinations // First and last if tree_size >= 2 { let positions = [0, (tree_size - 1) as u32]; let multi_proof = tree.multi_proof(positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok(), "Failed for tree_size={tree_size}, positions=[0, {}]", tree_size - 1 ); } // Every other element if tree_size >= 4 { let positions: Vec = (0..tree_size as u32).step_by(2).collect(); let multi_proof = tree.multi_proof(&positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok(), "Failed for tree_size={tree_size}, every other element" ); } } } #[test] fn test_multi_proof_wrong_elements() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); // Verify with wrong elements let wrong_elements = [ (Sha256::hash(b"wrong1"), 0), (digests[3], 3), (digests[5], 5), ]; assert!(multi_proof .verify_multi_inclusion(&mut hasher, &wrong_elements, &root) .is_err()); } #[test] fn test_multi_proof_wrong_positions() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); // Verify with wrong positions (same elements, different positions) let wrong_positions = [ (digests[0], 1), // wrong position (digests[3], 3), (digests[5], 5), ]; assert!(multi_proof .verify_multi_inclusion(&mut hasher, &wrong_positions, &root) .is_err()); } #[test] fn test_multi_proof_wrong_root() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); // Verify with wrong root let wrong_root = Sha256::hash(b"wrong_root"); assert!(multi_proof .verify_multi_inclusion(&mut hasher, &elements, &wrong_root) .is_err()); } #[test] fn test_multi_proof_tampering() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); // Tamper with sibling assert!(!multi_proof.siblings.is_empty()); let mut modified = multi_proof.clone(); modified.siblings[0] = Sha256::hash(b"tampered"); assert!(modified .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err()); // Add extra sibling let mut extra = multi_proof.clone(); extra.siblings.push(Sha256::hash(b"extra")); assert!(extra .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err()); // Remove a sibling let mut missing = multi_proof; missing.siblings.pop(); assert!(missing .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err()); } #[test] fn test_multi_proof_deduplication() { // Create test data let digests: Vec = (0..16u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Get individual proofs let individual_siblings: usize = [0u32, 1, 8, 9] .iter() .map(|&p| tree.proof(p).unwrap().siblings.len()) .sum(); // Get multi-proof for same positions let multi_proof = tree.multi_proof([0, 1, 8, 9]).unwrap(); // Multi-proof should have fewer siblings due to deduplication assert!( multi_proof.siblings.len() < individual_siblings, "Multi-proof ({}) should have fewer siblings than sum of individual proofs ({})", multi_proof.siblings.len(), individual_siblings ); } #[test] fn test_multi_proof_serialization() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate proof let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); // Serialize and deserialize let serialized = multi_proof.encode(); let deserialized = Proof::::decode_cfg(serialized, &positions.len()).unwrap(); assert_eq!(multi_proof, deserialized); // Verify deserialized proof works let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!(deserialized .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok()); } #[test] fn test_multi_proof_serialization_truncated() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Generate proof let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); // Serialize and truncate let mut serialized = multi_proof.encode(); serialized.truncate(serialized.len() - 1); // Should fail to deserialize assert!(Proof::::decode_cfg(&mut serialized, &positions.len()).is_err()); } #[test] fn test_multi_proof_serialization_extra() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Generate proof let positions = [0, 3, 5]; let multi_proof = tree.multi_proof(positions).unwrap(); // Serialize and add extra byte let mut serialized = multi_proof.encode_mut(); serialized.extend_from_slice(&[0u8]); // Should fail to deserialize assert!(Proof::::decode_cfg(&mut serialized, &positions.len()).is_err()); } #[test] fn test_multi_proof_decode_insufficient_data() { let mut serialized = Vec::new(); serialized.extend_from_slice(&8u32.encode()); // leaf_count serialized.extend_from_slice(&1usize.encode()); // claims 1 sibling but no data follows // Should fail because the buffer claims 1 sibling but doesn't have the data let err = Proof::::decode_cfg(serialized.as_slice(), &1).unwrap_err(); assert!(matches!(err, commonware_codec::Error::EndOfBuffer)); } #[test] fn test_multi_proof_invalid_position() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); // Test out of bounds position assert!(matches!( tree.multi_proof([0, 8]), Err(Error::InvalidPosition(8)) )); assert!(matches!( tree.multi_proof([100]), Err(Error::InvalidPosition(100)) )); } #[test] fn test_multi_proof_verify_invalid_position() { // Create test data let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 3]; let multi_proof = tree.multi_proof(positions).unwrap(); // Try to verify with out of bounds position let invalid_elements = [(digests[0], 0), (digests[3], 100)]; assert!(multi_proof .verify_multi_inclusion(&mut hasher, &invalid_elements, &root) .is_err()); } #[test] fn test_multi_proof_odd_tree_sizes() { // Test odd-sized trees that require node duplication for tree_size in [3, 5, 7, 9, 11, 13, 15] { let digests: Vec = (0..tree_size as u32) .map(|i| Sha256::hash(&i.to_be_bytes())) .collect(); // Build tree let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Test with positions including the last element let positions = [0, (tree_size - 1) as u32]; let multi_proof = tree.multi_proof(positions).unwrap(); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok(), "Failed for tree_size={tree_size}" ); } } #[test] fn test_multi_proof_verify_empty_elements() { // Create a valid proof and try to verify with empty elements let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 3]; let multi_proof = tree.multi_proof(positions).unwrap(); // Try to verify with empty elements let empty_elements: &[(Digest, u32)] = &[]; assert!(multi_proof .verify_multi_inclusion(&mut hasher, empty_elements, &root) .is_err()); } #[test] fn test_multi_proof_default_verify() { // Default (empty) proof should only verify against empty tree let mut hasher = Sha256::default(); let default_proof = Proof::::default(); // Empty elements against default proof let empty_elements: &[(Digest, u32)] = &[]; // Build empty tree to get the empty root let builder = Builder::::new(0); let empty_tree = builder.build(); let empty_root = empty_tree.root(); assert!(default_proof .verify_multi_inclusion(&mut hasher, empty_elements, &empty_root) .is_ok()); // Should fail with wrong root let wrong_root = Sha256::hash(b"not_empty"); assert!(default_proof .verify_multi_inclusion(&mut hasher, empty_elements, &wrong_root) .is_err()); } #[test] fn test_multi_proof_single_leaf_tree() { // Edge case: tree with exactly one leaf let digest = Sha256::hash(b"only_leaf"); // Build single-leaf tree let mut builder = Builder::::new(1); builder.add(&digest); let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate multi-proof for the only leaf let multi_proof = tree.multi_proof([0]).unwrap(); // Single leaf tree: leaf_count should be 1 assert_eq!(multi_proof.leaf_count, 1); // Single leaf tree: no siblings needed (leaf is the root after position hashing) assert!( multi_proof.siblings.is_empty(), "Single leaf tree should have no siblings" ); // Verify the proof let elements = [(digest, 0u32)]; assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_ok(), "Single leaf multi-proof verification failed" ); // Verify with wrong digest fails let wrong_digest = Sha256::hash(b"wrong"); let wrong_elements = [(wrong_digest, 0u32)]; assert!( multi_proof .verify_multi_inclusion(&mut hasher, &wrong_elements, &root) .is_err(), "Should fail with wrong digest" ); // Verify with wrong position fails let wrong_position_elements = [(digest, 1u32)]; assert!( multi_proof .verify_multi_inclusion(&mut hasher, &wrong_position_elements, &root) .is_err(), "Should fail with invalid position" ); } #[test] fn test_multi_proof_malicious_leaf_count_zero() { // Attacker sets leaf_count = 0 but provides siblings let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof and tamper with leaf_count let positions = [0, 3]; let mut multi_proof = tree.multi_proof(positions).unwrap(); multi_proof.leaf_count = 0; let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); // Should fail - leaf_count=0 but we have elements assert!(multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err()); } #[test] fn test_multi_proof_malicious_leaf_count_larger() { // Attacker inflates leaf_count to claim proof is for larger tree let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof and inflate leaf_count let positions = [0, 3]; let mut multi_proof = tree.multi_proof(positions).unwrap(); let original_leaf_count = multi_proof.leaf_count; multi_proof.leaf_count = 1000; let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); // Should fail - inflated leaf_count changes required siblings assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err(), "Should reject proof with inflated leaf_count ({} -> {})", original_leaf_count, multi_proof.leaf_count ); } #[test] fn test_multi_proof_malicious_leaf_count_smaller() { // Attacker deflates leaf_count to claim proof is for smaller tree let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof and deflate leaf_count let positions = [0, 3]; let mut multi_proof = tree.multi_proof(positions).unwrap(); multi_proof.leaf_count = 4; // Smaller than actual tree let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); // Should fail - deflated leaf_count changes tree structure assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err(), "Should reject proof with deflated leaf_count" ); } #[test] fn test_multi_proof_mismatched_element_count() { // Provide more or fewer elements than the proof was generated for let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate proof for 2 positions let positions = [0, 3]; let multi_proof = tree.multi_proof(positions).unwrap(); // Try to verify with only 1 element (too few) let too_few = [(digests[0], 0u32)]; assert!( multi_proof .verify_multi_inclusion(&mut hasher, &too_few, &root) .is_err(), "Should reject when fewer elements provided than proof was generated for" ); // Try to verify with 3 elements (too many) let too_many = [(digests[0], 0u32), (digests[3], 3), (digests[5], 5)]; assert!( multi_proof .verify_multi_inclusion(&mut hasher, &too_many, &root) .is_err(), "Should reject when more elements provided than proof was generated for" ); } #[test] fn test_multi_proof_swapped_siblings() { // Swap the order of siblings in the proof let digests: Vec = (0..8u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof with multiple siblings let positions = [0, 5]; let mut multi_proof = tree.multi_proof(positions).unwrap(); // Ensure we have at least 2 siblings to swap if multi_proof.siblings.len() >= 2 { // Swap first two siblings multi_proof.siblings.swap(0, 1); let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); assert!( multi_proof .verify_multi_inclusion(&mut hasher, &elements, &root) .is_err(), "Should reject proof with swapped siblings" ); } } #[test] fn test_multi_proof_dos_large_leaf_count() { // Attacker sets massive leaf_count trying to cause DoS via memory allocation // The verify function should NOT allocate proportional to leaf_count let digests: Vec = (0..4u32).map(|i| Sha256::hash(&i.to_be_bytes())).collect(); let mut builder = Builder::::new(digests.len()); for digest in &digests { builder.add(digest); } let tree = builder.build(); let root = tree.root(); let mut hasher = Sha256::default(); // Generate valid proof let positions = [0, 2]; let mut multi_proof = tree.multi_proof(positions).unwrap(); // Set massive leaf_count (attacker trying to exhaust memory) multi_proof.leaf_count = u32::MAX; let elements: Vec<(Digest, u32)> = positions .iter() .map(|&p| (digests[p as usize], p)) .collect(); // This should fail quickly without allocating massive memory // The function is O(elements * levels), not O(leaf_count) let result = multi_proof.verify_multi_inclusion(&mut hasher, &elements, &root); assert!(result.is_err(), "Should reject malicious large leaf_count"); } #[cfg(feature = "arbitrary")] mod conformance { use super::*; use commonware_codec::conformance::CodecConformance; use commonware_conformance::Conformance; use commonware_cryptography::sha256::Digest as Sha256Digest; fn test_merkle_tree(n: usize) -> Digest { // Build tree let mut digests = Vec::with_capacity(n); let mut builder = Builder::::new(n); for i in 0..n { let digest = Sha256::hash(&i.to_be_bytes()); builder.add(&digest); digests.push(digest); } let tree = builder.build(); let root = tree.root(); // For each leaf, generate and verify its proof let mut hasher = Sha256::default(); for (i, leaf) in digests.iter().enumerate() { // Generate proof let proof = tree.proof(i as u32).unwrap(); assert!( proof .verify_element_inclusion(&mut hasher, leaf, i as u32, &root) .is_ok(), "correct fail for size={n} leaf={i}" ); // Serialize and deserialize the proof let serialized = proof.encode(); let deserialized = Proof::::decode_cfg(serialized, &1).unwrap(); assert!( deserialized .verify_element_inclusion(&mut hasher, leaf, i as u32, &root) .is_ok(), "deserialize fail for size={n} leaf={i}" ); // Modify a sibling hash and ensure the proof fails if !proof.siblings.is_empty() { let mut update_tamper = proof.clone(); update_tamper.siblings[0] = Sha256::hash(b"tampered"); assert!( update_tamper .verify_element_inclusion(&mut hasher, leaf, i as u32, &root) .is_err(), "modify fail for size={n} leaf={i}" ); } // Add a sibling hash and ensure the proof fails let mut add_tamper = proof.clone(); add_tamper.siblings.push(Sha256::hash(b"tampered")); assert!( add_tamper .verify_element_inclusion(&mut hasher, leaf, i as u32, &root) .is_err(), "add fail for size={n} leaf={i}" ); // Remove a sibling hash and ensure the proof fails if !proof.siblings.is_empty() { let mut remove_tamper = proof.clone(); remove_tamper.siblings.pop(); assert!( remove_tamper .verify_element_inclusion(&mut hasher, leaf, i as u32, &root) .is_err(), "remove fail for size={n} leaf={i}" ); } } // Test proof for larger than size assert!(tree.proof(n as u32).is_err()); // Return the root so we can ensure we don't silently change. root } struct RootConformance; impl Conformance for RootConformance { async fn commit(seed: u64) -> Vec { let root = test_merkle_tree(seed as usize); root.to_vec() } } commonware_conformance::conformance_tests! { CodecConformance>, RootConformance => 200 } } }