Skip to content

Commit 88ffb7c

Browse files
perf(offers): reduce payer proof merkle allocations
Streamline the payer proof merkle reconstruction path to avoid intermediate allocations while preserving the selective disclosure behavior.
1 parent 28a55cb commit 88ffb7c

File tree

1 file changed

+56
-66
lines changed

1 file changed

+56
-66
lines changed

lightning/src/offers/merkle.rs

Lines changed: 56 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -342,7 +342,6 @@ pub(super) struct SelectiveDisclosure {
342342
struct TlvMerkleData {
343343
tlv_type: u64,
344344
per_tlv_hash: sha256::Hash,
345-
nonce_hash: sha256::Hash,
346345
is_included: bool,
347346
}
348347

@@ -373,27 +372,23 @@ pub(super) fn compute_selective_disclosure(
373372
let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes()));
374373

375374
let mut tlv_data: Vec<TlvMerkleData> = Vec::new();
375+
let mut leaf_hashes: Vec<sha256::Hash> = Vec::new();
376376
for record in tlv_stream.filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) {
377377
let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record.record_bytes);
378378
let nonce_hash = tagged_hash_from_engine(nonce_tag.clone(), record.type_bytes);
379379
let per_tlv_hash =
380380
tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash);
381381

382382
let is_included = included_types.contains(&record.r#type);
383-
tlv_data.push(TlvMerkleData {
384-
tlv_type: record.r#type,
385-
per_tlv_hash,
386-
nonce_hash,
387-
is_included,
388-
});
383+
if is_included {
384+
leaf_hashes.push(nonce_hash);
385+
}
386+
tlv_data.push(TlvMerkleData { tlv_type: record.r#type, per_tlv_hash, is_included });
389387
}
390388

391389
if tlv_data.is_empty() {
392390
return Err(SelectiveDisclosureError::EmptyTlvStream);
393391
}
394-
395-
let leaf_hashes: Vec<_> =
396-
tlv_data.iter().filter(|d| d.is_included).map(|d| d.nonce_hash).collect();
397392
let omitted_markers = compute_omitted_markers(&tlv_data);
398393
let (merkle_root, missing_hashes) = build_tree_with_disclosure(&tlv_data, &branch_tag);
399394

@@ -447,6 +442,8 @@ fn build_tree_with_disclosure(
447442
let num_nodes = tlv_data.len();
448443
debug_assert!(num_nodes > 0, "TLV stream must contain at least one record");
449444

445+
let num_omitted = tlv_data.iter().filter(|d| !d.is_included).count();
446+
450447
let mut nodes: Vec<TreeNode> = tlv_data
451448
.iter()
452449
.map(|data| TreeNode {
@@ -456,7 +453,7 @@ fn build_tree_with_disclosure(
456453
})
457454
.collect();
458455

459-
let mut missing_with_types: Vec<(u64, sha256::Hash)> = Vec::new();
456+
let mut missing_with_types: Vec<(u64, sha256::Hash)> = Vec::with_capacity(num_omitted);
460457

461458
for level in 0.. {
462459
let step = 2 << level;
@@ -522,32 +519,65 @@ pub(super) fn reconstruct_merkle_root<'a>(
522519
return Err(SelectiveDisclosureError::LeafHashCountMismatch);
523520
}
524521

525-
let positions = reconstruct_positions_from_records(included_records, omitted_markers);
526-
527-
let num_nodes = positions.len();
528-
529522
let leaf_tag = tagged_hash_engine(sha256::Hash::hash("LnLeaf".as_bytes()));
530523
let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes()));
531524

525+
// Build TreeNode vec directly by interleaving included/omitted positions,
526+
// eliminating the intermediate Vec<bool> from reconstruct_positions_from_records.
527+
let num_nodes = 1 + included_records.len() + omitted_markers.len();
532528
let mut nodes: Vec<TreeNode> = Vec::with_capacity(num_nodes);
533-
let mut leaf_hash_idx = 0;
534-
for (i, &incl) in positions.iter().enumerate() {
535-
let hash = if incl {
536-
let (_, record_bytes) = included_records[leaf_hash_idx];
529+
530+
// TLV0 is always omitted
531+
nodes.push(TreeNode { hash: None, included: false, min_type: 0 });
532+
533+
let mut inc_idx = 0;
534+
let mut mrk_idx = 0;
535+
let mut prev_marker: u64 = 0;
536+
let mut node_idx: u64 = 1;
537+
538+
while inc_idx < included_records.len() || mrk_idx < omitted_markers.len() {
539+
if mrk_idx >= omitted_markers.len() {
540+
// No more markers, remaining positions are included
541+
let (_, record_bytes) = included_records[inc_idx];
537542
let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record_bytes);
538-
let nonce_hash = leaf_hashes[leaf_hash_idx];
539-
leaf_hash_idx += 1;
540-
Some(tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash))
543+
let nonce_hash = leaf_hashes[inc_idx];
544+
let hash = tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash);
545+
nodes.push(TreeNode { hash: Some(hash), included: true, min_type: node_idx });
546+
inc_idx += 1;
547+
} else if inc_idx >= included_records.len() {
548+
// No more included types, remaining positions are omitted
549+
nodes.push(TreeNode { hash: None, included: false, min_type: node_idx });
550+
prev_marker = omitted_markers[mrk_idx];
551+
mrk_idx += 1;
541552
} else {
542-
None
543-
};
544-
nodes.push(TreeNode { hash, included: incl, min_type: i as u64 });
553+
let marker = omitted_markers[mrk_idx];
554+
let (inc_type, _) = included_records[inc_idx];
555+
556+
if marker == prev_marker + 1 {
557+
// Continuation of current run -> omitted position
558+
nodes.push(TreeNode { hash: None, included: false, min_type: node_idx });
559+
prev_marker = marker;
560+
mrk_idx += 1;
561+
} else {
562+
// Jump detected -> included position comes first
563+
let (_, record_bytes) = included_records[inc_idx];
564+
let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record_bytes);
565+
let nonce_hash = leaf_hashes[inc_idx];
566+
let hash =
567+
tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash);
568+
nodes.push(TreeNode { hash: Some(hash), included: true, min_type: node_idx });
569+
prev_marker = inc_type;
570+
inc_idx += 1;
571+
}
572+
}
573+
node_idx += 1;
545574
}
546575

547576
// First pass: walk the tree to discover which positions need missing hashes.
548577
// We mutate nodes[].included and nodes[].min_type directly since the second
549578
// pass only reads nodes[].hash, making this safe without a separate allocation.
550-
let mut needs_hash: Vec<(u64, usize)> = Vec::new();
579+
let num_omitted = omitted_markers.len() + 1; // +1 for implicit TLV0
580+
let mut needs_hash: Vec<(u64, usize)> = Vec::with_capacity(num_omitted);
551581

552582
for level in 0.. {
553583
let step = 2 << level;
@@ -707,46 +737,6 @@ fn reconstruct_positions(included_types: &[u64], omitted_markers: &[u64]) -> Vec
707737
positions
708738
}
709739

710-
/// Like `reconstruct_positions`, but extracts types directly from included records,
711-
/// avoiding a separate Vec allocation for the types.
712-
fn reconstruct_positions_from_records(
713-
included_records: &[(u64, &[u8])], omitted_markers: &[u64],
714-
) -> Vec<bool> {
715-
let total = 1 + included_records.len() + omitted_markers.len();
716-
let mut positions = Vec::with_capacity(total);
717-
positions.push(false); // TLV0 is always omitted
718-
719-
let mut inc_idx = 0;
720-
let mut mrk_idx = 0;
721-
let mut prev_marker: u64 = 0;
722-
723-
while inc_idx < included_records.len() || mrk_idx < omitted_markers.len() {
724-
if mrk_idx >= omitted_markers.len() {
725-
positions.push(true);
726-
inc_idx += 1;
727-
} else if inc_idx >= included_records.len() {
728-
positions.push(false);
729-
prev_marker = omitted_markers[mrk_idx];
730-
mrk_idx += 1;
731-
} else {
732-
let marker = omitted_markers[mrk_idx];
733-
let (inc_type, _) = included_records[inc_idx];
734-
735-
if marker == prev_marker + 1 {
736-
positions.push(false);
737-
prev_marker = marker;
738-
mrk_idx += 1;
739-
} else {
740-
positions.push(true);
741-
prev_marker = inc_type;
742-
inc_idx += 1;
743-
}
744-
}
745-
}
746-
747-
positions
748-
}
749-
750740
#[cfg(test)]
751741
mod tests {
752742
use super::{TlvStream, SIGNATURE_TYPES};

0 commit comments

Comments
 (0)