1use std::io::{Cursor, Read, Write};
6
7use crate::primitives::utils::{from_hex, to_hex};
8use crate::transaction::beef_tx::BeefTx;
9use crate::transaction::error::TransactionError;
10use crate::transaction::merkle_path::MerklePath;
11use crate::transaction::{read_u32_le, read_varint, write_u32_le, write_varint};
12
13pub const BEEF_V1: u32 = 4022206465;
15pub const BEEF_V2: u32 = 4022206466;
17pub const ATOMIC_BEEF: u32 = 0x01010101;
19
20#[derive(Debug, Clone)]
25pub struct Beef {
26 pub version: u32,
28 pub bumps: Vec<MerklePath>,
30 pub txs: Vec<BeefTx>,
32 pub atomic_txid: Option<String>,
34}
35
36impl Beef {
37 pub fn new(version: u32) -> Self {
39 Beef {
40 version,
41 bumps: Vec::new(),
42 txs: Vec::new(),
43 atomic_txid: None,
44 }
45 }
46
47 pub fn from_binary(reader: &mut impl Read) -> Result<Self, TransactionError> {
49 let mut version = read_u32_le(reader)?;
50 let mut atomic_txid = None;
51
52 if version == ATOMIC_BEEF {
53 let mut txid_bytes = [0u8; 32];
55 reader.read_exact(&mut txid_bytes)?;
56 txid_bytes.reverse();
57 atomic_txid = Some(to_hex(&txid_bytes));
58 version = read_u32_le(reader)?;
60 }
61
62 if version != BEEF_V1 && version != BEEF_V2 {
63 return Err(TransactionError::BeefError(format!(
64 "Serialized BEEF must start with {} or {} but starts with {}",
65 BEEF_V1, BEEF_V2, version
66 )));
67 }
68
69 let mut beef = Beef::new(version);
70
71 let bump_count = read_varint(reader)
73 .map_err(|e| TransactionError::InvalidFormat(e.to_string()))?
74 as usize;
75 for _ in 0..bump_count {
76 let bump = MerklePath::from_binary(reader)?;
77 beef.bumps.push(bump);
78 }
79
80 let tx_count = read_varint(reader)
82 .map_err(|e| TransactionError::InvalidFormat(e.to_string()))?
83 as usize;
84 for _ in 0..tx_count {
85 let beef_tx = if version == BEEF_V2 {
86 BeefTx::from_binary_v2(reader)?
87 } else {
88 BeefTx::from_binary_v1(reader)?
89 };
90 beef.txs.push(beef_tx);
91 }
92
93 beef.atomic_txid = atomic_txid;
94
95 beef.link_source_transactions();
98
99 Ok(beef)
100 }
101
102 pub fn to_binary(&self, writer: &mut impl Write) -> Result<(), TransactionError> {
104 if let Some(ref txid) = self.atomic_txid {
106 write_u32_le(writer, ATOMIC_BEEF)?;
107 let mut txid_bytes =
108 from_hex(txid).map_err(|e| TransactionError::InvalidFormat(e.to_string()))?;
109 txid_bytes.reverse(); writer.write_all(&txid_bytes)?;
111 }
112
113 write_u32_le(writer, self.version)?;
114
115 write_varint(writer, self.bumps.len() as u64)?;
117 for bump in &self.bumps {
118 bump.to_binary(writer)?;
119 }
120
121 write_varint(writer, self.txs.len() as u64)?;
123 for tx in &self.txs {
124 if self.version == BEEF_V2 {
125 tx.to_binary_v2(writer)?;
126 } else {
127 tx.to_binary_v1(writer)?;
128 }
129 }
130
131 Ok(())
132 }
133
134 pub fn from_hex(hex: &str) -> Result<Self, TransactionError> {
136 let bytes = from_hex(hex).map_err(|e| TransactionError::InvalidFormat(e.to_string()))?;
137 let mut cursor = Cursor::new(bytes);
138 Self::from_binary(&mut cursor)
139 }
140
141 pub fn to_hex(&self) -> Result<String, TransactionError> {
143 let mut buf = Vec::new();
144 self.to_binary(&mut buf)?;
145 Ok(to_hex(&buf))
146 }
147
148 pub fn into_transaction(
154 self,
155 ) -> Result<crate::transaction::transaction::Transaction, TransactionError> {
156 let subject_idx = if let Some(ref atomic_txid) = self.atomic_txid {
157 self.txs
158 .iter()
159 .position(|btx| btx.txid == *atomic_txid)
160 .ok_or_else(|| {
161 TransactionError::BeefError(format!(
162 "atomic txid {} not found in BEEF",
163 atomic_txid
164 ))
165 })?
166 } else {
167 if self.txs.is_empty() {
168 return Err(TransactionError::BeefError(
169 "BEEF contains no transactions".into(),
170 ));
171 }
172 self.txs.len() - 1
173 };
174
175 let mut tx = self.txs[subject_idx]
176 .tx
177 .clone()
178 .ok_or_else(|| TransactionError::BeefError("subject tx is txid-only".into()))?;
179
180 if let Some(bi) = self.txs[subject_idx].bump_index {
182 if bi >= self.bumps.len() {
183 return Err(TransactionError::BeefError(format!(
184 "bump_index {} out of bounds (only {} bumps)",
185 bi,
186 self.bumps.len()
187 )));
188 }
189 if tx.merkle_path.is_none() {
190 tx.merkle_path = Some(self.bumps[bi].clone());
191 }
192 }
193
194 for input in &mut tx.inputs {
197 if let Some(ref source_txid) = input.source_txid {
198 if input.source_transaction.is_none() {
199 for btx in &self.txs {
200 if btx.txid == *source_txid {
201 if let Some(ref source_tx) = btx.tx {
202 let mut linked = source_tx.clone();
203 if let Some(bi) = btx.bump_index {
204 if bi >= self.bumps.len() {
205 return Err(TransactionError::BeefError(format!(
206 "bump_index {} out of bounds (only {} bumps) for source tx {}",
207 bi, self.bumps.len(), btx.txid
208 )));
209 }
210 if linked.merkle_path.is_none() {
211 linked.merkle_path = Some(self.bumps[bi].clone());
212 }
213 }
214 input.source_transaction = Some(Box::new(linked));
215 }
216 break;
217 }
218 }
219 }
220 }
221 }
222
223 Ok(tx)
224 }
225
226 pub fn sort_txs(&mut self) {
231 use std::collections::{HashMap, VecDeque};
232
233 let n = self.txs.len();
234 if n <= 1 {
235 return;
236 }
237
238 let txid_to_idx: HashMap<&str, usize> = self
240 .txs
241 .iter()
242 .enumerate()
243 .map(|(i, btx)| (btx.txid.as_str(), i))
244 .collect();
245
246 let mut in_degree = vec![0usize; n];
248 let mut dependents: Vec<Vec<usize>> = vec![Vec::new(); n];
250
251 for (i, btx) in self.txs.iter().enumerate() {
252 for input_txid in &btx.input_txids {
253 if let Some(&dep_idx) = txid_to_idx.get(input_txid.as_str()) {
254 if dep_idx != i {
255 in_degree[i] += 1;
256 dependents[dep_idx].push(i);
257 }
258 }
259 }
260 }
261
262 let mut queue: VecDeque<usize> = VecDeque::new();
264 for (i, °) in in_degree.iter().enumerate() {
265 if deg == 0 {
266 queue.push_back(i);
267 }
268 }
269
270 let mut sorted_indices: Vec<usize> = Vec::with_capacity(n);
271 while let Some(idx) = queue.pop_front() {
272 sorted_indices.push(idx);
273 for &dep in &dependents[idx] {
274 in_degree[dep] -= 1;
275 if in_degree[dep] == 0 {
276 queue.push_back(dep);
277 }
278 }
279 }
280
281 if sorted_indices.len() < n {
283 for i in 0..n {
284 if !sorted_indices.contains(&i) {
285 sorted_indices.push(i);
286 }
287 }
288 }
289
290 let old_txs = std::mem::take(&mut self.txs);
292 self.txs = sorted_indices
293 .into_iter()
294 .map(|i| old_txs[i].clone())
295 .collect();
296 }
297
298 pub fn find_txid(&self, txid: &str) -> Option<&BeefTx> {
300 self.txs.iter().find(|btx| btx.txid == txid)
301 }
302
303 pub fn merge_bump(&mut self, bump: &MerklePath) -> Result<usize, TransactionError> {
313 let mut bump_index: Option<usize> = None;
314
315 for (i, existing) in self.bumps.iter_mut().enumerate() {
316 if existing.block_height == bump.block_height {
317 let root_a = existing.compute_root(None)?;
318 let root_b = bump.compute_root(None)?;
319 if root_a == root_b {
320 existing.combine(bump)?;
321 bump_index = Some(i);
322 break;
323 }
324 }
325 }
326
327 if bump_index.is_none() {
328 bump_index = Some(self.bumps.len());
329 self.bumps.push(bump.clone());
330 }
331
332 let bi = bump_index.expect("bump_index was just set");
333
334 let bump_ref = &self.bumps[bi];
336 let leaf_txids: Vec<String> = bump_ref.path[0]
337 .iter()
338 .filter_map(|leaf| leaf.hash.clone())
339 .collect();
340
341 for btx in &mut self.txs {
342 if btx.bump_index.is_none() && leaf_txids.contains(&btx.txid) {
343 btx.bump_index = Some(bi);
344 }
345 }
346
347 Ok(bi)
348 }
349
350 pub fn remove_existing_txid(&mut self, txid: &str) {
352 if let Some(pos) = self.txs.iter().position(|btx| btx.txid == txid) {
353 self.txs.remove(pos);
354 }
355 }
356
357 pub fn merge_raw_tx(
363 &mut self,
364 raw_tx: &[u8],
365 bump_index: Option<usize>,
366 ) -> Result<BeefTx, TransactionError> {
367 let mut cursor = std::io::Cursor::new(raw_tx);
368 let tx = crate::transaction::transaction::Transaction::from_binary(&mut cursor)?;
369 let new_tx = BeefTx::from_tx(tx, bump_index)?;
370 self.remove_existing_txid(&new_tx.txid);
371 let txid = new_tx.txid.clone();
372 self.txs.push(new_tx);
373
374 if bump_index.is_none() {
376 self.try_to_validate_bump_index(&txid);
377 }
378
379 Ok(self.txs.last().cloned().expect("just pushed"))
380 }
381
382 pub fn merge_beef(&mut self, other: &Beef) -> Result<(), TransactionError> {
387 for bump in &other.bumps {
388 self.merge_bump(bump)?;
389 }
390
391 for btx in &other.txs {
392 if btx.is_txid_only() {
393 if self.find_txid(&btx.txid).is_none() {
395 self.txs.push(BeefTx::from_txid(btx.txid.clone()));
396 }
397 } else if let Some(ref tx) = btx.tx {
398 let new_bump_index = self.find_bump_index_for_txid(&btx.txid);
400 let new_btx = BeefTx::from_tx(tx.clone(), new_bump_index)?;
401 self.remove_existing_txid(&btx.txid);
402 let txid = new_btx.txid.clone();
403 self.txs.push(new_btx);
404 if new_bump_index.is_none() {
405 self.try_to_validate_bump_index(&txid);
406 }
407 }
408 }
409
410 Ok(())
411 }
412
413 pub fn merge_beef_from_binary(&mut self, data: &[u8]) -> Result<(), TransactionError> {
415 let mut cursor = std::io::Cursor::new(data);
416 let other = Beef::from_binary(&mut cursor)?;
417 self.merge_beef(&other)
418 }
419
420 pub fn to_binary_atomic(&self, txid: &str) -> Result<Vec<u8>, TransactionError> {
427 if self.find_txid(txid).is_none() {
429 return Err(TransactionError::BeefError(format!(
430 "{} does not exist in this Beef",
431 txid
432 )));
433 }
434
435 let mut atomic_beef = self.clone();
437 atomic_beef.atomic_txid = Some(txid.to_string());
438
439 if let Some(pos) = atomic_beef.txs.iter().position(|btx| btx.txid == txid) {
441 atomic_beef.txs.truncate(pos + 1);
442 }
443
444 let mut buf = Vec::new();
445 atomic_beef.to_binary(&mut buf)?;
446 Ok(buf)
447 }
448
449 fn try_to_validate_bump_index(&mut self, txid: &str) {
451 for (i, bump) in self.bumps.iter().enumerate() {
452 let found = bump.path[0]
453 .iter()
454 .any(|leaf| leaf.hash.as_deref() == Some(txid));
455 if found {
456 if let Some(btx) = self.txs.iter_mut().find(|btx| btx.txid == txid) {
457 btx.bump_index = Some(i);
458 }
459 return;
460 }
461 }
462 }
463
464 fn find_bump_index_for_txid(&self, txid: &str) -> Option<usize> {
466 for (i, bump) in self.bumps.iter().enumerate() {
467 let found = bump.path[0]
468 .iter()
469 .any(|leaf| leaf.hash.as_deref() == Some(txid));
470 if found {
471 return Some(i);
472 }
473 }
474 None
475 }
476
477 fn link_source_transactions(&mut self) {
482 let txid_map: Vec<(String, usize)> = self
484 .txs
485 .iter()
486 .enumerate()
487 .map(|(i, btx)| (btx.txid.clone(), i))
488 .collect();
489
490 let tx_clones: Vec<Option<crate::transaction::transaction::Transaction>> =
494 self.txs.iter().map(|btx| btx.tx.clone()).collect();
495
496 for btx in &mut self.txs {
497 if let Some(ref mut tx) = btx.tx {
498 for input in &mut tx.inputs {
499 if let Some(ref source_txid) = input.source_txid {
500 if input.source_transaction.is_none() {
501 if let Some((_, idx)) =
503 txid_map.iter().find(|(tid, _)| tid == source_txid)
504 {
505 if let Some(ref source_tx) = tx_clones[*idx] {
506 input.source_transaction = Some(Box::new(source_tx.clone()));
507 }
508 }
509 }
510 }
511 }
512 }
513 }
514 }
515}
516
517#[cfg(test)]
518mod tests {
519 use super::*;
520 use serde::Deserialize;
521
522 #[derive(Deserialize)]
523 struct BeefVector {
524 name: String,
525 hex: String,
526 version: u32,
527 bump_count: usize,
528 tx_count: usize,
529 #[serde(default)]
530 txid: Option<String>,
531 }
532
533 fn load_test_vectors() -> Vec<BeefVector> {
534 let json = include_str!("../../test-vectors/beef_valid.json");
535 serde_json::from_str(json).expect("failed to parse beef_valid.json")
536 }
537
538 #[test]
539 fn test_beef_v1_round_trip() {
540 let vectors = load_test_vectors();
541 for v in vectors.iter().filter(|v| v.version == 1) {
542 let beef = Beef::from_hex(&v.hex)
543 .unwrap_or_else(|e| panic!("failed to parse '{}': {}", v.name, e));
544 assert_eq!(
545 beef.bumps.len(),
546 v.bump_count,
547 "bump count mismatch for '{}'",
548 v.name
549 );
550 assert_eq!(
551 beef.txs.len(),
552 v.tx_count,
553 "tx count mismatch for '{}'",
554 v.name
555 );
556
557 let result_hex = beef
558 .to_hex()
559 .unwrap_or_else(|e| panic!("failed to serialize '{}': {}", v.name, e));
560 assert_eq!(result_hex, v.hex, "round-trip failed for '{}'", v.name);
561 }
562 }
563
564 #[test]
565 fn test_beef_tx_count() {
566 let vectors = load_test_vectors();
567 for v in &vectors {
568 let beef = Beef::from_hex(&v.hex)
569 .unwrap_or_else(|e| panic!("failed to parse '{}': {}", v.name, e));
570 assert_eq!(
571 beef.bumps.len(),
572 v.bump_count,
573 "bump count mismatch for '{}'",
574 v.name
575 );
576 assert_eq!(
577 beef.txs.len(),
578 v.tx_count,
579 "tx count mismatch for '{}'",
580 v.name
581 );
582
583 if let Some(ref expected_txid) = v.txid {
585 let last_tx = &beef.txs[beef.txs.len() - 1];
586 assert_eq!(
587 &last_tx.txid, expected_txid,
588 "txid mismatch for '{}'",
589 v.name
590 );
591 }
592 }
593 }
594
595 #[test]
596 fn test_merge_beef_combines_bumps_and_txs() {
597 let vectors = load_test_vectors();
598 let beef_a = Beef::from_hex(&vectors[0].hex).expect("parse beef_a");
600 let beef_b = Beef::from_hex(&vectors[1].hex).expect("parse beef_b");
601
602 let mut merged = Beef::new(BEEF_V2);
603 merged.merge_beef(&beef_a).expect("merge beef_a");
604 merged.merge_beef(&beef_b).expect("merge beef_b");
605
606 assert!(
608 merged.txs.len() >= beef_a.txs.len(),
609 "merged should have at least as many txs as beef_a"
610 );
611 assert!(
612 merged.bumps.len() >= 1,
613 "merged should have at least one bump"
614 );
615
616 for btx in &beef_a.txs {
618 assert!(
619 merged.find_txid(&btx.txid).is_some(),
620 "merged should contain txid {} from beef_a",
621 btx.txid
622 );
623 }
624 for btx in &beef_b.txs {
625 assert!(
626 merged.find_txid(&btx.txid).is_some(),
627 "merged should contain txid {} from beef_b",
628 btx.txid
629 );
630 }
631 }
632
633 #[test]
634 fn test_merge_beef_deduplicates_same_txid() {
635 let vectors = load_test_vectors();
636 let beef_a = Beef::from_hex(&vectors[0].hex).expect("parse beef");
637
638 let mut merged = Beef::new(BEEF_V2);
639 merged.merge_beef(&beef_a).expect("merge first");
640 let count_after_first = merged.txs.len();
641
642 merged.merge_beef(&beef_a).expect("merge second");
644 assert_eq!(
645 merged.txs.len(),
646 count_after_first,
647 "merging same beef twice should not duplicate txs"
648 );
649 }
650
651 #[test]
652 fn test_merge_beef_from_binary() {
653 let vectors = load_test_vectors();
654 let beef_a = Beef::from_hex(&vectors[0].hex).expect("parse beef");
655 let binary = crate::primitives::utils::from_hex(&vectors[0].hex).expect("hex decode");
656
657 let mut merged = Beef::new(BEEF_V2);
658 merged
659 .merge_beef_from_binary(&binary)
660 .expect("merge from binary");
661
662 assert_eq!(merged.txs.len(), beef_a.txs.len());
663 assert_eq!(merged.bumps.len(), beef_a.bumps.len());
664 }
665
666 #[test]
667 fn test_merge_raw_tx() {
668 let vectors = load_test_vectors();
669 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
670
671 if let Some(ref tx) = beef.txs[0].tx {
673 let mut raw_tx_buf = Vec::new();
674 tx.to_binary(&mut raw_tx_buf).expect("serialize tx");
675
676 let mut new_beef = Beef::new(BEEF_V2);
677 let result = new_beef
678 .merge_raw_tx(&raw_tx_buf, None)
679 .expect("merge raw tx");
680 assert_eq!(result.txid, beef.txs[0].txid);
681 assert_eq!(new_beef.txs.len(), 1);
682 }
683 }
684
685 #[test]
686 fn test_merge_raw_tx_replaces_existing() {
687 let vectors = load_test_vectors();
688 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
689
690 if let Some(ref tx) = beef.txs[0].tx {
691 let mut raw_tx_buf = Vec::new();
692 tx.to_binary(&mut raw_tx_buf).expect("serialize tx");
693
694 let mut new_beef = Beef::new(BEEF_V2);
695 new_beef
696 .merge_raw_tx(&raw_tx_buf, None)
697 .expect("merge first");
698 new_beef
699 .merge_raw_tx(&raw_tx_buf, None)
700 .expect("merge second");
701
702 assert_eq!(
703 new_beef.txs.len(),
704 1,
705 "merging same raw tx twice should replace, not duplicate"
706 );
707 }
708 }
709
710 #[test]
711 fn test_to_binary_atomic() {
712 let vectors = load_test_vectors();
713 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
714
715 if let Some(ref expected_txid) = vectors[0].txid {
716 let atomic = beef
717 .to_binary_atomic(expected_txid)
718 .expect("to_binary_atomic");
719
720 assert!(atomic.len() > 36, "atomic output too short");
722 let prefix = u32::from_le_bytes([atomic[0], atomic[1], atomic[2], atomic[3]]);
723 assert_eq!(prefix, ATOMIC_BEEF, "should start with ATOMIC_BEEF prefix");
724
725 let mut txid_bytes =
727 crate::primitives::utils::from_hex(expected_txid).expect("hex decode txid");
728 txid_bytes.reverse(); assert_eq!(
730 &atomic[4..36],
731 &txid_bytes[..],
732 "atomic should contain txid in LE"
733 );
734
735 let mut cursor = Cursor::new(&atomic);
737 let parsed = Beef::from_binary(&mut cursor).expect("parse atomic beef");
738 assert_eq!(
739 parsed.atomic_txid.as_deref(),
740 Some(expected_txid.as_str()),
741 "parsed atomic txid should match"
742 );
743 assert_eq!(
744 parsed.txs.len(),
745 beef.txs.len(),
746 "parsed atomic should have same tx count"
747 );
748 }
749 }
750
751 #[test]
752 fn test_to_binary_atomic_nonexistent_txid() {
753 let vectors = load_test_vectors();
754 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
755
756 let result = beef
757 .to_binary_atomic("0000000000000000000000000000000000000000000000000000000000000000");
758 assert!(result.is_err(), "should error for nonexistent txid");
759 }
760
761 #[test]
762 fn test_find_txid() {
763 let vectors = load_test_vectors();
764 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
765
766 if let Some(ref expected_txid) = vectors[0].txid {
767 assert!(
768 beef.find_txid(expected_txid).is_some(),
769 "should find existing txid"
770 );
771 }
772
773 assert!(
774 beef.find_txid("0000000000000000000000000000000000000000000000000000000000000000")
775 .is_none(),
776 "should not find nonexistent txid"
777 );
778 }
779
780 #[test]
781 fn test_into_transaction_returns_last_tx() {
782 let vectors = load_test_vectors();
783 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
784 let expected_txid = beef.txs.last().unwrap().txid.clone();
785 let tx = beef.into_transaction().expect("into_transaction");
786 assert_eq!(
787 tx.id().unwrap(),
788 expected_txid,
789 "should return last (subject) tx"
790 );
791 }
792
793 #[test]
794 fn test_from_beef_hex() {
795 let vectors = load_test_vectors();
796 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
797 let expected_txid = beef.txs.last().unwrap().txid.clone();
798 let tx = crate::transaction::transaction::Transaction::from_beef(&vectors[0].hex)
799 .expect("from_beef");
800 assert_eq!(
801 tx.id().unwrap(),
802 expected_txid,
803 "from_beef should return subject tx"
804 );
805 }
806
807 #[test]
808 fn test_sort_txs_proven_before_unproven() {
809 let vectors = load_test_vectors();
810 let mut beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
811 beef.sort_txs();
812 let mut seen_unproven = false;
814 for btx in &beef.txs {
815 if btx.bump_index.is_some() {
816 assert!(!seen_unproven, "proven tx should not come after unproven");
817 } else {
818 seen_unproven = true;
819 }
820 }
821 }
822
823 #[test]
824 fn test_sort_txs_idempotent() {
825 let vectors = load_test_vectors();
826 let mut beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
827 beef.sort_txs();
828 let first_order: Vec<String> = beef.txs.iter().map(|t| t.txid.clone()).collect();
829 beef.sort_txs();
830 let second_order: Vec<String> = beef.txs.iter().map(|t| t.txid.clone()).collect();
831 assert_eq!(first_order, second_order, "sort_txs should be idempotent");
832 }
833
834 #[test]
835 fn test_merge_bump() {
836 let vectors = load_test_vectors();
837 let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
838
839 let mut new_beef = Beef::new(BEEF_V2);
840 let idx = new_beef.merge_bump(&beef.bumps[0]).expect("merge bump");
842 assert_eq!(idx, 0, "first bump should be at index 0");
843 assert_eq!(new_beef.bumps.len(), 1);
844
845 let idx2 = new_beef
847 .merge_bump(&beef.bumps[0])
848 .expect("merge bump again");
849 assert_eq!(idx2, 0, "same bump should merge to index 0");
850 assert_eq!(
851 new_beef.bumps.len(),
852 1,
853 "should still be 1 bump after re-merge"
854 );
855 }
856
857 #[test]
858 fn test_into_transaction_sets_merkle_path_from_bumps() {
859 let vectors = load_test_vectors();
862 let beef = Beef::from_hex(&vectors[1].hex).expect("parse vector 1");
863 assert_eq!(beef.txs.len(), 2, "vector 1 should have 2 txs");
864
865 let proven_count = beef.txs.iter().filter(|t| t.bump_index.is_some()).count();
867 assert!(proven_count >= 1, "at least one tx should have a bump");
868
869 let tx = beef.into_transaction().expect("into_transaction");
870
871 for input in &tx.inputs {
875 if let Some(ref source_txid) = input.source_txid {
876 if let Some(ref source_tx) = input.source_transaction {
877 assert!(
879 source_tx.merkle_path.is_some(),
880 "source tx {} should have merkle_path set from BEEF bump",
881 source_txid
882 );
883 }
884 }
885 }
886 }
887
888 #[test]
889 fn test_into_transaction_sets_merkle_path_on_subject() {
890 let vectors = load_test_vectors();
893 let beef = Beef::from_hex(&vectors[0].hex).expect("parse vector 0");
894 assert!(
895 beef.txs[0].bump_index.is_some(),
896 "vector 0 tx should have a bump"
897 );
898
899 let tx = beef.into_transaction().expect("into_transaction");
900 assert!(
901 tx.merkle_path.is_some(),
902 "subject tx with bump should have merkle_path set"
903 );
904 }
905}