use std::io::{Cursor, Read, Write};
use crate::primitives::utils::{from_hex, to_hex};
use crate::transaction::beef_tx::BeefTx;
use crate::transaction::error::TransactionError;
use crate::transaction::merkle_path::MerklePath;
use crate::transaction::{read_u32_le, read_varint, write_u32_le, write_varint};
pub const BEEF_V1: u32 = 4022206465;
pub const BEEF_V2: u32 = 4022206466;
pub const ATOMIC_BEEF: u32 = 0x01010101;
#[derive(Debug, Clone)]
pub struct Beef {
pub version: u32,
pub bumps: Vec<MerklePath>,
pub txs: Vec<BeefTx>,
pub atomic_txid: Option<String>,
}
impl Beef {
pub fn new(version: u32) -> Self {
Beef {
version,
bumps: Vec::new(),
txs: Vec::new(),
atomic_txid: None,
}
}
pub fn from_binary(reader: &mut impl Read) -> Result<Self, TransactionError> {
let mut version = read_u32_le(reader)?;
let mut atomic_txid = None;
if version == ATOMIC_BEEF {
let mut txid_bytes = [0u8; 32];
reader.read_exact(&mut txid_bytes)?;
txid_bytes.reverse();
atomic_txid = Some(to_hex(&txid_bytes));
version = read_u32_le(reader)?;
}
if version != BEEF_V1 && version != BEEF_V2 {
return Err(TransactionError::BeefError(format!(
"Serialized BEEF must start with {} or {} but starts with {}",
BEEF_V1, BEEF_V2, version
)));
}
let mut beef = Beef::new(version);
let bump_count = read_varint(reader)
.map_err(|e| TransactionError::InvalidFormat(e.to_string()))?
as usize;
for _ in 0..bump_count {
let bump = MerklePath::from_binary(reader)?;
beef.bumps.push(bump);
}
let tx_count = read_varint(reader)
.map_err(|e| TransactionError::InvalidFormat(e.to_string()))?
as usize;
for _ in 0..tx_count {
let beef_tx = if version == BEEF_V2 {
BeefTx::from_binary_v2(reader)?
} else {
BeefTx::from_binary_v1(reader)?
};
beef.txs.push(beef_tx);
}
beef.atomic_txid = atomic_txid;
beef.link_source_transactions();
Ok(beef)
}
pub fn to_binary(&self, writer: &mut impl Write) -> Result<(), TransactionError> {
if let Some(ref txid) = self.atomic_txid {
write_u32_le(writer, ATOMIC_BEEF)?;
let mut txid_bytes =
from_hex(txid).map_err(|e| TransactionError::InvalidFormat(e.to_string()))?;
txid_bytes.reverse(); writer.write_all(&txid_bytes)?;
}
write_u32_le(writer, self.version)?;
write_varint(writer, self.bumps.len() as u64)?;
for bump in &self.bumps {
bump.to_binary(writer)?;
}
write_varint(writer, self.txs.len() as u64)?;
for tx in &self.txs {
if self.version == BEEF_V2 {
tx.to_binary_v2(writer)?;
} else {
tx.to_binary_v1(writer)?;
}
}
Ok(())
}
pub fn from_hex(hex: &str) -> Result<Self, TransactionError> {
let bytes = from_hex(hex).map_err(|e| TransactionError::InvalidFormat(e.to_string()))?;
let mut cursor = Cursor::new(bytes);
Self::from_binary(&mut cursor)
}
pub fn to_hex(&self) -> Result<String, TransactionError> {
let mut buf = Vec::new();
self.to_binary(&mut buf)?;
Ok(to_hex(&buf))
}
pub fn into_transaction(
self,
) -> Result<crate::transaction::transaction::Transaction, TransactionError> {
let subject_idx = if let Some(ref atomic_txid) = self.atomic_txid {
self.txs
.iter()
.position(|btx| btx.txid == *atomic_txid)
.ok_or_else(|| {
TransactionError::BeefError(format!(
"atomic txid {} not found in BEEF",
atomic_txid
))
})?
} else {
if self.txs.is_empty() {
return Err(TransactionError::BeefError(
"BEEF contains no transactions".into(),
));
}
self.txs.len() - 1
};
let mut tx = self.txs[subject_idx]
.tx
.clone()
.ok_or_else(|| TransactionError::BeefError("subject tx is txid-only".into()))?;
for input in &mut tx.inputs {
if let Some(ref source_txid) = input.source_txid {
if input.source_transaction.is_none() {
for btx in &self.txs {
if btx.txid == *source_txid {
if let Some(ref source_tx) = btx.tx {
input.source_transaction = Some(Box::new(source_tx.clone()));
}
break;
}
}
}
}
}
Ok(tx)
}
pub fn sort_txs(&mut self) {
use std::collections::{HashMap, VecDeque};
let n = self.txs.len();
if n <= 1 {
return;
}
let txid_to_idx: HashMap<&str, usize> = self
.txs
.iter()
.enumerate()
.map(|(i, btx)| (btx.txid.as_str(), i))
.collect();
let mut in_degree = vec![0usize; n];
let mut dependents: Vec<Vec<usize>> = vec![Vec::new(); n];
for (i, btx) in self.txs.iter().enumerate() {
for input_txid in &btx.input_txids {
if let Some(&dep_idx) = txid_to_idx.get(input_txid.as_str()) {
if dep_idx != i {
in_degree[i] += 1;
dependents[dep_idx].push(i);
}
}
}
}
let mut queue: VecDeque<usize> = VecDeque::new();
for (i, °) in in_degree.iter().enumerate() {
if deg == 0 {
queue.push_back(i);
}
}
let mut sorted_indices: Vec<usize> = Vec::with_capacity(n);
while let Some(idx) = queue.pop_front() {
sorted_indices.push(idx);
for &dep in &dependents[idx] {
in_degree[dep] -= 1;
if in_degree[dep] == 0 {
queue.push_back(dep);
}
}
}
if sorted_indices.len() < n {
for i in 0..n {
if !sorted_indices.contains(&i) {
sorted_indices.push(i);
}
}
}
let old_txs = std::mem::take(&mut self.txs);
self.txs = sorted_indices
.into_iter()
.map(|i| old_txs[i].clone())
.collect();
}
pub fn find_txid(&self, txid: &str) -> Option<&BeefTx> {
self.txs.iter().find(|btx| btx.txid == txid)
}
pub fn merge_bump(&mut self, bump: &MerklePath) -> Result<usize, TransactionError> {
let mut bump_index: Option<usize> = None;
for (i, existing) in self.bumps.iter_mut().enumerate() {
if existing.block_height == bump.block_height {
let root_a = existing.compute_root(None)?;
let root_b = bump.compute_root(None)?;
if root_a == root_b {
existing.combine(bump)?;
bump_index = Some(i);
break;
}
}
}
if bump_index.is_none() {
bump_index = Some(self.bumps.len());
self.bumps.push(bump.clone());
}
let bi = bump_index.expect("bump_index was just set");
let bump_ref = &self.bumps[bi];
let leaf_txids: Vec<String> = bump_ref.path[0]
.iter()
.filter_map(|leaf| leaf.hash.clone())
.collect();
for btx in &mut self.txs {
if btx.bump_index.is_none() && leaf_txids.contains(&btx.txid) {
btx.bump_index = Some(bi);
}
}
Ok(bi)
}
fn remove_existing_txid(&mut self, txid: &str) {
if let Some(pos) = self.txs.iter().position(|btx| btx.txid == txid) {
self.txs.remove(pos);
}
}
pub fn merge_raw_tx(
&mut self,
raw_tx: &[u8],
bump_index: Option<usize>,
) -> Result<BeefTx, TransactionError> {
let mut cursor = std::io::Cursor::new(raw_tx);
let tx = crate::transaction::transaction::Transaction::from_binary(&mut cursor)?;
let new_tx = BeefTx::from_tx(tx, bump_index)?;
self.remove_existing_txid(&new_tx.txid);
let txid = new_tx.txid.clone();
self.txs.push(new_tx);
if bump_index.is_none() {
self.try_to_validate_bump_index(&txid);
}
Ok(self.txs.last().cloned().expect("just pushed"))
}
pub fn merge_beef(&mut self, other: &Beef) -> Result<(), TransactionError> {
for bump in &other.bumps {
self.merge_bump(bump)?;
}
for btx in &other.txs {
if btx.is_txid_only() {
if self.find_txid(&btx.txid).is_none() {
self.txs.push(BeefTx::from_txid(btx.txid.clone()));
}
} else if let Some(ref tx) = btx.tx {
let new_bump_index = self.find_bump_index_for_txid(&btx.txid);
let new_btx = BeefTx::from_tx(tx.clone(), new_bump_index)?;
self.remove_existing_txid(&btx.txid);
let txid = new_btx.txid.clone();
self.txs.push(new_btx);
if new_bump_index.is_none() {
self.try_to_validate_bump_index(&txid);
}
}
}
Ok(())
}
pub fn merge_beef_from_binary(&mut self, data: &[u8]) -> Result<(), TransactionError> {
let mut cursor = std::io::Cursor::new(data);
let other = Beef::from_binary(&mut cursor)?;
self.merge_beef(&other)
}
pub fn to_binary_atomic(&self, txid: &str) -> Result<Vec<u8>, TransactionError> {
if self.find_txid(txid).is_none() {
return Err(TransactionError::BeefError(format!(
"{} does not exist in this Beef",
txid
)));
}
let mut atomic_beef = self.clone();
atomic_beef.atomic_txid = Some(txid.to_string());
if let Some(pos) = atomic_beef.txs.iter().position(|btx| btx.txid == txid) {
atomic_beef.txs.truncate(pos + 1);
}
let mut buf = Vec::new();
atomic_beef.to_binary(&mut buf)?;
Ok(buf)
}
fn try_to_validate_bump_index(&mut self, txid: &str) {
for (i, bump) in self.bumps.iter().enumerate() {
let found = bump.path[0]
.iter()
.any(|leaf| leaf.hash.as_deref() == Some(txid));
if found {
if let Some(btx) = self.txs.iter_mut().find(|btx| btx.txid == txid) {
btx.bump_index = Some(i);
}
return;
}
}
}
fn find_bump_index_for_txid(&self, txid: &str) -> Option<usize> {
for (i, bump) in self.bumps.iter().enumerate() {
let found = bump.path[0]
.iter()
.any(|leaf| leaf.hash.as_deref() == Some(txid));
if found {
return Some(i);
}
}
None
}
fn link_source_transactions(&mut self) {
let txid_map: Vec<(String, usize)> = self
.txs
.iter()
.enumerate()
.map(|(i, btx)| (btx.txid.clone(), i))
.collect();
let tx_clones: Vec<Option<crate::transaction::transaction::Transaction>> =
self.txs.iter().map(|btx| btx.tx.clone()).collect();
for btx in &mut self.txs {
if let Some(ref mut tx) = btx.tx {
for input in &mut tx.inputs {
if let Some(ref source_txid) = input.source_txid {
if input.source_transaction.is_none() {
if let Some((_, idx)) =
txid_map.iter().find(|(tid, _)| tid == source_txid)
{
if let Some(ref source_tx) = tx_clones[*idx] {
input.source_transaction = Some(Box::new(source_tx.clone()));
}
}
}
}
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Deserialize;
#[derive(Deserialize)]
struct BeefVector {
name: String,
hex: String,
version: u32,
bump_count: usize,
tx_count: usize,
#[serde(default)]
txid: Option<String>,
}
fn load_test_vectors() -> Vec<BeefVector> {
let json = include_str!("../../test-vectors/beef_valid.json");
serde_json::from_str(json).expect("failed to parse beef_valid.json")
}
#[test]
fn test_beef_v1_round_trip() {
let vectors = load_test_vectors();
for v in vectors.iter().filter(|v| v.version == 1) {
let beef = Beef::from_hex(&v.hex)
.unwrap_or_else(|e| panic!("failed to parse '{}': {}", v.name, e));
assert_eq!(
beef.bumps.len(),
v.bump_count,
"bump count mismatch for '{}'",
v.name
);
assert_eq!(
beef.txs.len(),
v.tx_count,
"tx count mismatch for '{}'",
v.name
);
let result_hex = beef
.to_hex()
.unwrap_or_else(|e| panic!("failed to serialize '{}': {}", v.name, e));
assert_eq!(result_hex, v.hex, "round-trip failed for '{}'", v.name);
}
}
#[test]
fn test_beef_tx_count() {
let vectors = load_test_vectors();
for v in &vectors {
let beef = Beef::from_hex(&v.hex)
.unwrap_or_else(|e| panic!("failed to parse '{}': {}", v.name, e));
assert_eq!(
beef.bumps.len(),
v.bump_count,
"bump count mismatch for '{}'",
v.name
);
assert_eq!(
beef.txs.len(),
v.tx_count,
"tx count mismatch for '{}'",
v.name
);
if let Some(ref expected_txid) = v.txid {
let last_tx = &beef.txs[beef.txs.len() - 1];
assert_eq!(
&last_tx.txid, expected_txid,
"txid mismatch for '{}'",
v.name
);
}
}
}
#[test]
fn test_merge_beef_combines_bumps_and_txs() {
let vectors = load_test_vectors();
let beef_a = Beef::from_hex(&vectors[0].hex).expect("parse beef_a");
let beef_b = Beef::from_hex(&vectors[1].hex).expect("parse beef_b");
let mut merged = Beef::new(BEEF_V2);
merged.merge_beef(&beef_a).expect("merge beef_a");
merged.merge_beef(&beef_b).expect("merge beef_b");
assert!(
merged.txs.len() >= beef_a.txs.len(),
"merged should have at least as many txs as beef_a"
);
assert!(
merged.bumps.len() >= 1,
"merged should have at least one bump"
);
for btx in &beef_a.txs {
assert!(
merged.find_txid(&btx.txid).is_some(),
"merged should contain txid {} from beef_a",
btx.txid
);
}
for btx in &beef_b.txs {
assert!(
merged.find_txid(&btx.txid).is_some(),
"merged should contain txid {} from beef_b",
btx.txid
);
}
}
#[test]
fn test_merge_beef_deduplicates_same_txid() {
let vectors = load_test_vectors();
let beef_a = Beef::from_hex(&vectors[0].hex).expect("parse beef");
let mut merged = Beef::new(BEEF_V2);
merged.merge_beef(&beef_a).expect("merge first");
let count_after_first = merged.txs.len();
merged.merge_beef(&beef_a).expect("merge second");
assert_eq!(
merged.txs.len(),
count_after_first,
"merging same beef twice should not duplicate txs"
);
}
#[test]
fn test_merge_beef_from_binary() {
let vectors = load_test_vectors();
let beef_a = Beef::from_hex(&vectors[0].hex).expect("parse beef");
let binary = crate::primitives::utils::from_hex(&vectors[0].hex).expect("hex decode");
let mut merged = Beef::new(BEEF_V2);
merged
.merge_beef_from_binary(&binary)
.expect("merge from binary");
assert_eq!(merged.txs.len(), beef_a.txs.len());
assert_eq!(merged.bumps.len(), beef_a.bumps.len());
}
#[test]
fn test_merge_raw_tx() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
if let Some(ref tx) = beef.txs[0].tx {
let mut raw_tx_buf = Vec::new();
tx.to_binary(&mut raw_tx_buf).expect("serialize tx");
let mut new_beef = Beef::new(BEEF_V2);
let result = new_beef
.merge_raw_tx(&raw_tx_buf, None)
.expect("merge raw tx");
assert_eq!(result.txid, beef.txs[0].txid);
assert_eq!(new_beef.txs.len(), 1);
}
}
#[test]
fn test_merge_raw_tx_replaces_existing() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
if let Some(ref tx) = beef.txs[0].tx {
let mut raw_tx_buf = Vec::new();
tx.to_binary(&mut raw_tx_buf).expect("serialize tx");
let mut new_beef = Beef::new(BEEF_V2);
new_beef
.merge_raw_tx(&raw_tx_buf, None)
.expect("merge first");
new_beef
.merge_raw_tx(&raw_tx_buf, None)
.expect("merge second");
assert_eq!(
new_beef.txs.len(),
1,
"merging same raw tx twice should replace, not duplicate"
);
}
}
#[test]
fn test_to_binary_atomic() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
if let Some(ref expected_txid) = vectors[0].txid {
let atomic = beef
.to_binary_atomic(expected_txid)
.expect("to_binary_atomic");
assert!(atomic.len() > 36, "atomic output too short");
let prefix = u32::from_le_bytes([atomic[0], atomic[1], atomic[2], atomic[3]]);
assert_eq!(prefix, ATOMIC_BEEF, "should start with ATOMIC_BEEF prefix");
let mut txid_bytes =
crate::primitives::utils::from_hex(expected_txid).expect("hex decode txid");
txid_bytes.reverse(); assert_eq!(
&atomic[4..36],
&txid_bytes[..],
"atomic should contain txid in LE"
);
let mut cursor = Cursor::new(&atomic);
let parsed = Beef::from_binary(&mut cursor).expect("parse atomic beef");
assert_eq!(
parsed.atomic_txid.as_deref(),
Some(expected_txid.as_str()),
"parsed atomic txid should match"
);
assert_eq!(
parsed.txs.len(),
beef.txs.len(),
"parsed atomic should have same tx count"
);
}
}
#[test]
fn test_to_binary_atomic_nonexistent_txid() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
let result = beef
.to_binary_atomic("0000000000000000000000000000000000000000000000000000000000000000");
assert!(result.is_err(), "should error for nonexistent txid");
}
#[test]
fn test_find_txid() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
if let Some(ref expected_txid) = vectors[0].txid {
assert!(
beef.find_txid(expected_txid).is_some(),
"should find existing txid"
);
}
assert!(
beef.find_txid("0000000000000000000000000000000000000000000000000000000000000000")
.is_none(),
"should not find nonexistent txid"
);
}
#[test]
fn test_into_transaction_returns_last_tx() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
let expected_txid = beef.txs.last().unwrap().txid.clone();
let tx = beef.into_transaction().expect("into_transaction");
assert_eq!(
tx.id().unwrap(),
expected_txid,
"should return last (subject) tx"
);
}
#[test]
fn test_from_beef_hex() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
let expected_txid = beef.txs.last().unwrap().txid.clone();
let tx = crate::transaction::transaction::Transaction::from_beef(&vectors[0].hex)
.expect("from_beef");
assert_eq!(
tx.id().unwrap(),
expected_txid,
"from_beef should return subject tx"
);
}
#[test]
fn test_sort_txs_proven_before_unproven() {
let vectors = load_test_vectors();
let mut beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
beef.sort_txs();
let mut seen_unproven = false;
for btx in &beef.txs {
if btx.bump_index.is_some() {
assert!(!seen_unproven, "proven tx should not come after unproven");
} else {
seen_unproven = true;
}
}
}
#[test]
fn test_sort_txs_idempotent() {
let vectors = load_test_vectors();
let mut beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
beef.sort_txs();
let first_order: Vec<String> = beef.txs.iter().map(|t| t.txid.clone()).collect();
beef.sort_txs();
let second_order: Vec<String> = beef.txs.iter().map(|t| t.txid.clone()).collect();
assert_eq!(first_order, second_order, "sort_txs should be idempotent");
}
#[test]
fn test_merge_bump() {
let vectors = load_test_vectors();
let beef = Beef::from_hex(&vectors[0].hex).expect("parse beef");
let mut new_beef = Beef::new(BEEF_V2);
let idx = new_beef.merge_bump(&beef.bumps[0]).expect("merge bump");
assert_eq!(idx, 0, "first bump should be at index 0");
assert_eq!(new_beef.bumps.len(), 1);
let idx2 = new_beef
.merge_bump(&beef.bumps[0])
.expect("merge bump again");
assert_eq!(idx2, 0, "same bump should merge to index 0");
assert_eq!(
new_beef.bumps.len(),
1,
"should still be 1 bump after re-merge"
);
}
}