1#![allow(dead_code)]
4use arweave_rs_randomx::RandomXVM;
5use arweave_rs_types::{*, consensus::*};
6use color_eyre::eyre::{eyre, Result};
7use arweave_rs_indexes::*;
8use merkle::*;
9use openssl::sha;
10use arweave_rs_packing::{*, feistel::*};
11
12pub mod merkle;
13
14pub fn pre_validate_block(
19 block_header: &ArweaveBlockHeader,
20 previous_block_header: &ArweaveBlockHeader,
21 block_index: &BlockIndex<Initialized>,
22 randomx_vm: Option<&RandomXVM>,
23) -> Result<[u8; 32]> {
24 let block_height = block_header.height;
28
29 if !proof_size_is_valid(&previous_block_header.poa, block_height - 1) {
31 return Err(eyre!("previous blocks PoA proof has invalid size"));
32 }
33
34 if !proof_size_is_valid(&previous_block_header.poa2, block_height - 1) {
35 return Err(eyre!("previous blocks PoA2 proof has invalid size"));
36 }
37
38 if !proof_size_is_valid(&block_header.poa, block_height) {
40 return Err(eyre!("PoA proof has invalid size"));
41 }
42
43 if !proof_size_is_valid(&block_header.poa2, block_height) {
44 return Err(eyre!("PoA2 proof has invalid size"));
45 }
46
47 let chunk = &block_header.poa.chunk;
49 if !chunk_hash_is_valid(&block_header.chunk_hash, chunk, block_height) {
50 return Err(eyre!("chunk_hash does not match poa.chunk bytes"));
51 }
52
53 if block_header.chunk2_hash.is_some() {
55 let chunk = &block_header.poa2.chunk;
56 let chunk2_hash = block_header.chunk2_hash.unwrap_or_default();
57 if !chunk_hash_is_valid(&chunk2_hash, chunk, block_height) {
58 return Err(eyre!("chunk2_hash does not match poa2.chunk bytes"));
59 }
60 }
61
62 if !block_hash_is_valid(block_header) {
68 return Err(eyre!("indep_hash does not match calculated block_hash"));
69 }
70
71 if block_header.previous_block != previous_block_header.indep_hash {
85 return Err(eyre!("previous blocks indep_hash is not the parent block"));
86 }
87
88 if !last_retarget_is_valid(block_header, previous_block_header) {
90 return Err(eyre!("last_retarget is invalid"));
91 }
92
93 if !difficulty_is_valid(block_header, previous_block_header) {
95 return Err(eyre!("block difficulty is invalid"));
96 }
97
98 if !cumulative_diff_is_valid(block_header, previous_block_header) {
100 return Err(eyre!("cumulative_diff is invalid"));
101 }
102
103 let quick_pow_result = quick_pow_is_valid(block_header, previous_block_header, randomx_vm);
105
106 let (mining_hash, solution_hash) = match quick_pow_result {
107 Ok(tuple) => tuple,
108 Err(err) => return Err(err),
109 };
110
111 if !seed_data_is_valid(block_header, previous_block_header) {
113 return Err(eyre!("seed_data is invalid"));
114 }
115
116 if !partition_number_is_valid(block_header) {
118 return Err(eyre!("partition_number is invalid"));
119 }
120
121 if !nonce_is_valid(block_header) {
123 return Err(eyre!("nonce is invalid"));
124 }
125
126 let (recall_byte_1, recall_byte_2) = match recall_bytes_is_valid(block_header, &mining_hash) {
128 Ok(tuple) => tuple,
129 Err(err) => return Err(err),
130 };
131
132 if !poa_is_valid(
134 &block_header.poa,
135 recall_byte_1,
136 block_index,
137 &block_header.reward_addr,
138 randomx_vm,
139 ) {
140 return Err(eyre!("poa is invalid"));
141 }
142
143 if let Some(recall_byte_2) = recall_byte_2 {
145 if !poa_is_valid(
146 &block_header.poa2,
147 recall_byte_2,
148 block_index,
149 &block_header.reward_addr,
150 randomx_vm,
151 ) {
152 return Err(eyre!("poa2 is invalid"));
153 }
154 }
155
156 Ok(solution_hash)
157}
158
159fn compute_solution_hash(mining_hash: &[u8; 32], hash_preimage: &H256) -> [u8; 32] {
160 let mut hasher = sha::Sha256::new();
161 hasher.update(mining_hash);
162 hasher.update(hash_preimage.as_bytes());
163 hasher.finish()
164}
165
166fn proof_size_is_valid(poa_data: &PoaData, block_height: u64) -> bool {
167 if block_height < FORK_2_7_HEIGHT {
169 return true;
170 }
171
172 let tx_path = &poa_data.tx_path;
173 let data_path = &poa_data.data_path;
174 let chunk = &poa_data.chunk;
175
176 tx_path.len() <= MAX_TX_PATH_SIZE
177 && data_path.len() <= MAX_DATA_PATH_SIZE
178 && chunk.len() <= (DATA_CHUNK_SIZE as usize)
179}
180
181fn chunk_hash_is_valid(chunk_hash: &H256, chunk: &Base64, block_height: u64) -> bool {
182 if block_height < FORK_2_7_HEIGHT {
183 return true;
184 }
185
186 let mut hasher = sha::Sha256::new();
187 hasher.update(chunk.0.as_slice());
188 let hash = H256::from(hasher.finish());
189 hash == *chunk_hash
190}
191
192fn last_retarget_is_valid(
193 block_header: &ArweaveBlockHeader,
194 previous_block_header: &ArweaveBlockHeader,
195) -> bool {
196 if is_retarget_height(block_header) {
197 block_header.last_retarget == block_header.timestamp
198 } else {
199 block_header.last_retarget == previous_block_header.last_retarget
200 }
201}
202
203fn difficulty_is_valid(
204 block_header: &ArweaveBlockHeader,
205 previous_block_header: &ArweaveBlockHeader,
206) -> bool {
207 if is_retarget_height(block_header) {
208 let result = calculate_difficulty(block_header, previous_block_header);
209 match result {
210 Ok(computed_diff) => {
211 if computed_diff == block_header.diff {
212 true
213 } else {
214 println!(
215 "\ncomputed: {}\n actual: {}",
216 computed_diff, block_header.diff
217 );
218 false
219 }
220 }
221 Err(_) => false,
222 }
223 } else {
224 block_header.diff == previous_block_header.diff
225 && block_header.last_retarget == previous_block_header.last_retarget
226 }
227}
228
229fn calculate_difficulty(
230 block_header: &ArweaveBlockHeader,
231 previous_block_header: &ArweaveBlockHeader,
232) -> Result<U256> {
233 let height = block_header.height;
234 let timestamp = block_header.timestamp;
235
236 if height < FORK_2_5_HEIGHT {
237 return Err(eyre!(
238 "Can't calculate difficulty for block height prior to Fork 2.5"
239 ));
240 }
241 let previous_diff = previous_block_header.diff;
242 let previous_last_retarget = previous_block_header.last_retarget;
243
244 let max_timestamp_deviation = JOIN_CLOCK_TOLERANCE * 2 + CLOCK_DRIFT_MAX;
247
248 let target_time = RETARGET_BLOCKS * TARGET_TIME;
250
251 let actual_time = std::cmp::max(timestamp - previous_last_retarget, max_timestamp_deviation);
253
254 if actual_time < RETARGET_TOLERANCE_UPPER_BOUND && actual_time > RETARGET_TOLERANCE_LOWER_BOUND
255 {
256 Ok(previous_diff)
258 } else {
259 let min_diff = U256::from(MIN_SPORA_DIFFICULTY);
261 let max_diff = U256::max_value();
262 let diff_inverse = ((max_diff - previous_diff + 1) * actual_time) / target_time;
265 let computed_diff = max_diff - diff_inverse + 1;
266 Ok(computed_diff.clamp(min_diff, max_diff))
267 }
268}
269
270fn cumulative_diff_is_valid(
271 block_header: &ArweaveBlockHeader,
272 previous_block_header: &ArweaveBlockHeader,
273) -> bool {
274 let cumulative_diff = compute_cumulative_diff(block_header, previous_block_header);
275 cumulative_diff == block_header.cumulative_diff
276}
277
278fn compute_cumulative_diff(
279 block_header: &ArweaveBlockHeader,
280 previous_block_header: &ArweaveBlockHeader,
281) -> U256 {
282 let max_diff = U256::max_value();
284 let delta = max_diff / (max_diff - block_header.diff);
285 previous_block_header.cumulative_diff + delta
286}
287
288fn quick_pow_is_valid(
289 block_header: &ArweaveBlockHeader,
290 previous_block_header: &ArweaveBlockHeader,
291 randomx_vm: Option<&RandomXVM>,
292) -> Result<([u8; 32], [u8; 32])> {
293 let nonce_limiter_info = &block_header.nonce_limiter_info;
295 let vdf_output = nonce_limiter_info.output;
296 let mining_address: H256 = block_header.reward_addr;
297 let partition_number: u32 = block_header.partition_number as u32;
298
299 let previous_nonce_limiter_info = &previous_block_header.nonce_limiter_info;
301 let previous_vdf_seed: H384 = previous_nonce_limiter_info.seed;
302
303 let mining_hash = compute_mining_hash(
304 vdf_output,
305 partition_number,
306 previous_vdf_seed,
307 mining_address,
308 randomx_vm,
309 );
310
311 let hash_preimage = block_header.hash_preimage;
313 let solution_hash = compute_solution_hash(&mining_hash, &hash_preimage);
314
315 let solution_hash_value_big: U256 = U256::from_big_endian(&solution_hash);
316
317 let diff: U256 = block_header.diff;
318 if solution_hash_value_big > diff {
319 Ok((mining_hash, solution_hash))
320 } else {
321 Err(eyre!(
322 "Block solution_hash does not satisfy proof of work difficulty check"
323 ))
324 }
325}
326
327fn seed_data_is_valid(
328 block_header: &ArweaveBlockHeader,
329 previous_block_header: &ArweaveBlockHeader,
330) -> bool {
331 let nonce_info = &block_header.nonce_limiter_info;
332 let expected_seed_data = get_seed_data(
333 block_header.nonce_limiter_info.global_step_number,
334 previous_block_header,
335 );
336
337 if expected_seed_data.seed == nonce_info.seed
339 && expected_seed_data.next_seed == nonce_info.next_seed
340 && expected_seed_data.next_partition_upper_bound == nonce_info.next_zone_upper_bound
341 && expected_seed_data.partition_upper_bound == nonce_info.zone_upper_bound
342 && expected_seed_data.vdf_difficulty == nonce_info.vdf_difficulty.unwrap_or(VDF_SHA_1S)
343 {
344 true
345 } else {
346 println!(
347 "expected seed: {:?}\nfound seed: {:?}",
348 expected_seed_data.seed, nonce_info.seed
349 );
350 false
351 }
352}
353
354fn partition_number_is_valid(block_header: &ArweaveBlockHeader) -> bool {
355 let max = std::cmp::max(
356 0,
357 block_header.nonce_limiter_info.zone_upper_bound / PARTITION_SIZE - 1,
358 );
359 block_header.partition_number <= max
360}
361
362fn nonce_is_valid(block_header: &ArweaveBlockHeader) -> bool {
363 let max = RECALL_RANGE_SIZE / DATA_CHUNK_SIZE;
364 let nonce_value = block_header.nonce.0 as u32;
365 nonce_value < max
366}
367
368fn recall_bytes_is_valid(
369 block_header: &ArweaveBlockHeader,
370 mining_hash: &[u8; 32],
371) -> Result<(U256, Option<U256>)> {
372 let (recall_range1_start, recall_range2_start) = get_recall_range(
373 mining_hash,
374 block_header.partition_number,
375 block_header.nonce_limiter_info.zone_upper_bound,
376 );
377
378 let recall_byte_1 = recall_range1_start + block_header.nonce.0 * DATA_CHUNK_SIZE as u64;
379 let recall_byte_2 = recall_range2_start + block_header.nonce.0 * DATA_CHUNK_SIZE as u64;
380
381 if let Some(b2) = block_header.recall_byte2 {
382 if recall_byte_2 == b2 && recall_byte_1 == U256::from(block_header.recall_byte) {
383 Ok((recall_byte_1, Some(recall_byte_2)))
384 } else {
385 Err(eyre!("invalid recall byte 2"))
386 }
387 } else if recall_byte_1 == U256::from(block_header.recall_byte) {
388 Ok((recall_byte_1, None))
389 } else {
390 Err(eyre!("invalid recall byte 1"))
391 }
392}
393
394fn poa_is_valid(
395 poa_data: &PoaData,
396 recall_byte: U256,
397 block_index: &BlockIndex<Initialized>,
398 reward_addr: &H256,
399 randomx_vm: Option<&RandomXVM>,
400) -> bool {
401 let block_bounds = block_index.get_block_bounds(recall_byte.as_u128());
403 let start = block_bounds.block_start_offset;
404 let end = block_bounds.block_end_offset;
405
406 if (start..=end).contains(&recall_byte.as_u128()) {
409 } else {
414 return false;
415 }
416
417 let byte_offset_in_block = get_byte_offset(recall_byte, block_bounds.block_start_offset, block_bounds.block_end_offset);
419 let tx_path_result = match validate_path(
427 block_bounds.tx_root.0,
428 &poa_data.tx_path,
429 byte_offset_in_block,
430 ) {
431 Ok(result) => result,
432 Err(_) => {
433 println!("tx_path is invalid");
434 return false;
435 }
436 };
437
438 let byte_offset_in_tx = byte_offset_in_block - tx_path_result.left_bound;
440 let tx_start = 0;
441 let tx_end = tx_path_result.right_bound - tx_path_result.left_bound;
442 if (tx_start..=tx_end).contains(&byte_offset_in_tx) || (tx_start == 0 && tx_end == 0) {
446 } else {
448 return false;
449 }
450
451 let data_path_result = match validate_path(
455 tx_path_result.leaf_hash,
456 &poa_data.data_path,
457 byte_offset_in_tx,
458 ) {
459 Ok(result) => result,
460 Err(_) => return false,
461 };
462
463 let chunk_size = (data_path_result.right_bound - data_path_result.left_bound) as usize;
465 let chunk_offset =
466 block_bounds.block_start_offset + tx_path_result.left_bound + data_path_result.right_bound;
467
468 let input = get_chunk_entropy_input(chunk_offset.into(), &block_bounds.tx_root, reward_addr);
474 let randomx_program_count = RANDOMX_PACKING_ROUNDS_2_6;
475 let entropy = compute_entropy(&input, randomx_program_count, randomx_vm);
476
477
478 let ciphertext = poa_data.chunk.as_slice();
481 let decrypted_chunk = feistel_decrypt(ciphertext, &entropy);
482
483 let (decrypted_chunk, _) = decrypted_chunk.split_at(chunk_size.min(decrypted_chunk.len()));
486
487 let chunk_hash = generate_chunk_id(decrypted_chunk);
490
491 chunk_hash == data_path_result.leaf_hash
493}
494
495trait DoubleSigningProofBytes {
496 fn bytes(&self) -> Vec<u8>;
497}
498
499impl DoubleSigningProofBytes for DoubleSigningProof {
500 fn bytes(&self) -> Vec<u8> {
501 if self.pub_key.is_none() {
503 return vec![0];
504 }
505
506 let mut buff: Vec<u8> = Vec::new();
507
508 buff.extend_raw_buf(1, &[1])
510 .extend_optional_raw_buf(64, &self.pub_key)
511 .extend_optional_raw_buf(64, &self.sig1)
512 .extend_big(2, &self.cdiff1.unwrap_or_default())
513 .extend_big(2, &self.prev_cdiff1.unwrap_or_default())
514 .extend_raw_buf(8, self.preimage1.unwrap_or_default().as_bytes())
515 .extend_optional_raw_buf(64, &self.sig2)
516 .extend_big(2, &self.cdiff2.unwrap_or_default())
517 .extend_big(2, &self.prev_cdiff2.unwrap_or_default())
518 .extend_raw_buf(8, self.preimage2.unwrap_or_default().as_bytes());
519 buff
520 }
521}
522
523trait ExtendBytes {
527 fn extend_raw_buf(&mut self, raw_size: usize, val: &[u8]) -> &mut Self;
528 fn extend_optional_raw_buf(&mut self, raw_size: usize, val: &Option<Base64>) -> &mut Self;
529 fn extend_raw_big(&mut self, raw_size: usize, val: &U256) -> &mut Self;
530 fn extend_u64(&mut self, size_bytes: usize, val: &u64) -> &mut Self;
531 fn extend_big(&mut self, size_bytes: usize, val: &U256) -> &mut Self;
532 fn extend_optional_big(&mut self, size_bytes: usize, val: &Option<U256>) -> &mut Self;
533 fn extend_optional_hash(&mut self, size_bytes: usize, val: &Option<H256>) -> &mut Self;
534 fn extend_buf(&mut self, size_bytes: usize, val: &[u8]) -> &mut Self;
535 fn extend_buf_list(&mut self, size_bytes: usize, val: &[Base64]) -> &mut Self;
536 fn extend_hash_list(&mut self, val: &[H256]) -> &mut Self;
537 fn trim_leading_zero_bytes(slice: &[u8]) -> &[u8] {
538 let mut non_zero_index = slice.iter().position(|&x| x != 0).unwrap_or(slice.len());
539 non_zero_index = std::cmp::min(non_zero_index, slice.len() - 1);
540 &slice[non_zero_index..]
541 }
542}
543
544impl ExtendBytes for Vec<u8> {
545 fn extend_raw_buf(&mut self, raw_size: usize, val: &[u8]) -> &mut Self {
548 let mut bytes = vec![0u8; raw_size];
549
550 let start = if val.len() > raw_size {
552 val.len() - raw_size
553 } else {
554 0
555 };
556
557 let insert = raw_size.saturating_sub(val.len());
559 bytes[insert..].copy_from_slice(&val[start..]);
560
561 self.extend_from_slice(&bytes);
563 self
564 }
565
566 fn extend_optional_raw_buf(&mut self, raw_size: usize, val: &Option<Base64>) -> &mut Self {
567 let mut bytes: Vec<u8> = Vec::new();
568 if let Some(val_bytes) = val {
569 bytes.extend_from_slice(val_bytes.as_slice());
570 }
571 self.extend_raw_buf(raw_size, &bytes)
572 }
573
574 fn extend_raw_big(&mut self, raw_size: usize, val: &U256) -> &mut Self {
575 let mut bytes = [0u8; 32];
576 val.to_big_endian(&mut bytes);
577 self.extend_raw_buf(raw_size, &bytes)
578 }
579
580 fn extend_u64(&mut self, num_size_bytes: usize, val: &u64) -> &mut Self {
581 let bytes = &val.to_be_bytes();
582 let bytes = Self::trim_leading_zero_bytes(bytes);
583 let num_val_bytes = bytes.len();
584 let size_bytes = num_val_bytes.to_be_bytes();
585 let start = size_bytes.len().saturating_sub(num_size_bytes);
586 self.extend_from_slice(&Vec::from(&size_bytes[start..]));
587 self.extend_from_slice(bytes);
588 self
589 }
590
591 fn extend_big(&mut self, num_size_bytes: usize, val: &U256) -> &mut Self {
592 let mut be_bytes = [0u8; 32];
593 val.to_big_endian(&mut be_bytes);
594 let bytes = Self::trim_leading_zero_bytes(&be_bytes);
595 let num_val_bytes = bytes.len();
596 let size_bytes = num_val_bytes.to_be_bytes();
597 let start = size_bytes.len().saturating_sub(num_size_bytes);
598 self.extend_from_slice(&Vec::from(&size_bytes[start..]));
599 self.extend_from_slice(bytes);
600 self
601 }
602
603 fn extend_optional_big(&mut self, size_bytes: usize, val: &Option<U256>) -> &mut Self {
604 if let Some(big_int) = val {
605 self.extend_big(size_bytes, big_int)
606 } else {
607 self.extend_buf(size_bytes, &[])
609 }
610 }
611
612 fn extend_buf(&mut self, num_size_bytes: usize, val: &[u8]) -> &mut Self {
613 let bytes = val;
614 let num_val_bytes = bytes.len();
615 let size_bytes = num_val_bytes.to_be_bytes();
616 let start = size_bytes.len().saturating_sub(num_size_bytes);
617 self.extend_from_slice(&Vec::from(&size_bytes[start..]));
618 self.extend_from_slice(bytes);
619 self
620 }
621
622 fn extend_optional_hash(&mut self, size_bytes: usize, val: &Option<H256>) -> &mut Self {
623 let mut bytes: Vec<u8> = Vec::new();
624 if let Some(val_bytes) = val {
625 bytes.extend_from_slice(&val_bytes[..]);
626 }
627 self.extend_buf(size_bytes, &bytes)
628 }
629
630 fn extend_buf_list(&mut self, size_bytes: usize, data: &[Base64]) -> &mut Self {
631 let num_elements = data.len() as u16;
633 self.extend_from_slice(&num_elements.to_be_bytes());
634 for elem in data.iter().rev() {
636 self.extend_buf(size_bytes, elem.as_slice());
637 }
638 self
639 }
640
641 fn extend_hash_list(&mut self, data: &[H256]) -> &mut Self {
642 let num_elements = data.len() as u16;
644 self.extend_from_slice(&num_elements.to_be_bytes());
645 for elem in data.iter() {
647 self.extend_from_slice(elem.as_bytes());
648 }
649 self
650 }
651}
652
653fn block_hash_is_valid(block_header: &ArweaveBlockHeader) -> bool {
654 let b = block_header;
655 let nonce_info = &b.nonce_limiter_info;
656 let mut diff_bytes: [u8; 32] = Default::default();
657 b.diff.to_big_endian(&mut diff_bytes);
658
659 let proof_bytes = b.double_signing_proof.bytes();
660
661 let mut buff: Vec<u8> = Vec::new();
664 buff.extend_buf(1, b.previous_block.as_bytes())
665 .extend_u64(1, &b.timestamp)
666 .extend_u64(2, &b.nonce.0)
667 .extend_u64(1, &b.height)
668 .extend_buf(2, &diff_bytes)
669 .extend_big(2, &b.cumulative_diff)
670 .extend_u64(1, &b.last_retarget)
671 .extend_buf(1, b.hash.as_bytes())
672 .extend_u64(2, &b.block_size)
673 .extend_u64(2, &b.weave_size)
674 .extend_buf(1, b.reward_addr.as_bytes())
675 .extend_optional_hash(1, &b.tx_root)
676 .extend_buf(1, b.wallet_list.as_bytes())
677 .extend_buf(1, b.hash_list_merkle.as_bytes())
678 .extend_u64(1, &b.reward_pool)
679 .extend_u64(1, &b.packing_2_5_threshold)
680 .extend_u64(1, &b.strict_data_split_threshold)
681 .extend_u64(1, &b.usd_to_ar_rate[0])
682 .extend_u64(1, &b.usd_to_ar_rate[1])
683 .extend_u64(1, &b.scheduled_usd_to_ar_rate[0])
684 .extend_u64(1, &b.scheduled_usd_to_ar_rate[1])
685 .extend_buf_list(2, &b.tags.0)
686 .extend_buf_list(1, &b.txs.0)
687 .extend_u64(1, &b.reward)
688 .extend_u64(2, &b.recall_byte)
689 .extend_buf(1, b.hash_preimage.as_bytes())
690 .extend_optional_big(2, &b.recall_byte2)
691 .extend_buf(2, b.reward_key.as_slice())
692 .extend_u64(1, &b.partition_number)
693 .extend_raw_buf(32, nonce_info.output.as_bytes())
694 .extend_raw_buf(8, &nonce_info.global_step_number.to_be_bytes())
695 .extend_raw_buf(48, nonce_info.seed.as_bytes())
696 .extend_raw_buf(48, nonce_info.next_seed.as_bytes())
697 .extend_raw_buf(32, &nonce_info.zone_upper_bound.to_be_bytes())
698 .extend_raw_buf(32, &nonce_info.next_zone_upper_bound.to_be_bytes())
699 .extend_buf(1, b.nonce_limiter_info.prev_output.as_bytes())
700 .extend_hash_list(&b.nonce_limiter_info.checkpoints.0)
701 .extend_hash_list(&b.nonce_limiter_info.last_step_checkpoints.0)
702 .extend_buf(1, b.previous_solution_hash.as_bytes())
703 .extend_big(1, &b.price_per_gib_minute)
704 .extend_big(1, &b.scheduled_price_per_gib_minute)
705 .extend_raw_buf(32, b.reward_history_hash.as_bytes())
706 .extend_big(1, &b.debt_supply)
707 .extend_raw_big(3, &b.kryder_plus_rate_multiplier)
708 .extend_raw_big(1, &b.kryder_plus_rate_multiplier_latch)
709 .extend_raw_big(3, &b.denomination)
710 .extend_u64(1, &b.redenomination_height)
711 .extend_raw_buf(proof_bytes.len(), &proof_bytes)
712 .extend_big(2, &b.previous_cumulative_diff)
713 .extend_big(2, &b.merkle_rebase_support_threshold)
715 .extend_buf(3, b.poa.data_path.as_slice())
716 .extend_buf(3, b.poa.tx_path.as_slice())
717 .extend_buf(3, b.poa2.data_path.as_slice())
718 .extend_buf(3, b.poa2.tx_path.as_slice())
719 .extend_raw_buf(32, b.chunk_hash.as_bytes())
720 .extend_optional_hash(1, &b.chunk2_hash)
721 .extend_raw_buf(32, b.block_time_history_hash.as_bytes())
722 .extend_u64(1, &nonce_info.vdf_difficulty.unwrap_or_default())
723 .extend_u64(1, &nonce_info.next_vdf_difficulty.unwrap_or_default());
724
725 let mut hasher = sha::Sha256::new();
733 hasher.update(&buff);
734 let signed_hash = hasher.finish();
735
736 let mut hasher = sha::Sha384::new();
737 hasher.update(&signed_hash);
738 hasher.update(b.signature.as_slice());
739 let hash = H384::from(hasher.finish());
740
741 hash == b.indep_hash
742}
743
744fn is_retarget_height(block_header: &ArweaveBlockHeader) -> bool {
745 let height = block_header.height;
746 height % RETARGET_BLOCKS == 0 && height != 0
747}
748
749fn first_mismatch_index(vec1: &[u8], vec2: &[u8]) -> Option<usize> {
751 vec1.iter().zip(vec2.iter()).enumerate().find_map(
752 |(index, (&val1, &val2))| {
753 if val1 != val2 {
754 Some(index)
755 } else {
756 None
757 }
758 },
759 )
760}