1use bee::debug::ChainState;
37use bee::postage::PostageBatch;
38use num_bigint::BigInt;
39
40use crate::components::stamps::{format_bytes, format_ttl_seconds};
41
42pub const GNOSIS_BLOCK_TIME_SECS: i64 = 5;
47
48pub const PLUR_PER_BZZ: f64 = 1e16;
50
51#[derive(Debug, Clone, PartialEq)]
54pub struct TopupPreview {
55 pub batch_id_short: String,
56 pub current_depth: u8,
57 pub current_ttl_seconds: i64,
58 pub delta_amount: BigInt,
60 pub extra_ttl_seconds: i64,
62 pub new_ttl_seconds: i64,
65 pub cost_bzz: f64,
67}
68
69impl TopupPreview {
70 pub fn summary(&self) -> String {
72 format!(
73 "topup-preview {}: +{:.4} BZZ (delta {} PLUR/chunk), TTL {} → {}",
74 self.batch_id_short,
75 self.cost_bzz,
76 self.delta_amount,
77 format_ttl_seconds(self.current_ttl_seconds),
78 format_ttl_seconds(self.new_ttl_seconds),
79 )
80 }
81}
82
83#[derive(Debug, Clone, PartialEq)]
84pub struct DilutePreview {
85 pub batch_id_short: String,
86 pub old_depth: u8,
87 pub new_depth: u8,
88 pub old_capacity_bytes: u128,
89 pub new_capacity_bytes: u128,
90 pub old_ttl_seconds: i64,
91 pub new_ttl_seconds: i64,
92}
93
94impl DilutePreview {
95 pub fn summary(&self) -> String {
96 format!(
97 "dilute-preview {}: depth {}→{}, capacity {}→{}, TTL {}→{}, cost 0 BZZ",
98 self.batch_id_short,
99 self.old_depth,
100 self.new_depth,
101 format_bytes(self.old_capacity_bytes),
102 format_bytes(self.new_capacity_bytes),
103 format_ttl_seconds(self.old_ttl_seconds),
104 format_ttl_seconds(self.new_ttl_seconds),
105 )
106 }
107}
108
109#[derive(Debug, Clone, PartialEq)]
110pub struct ExtendPreview {
111 pub batch_id_short: String,
112 pub depth: u8,
113 pub current_ttl_seconds: i64,
114 pub extension_seconds: i64,
115 pub needed_amount_plur: BigInt,
118 pub cost_bzz: f64,
119 pub new_ttl_seconds: i64,
120}
121
122impl ExtendPreview {
123 pub fn summary(&self) -> String {
124 format!(
125 "extend-preview {} +{}: cost {:.4} BZZ ({} PLUR/chunk), TTL {} → {}",
126 self.batch_id_short,
127 format_ttl_seconds(self.extension_seconds),
128 self.cost_bzz,
129 self.needed_amount_plur,
130 format_ttl_seconds(self.current_ttl_seconds),
131 format_ttl_seconds(self.new_ttl_seconds),
132 )
133 }
134}
135
136#[derive(Debug, Clone, PartialEq)]
137pub struct BuyPreview {
138 pub depth: u8,
139 pub amount_plur: BigInt,
140 pub capacity_bytes: u128,
141 pub ttl_seconds: i64,
142 pub cost_bzz: f64,
143}
144
145impl BuyPreview {
146 pub fn summary(&self) -> String {
147 format!(
148 "buy-preview depth={} amount={} PLUR/chunk: capacity {}, TTL {}, cost {:.4} BZZ",
149 self.depth,
150 self.amount_plur,
151 format_bytes(self.capacity_bytes),
152 format_ttl_seconds(self.ttl_seconds),
153 self.cost_bzz,
154 )
155 }
156}
157
158#[derive(Debug, Clone, PartialEq)]
168pub struct PlanPreview {
169 pub batch_id_short: String,
170 pub current_depth: u8,
172 pub current_usage_pct: f64,
173 pub current_ttl_seconds: i64,
174 pub usage_threshold_pct: f64,
176 pub ttl_threshold_seconds: i64,
177 pub extra_depth: u8,
178 pub action: PlanAction,
181 pub total_cost_bzz: f64,
183 pub reason: String,
186}
187
188#[derive(Debug, Clone, PartialEq)]
189pub enum PlanAction {
190 None,
192 Topup {
194 delta_amount_plur: BigInt,
195 new_ttl_seconds: i64,
196 cost_bzz: f64,
197 },
198 Dilute {
200 new_depth: u8,
201 post_dilute_ttl_seconds: i64,
202 },
203 TopupThenDilute {
206 topup_delta_amount_plur: BigInt,
207 topup_cost_bzz: f64,
208 new_depth: u8,
209 post_dilute_ttl_seconds: i64,
210 },
211}
212
213impl PlanPreview {
214 pub fn summary(&self) -> String {
215 let action_line = match &self.action {
216 PlanAction::None => "no action needed".to_string(),
217 PlanAction::Topup {
218 delta_amount_plur,
219 cost_bzz,
220 new_ttl_seconds,
221 ..
222 } => format!(
223 "topup +{} PLUR/chunk → TTL {} (cost {cost_bzz:.4} BZZ)",
224 delta_amount_plur,
225 format_ttl_seconds(*new_ttl_seconds),
226 ),
227 PlanAction::Dilute {
228 new_depth,
229 post_dilute_ttl_seconds,
230 } => format!(
231 "dilute → depth {new_depth} (TTL {} after, no BZZ)",
232 format_ttl_seconds(*post_dilute_ttl_seconds),
233 ),
234 PlanAction::TopupThenDilute {
235 topup_delta_amount_plur,
236 topup_cost_bzz,
237 new_depth,
238 post_dilute_ttl_seconds,
239 } => format!(
240 "topup +{topup_delta_amount_plur} PLUR/chunk + dilute → depth {new_depth} (TTL {} after, cost {topup_cost_bzz:.4} BZZ)",
241 format_ttl_seconds(*post_dilute_ttl_seconds),
242 ),
243 };
244 format!(
245 "plan-batch {}: usage {:.1}% (thr {:.0}%), TTL {} (thr {}); {action_line}; total {:.4} BZZ — {}",
246 self.batch_id_short,
247 self.current_usage_pct * 100.0,
248 self.usage_threshold_pct * 100.0,
249 format_ttl_seconds(self.current_ttl_seconds),
250 format_ttl_seconds(self.ttl_threshold_seconds),
251 self.total_cost_bzz,
252 self.reason,
253 )
254 }
255}
256
257pub const DEFAULT_USAGE_THRESHOLD: f64 = 0.85;
261pub const DEFAULT_TTL_THRESHOLD_SECONDS: i64 = 24 * 60 * 60;
262pub const DEFAULT_EXTRA_DEPTH: u8 = 2;
263
264pub fn plan_batch(
272 batch: &PostageBatch,
273 chain_state: &ChainState,
274 usage_threshold: f64,
275 ttl_threshold_seconds: i64,
276 extra_depth: u8,
277) -> Result<PlanPreview, String> {
278 if !(0.0..=1.0).contains(&usage_threshold) {
279 return Err(format!(
280 "usage_threshold {usage_threshold} out of range [0, 1]"
281 ));
282 }
283 if ttl_threshold_seconds <= 0 {
284 return Err("ttl_threshold must be a positive duration".into());
285 }
286 if chain_state.current_price <= BigInt::from(0) {
287 return Err("chain price not loaded yet — try again in a moment".into());
288 }
289 let bucket_depth = batch.bucket_depth.max(16);
290 let usage_pct = stamp_usage(batch.utilization, batch.depth, bucket_depth);
291 let current_ttl = batch.batch_ttl.max(0);
292
293 let new_depth = batch.depth.saturating_add(extra_depth);
294 if new_depth > 41 {
295 return Err(format!(
296 "current depth {} + extra_depth {extra_depth} exceeds Bee's depth ceiling 41",
297 batch.depth
298 ));
299 }
300
301 let needs_dilute = usage_pct >= usage_threshold;
304 let dilute_factor = 1i64 << extra_depth;
305 let post_dilute_ttl = current_ttl / dilute_factor.max(1);
306
307 if batch.immutable && needs_dilute {
308 if current_ttl >= ttl_threshold_seconds {
310 return Ok(PlanPreview {
311 batch_id_short: short_batch_id(batch),
312 current_depth: batch.depth,
313 current_usage_pct: usage_pct,
314 current_ttl_seconds: current_ttl,
315 usage_threshold_pct: usage_threshold,
316 ttl_threshold_seconds,
317 extra_depth,
318 action: PlanAction::None,
319 total_cost_bzz: 0.0,
320 reason: format!(
321 "immutable batch above usage threshold ({:.1}%) — can't dilute, but TTL still above threshold",
322 usage_pct * 100.0
323 ),
324 });
325 }
326 let needed = ttl_threshold_seconds.saturating_sub(current_ttl).max(1);
327 let amount =
328 amount_for_ttl_extension(needed, &chain_state.current_price, GNOSIS_BLOCK_TIME_SECS);
329 let cost = cost_bzz(&amount, batch.depth);
330 return Ok(PlanPreview {
331 batch_id_short: short_batch_id(batch),
332 current_depth: batch.depth,
333 current_usage_pct: usage_pct,
334 current_ttl_seconds: current_ttl,
335 usage_threshold_pct: usage_threshold,
336 ttl_threshold_seconds,
337 extra_depth,
338 action: PlanAction::Topup {
339 delta_amount_plur: amount,
340 new_ttl_seconds: current_ttl + needed,
341 cost_bzz: cost,
342 },
343 total_cost_bzz: cost,
344 reason: "immutable batch above usage threshold + TTL below threshold — topup only"
345 .to_string(),
346 });
347 }
348
349 let effective_ttl_after = if needs_dilute {
350 post_dilute_ttl
351 } else {
352 current_ttl
353 };
354 let needs_topup = effective_ttl_after < ttl_threshold_seconds;
355
356 match (needs_topup, needs_dilute) {
357 (false, false) => Ok(PlanPreview {
358 batch_id_short: short_batch_id(batch),
359 current_depth: batch.depth,
360 current_usage_pct: usage_pct,
361 current_ttl_seconds: current_ttl,
362 usage_threshold_pct: usage_threshold,
363 ttl_threshold_seconds,
364 extra_depth,
365 action: PlanAction::None,
366 total_cost_bzz: 0.0,
367 reason: "batch is healthy against both thresholds".into(),
368 }),
369 (true, false) => {
370 let needed = ttl_threshold_seconds.saturating_sub(current_ttl).max(1);
371 let amount = amount_for_ttl_extension(
372 needed,
373 &chain_state.current_price,
374 GNOSIS_BLOCK_TIME_SECS,
375 );
376 let cost = cost_bzz(&amount, batch.depth);
377 Ok(PlanPreview {
378 batch_id_short: short_batch_id(batch),
379 current_depth: batch.depth,
380 current_usage_pct: usage_pct,
381 current_ttl_seconds: current_ttl,
382 usage_threshold_pct: usage_threshold,
383 ttl_threshold_seconds,
384 extra_depth,
385 action: PlanAction::Topup {
386 delta_amount_plur: amount,
387 new_ttl_seconds: current_ttl + needed,
388 cost_bzz: cost,
389 },
390 total_cost_bzz: cost,
391 reason: format!(
392 "TTL below threshold ({}) — topup",
393 format_ttl_seconds(ttl_threshold_seconds)
394 ),
395 })
396 }
397 (false, true) => Ok(PlanPreview {
398 batch_id_short: short_batch_id(batch),
399 current_depth: batch.depth,
400 current_usage_pct: usage_pct,
401 current_ttl_seconds: current_ttl,
402 usage_threshold_pct: usage_threshold,
403 ttl_threshold_seconds,
404 extra_depth,
405 action: PlanAction::Dilute {
406 new_depth,
407 post_dilute_ttl_seconds: post_dilute_ttl,
408 },
409 total_cost_bzz: 0.0,
410 reason: format!(
411 "usage above threshold ({:.0}%) — dilute",
412 usage_threshold * 100.0
413 ),
414 }),
415 (true, true) => {
416 let target_pre_dilute_ttl = ttl_threshold_seconds.saturating_mul(dilute_factor.max(1));
420 let needed = target_pre_dilute_ttl.saturating_sub(current_ttl).max(1);
421 let amount = amount_for_ttl_extension(
422 needed,
423 &chain_state.current_price,
424 GNOSIS_BLOCK_TIME_SECS,
425 );
426 let cost = cost_bzz(&amount, batch.depth);
427 let post_dilute_ttl = (current_ttl + needed) / dilute_factor.max(1);
428 Ok(PlanPreview {
429 batch_id_short: short_batch_id(batch),
430 current_depth: batch.depth,
431 current_usage_pct: usage_pct,
432 current_ttl_seconds: current_ttl,
433 usage_threshold_pct: usage_threshold,
434 ttl_threshold_seconds,
435 extra_depth,
436 action: PlanAction::TopupThenDilute {
437 topup_delta_amount_plur: amount,
438 topup_cost_bzz: cost,
439 new_depth,
440 post_dilute_ttl_seconds: post_dilute_ttl,
441 },
442 total_cost_bzz: cost,
443 reason:
444 "usage above threshold + post-dilute TTL would fall below — topup then dilute"
445 .to_string(),
446 })
447 }
448 }
449}
450
451fn stamp_usage(utilization: u32, depth: u8, bucket_depth: u8) -> f64 {
455 if depth <= bucket_depth {
456 return 0.0;
457 }
458 let denom = 1u64 << (depth - bucket_depth);
459 f64::from(utilization) / (denom as f64)
460}
461
462#[derive(Debug, Clone, PartialEq)]
470pub struct BuySuggestion {
471 pub target_bytes: u128,
472 pub target_seconds: i64,
473 pub depth: u8,
474 pub amount_plur: BigInt,
475 pub capacity_bytes: u128,
477 pub ttl_seconds: i64,
479 pub cost_bzz: f64,
480}
481
482impl BuySuggestion {
483 pub fn summary(&self) -> String {
484 format!(
485 "buy-suggest {} / {}: depth={} amount={} PLUR/chunk → capacity {}, TTL {}, cost {:.4} BZZ",
486 format_bytes(self.target_bytes),
487 format_ttl_seconds(self.target_seconds),
488 self.depth,
489 self.amount_plur,
490 format_bytes(self.capacity_bytes),
491 format_ttl_seconds(self.ttl_seconds),
492 self.cost_bzz,
493 )
494 }
495}
496
497pub fn theoretical_capacity_bytes(depth: u8) -> u128 {
500 (1u128 << depth) * 4096
501}
502
503pub fn cost_bzz(amount_per_chunk: &BigInt, depth: u8) -> f64 {
507 let total_plur: BigInt = amount_per_chunk * (BigInt::from(1u32) << depth as usize);
508 total_plur.to_string().parse::<f64>().unwrap_or(0.0) / PLUR_PER_BZZ
509}
510
511pub fn ttl_seconds(amount_per_chunk: &BigInt, current_price: &BigInt, blocktime: i64) -> i64 {
515 if current_price <= &BigInt::from(0) {
516 return 0;
517 }
518 let ttl_blocks: BigInt = amount_per_chunk / current_price;
519 let secs: BigInt = &ttl_blocks * BigInt::from(blocktime);
520 secs.to_string().parse::<i64>().unwrap_or(i64::MAX)
521}
522
523pub fn amount_for_ttl_extension(
526 extra_seconds: i64,
527 current_price: &BigInt,
528 blocktime: i64,
529) -> BigInt {
530 if extra_seconds <= 0 || blocktime <= 0 {
531 return BigInt::from(0);
532 }
533 let extra_blocks = BigInt::from(extra_seconds / blocktime);
534 extra_blocks * current_price
535}
536
537pub fn topup_preview(
542 batch: &PostageBatch,
543 delta_amount: BigInt,
544 chain_state: &ChainState,
545) -> Result<TopupPreview, String> {
546 if chain_state.current_price <= BigInt::from(0) {
547 return Err("chain price not loaded yet — try again in a moment".into());
548 }
549 if delta_amount <= BigInt::from(0) {
550 return Err("topup amount must be a positive PLUR value".into());
551 }
552 let extra_ttl_seconds = ttl_seconds(
553 &delta_amount,
554 &chain_state.current_price,
555 GNOSIS_BLOCK_TIME_SECS,
556 );
557 let new_ttl_seconds = batch.batch_ttl.max(0).saturating_add(extra_ttl_seconds);
558 let cost = cost_bzz(&delta_amount, batch.depth);
559 Ok(TopupPreview {
560 batch_id_short: short_batch_id(batch),
561 current_depth: batch.depth,
562 current_ttl_seconds: batch.batch_ttl,
563 delta_amount,
564 extra_ttl_seconds,
565 new_ttl_seconds,
566 cost_bzz: cost,
567 })
568}
569
570pub fn dilute_preview(batch: &PostageBatch, new_depth: u8) -> Result<DilutePreview, String> {
575 if new_depth <= batch.depth {
576 return Err(format!(
577 "new depth {} must be greater than current depth {} (dilute can only raise depth)",
578 new_depth, batch.depth
579 ));
580 }
581 if new_depth > 41 {
582 return Err(format!(
583 "depth {new_depth} exceeds Bee's depth ceiling (41) — refusing to preview"
584 ));
585 }
586 let delta = (new_depth - batch.depth) as u32;
587 let factor = 1u128 << delta;
588 let old_capacity = theoretical_capacity_bytes(batch.depth);
589 let new_capacity = theoretical_capacity_bytes(new_depth);
590 let old_ttl = batch.batch_ttl.max(0);
591 let new_ttl = old_ttl / (factor.min(i64::MAX as u128) as i64).max(1);
592 Ok(DilutePreview {
593 batch_id_short: short_batch_id(batch),
594 old_depth: batch.depth,
595 new_depth,
596 old_capacity_bytes: old_capacity,
597 new_capacity_bytes: new_capacity,
598 old_ttl_seconds: old_ttl,
599 new_ttl_seconds: new_ttl,
600 })
601}
602
603pub fn extend_preview(
606 batch: &PostageBatch,
607 extension_seconds: i64,
608 chain_state: &ChainState,
609) -> Result<ExtendPreview, String> {
610 if extension_seconds <= 0 {
611 return Err("extension must be a positive duration".into());
612 }
613 if chain_state.current_price <= BigInt::from(0) {
614 return Err("chain price not loaded yet — try again in a moment".into());
615 }
616 let needed_amount = amount_for_ttl_extension(
617 extension_seconds,
618 &chain_state.current_price,
619 GNOSIS_BLOCK_TIME_SECS,
620 );
621 let cost = cost_bzz(&needed_amount, batch.depth);
622 let new_ttl_seconds = batch.batch_ttl.max(0).saturating_add(extension_seconds);
623 Ok(ExtendPreview {
624 batch_id_short: short_batch_id(batch),
625 depth: batch.depth,
626 current_ttl_seconds: batch.batch_ttl,
627 extension_seconds,
628 needed_amount_plur: needed_amount,
629 cost_bzz: cost,
630 new_ttl_seconds,
631 })
632}
633
634pub fn buy_preview(
637 depth: u8,
638 amount_plur: BigInt,
639 chain_state: &ChainState,
640) -> Result<BuyPreview, String> {
641 if depth < 17 {
642 return Err(format!(
643 "depth {depth} is below Bee's minimum (17) — refusing to preview"
644 ));
645 }
646 if depth > 41 {
647 return Err(format!(
648 "depth {depth} exceeds Bee's depth ceiling (41) — refusing to preview"
649 ));
650 }
651 if amount_plur <= BigInt::from(0) {
652 return Err("amount must be a positive PLUR value".into());
653 }
654 if chain_state.current_price <= BigInt::from(0) {
655 return Err("chain price not loaded yet — try again in a moment".into());
656 }
657 let capacity_bytes = theoretical_capacity_bytes(depth);
658 let ttl = ttl_seconds(
659 &amount_plur,
660 &chain_state.current_price,
661 GNOSIS_BLOCK_TIME_SECS,
662 );
663 let cost = cost_bzz(&amount_plur, depth);
664 Ok(BuyPreview {
665 depth,
666 amount_plur,
667 capacity_bytes,
668 ttl_seconds: ttl,
669 cost_bzz: cost,
670 })
671}
672
673pub fn buy_suggest(
685 target_bytes: u128,
686 target_seconds: i64,
687 chain_state: &ChainState,
688) -> Result<BuySuggestion, String> {
689 if target_bytes == 0 {
690 return Err("target size must be positive".into());
691 }
692 if target_seconds <= 0 {
693 return Err("target duration must be positive".into());
694 }
695 if chain_state.current_price <= BigInt::from(0) {
696 return Err("chain price not loaded yet — try again in a moment".into());
697 }
698
699 let chunks_needed = target_bytes.div_ceil(4096);
701 let raw_depth = if chunks_needed <= 1 {
705 0
706 } else {
707 128 - (chunks_needed - 1).leading_zeros()
709 };
710 if raw_depth > 41 {
711 return Err(format!(
712 "target {} exceeds Bee's max batch capacity (depth 41 ≈ 8 PiB)",
713 format_bytes(target_bytes)
714 ));
715 }
716 let depth: u8 = raw_depth.max(17) as u8;
717 let capacity_bytes = theoretical_capacity_bytes(depth);
718
719 let target_blocks =
721 target_seconds.saturating_add(GNOSIS_BLOCK_TIME_SECS - 1) / GNOSIS_BLOCK_TIME_SECS;
722 let amount = BigInt::from(target_blocks) * &chain_state.current_price;
723
724 let ttl_seconds = ttl_seconds(&amount, &chain_state.current_price, GNOSIS_BLOCK_TIME_SECS);
726 let cost = cost_bzz(&amount, depth);
727
728 Ok(BuySuggestion {
729 target_bytes,
730 target_seconds,
731 depth,
732 amount_plur: amount,
733 capacity_bytes,
734 ttl_seconds,
735 cost_bzz: cost,
736 })
737}
738
739fn short_batch_id(batch: &PostageBatch) -> String {
740 let hex = batch.batch_id.to_hex();
741 if hex.len() > 8 {
742 format!("{}…", &hex[..8])
743 } else {
744 hex
745 }
746}
747
748pub fn parse_size_bytes(s: &str) -> Result<u128, String> {
760 let s = s.trim();
761 if s.is_empty() {
762 return Err("size cannot be empty".into());
763 }
764 let compact: String = s.chars().filter(|c| !c.is_whitespace()).collect();
767 let (num_part, mul) = split_size(&compact)
768 .ok_or_else(|| format!("invalid size {s:?} (try 5GiB, 2TiB, 500MiB, 4096)"))?;
769 let n: u128 = num_part
770 .parse()
771 .map_err(|_| format!("invalid size {s:?} (numeric part {num_part:?} unparseable)"))?;
772 if n == 0 {
773 return Err("size must be positive".into());
774 }
775 n.checked_mul(mul).ok_or_else(|| {
776 format!("size {s:?} overflowed u128 — that's larger than any plausible Bee batch")
777 })
778}
779
780fn split_size(s: &str) -> Option<(&str, u128)> {
783 let split = s
786 .char_indices()
787 .find(|(_, c)| !c.is_ascii_digit())
788 .map(|(i, _)| i)
789 .unwrap_or(s.len());
790 let (num, unit) = s.split_at(split);
791 let unit_lower = unit.to_ascii_lowercase();
792 let mul: u128 = match unit_lower.as_str() {
793 "" | "b" => 1,
794 "k" | "kib" => 1024,
795 "kb" => 1_000,
796 "m" | "mib" => 1024u128.pow(2),
797 "mb" => 1_000u128.pow(2),
798 "g" | "gib" => 1024u128.pow(3),
799 "gb" => 1_000u128.pow(3),
800 "t" | "tib" => 1024u128.pow(4),
801 "tb" => 1_000u128.pow(4),
802 "p" | "pib" => 1024u128.pow(5),
803 "pb" => 1_000u128.pow(5),
804 _ => return None,
805 };
806 Some((num, mul))
807}
808
809pub fn parse_duration_seconds(s: &str) -> Result<i64, String> {
814 let s = s.trim();
815 if s.is_empty() {
816 return Err("duration cannot be empty".into());
817 }
818 let (num_part, unit) = match s.chars().last() {
819 Some(c) if "smhdSMHD".contains(c) => (&s[..s.len() - 1], Some(c.to_ascii_lowercase())),
820 _ => (s, None),
821 };
822 let n: i64 = num_part
823 .parse()
824 .map_err(|_| format!("invalid duration {s:?} (try 30d / 12h / 90m / 45s / 5000)"))?;
825 if n <= 0 {
826 return Err(format!("duration must be positive, got {n}"));
827 }
828 let secs = match unit {
829 Some('s') | None => n,
830 Some('m') => n.saturating_mul(60),
831 Some('h') => n.saturating_mul(3_600),
832 Some('d') => n.saturating_mul(86_400),
833 _ => unreachable!("unit guard above"),
834 };
835 Ok(secs)
836}
837
838pub fn parse_plur_amount(s: &str) -> Result<BigInt, String> {
842 let s = s.trim();
843 if s.is_empty() {
844 return Err("amount cannot be empty".into());
845 }
846 s.parse::<BigInt>()
847 .map_err(|_| format!("invalid PLUR amount {s:?} (digits only, e.g. 100000000000)"))
848}
849
850pub fn match_batch_prefix<'a>(
855 batches: &'a [PostageBatch],
856 prefix: &str,
857) -> Result<&'a PostageBatch, String> {
858 let prefix = prefix.trim().trim_end_matches('…').to_ascii_lowercase();
859 if prefix.is_empty() {
860 return Err("batch id prefix cannot be empty".into());
861 }
862 let matches: Vec<&PostageBatch> = batches
863 .iter()
864 .filter(|b| {
865 b.batch_id
866 .to_hex()
867 .to_ascii_lowercase()
868 .starts_with(&prefix)
869 })
870 .collect();
871 match matches.as_slice() {
872 [] => Err(format!(
873 "no batch matches prefix {prefix:?} (try the 8-char hex shown in S2)"
874 )),
875 [single] => Ok(single),
876 many => Err(format!(
877 "{} batches match prefix {prefix:?}: {} — type a longer prefix",
878 many.len(),
879 many.iter()
880 .map(|b| short_batch_id(b))
881 .collect::<Vec<_>>()
882 .join(", ")
883 )),
884 }
885}
886
887#[cfg(test)]
888mod tests {
889 use super::*;
890
891 fn make_batch(amount: Option<BigInt>, depth: u8, batch_ttl: i64) -> PostageBatch {
892 PostageBatch {
893 batch_id: bee::swarm::BatchId::new(&[0xab; 32]).unwrap(),
894 amount,
895 start: 0,
896 owner: String::new(),
897 depth,
898 bucket_depth: depth.saturating_sub(6),
899 immutable: true,
900 batch_ttl,
901 utilization: 0,
902 usable: true,
903 exists: true,
904 label: "test".into(),
905 block_number: 0,
906 }
907 }
908
909 fn chain(current_price_plur: u64) -> ChainState {
910 ChainState {
911 block: 100,
912 chain_tip: 100,
913 current_price: BigInt::from(current_price_plur),
914 total_amount: BigInt::from(0),
915 }
916 }
917
918 #[test]
919 fn capacity_at_depth_22_is_16_gib() {
920 assert_eq!(theoretical_capacity_bytes(22), 16 * 1024 * 1024 * 1024);
922 }
923
924 #[test]
925 fn cost_bzz_matches_canonical_formula() {
926 let amount = BigInt::from(100_000_000_000_000u64);
929 let bzz = cost_bzz(&amount, 22);
930 assert!(
931 (bzz - 41943.04).abs() < 0.0001,
932 "expected ~41943.04 BZZ, got {bzz}"
933 );
934 }
935
936 #[test]
937 fn ttl_seconds_basic() {
938 let secs = ttl_seconds(
941 &BigInt::from(1_000_000u64),
942 &BigInt::from(1u64),
943 GNOSIS_BLOCK_TIME_SECS,
944 );
945 assert_eq!(secs, 5_000_000);
946 }
947
948 #[test]
949 fn ttl_seconds_zero_price_returns_zero() {
950 let secs = ttl_seconds(
951 &BigInt::from(1_000_000u64),
952 &BigInt::from(0u64),
953 GNOSIS_BLOCK_TIME_SECS,
954 );
955 assert_eq!(secs, 0);
956 }
957
958 #[test]
959 fn amount_for_extension_is_inverse_of_ttl() {
960 let amt = amount_for_ttl_extension(5_000_000, &BigInt::from(1u64), GNOSIS_BLOCK_TIME_SECS);
963 assert_eq!(amt, BigInt::from(1_000_000u64));
964 }
965
966 #[test]
967 fn topup_preview_typical_case() {
968 let batch = make_batch(Some(BigInt::from(0)), 22, 86_400);
972 let preview = topup_preview(&batch, BigInt::from(10_000_000_000u64), &chain(1)).unwrap();
973 assert_eq!(preview.current_depth, 22);
974 assert_eq!(preview.extra_ttl_seconds, 50_000_000_000);
975 assert!((preview.cost_bzz - 4.194304).abs() < 0.0001);
976 assert_eq!(preview.new_ttl_seconds, 86_400 + 50_000_000_000);
977 }
978
979 #[test]
980 fn topup_preview_rejects_zero_price() {
981 let batch = make_batch(None, 22, 86_400);
982 let err = topup_preview(&batch, BigInt::from(1_000), &chain(0)).unwrap_err();
983 assert!(err.contains("chain price"));
984 }
985
986 #[test]
987 fn topup_preview_rejects_zero_delta() {
988 let batch = make_batch(None, 22, 86_400);
989 let err = topup_preview(&batch, BigInt::from(0), &chain(1)).unwrap_err();
990 assert!(err.contains("positive PLUR"));
991 }
992
993 #[test]
994 fn dilute_preview_doubles_capacity_halves_ttl() {
995 let batch = make_batch(None, 22, 100_000);
997 let preview = dilute_preview(&batch, 23).unwrap();
998 assert_eq!(preview.old_capacity_bytes * 2, preview.new_capacity_bytes);
999 assert_eq!(preview.old_ttl_seconds / 2, preview.new_ttl_seconds);
1000 assert!(preview.summary().contains("cost 0 BZZ"));
1001 }
1002
1003 #[test]
1004 fn dilute_preview_rejects_lower_or_equal_depth() {
1005 let batch = make_batch(None, 22, 100_000);
1006 assert!(dilute_preview(&batch, 22).is_err());
1007 assert!(dilute_preview(&batch, 21).is_err());
1008 }
1009
1010 #[test]
1011 fn dilute_preview_rejects_above_depth_ceiling() {
1012 let batch = make_batch(None, 22, 100_000);
1013 assert!(dilute_preview(&batch, 42).is_err());
1014 }
1015
1016 #[test]
1017 fn extend_preview_typical_case() {
1018 let batch = make_batch(None, 22, 86_400);
1022 let preview = extend_preview(&batch, 5_000_000, &chain(1)).unwrap();
1023 assert_eq!(preview.needed_amount_plur, BigInt::from(1_000_000u64));
1024 assert!((preview.cost_bzz - 4.194304e-4).abs() < 1e-9);
1025 assert_eq!(preview.new_ttl_seconds, 86_400 + 5_000_000);
1026 }
1027
1028 #[test]
1029 fn extend_preview_rejects_zero_extension() {
1030 let batch = make_batch(None, 22, 86_400);
1031 assert!(extend_preview(&batch, 0, &chain(1)).is_err());
1032 assert!(extend_preview(&batch, -10, &chain(1)).is_err());
1033 }
1034
1035 #[test]
1036 fn buy_preview_typical_case() {
1037 let preview = buy_preview(22, BigInt::from(100_000_000_000_000u64), &chain(1)).unwrap();
1040 assert_eq!(preview.capacity_bytes, 16 * 1024 * 1024 * 1024);
1041 assert_eq!(preview.ttl_seconds, 500_000_000_000_000);
1042 assert!((preview.cost_bzz - 41943.04).abs() < 0.0001);
1043 }
1044
1045 #[test]
1046 fn buy_preview_rejects_below_minimum_depth() {
1047 assert!(buy_preview(16, BigInt::from(100), &chain(1)).is_err());
1048 }
1049
1050 #[test]
1051 fn buy_preview_rejects_above_ceiling() {
1052 assert!(buy_preview(42, BigInt::from(100), &chain(1)).is_err());
1053 }
1054
1055 #[test]
1056 fn buy_preview_rejects_zero_amount() {
1057 assert!(buy_preview(22, BigInt::from(0), &chain(1)).is_err());
1058 }
1059
1060 #[test]
1061 fn parse_size_plain_integer_is_bytes() {
1062 assert_eq!(parse_size_bytes("4096").unwrap(), 4096);
1063 assert!(parse_size_bytes("0").is_err());
1064 assert!(parse_size_bytes("").is_err());
1065 }
1066
1067 #[test]
1068 fn parse_size_binary_suffixes() {
1069 assert_eq!(parse_size_bytes("1KiB").unwrap(), 1024);
1070 assert_eq!(parse_size_bytes("1MiB").unwrap(), 1024u128.pow(2));
1071 assert_eq!(parse_size_bytes("1GiB").unwrap(), 1024u128.pow(3));
1072 assert_eq!(parse_size_bytes("1TiB").unwrap(), 1024u128.pow(4));
1073 assert_eq!(parse_size_bytes("1G").unwrap(), 1024u128.pow(3));
1076 assert_eq!(parse_size_bytes("4K").unwrap(), 4096);
1077 }
1078
1079 #[test]
1080 fn parse_size_decimal_suffixes() {
1081 assert_eq!(parse_size_bytes("1KB").unwrap(), 1_000);
1082 assert_eq!(parse_size_bytes("1MB").unwrap(), 1_000_000);
1083 assert_eq!(parse_size_bytes("1GB").unwrap(), 1_000_000_000);
1084 }
1085
1086 #[test]
1087 fn parse_size_handles_whitespace_and_case() {
1088 assert_eq!(parse_size_bytes(" 5 GiB ").unwrap(), 5 * 1024u128.pow(3));
1089 assert_eq!(parse_size_bytes("5gib").unwrap(), 5 * 1024u128.pow(3));
1090 assert_eq!(parse_size_bytes("2 TIB").unwrap(), 2 * 1024u128.pow(4));
1091 }
1092
1093 #[test]
1094 fn parse_size_rejects_unknown_unit() {
1095 assert!(parse_size_bytes("5xyz").is_err());
1096 assert!(parse_size_bytes("abc").is_err());
1097 }
1098
1099 #[test]
1100 fn buy_suggest_typical_5gib_30d() {
1101 let s = buy_suggest(5 * 1024u128.pow(3), 30 * 86_400, &chain(1)).unwrap();
1106 assert_eq!(s.depth, 21);
1107 assert_eq!(s.capacity_bytes, 8 * 1024u128.pow(3));
1108 assert_eq!(s.amount_plur, BigInt::from(518_400u32));
1109 assert_eq!(s.ttl_seconds, 30 * 86_400);
1110 }
1111
1112 #[test]
1113 fn buy_suggest_4gib_exact_uses_depth_20() {
1114 let s = buy_suggest(4 * 1024u128.pow(3), 86_400, &chain(1)).unwrap();
1116 assert_eq!(s.depth, 20);
1117 assert_eq!(s.capacity_bytes, 4 * 1024u128.pow(3));
1118 }
1119
1120 #[test]
1121 fn buy_suggest_tiny_target_clamps_to_min_depth_17() {
1122 let s = buy_suggest(4096, 86_400, &chain(1)).unwrap();
1124 assert_eq!(s.depth, 17);
1125 assert!(s.capacity_bytes >= 4096);
1126 }
1127
1128 #[test]
1129 fn buy_suggest_rejects_above_max_depth() {
1130 let huge = 16 * 1024u128.pow(5); assert!(buy_suggest(huge, 86_400, &chain(1)).is_err());
1133 }
1134
1135 #[test]
1136 fn buy_suggest_rounds_duration_up_in_blocks() {
1137 let s = buy_suggest(4096, 7, &chain(1)).unwrap();
1140 assert_eq!(s.amount_plur, BigInt::from(2u32));
1141 assert_eq!(s.ttl_seconds, 10);
1142 }
1143
1144 #[test]
1145 fn buy_suggest_rejects_zero_or_negative_inputs() {
1146 assert!(buy_suggest(0, 86_400, &chain(1)).is_err());
1147 assert!(buy_suggest(4096, 0, &chain(1)).is_err());
1148 assert!(buy_suggest(4096, -5, &chain(1)).is_err());
1149 }
1150
1151 #[test]
1152 fn buy_suggest_rejects_zero_chain_price() {
1153 assert!(buy_suggest(4096, 86_400, &chain(0)).is_err());
1154 }
1155
1156 #[test]
1157 fn buy_suggest_summary_is_compact() {
1158 let s = buy_suggest(5 * 1024u128.pow(3), 30 * 86_400, &chain(1)).unwrap();
1159 let line = s.summary();
1160 assert!(line.starts_with("buy-suggest"));
1161 assert!(line.contains("5.0 GiB"));
1162 assert!(line.contains("30d 0h"));
1163 assert!(line.contains("depth=21"));
1164 assert!(!line.contains('\n'));
1165 }
1166
1167 #[test]
1168 fn parse_duration_handles_units() {
1169 assert_eq!(parse_duration_seconds("5000").unwrap(), 5_000);
1170 assert_eq!(parse_duration_seconds("45s").unwrap(), 45);
1171 assert_eq!(parse_duration_seconds("90m").unwrap(), 5_400);
1172 assert_eq!(parse_duration_seconds("12h").unwrap(), 43_200);
1173 assert_eq!(parse_duration_seconds("30d").unwrap(), 2_592_000);
1174 assert_eq!(parse_duration_seconds(" 7D ").unwrap(), 604_800);
1176 }
1177
1178 #[test]
1179 fn parse_duration_rejects_invalid() {
1180 assert!(parse_duration_seconds("").is_err());
1181 assert!(parse_duration_seconds("abc").is_err());
1182 assert!(parse_duration_seconds("0d").is_err());
1183 assert!(parse_duration_seconds("-5h").is_err());
1184 }
1185
1186 #[test]
1187 fn parse_plur_handles_large_amounts() {
1188 let amt = parse_plur_amount("100000000000000").unwrap();
1189 assert_eq!(amt, BigInt::from(100_000_000_000_000u64));
1190 }
1191
1192 #[test]
1193 fn parse_plur_rejects_garbage() {
1194 assert!(parse_plur_amount("").is_err());
1195 assert!(parse_plur_amount("1e14").is_err()); assert!(parse_plur_amount("123abc").is_err());
1197 }
1198
1199 #[test]
1200 fn match_batch_prefix_unique_returns_single() {
1201 let b1 = make_batch_with_id([0xab; 32]);
1202 let b2 = make_batch_with_id([0xcd; 32]);
1203 let batches = vec![b1.clone(), b2.clone()];
1204 let m = match_batch_prefix(&batches, "abab").unwrap();
1205 assert_eq!(m.batch_id, b1.batch_id);
1206 }
1207
1208 #[test]
1209 fn match_batch_prefix_handles_trailing_ellipsis() {
1210 let b1 = make_batch_with_id([0xab; 32]);
1213 let batches = vec![b1.clone()];
1214 let m = match_batch_prefix(&batches, "abababab…").unwrap();
1215 assert_eq!(m.batch_id, b1.batch_id);
1216 }
1217
1218 #[test]
1219 fn match_batch_prefix_ambiguous_errors_with_listing() {
1220 let b1 = make_batch_with_id([0xab; 32]);
1221 let b2 = make_batch_with_id([0xab; 32]); let batches = vec![b1, b2];
1223 let err = match_batch_prefix(&batches, "ab").unwrap_err();
1224 assert!(err.contains("match prefix"));
1225 }
1226
1227 #[test]
1228 fn match_batch_prefix_no_match_errors() {
1229 let b1 = make_batch_with_id([0xab; 32]);
1230 let batches = vec![b1];
1231 let err = match_batch_prefix(&batches, "ff").unwrap_err();
1232 assert!(err.contains("no batch matches"));
1233 }
1234
1235 fn make_batch_with_id(bytes: [u8; 32]) -> PostageBatch {
1236 PostageBatch {
1237 batch_id: bee::swarm::BatchId::new(&bytes).unwrap(),
1238 amount: None,
1239 start: 0,
1240 owner: String::new(),
1241 depth: 22,
1242 bucket_depth: 16,
1243 immutable: true,
1244 batch_ttl: 86_400,
1245 utilization: 0,
1246 usable: true,
1247 exists: true,
1248 label: "test".into(),
1249 block_number: 0,
1250 }
1251 }
1252
1253 #[test]
1254 fn summary_strings_are_compact_and_human_readable() {
1255 let batch = make_batch(None, 22, 86_400);
1258 let p = topup_preview(&batch, BigInt::from(10u64), &chain(1)).unwrap();
1259 let s = p.summary();
1260 assert!(s.starts_with("topup-preview"));
1261 assert!(!s.contains('\n'));
1262
1263 let p = dilute_preview(&batch, 23).unwrap();
1264 let s = p.summary();
1265 assert!(s.starts_with("dilute-preview"));
1266 assert!(!s.contains('\n'));
1267
1268 let p = extend_preview(&batch, 86_400, &chain(1)).unwrap();
1269 let s = p.summary();
1270 assert!(s.starts_with("extend-preview"));
1271 assert!(!s.contains('\n'));
1272
1273 let p = buy_preview(22, BigInt::from(10_000), &chain(1)).unwrap();
1274 let s = p.summary();
1275 assert!(s.starts_with("buy-preview"));
1276 assert!(!s.contains('\n'));
1277 }
1278
1279 fn mutable_batch(amount: u64, depth: u8, batch_ttl: i64, utilization: u32) -> PostageBatch {
1280 let mut b = make_batch(Some(BigInt::from(amount)), depth, batch_ttl);
1281 b.immutable = false;
1282 b.utilization = utilization;
1283 b
1284 }
1285
1286 #[test]
1287 fn plan_batch_healthy_returns_no_action() {
1288 let batch = mutable_batch(1_000_000, 22, 30 * 86_400, 0);
1290 let plan = plan_batch(
1291 &batch,
1292 &chain(1),
1293 DEFAULT_USAGE_THRESHOLD,
1294 DEFAULT_TTL_THRESHOLD_SECONDS,
1295 DEFAULT_EXTRA_DEPTH,
1296 )
1297 .unwrap();
1298 assert_eq!(plan.action, PlanAction::None);
1299 assert_eq!(plan.total_cost_bzz, 0.0);
1300 assert!(plan.reason.contains("healthy"));
1301 }
1302
1303 #[test]
1304 fn plan_batch_low_ttl_only_topup() {
1305 let batch = mutable_batch(1_000_000, 22, 3600, 0);
1308 let plan = plan_batch(
1309 &batch,
1310 &chain(1),
1311 DEFAULT_USAGE_THRESHOLD,
1312 DEFAULT_TTL_THRESHOLD_SECONDS,
1313 DEFAULT_EXTRA_DEPTH,
1314 )
1315 .unwrap();
1316 match plan.action {
1317 PlanAction::Topup {
1318 ref delta_amount_plur,
1319 ..
1320 } => {
1321 assert!(*delta_amount_plur > BigInt::from(0));
1322 }
1323 other => panic!("expected Topup, got {other:?}"),
1324 }
1325 assert!(plan.total_cost_bzz > 0.0);
1326 }
1327
1328 #[test]
1329 fn plan_batch_high_usage_only_dilute() {
1330 let batch = mutable_batch(1_000_000, 22, 365 * 86_400, 64);
1335 let plan = plan_batch(
1336 &batch,
1337 &chain(1),
1338 DEFAULT_USAGE_THRESHOLD,
1339 DEFAULT_TTL_THRESHOLD_SECONDS,
1340 DEFAULT_EXTRA_DEPTH,
1341 )
1342 .unwrap();
1343 match plan.action {
1344 PlanAction::Dilute { new_depth, .. } => {
1345 assert_eq!(new_depth, 24);
1346 }
1347 other => panic!("expected Dilute, got {other:?}"),
1348 }
1349 assert_eq!(plan.total_cost_bzz, 0.0);
1350 }
1351
1352 #[test]
1353 fn plan_batch_high_usage_low_ttl_topup_then_dilute() {
1354 let batch = mutable_batch(1_000_000, 22, 2 * 24 * 3600, 64);
1357 let plan = plan_batch(
1358 &batch,
1359 &chain(1),
1360 DEFAULT_USAGE_THRESHOLD,
1361 DEFAULT_TTL_THRESHOLD_SECONDS,
1362 DEFAULT_EXTRA_DEPTH,
1363 )
1364 .unwrap();
1365 match plan.action {
1366 PlanAction::TopupThenDilute {
1367 ref topup_delta_amount_plur,
1368 new_depth,
1369 ..
1370 } => {
1371 assert!(*topup_delta_amount_plur > BigInt::from(0));
1372 assert_eq!(new_depth, 24);
1373 }
1374 other => panic!("expected TopupThenDilute, got {other:?}"),
1375 }
1376 assert!(plan.total_cost_bzz > 0.0);
1377 }
1378
1379 #[test]
1380 fn plan_batch_immutable_high_usage_skips_dilute() {
1381 let mut batch = mutable_batch(1_000_000, 22, 30 * 86_400, 64);
1382 batch.immutable = true;
1383 let plan = plan_batch(
1384 &batch,
1385 &chain(1),
1386 DEFAULT_USAGE_THRESHOLD,
1387 DEFAULT_TTL_THRESHOLD_SECONDS,
1388 DEFAULT_EXTRA_DEPTH,
1389 )
1390 .unwrap();
1391 assert_eq!(plan.action, PlanAction::None);
1394 assert!(plan.reason.contains("immutable"));
1395 }
1396
1397 #[test]
1398 fn plan_batch_rejects_out_of_range_threshold() {
1399 let batch = mutable_batch(1_000_000, 22, 30 * 86_400, 0);
1400 assert!(plan_batch(&batch, &chain(1), 1.5, DEFAULT_TTL_THRESHOLD_SECONDS, 2).is_err());
1401 assert!(plan_batch(&batch, &chain(1), -0.1, 86400, 2).is_err());
1402 }
1403
1404 #[test]
1405 fn plan_batch_summary_is_one_line() {
1406 let batch = mutable_batch(1_000_000, 22, 3600, 64);
1407 let plan = plan_batch(
1408 &batch,
1409 &chain(1),
1410 DEFAULT_USAGE_THRESHOLD,
1411 DEFAULT_TTL_THRESHOLD_SECONDS,
1412 DEFAULT_EXTRA_DEPTH,
1413 )
1414 .unwrap();
1415 let s = plan.summary();
1416 assert!(s.starts_with("plan-batch"));
1417 assert!(!s.contains('\n'), "summary must be a single line: {s}");
1418 }
1419}