forest/dev/subcommands/
export_state_tree_cmd.rs1use crate::{
5 chain::{ChainStore, index::ResolveNullTipset},
6 cid_collections::FileBackedCidHashSet,
7 cli_shared::{chain_path, read_config},
8 daemon::db_util::load_all_forest_cars,
9 db::{
10 CAR_DB_DIR_NAME,
11 car::{ManyCar, forest::FOREST_CAR_FILE_EXTENSION},
12 db_engine::{db_root, open_db},
13 },
14 genesis::read_genesis_header,
15 ipld::IpldStream,
16 networks::{ChainConfig, NetworkChain},
17 shim::{clock::ChainEpoch, executor::Receipt},
18};
19use anyhow::Context as _;
20use clap::Args;
21use itertools::Itertools;
22use std::{
23 path::{Path, PathBuf},
24 sync::Arc,
25};
26use tokio::io::AsyncWriteExt as _;
27
28#[derive(Debug, Args)]
30pub struct ExportStateTreeCommand {
31 #[arg(long, required = true)]
33 chain: NetworkChain,
34 #[arg(long)]
36 db: Option<PathBuf>,
37 #[arg(long)]
39 from: ChainEpoch,
40 #[arg(long)]
42 to: ChainEpoch,
43 #[arg(short, long)]
45 output: Option<PathBuf>,
46}
47
48impl ExportStateTreeCommand {
49 pub async fn run(self) -> anyhow::Result<()> {
50 let Self {
51 chain,
52 db,
53 from,
54 to,
55 output,
56 } = self;
57 let output = output.unwrap_or_else(|| {
58 Path::new(&format!(
59 "statetree_{chain}_{to}_{from}{FOREST_CAR_FILE_EXTENSION}"
60 ))
61 .to_owned()
62 });
63 let db_root_path = if let Some(db) = db {
64 db
65 } else {
66 let (_, config) = read_config(None, Some(chain.clone()))?;
67 db_root(&chain_path(&config))?
68 };
69 let forest_car_db_dir = db_root_path.join(CAR_DB_DIR_NAME);
70 let db: Arc<ManyCar<crate::db::parity_db::ParityDb>> =
71 Arc::new(ManyCar::new(open_db(db_root_path, &Default::default())?));
72 load_all_forest_cars(&db, &forest_car_db_dir)?;
73
74 let chain_config = Arc::new(ChainConfig::from_chain(&chain));
75 let genesis_header =
76 read_genesis_header(None, chain_config.genesis_bytes(&db).await?.as_deref(), &db)
77 .await?;
78 let chain_store = Arc::new(ChainStore::new(
79 db.clone(),
80 db.clone(),
81 db.clone(),
82 chain_config,
83 genesis_header,
84 )?);
85
86 let start_ts = chain_store.chain_index().tipset_by_height(
87 from,
88 chain_store.heaviest_tipset(),
89 ResolveNullTipset::TakeNewer,
90 )?;
91
92 let mut ipld_roots = vec![];
93 for (child, ts) in start_ts
94 .chain(&db)
95 .tuple_windows()
96 .take_while(|(_, parent)| parent.epoch() >= to)
97 {
98 ipld_roots.extend([*child.parent_state(), *child.parent_message_receipts()]);
99 ipld_roots.extend(ts.block_headers().iter().map(|h| h.messages));
100 let receipts = Receipt::get_receipts(&db, *child.parent_message_receipts())
101 .with_context(|| {
102 format!(
103 "failed to get receipts, root: {}, epoch: {}, tipset key: {}",
104 *child.parent_message_receipts(),
105 ts.epoch(),
106 ts.key(),
107 )
108 })?;
109 ipld_roots.extend(receipts.into_iter().filter_map(|r| r.events_root()));
110 }
111 let roots = nunny::vec![ipld_roots.first().cloned().context("no ipld roots found")?];
112 let stream = IpldStream::new(
113 db,
114 ipld_roots.clone(),
115 FileBackedCidHashSet::new_in_temp_dir()?,
116 );
117 let frames = crate::db::car::forest::Encoder::compress_stream_default(stream);
118 let tmp =
119 tempfile::NamedTempFile::new_in(output.parent().unwrap_or_else(|| Path::new(".")))?
120 .into_temp_path();
121 let mut writer = tokio::io::BufWriter::new(tokio::fs::File::create(&tmp).await?);
122 crate::db::car::forest::Encoder::write(&mut writer, roots, frames).await?;
123 writer.flush().await?;
124 tmp.persist(output)?;
125
126 Ok(())
127 }
128}