reth_cli_commands/
import.rs

1//! Command that initializes the node by importing a chain from a file.
2use crate::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs};
3use alloy_primitives::B256;
4use clap::Parser;
5use futures::{Stream, StreamExt};
6use reth_beacon_consensus::EthBeaconConsensus;
7use reth_chainspec::{EthChainSpec, EthereumHardforks};
8use reth_cli::chainspec::ChainSpecParser;
9use reth_config::Config;
10use reth_consensus::Consensus;
11use reth_db::tables;
12use reth_db_api::transaction::DbTx;
13use reth_downloaders::{
14    bodies::bodies::BodiesDownloaderBuilder,
15    file_client::{ChunkedFileReader, FileClient, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE},
16    headers::reverse_headers::ReverseHeadersDownloaderBuilder,
17};
18use reth_evm::execute::BlockExecutorProvider;
19use reth_network_p2p::{
20    bodies::downloader::BodyDownloader,
21    headers::downloader::{HeaderDownloader, SyncTarget},
22};
23use reth_node_core::version::SHORT_VERSION;
24use reth_node_events::node::NodeEvent;
25use reth_provider::{
26    providers::ProviderNodeTypes, BlockNumReader, ChainSpecProvider, HeaderProvider, ProviderError,
27    ProviderFactory, StageCheckpointReader,
28};
29use reth_prune::PruneModes;
30use reth_stages::{prelude::*, Pipeline, StageId, StageSet};
31use reth_static_file::StaticFileProducer;
32use std::{path::PathBuf, sync::Arc};
33use tokio::sync::watch;
34use tracing::{debug, error, info};
35
36/// Syncs RLP encoded blocks from a file.
37#[derive(Debug, Parser)]
38pub struct ImportCommand<C: ChainSpecParser> {
39    #[command(flatten)]
40    env: EnvironmentArgs<C>,
41
42    /// Disables stages that require state.
43    #[arg(long, verbatim_doc_comment)]
44    no_state: bool,
45
46    /// Chunk byte length to read from file.
47    #[arg(long, value_name = "CHUNK_LEN", verbatim_doc_comment)]
48    chunk_len: Option<u64>,
49
50    /// The path to a block file for import.
51    ///
52    /// The online stages (headers and bodies) are replaced by a file import, after which the
53    /// remaining stages are executed.
54    #[arg(value_name = "IMPORT_PATH", verbatim_doc_comment)]
55    path: PathBuf,
56}
57
58impl<C: ChainSpecParser<ChainSpec: EthChainSpec + EthereumHardforks>> ImportCommand<C> {
59    /// Execute `import` command
60    pub async fn execute<N, E, F>(self, executor: F) -> eyre::Result<()>
61    where
62        N: CliNodeTypes<ChainSpec = C::ChainSpec>,
63        E: BlockExecutorProvider<Primitives = N::Primitives>,
64        F: FnOnce(Arc<N::ChainSpec>) -> E,
65    {
66        info!(target: "reth::cli", "reth {} starting", SHORT_VERSION);
67
68        if self.no_state {
69            info!(target: "reth::cli", "Disabled stages requiring state");
70        }
71
72        debug!(target: "reth::cli",
73            chunk_byte_len=self.chunk_len.unwrap_or(DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE),
74            "Chunking chain import"
75        );
76
77        let Environment { provider_factory, config, .. } = self.env.init::<N>(AccessRights::RW)?;
78
79        let executor = executor(provider_factory.chain_spec());
80        let consensus = Arc::new(EthBeaconConsensus::new(self.env.chain.clone()));
81        info!(target: "reth::cli", "Consensus engine initialized");
82
83        // open file
84        let mut reader = ChunkedFileReader::new(&self.path, self.chunk_len).await?;
85
86        let mut total_decoded_blocks = 0;
87        let mut total_decoded_txns = 0;
88
89        while let Some(file_client) = reader.next_chunk::<FileClient>().await? {
90            // create a new FileClient from chunk read from file
91            info!(target: "reth::cli",
92                "Importing chain file chunk"
93            );
94
95            let tip = file_client.tip().ok_or(eyre::eyre!("file client has no tip"))?;
96            info!(target: "reth::cli", "Chain file chunk read");
97
98            total_decoded_blocks += file_client.headers_len();
99            total_decoded_txns += file_client.total_transactions();
100
101            let (mut pipeline, events) = build_import_pipeline(
102                &config,
103                provider_factory.clone(),
104                &consensus,
105                Arc::new(file_client),
106                StaticFileProducer::new(provider_factory.clone(), PruneModes::default()),
107                self.no_state,
108                executor.clone(),
109            )?;
110
111            // override the tip
112            pipeline.set_tip(tip);
113            debug!(target: "reth::cli", ?tip, "Tip manually set");
114
115            let provider = provider_factory.provider()?;
116
117            let latest_block_number =
118                provider.get_stage_checkpoint(StageId::Finish)?.map(|ch| ch.block_number);
119            tokio::spawn(reth_node_events::node::handle_events(None, latest_block_number, events));
120
121            // Run pipeline
122            info!(target: "reth::cli", "Starting sync pipeline");
123            tokio::select! {
124                res = pipeline.run() => res?,
125                _ = tokio::signal::ctrl_c() => {},
126            }
127        }
128
129        let provider = provider_factory.provider()?;
130
131        let total_imported_blocks = provider.tx_ref().entries::<tables::HeaderNumbers>()?;
132        let total_imported_txns = provider.tx_ref().entries::<tables::TransactionHashNumbers>()?;
133
134        if total_decoded_blocks != total_imported_blocks ||
135            total_decoded_txns != total_imported_txns
136        {
137            error!(target: "reth::cli",
138                total_decoded_blocks,
139                total_imported_blocks,
140                total_decoded_txns,
141                total_imported_txns,
142                "Chain was partially imported"
143            );
144        }
145
146        info!(target: "reth::cli",
147            total_imported_blocks,
148            total_imported_txns,
149            "Chain file imported"
150        );
151
152        Ok(())
153    }
154}
155
156/// Builds import pipeline.
157///
158/// If configured to execute, all stages will run. Otherwise, only stages that don't require state
159/// will run.
160pub fn build_import_pipeline<N, C, E>(
161    config: &Config,
162    provider_factory: ProviderFactory<N>,
163    consensus: &Arc<C>,
164    file_client: Arc<FileClient>,
165    static_file_producer: StaticFileProducer<ProviderFactory<N>>,
166    disable_exec: bool,
167    executor: E,
168) -> eyre::Result<(Pipeline<N>, impl Stream<Item = NodeEvent>)>
169where
170    N: ProviderNodeTypes + CliNodeTypes,
171    C: Consensus + 'static,
172    E: BlockExecutorProvider<Primitives = N::Primitives>,
173{
174    if !file_client.has_canonical_blocks() {
175        eyre::bail!("unable to import non canonical blocks");
176    }
177
178    // Retrieve latest header found in the database.
179    let last_block_number = provider_factory.last_block_number()?;
180    let local_head = provider_factory
181        .sealed_header(last_block_number)?
182        .ok_or_else(|| ProviderError::HeaderNotFound(last_block_number.into()))?;
183
184    let mut header_downloader = ReverseHeadersDownloaderBuilder::new(config.stages.headers)
185        .build(file_client.clone(), consensus.clone())
186        .into_task();
187    // TODO: The pipeline should correctly configure the downloader on its own.
188    // Find the possibility to remove unnecessary pre-configuration.
189    header_downloader.update_local_head(local_head);
190    header_downloader.update_sync_target(SyncTarget::Tip(file_client.tip().unwrap()));
191
192    let mut body_downloader = BodiesDownloaderBuilder::new(config.stages.bodies)
193        .build(file_client.clone(), consensus.clone(), provider_factory.clone())
194        .into_task();
195    // TODO: The pipeline should correctly configure the downloader on its own.
196    // Find the possibility to remove unnecessary pre-configuration.
197    body_downloader
198        .set_download_range(file_client.min_block().unwrap()..=file_client.max_block().unwrap())
199        .expect("failed to set download range");
200
201    let (tip_tx, tip_rx) = watch::channel(B256::ZERO);
202
203    let max_block = file_client.max_block().unwrap_or(0);
204
205    let pipeline = Pipeline::builder()
206        .with_tip_sender(tip_tx)
207        // we want to sync all blocks the file client provides or 0 if empty
208        .with_max_block(max_block)
209        .with_fail_on_unwind(true)
210        .add_stages(
211            DefaultStages::new(
212                provider_factory.clone(),
213                tip_rx,
214                consensus.clone(),
215                header_downloader,
216                body_downloader,
217                executor,
218                config.stages.clone(),
219                PruneModes::default(),
220            )
221            .builder()
222            .disable_all_if(&StageId::STATE_REQUIRED, || disable_exec),
223        )
224        .build(provider_factory, static_file_producer);
225
226    let events = pipeline.events().map(Into::into);
227
228    Ok((pipeline, events))
229}
230
231#[cfg(test)]
232mod tests {
233    use super::*;
234    use reth_ethereum_cli::chainspec::{EthereumChainSpecParser, SUPPORTED_CHAINS};
235
236    #[test]
237    fn parse_common_import_command_chain_args() {
238        for chain in SUPPORTED_CHAINS {
239            let args: ImportCommand<EthereumChainSpecParser> =
240                ImportCommand::parse_from(["reth", "--chain", chain, "."]);
241            assert_eq!(
242                Ok(args.env.chain.chain),
243                chain.parse::<reth_chainspec::Chain>(),
244                "failed to parse chain {chain}"
245            );
246        }
247    }
248}