spider_cli 1.7.14

Multithreaded web crawler written in Rust.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
use clap::Subcommand;

#[derive(Subcommand)]
pub enum Commands {
    /// crawl the website.
    CRAWL {
        /// sequentially one by one crawl pages
        #[clap(short, long)]
        sync: bool,
        /// stdout all links crawled
        #[clap(short, long)]
        output_links: bool,
    },
}