1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
mod page;
pub use self::page::Page;

use std::env;
use std::io::Read;
use std::ascii::AsciiExt;
use std::default::Default;
use std::collections::BTreeMap;

use url::Url;
use reqwest::Client as HttpClient;
use reqwest::header::UserAgent;
use reqwest::StatusCode;
use reqwest::Response as HttpResponse;
use robotparser::RobotFileParser;
use html5ever::tokenizer::Tokenizer;
use html5ever::tokenizer::buffer_queue::BufferQueue;
use sidekiq::Client as SidekiqClient;
use sidekiq::ClientOpts as SidekiqClientOpts;
use sidekiq::RedisPool;
use encoding::{Encoding, DecoderTrap};
use encoding::all::UTF_8;
use url_serde::Serde;

const MAMAN_ENV: &'static str = "MAMAN_ENV";
const MAMAN_ENV_DEFAULT: &'static str = "development";

pub struct Spider<'a> {
    pub base_url: Url,
    pub visited_urls: Vec<Serde<Url>>,
    pub unvisited_urls: Vec<Serde<Url>>,
    pub env: String,
    pub limit: isize,
    sidekiq: SidekiqClient,
    robot_parser: RobotFileParser<'a>,
}

impl<'a> Spider<'a> {
    pub fn new(redis_pool: RedisPool, base_url: Url, limit: isize) -> Spider<'a> {
        let maman_env =
            env::var(&MAMAN_ENV.to_owned()).unwrap_or_else(|_| MAMAN_ENV_DEFAULT.to_owned());
        let robots_txt = base_url.join("/robots.txt").unwrap();
        let robot_file_parser = RobotFileParser::new(robots_txt);
        let client_opts = SidekiqClientOpts { namespace: Some(maman_env.to_string()) };
        let sidekiq = SidekiqClient::new(redis_pool, client_opts);
        Spider {
            base_url: base_url,
            visited_urls: Vec::new(),
            unvisited_urls: Vec::new(),
            sidekiq: sidekiq,
            env: maman_env,
            robot_parser: robot_file_parser,
            limit: limit,
        }
    }

    pub fn visit_page(&mut self, page: Page) {
        self.visited_urls.push(page.url.clone());
        for u in &page.urls {
            self.unvisited_urls.push(u.clone());
        }
        if let Err(err) = self.sidekiq.push(page.to_job()) {
            error!("SidekiqClient push failed: {}", err);
        }
    }

    pub fn crawl(&mut self) {
        self.robot_parser.read();
        let base_url = self.base_url.clone();
        if let Some(response) = Spider::load_url(self.base_url.as_ref()) {
            self.visit(&base_url, response);
            while let Some(url) = self.unvisited_urls.pop() {
                if self.continue_to_crawl() {
                    if !self.visited_urls.contains(&url) {
                        if let Some(response) = Spider::load_url(url.as_ref()) {
                            self.visit(&url, response);
                        }
                    }
                } else {
                    break;
                }
            }
        }
    }

    pub fn read_page(page: Page, document: &str) -> Tokenizer<Page> {
        let mut tok = Tokenizer::new(page, Default::default());
        let mut input = BufferQueue::new();
        input.push_back(String::from(document).into());
        let _ = tok.feed(&mut input);
        tok.end();
        tok
    }

    fn visit(&mut self, page_url: &Url, response: HttpResponse) {
        if self.can_visit(page_url) {
            info!("{}", page_url);
            if let Some(page) = Spider::read_response(page_url, response) {
                self.visit_page(page);
            }
        }
    }

    fn continue_to_crawl(&self) -> bool {
        self.limit == 0 || (self.visited_urls.len() as isize) < self.limit
    }

    fn can_visit(&self, page_url: &Url) -> bool {
        self.robot_parser
            .can_fetch(maman_name!(), page_url.path())
    }

    fn read_response(page_url: &Url, mut response: HttpResponse) -> Option<Page> {
        let mut headers = BTreeMap::new();
        {
            for h in response.headers().iter() {
                headers.insert(h.name().to_ascii_lowercase(), h.value_string());
            }
        }
        let mut document = vec![];
        let _ = response.read_to_end(&mut document);
        match UTF_8.decode(&document, DecoderTrap::Ignore) {
            Ok(doc) => {
                let page = Page::new(page_url.clone(),
                                     doc.to_string(),
                                     headers.clone(),
                                     response.status().to_string(),
                                     response.version().to_string());
                let read = Spider::read_page(page, &doc).unwrap();
                Some(read)
            }
            Err(_) => None,
        }
    }

    fn load_url(url: &str) -> Option<HttpResponse> {
        let client = HttpClient::new().expect("HttpClient failed to construct");
        let request = client
            .get(url)
            .header(UserAgent(maman_user_agent!().to_owned()));
        match request.send() {
            Ok(response) => {
                match *response.status() {
                    StatusCode::Ok | StatusCode::NotModified => Some(response),
                    _ => None,
                }
            }
            Err(_) => None,
        }
    }
}