var searchIndex = {}; searchIndex["robotparser"] = {"doc":"robots.txt parser for Rust","items":[[3,"RequestRate","robotparser","",null,null],[12,"requests","","",0,null],[12,"seconds","","",0,null],[3,"RobotFileParser","","robots.txt file parser",null,null],[11,"clone","","",0,null],[11,"eq","","",0,null],[11,"ne","","",0,null],[11,"fmt","","",0,null],[11,"clone","","",1,null],[11,"eq","","",1,null],[11,"ne","","",1,null],[11,"fmt","","",1,null],[11,"new","","",1,{"inputs":[{"name":"t"}],"output":{"name":"robotfileparser"}}],[11,"mtime","","Returns the time the robots.txt file was last fetched.",1,null],[11,"modified","","Sets the time the robots.txt file was last fetched to the\ncurrent time.",1,null],[11,"set_url","","Sets the URL referring to a robots.txt file.",1,null],[11,"read","","Reads the robots.txt URL and feeds it to the parser.",1,null],[11,"from_response","","Reads the HTTP response and feeds it to the parser.",1,null],[11,"parse","","",1,null],[11,"can_fetch","","Using the parsed robots.txt decide if useragent can fetch url",1,null],[11,"get_crawl_delay","","Returns the crawl delay for this user agent as a `Duration`, or None if no crawl delay is defined.",1,null],[11,"get_sitemaps","","Returns the sitemaps for this user agent as a `Vec<Url>`.",1,null],[11,"get_req_rate","","Returns the request rate for this user agent as a `RequestRate`, or None if not request rate is defined",1,null]],"paths":[[3,"RequestRate"],[3,"RobotFileParser"]]}; initSearch(searchIndex);