use dbfile::DBfile;
use dbfile_derive::{dbnote, DBfile};
use requests2::{Cache, Requests, Store, Value};
use std::collections::HashMap;
pub fn run_selector() {
let cache = Cache::new();
let rq2 = Requests::new(&cache).connect("https://news.163.com/").default_headers();
rq2.parser(
|p| p.find_all(".news_default_news", |n| n.text() != "", "text"),
"163-news",
);
rq2.parser(
|p| p.find_all(".ns_area.list", |n| n.text() != "", "text"),
"163-tab",
);
cache.print_json();
let rq = Requests::new(&cache).connect("https://www.qq.com").default_headers();
rq.parser(
|p| {
let attr_eq = p.find_all("[bosszone=th_1]", |n| n.text() != "", "text");
let class1_class2 = p.find_all(".tit.fl", |n| n.text() != "", "text");
let id = p.find_all("#searchTab", |n| n.text() != "", "text");
let map = HashMap::from([
("attr".to_owned(), attr_eq),
("class_class".to_owned(), class1_class2),
("id".to_owned(), id),
]);
Value::MAP(map)
},
"qq",
);
cache.print();
let _value = cache.get("qq");
println!("Value: {:?}", _value);
}
pub fn run_free_parse_and_store_to_csv() {
let data = Cache::new();
let client = Requests::new(&data);
let rq = client.connect("https://www.qq.com/").default_headers().send().unwrap();
#[derive(DBfile, Debug)]
#[dbnote(table_name = "test_link", driver = "postgres", primary_key="href")]
struct Link<'a> {
href: &'a str,
link_name: String,
title: &'a str,
}
rq.free_parse(|p| {
let title = p.select("title").text();
let links = p
.select_all("li.nav-item a")
.iter()
.map(|x| Link {
title: "",
href: x.attr("href").unwrap_or_default(),
link_name: x.text(),
})
.collect::<Vec<Link>>();
links[0].create_table();
for (idx, mut link) in links.into_iter().enumerate() {
if idx == 0 {
link.title = &title;
link.write_csv_head();
}
link.to_db();
}
});
}