pub mod config;
mod fingerprints;
mod metadata;
mod page;
mod readability;
#[cfg(test)]
mod tests;
use self::config::{ConfigCollection, ConfigEntry};
use self::page::Page;
pub use self::readability::Readability;
use crate::article::Article;
use crate::constants::{self, CHARSET};
use crate::error::FullTextParserError;
use crate::util::Util;
use encoding_rs::Encoding;
use fingerprints::Fingerprints;
use libxml::parser::Parser;
use libxml::tree::{Document, Node, NodeType};
use libxml::xpath::Context;
use reqwest::header::HeaderMap;
use reqwest::{Client, Response, Url};
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::str::from_utf8;
pub struct FullTextParser {
config_files: ConfigCollection,
}
impl FullTextParser {
pub async fn new(config_path: Option<&Path>) -> Self {
let config_files = ConfigCollection::parse(config_path).await;
Self { config_files }
}
pub(crate) async fn parse(
&self,
url: &url::Url,
client: &Client,
) -> Result<Article, FullTextParserError> {
tracing::debug!(%url, "Scraping article");
let config = self.get_grabber_config(url);
let global_config = self
.config_files
.get("global.txt")
.ok_or(FullTextParserError::Config)?;
let headers = Util::generate_headers(config, global_config)?;
let (response, new_url) = Self::get_response(url, client, headers).await?;
let url = if let Some(new_url) = new_url {
tracing::debug!(%url, %new_url, "Url redirects");
new_url
} else {
url.clone()
};
if !Util::check_content_type(&response)? {
return Err(FullTextParserError::ContentType);
}
let html = Self::get_body(response).await?;
if html.is_empty() {
tracing::error!("Empty response body");
return Err(FullTextParserError::Http);
}
let config = if config.is_none() {
if let Some(url) = Fingerprints::detect(&html) {
self.get_grabber_config(&url)
} else {
config
}
} else {
config
};
let pages = self
.download_all_pages(html, client, config, global_config, &url)
.await?;
self.parse_offline(pages, config, Some(url))
}
pub fn parse_offline(
&self,
pages: Vec<String>,
config: Option<&ConfigEntry>,
url: Option<Url>,
) -> Result<Article, FullTextParserError> {
let url = url.unwrap_or_else(|| url::Url::parse("http://fakehost/test/base/").unwrap());
let config = if config.is_none() {
self.get_grabber_config(&url)
} else {
config
};
let global_config = self
.config_files
.get("global.txt")
.ok_or(FullTextParserError::Config)?;
let mut article = Article {
title: None,
author: None,
url: url.clone(),
date: None,
thumbnail_url: None,
html: None,
};
libxml::tree::node::set_node_rc_guard(10);
let mut document = Document::new().map_err(|()| FullTextParserError::Xml)?;
let mut root =
Node::new("article", None, &document).map_err(|()| FullTextParserError::Xml)?;
document.set_root_element(&root);
for page_html in pages {
self.parse_page(&mut article, &page_html, &mut root, config, global_config)?;
}
let context = Context::new(&document).map_err(|()| {
tracing::error!("Failed to create xpath context for extracted article");
FullTextParserError::Xml
})?;
if let Err(error) = Self::prevent_self_closing_tags(&context) {
tracing::error!("Preventing self closing tags failed - '{error}'");
return Err(error);
}
Self::post_process_document(&document)?;
article.html = Some(Util::serialize_node(&document, &root));
Ok(article)
}
async fn download_all_pages(
&self,
html: String,
client: &Client,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
article_url: &Url,
) -> Result<Vec<String>, FullTextParserError> {
let mut html = html;
let mut pages = vec![html.clone()];
while let Ok(page_result) = self.evlauate_page(&html, config, global_config, article_url) {
if let Page::Single(single_page_url) = page_result {
let single_page_html =
Self::download(&single_page_url, client, config, global_config).await?;
return Ok(vec![single_page_html]);
} else if let Page::Multi(next_page_url) = page_result {
if let Some(next_page_url) = next_page_url {
let next_page_html =
Self::download(&next_page_url, client, config, global_config).await?;
pages.push(next_page_html.clone());
html = next_page_html;
continue;
} else {
break;
}
}
}
Ok(pages)
}
fn evlauate_page(
&self,
html: &str,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
article_url: &Url,
) -> Result<Page, FullTextParserError> {
let document = Self::parse_html(html, config, global_config)?;
let xpath_ctx = Self::get_xpath_ctx(&document)?;
let rule = Util::select_rule(
config.and_then(|c| c.single_page_link.as_deref()),
global_config.single_page_link.as_deref(),
);
if let Some(xpath_single_page_link) = rule {
tracing::trace!(
xpath_single_page_link,
"Single page link xpath specified in config",
);
if let Some(single_page_url) = Util::find_page_url(&xpath_ctx, xpath_single_page_link) {
tracing::trace!(%single_page_url, "Single page link found");
return Ok(Page::Single(single_page_url));
}
}
let next_page_url =
self.check_for_next_page(&xpath_ctx, config, global_config, article_url);
Ok(Page::Multi(next_page_url))
}
fn parse_page(
&self,
article: &mut Article,
html: &str,
root: &mut Node,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
) -> Result<(), FullTextParserError> {
let document = Self::parse_html(html, config, global_config)?;
let xpath_ctx = Self::get_xpath_ctx(&document)?;
metadata::extract(&xpath_ctx, config, Some(global_config), article);
if article.thumbnail_url.is_none() {
article.thumbnail_url = Self::check_for_thumbnail(&xpath_ctx);
}
Self::prep_content(
&xpath_ctx,
config,
global_config,
&article.url,
&document,
article.title.as_deref(),
);
let found_body =
Self::extract_body(&xpath_ctx, root, config, global_config).unwrap_or(false);
if !found_body {
tracing::warn!("ftr failed to find content. trying readabilty");
if let Err(error) = Readability::extract_body(document, root, article.title.as_deref())
{
tracing::error!("Both ftr and readability failed to find content: {error}");
return Err(error);
}
}
Ok(())
}
pub(crate) fn parse_html(
html: &str,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
) -> Result<Document, FullTextParserError> {
let mut html = html.to_owned();
if let Some(config) = config {
for replace in &config.replace {
html = html.replace(&replace.to_replace, &replace.replace_with);
}
}
for replace in &global_config.replace {
html = html.replace(&replace.to_replace, &replace.replace_with);
}
let document = Self::parse_html_string_patched(html.as_str()).map_err(|err| {
tracing::error!(?err, "Parsing HTML failed for downloaded HTML");
FullTextParserError::Xml
})?;
if document.get_root_element().is_none() {
tracing::error!("document without root");
Err(FullTextParserError::Xml)
} else {
Ok(document)
}
}
fn try_usize_to_i32(value: usize) -> Result<i32, libxml::parser::XmlParseError> {
if cfg!(target_pointer_width = "16") || (value < i32::MAX as usize) {
Ok(value as i32)
} else {
Err(libxml::parser::XmlParseError::DocumentTooLarge)
}
}
pub(crate) fn parse_html_string_patched(
input: &str,
) -> Result<Document, libxml::parser::XmlParseError> {
unsafe {
libxml::bindings::xmlInitParser();
}
let parser = Parser::default_html();
let input_bytes: &[u8] = input.as_ref();
let input_ptr = input_bytes.as_ptr() as *const std::os::raw::c_char;
let input_len = Self::try_usize_to_i32(input_bytes.len())?;
let encoding = std::ffi::CString::new("utf-8").unwrap();
let encoding_ptr = encoding.as_ptr();
let url_ptr = std::ptr::null();
let options = 1 + 32;
match parser.format {
libxml::parser::ParseFormat::XML => unsafe {
let doc_ptr = libxml::bindings::xmlReadMemory(
input_ptr,
input_len,
url_ptr,
encoding_ptr,
options,
);
if doc_ptr.is_null() {
Err(libxml::parser::XmlParseError::GotNullPointer)
} else {
Ok(Document::new_ptr(doc_ptr))
}
},
libxml::parser::ParseFormat::HTML => unsafe {
let docptr = libxml::bindings::htmlReadMemory(
input_ptr,
input_len,
url_ptr,
encoding_ptr,
options,
);
if docptr.is_null() {
Err(libxml::parser::XmlParseError::GotNullPointer)
} else {
Ok(Document::new_ptr(docptr))
}
},
}
}
pub(crate) fn get_xpath_ctx(doc: &Document) -> Result<Context, FullTextParserError> {
Context::new(doc).map_err(|()| {
tracing::error!("Creating xpath context failed for downloaded HTML");
FullTextParserError::Xml
})
}
async fn get_response(
url: &url::Url,
client: &Client,
headers: HeaderMap,
) -> Result<(Response, Option<Url>), FullTextParserError> {
Self::get_response_impl(url, client, headers, 0).await
}
async fn get_response_impl(
url: &url::Url,
client: &Client,
headers: HeaderMap,
depth: u32,
) -> Result<(Response, Option<Url>), FullTextParserError> {
let response = client
.get(url.as_str())
.headers(headers.clone())
.send()
.await
.map_err(|err| {
tracing::error!(%url, ?err, "Downloading HTML failed: GET");
FullTextParserError::Http
})?;
let status = response.status();
tracing::debug!(%status);
if response.status().is_redirection()
&& let Some(new_location) = response
.headers()
.get("location")
.and_then(|header| std::str::from_utf8(header.as_bytes()).ok())
{
tracing::debug!(new_location);
if depth >= constants::MAX_REDIRECTS {
tracing::error!(depth, "max redirects reached");
return Err(FullTextParserError::Http);
}
if new_location == url.as_str() {
tracing::error!(new_location, "redirect url is same as original");
return Err(FullTextParserError::Http);
}
let Ok(new_url) = Url::parse(new_location) else {
tracing::error!(new_location, "not a valid url");
return Err(FullTextParserError::Http);
};
tracing::debug!(new_location, "redirect");
let (response, redirected_url) = Box::pin(Self::get_response_impl(
&new_url,
client,
headers,
depth + 1,
))
.await?;
if redirected_url.is_some() {
return Ok((response, redirected_url));
}
return Ok((response, Some(new_url)));
}
Ok((response, None))
}
async fn get_body(response: Response) -> Result<String, FullTextParserError> {
let headers = response.headers().clone();
if headers
.get(reqwest::header::CONTENT_LENGTH)
.and_then(|hv| hv.to_str().ok())
.and_then(|str| str.parse::<i64>().ok())
.map(|content_length| content_length == 0)
.unwrap_or(false)
{
tracing::error!("Empty response body");
return Err(FullTextParserError::Http);
}
let bytes = response
.bytes()
.await
.map_err(|_| FullTextParserError::Http)?;
match from_utf8(&bytes) {
Ok(utf8_str) => {
tracing::trace!(utf8_str, "Valid utf-8 string");
Ok(utf8_str.into())
}
Err(error) => {
let lossy_string = std::string::String::from_utf8_lossy(&bytes);
tracing::trace!(%lossy_string, "Invalid utf-8 string");
if let Some(encoding) = Self::get_encoding_from_html(&lossy_string) {
tracing::debug!(encoding, "Encoding extracted from HTML");
if let Some(decoded_html) = Self::decode_html(&bytes, encoding) {
let decoded_html = decoded_html.replacen(
&format!("charset=\"{encoding}\""),
"charset=\"utf-8\"",
1,
);
return Ok(decoded_html);
}
}
if let Some(encoding) = Self::get_encoding_from_http_header(&headers) {
tracing::debug!(encoding, "Encoding extracted from headers");
if let Some(decoded_html) = Self::decode_html(&bytes, encoding) {
let decoded_html = decoded_html.replacen(
&format!("charset=\"{encoding}\""),
"charset=\"utf-8\"",
1,
);
return Ok(decoded_html);
}
}
Err(FullTextParserError::Utf8(error))
}
}
}
pub async fn download(
url: &url::Url,
client: &Client,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
) -> Result<String, FullTextParserError> {
let headers = Util::generate_headers(config, global_config)?;
let (response, _) = Self::get_response(url, client, headers).await?;
let body = Self::get_body(response).await?;
if body.is_empty() {
tracing::error!("Empty response body");
Err(FullTextParserError::Http)
} else {
Ok(body)
}
}
fn get_encoding_from_http_header(headers: &reqwest::header::HeaderMap) -> Option<&str> {
headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|header| header.to_str().ok())
.and_then(|content_type| CHARSET.captures(content_type))
.and_then(|captures| captures.get(1))
.map(|regex_match| regex_match.as_str())
}
fn get_encoding_from_html(html: &str) -> Option<&str> {
let regex =
regex::Regex::new(r#"<meta.*?charset="*(.*?)""#).expect("Failed to parse regex");
if let Some(captures) = regex.captures(html)
&& let Some(regex_match) = captures.get(1)
{
return Some(regex_match.as_str());
}
None
}
fn decode_html(bytes: &[u8], encoding: &str) -> Option<String> {
if let Some(encoding) = Encoding::for_label(encoding.as_bytes()) {
let (decoded_html, _, invalid_chars) = encoding.decode(bytes);
if !invalid_chars {
return Some(decoded_html.into_owned());
}
}
tracing::warn!(encoding, "Could not decode HTML. Encoding:");
None
}
fn get_host_name(url: &url::Url) -> Result<String, FullTextParserError> {
match url.host_str() {
Some(name) => {
let mut name = name;
if name.starts_with("www.") && name.len() > 4 {
name = &name[4..]
}
Ok(name.into())
}
None => {
tracing::error!("Getting config failed due to bad Url");
Err(FullTextParserError::Config)
}
}
}
fn get_grabber_config(&self, url: &url::Url) -> Option<&ConfigEntry> {
let conf = Self::get_host_name(url)
.ok()
.map(|url| url + ".txt")
.and_then(|name| self.config_files.get(&name));
if conf.is_none() {
tracing::warn!(%url, "No config found");
}
conf
}
pub fn thumbnail_from_html(html: &str) -> Option<String> {
if let Ok(doc) = Self::parse_html_string_patched(html)
&& let Ok(ctx) = Self::get_xpath_ctx(&doc)
{
return Self::check_for_thumbnail(&ctx);
}
None
}
pub fn check_for_thumbnail(context: &Context) -> Option<String> {
if let Ok(thumb) = Util::get_attribute(
context,
"//meta[contains(@name, 'twitter:image')]",
"content",
) {
return Some(thumb);
}
if let Ok(thumb) =
Util::get_attribute(context, "//meta[contains(@name, 'og:image')]", "content")
{
return Some(thumb);
}
if let Ok(thumb) = Util::get_attribute(
context,
"//meta[contains(@property, 'twitter:image')]",
"content",
) {
return Some(thumb);
}
if let Ok(thumb) = Util::get_attribute(
context,
"//meta[contains(@property, 'og:image')]",
"content",
) {
return Some(thumb);
}
if let Ok(thumb) =
Util::get_attribute(context, "//link[contains(@rel, 'image_src')]", "href")
{
return Some(thumb);
}
if let Ok(img_nodes) = Util::evaluate_xpath(context, "//img", true) {
let mut scores: HashMap<String, i32> = HashMap::new();
let len = img_nodes.len();
for (index, img_node) in img_nodes.into_iter().enumerate() {
let src = if let Some(src) = img_node.get_attribute("src") {
src
} else {
continue;
};
let score = Util::score_image_url(&src);
let score = score + Util::score_img_attr(&img_node);
let score = score + Util::score_by_parents(&img_node);
let score = score + Util::score_by_sibling(&img_node);
let score = score + Util::score_by_dimensions(&img_node);
let score = score + Util::score_by_position(len, index);
let score = score + Util::score_by_alt(&img_node);
scores.insert(src, score);
}
if let Some((top_src, top_score)) =
scores.into_iter().max_by_key(|(_src, score)| *score)
&& top_score > 0
{
let top_url = top_src.trim().into();
if Url::parse(top_src.trim()).is_ok() {
return Some(top_url);
}
}
}
if let Ok(link_nodes) = Util::evaluate_xpath(context, constants::LEAD_IMAGE_URL_XPATH, true)
&& let Some(first_link_node) = link_nodes.first()
{
if let Some(src) = first_link_node.get_attribute("src") {
let src = src.trim().to_string();
if Url::parse(&src).is_ok() {
return Some(src);
}
}
if let Some(href) = first_link_node.get_attribute("href") {
let href = href.trim().to_string();
if Url::parse(&href).is_ok() {
return Some(href);
}
}
if let Some(val) = first_link_node.get_attribute("value") {
let val = val.trim().to_string();
if Url::parse(&val).is_ok() {
return Some(val);
}
}
}
None
}
fn fix_lazy_images(context: &Context, doc: &Document) -> Result<(), FullTextParserError> {
let mut img_nodes = Util::evaluate_xpath(context, "//img", false)?;
let pic_nodes = Util::evaluate_xpath(context, "//picture", false)?;
let fig_nodes = Util::evaluate_xpath(context, "//figure", false)?;
img_nodes.extend(pic_nodes);
img_nodes.extend(fig_nodes);
for mut node in img_nodes {
let tag_name = node.get_name().to_uppercase();
if let Some(src) = node.get_attribute("src") {
if let Some(mime) = constants::BASE64_DATA_URL
.captures(&src)
.and_then(|c| c.get(1).map(|c| c.as_str()))
&& mime == "image/svg+xml"
{
continue;
}
let mut src_could_be_removed = false;
for (name, val) in node.get_attributes() {
if name == "src" {
continue;
}
if constants::IS_IMAGE.is_match(&val) {
src_could_be_removed = true;
break;
}
}
if src_could_be_removed && let Some(_match) = constants::IS_BASE64.find(&src) {
let b64starts = _match.start() + 7;
let b64length = src.len() - b64starts;
if b64length < 133 {
_ = node.remove_attribute("src");
}
}
}
let class_contains_lazy = node
.get_attribute("class")
.map(|c| c.to_lowercase().contains("lazy"))
.unwrap_or(false);
let has_scr = node.has_attribute("src");
let has_srcset = node.has_attribute("srcset");
if (has_scr || has_srcset) && !class_contains_lazy {
continue;
}
for (name, val) in node.get_attributes() {
if name == "src" || name == "srcset" || name == "alt" {
continue;
}
let mut copy_to: Option<&str> = None;
if constants::COPY_TO_SRCSET.is_match(&val) {
copy_to = Some("srcset");
} else if constants::COPY_TO_SRC.is_match(&val) {
copy_to = Some("src");
}
if let Some(copy_to) = copy_to {
if tag_name == "IMG" || tag_name == "PICTURE" {
_ = node.set_attribute(copy_to, &val);
} else if tag_name == "FIGURE"
&& !Util::has_any_descendent_tag(&node, &HashSet::from(["IMG", "PICTURE"]))
{
let mut img = Node::new("img", None, doc).unwrap();
_ = img.set_attribute(copy_to, &val);
_ = node.add_child(&mut img);
}
}
}
}
Ok(())
}
fn fix_iframe_size(context: &Context, site_name: &str) -> Result<(), FullTextParserError> {
let xpath = &format!("//iframe[contains(@src, '{site_name}')]");
let node_vec = Util::evaluate_xpath(context, xpath, false)?;
for mut node in node_vec {
if node.is_null() {
continue;
}
let video_wrapper = node
.get_parent()
.and_then(|mut parent| parent.new_child(None, "div").ok());
if let Some(mut video_wrapper) = video_wrapper {
let success = video_wrapper
.set_property("class", "videoWrapper")
.ok()
.and_then(|()| node.set_property("width", "480").ok())
.and_then(|()| node.set_property("height", "360").ok())
.and_then(|()| node.set_property("aspect-ratio", "auto").ok())
.ok_or_else(|| {
node.unlink();
video_wrapper.add_child(&mut node)
})
.is_err();
if !success {
tracing::debug!("Failed to add iframe as child of video wrapper <div>");
}
} else {
tracing::warn!("Failed to get parent of iframe");
}
}
Ok(())
}
fn remove_attribute(
context: &Context,
tag: Option<&str>,
attribute: &str,
) -> Result<(), FullTextParserError> {
let xpath_tag = tag.unwrap_or("*");
let xpath = &format!("//{xpath_tag}[@{attribute}]");
let node_vec = Util::evaluate_xpath(context, xpath, false)?;
for mut node in node_vec {
if let Err(err) = node.remove_property(attribute) {
tracing::warn!(
attribute,
%err,
"Failed to remove attribute from node"
);
}
}
Ok(())
}
fn repair_urls(
context: &Context,
xpath: &str,
attribute: &str,
article_url: &url::Url,
document: &Document,
) -> Result<(), FullTextParserError> {
let node_vec = Util::evaluate_xpath(context, xpath, false)?;
for mut node in node_vec {
if node.is_null() {
continue;
}
if let Some(url) = node.get_attribute(attribute) {
let trimmed_url = url.trim();
if url.starts_with('#') || url.starts_with("\\#") {
continue;
}
let is_relative_url = url::Url::parse(&url)
.err()
.map(|err| err == url::ParseError::RelativeUrlWithoutBase)
.unwrap_or(false);
let is_javascript = trimmed_url.contains("javascript:");
if let Some(srcset) = node.get_attribute("srcset") {
let res = constants::SRC_SET_URL
.captures_iter(&srcset)
.map(|cap| {
let cap0 = cap.get(0).map_or("", |m| m.as_str());
let cap1 = cap.get(1).map_or("", |m| m.as_str());
let cap2 = cap.get(2).map_or("", |m| m.as_str());
let cap3 = cap.get(3).map_or("", |m| m.as_str());
let is_relative_url = url::Url::parse(cap1)
.err()
.map(|err| err == url::ParseError::RelativeUrlWithoutBase)
.unwrap_or(false);
if is_relative_url {
let completed_url = article_url
.join(cap1)
.map(|u| u.as_str().to_owned())
.unwrap_or_default();
format!("{completed_url}{cap2}{cap3}")
} else {
cap0.to_string()
}
})
.collect::<Vec<String>>()
.join(" ");
_ = node.set_attribute("srcset", res.as_str());
}
if is_relative_url {
let completed_url = match article_url.join(trimmed_url) {
Ok(joined_url) => joined_url,
Err(_) => continue,
};
_ = node.set_attribute(attribute, completed_url.as_str());
} else if is_javascript {
let mut child_nodes = node.get_child_nodes();
let child_count = child_nodes.len();
let first_child_is_text = child_nodes
.first()
.and_then(|n| n.get_type())
.map(|t| t == NodeType::TextNode)
.unwrap_or(false);
if let Some(mut parent) = node.get_parent() {
let new_node = if child_count == 1 && first_child_is_text {
let link_content = node.get_content();
Node::new_text(&link_content, document)
.expect("Failed to create new text node")
} else {
let mut container = Node::new("span", None, document)
.expect("Failed to create new span container node");
for mut child in child_nodes.drain(..) {
child.unlink();
_ = container.add_child(&mut child);
}
container
};
_ = parent.replace_child_node(new_node, node);
}
} else if let Ok(parsed_url) = Url::parse(trimmed_url) {
_ = node.set_attribute(attribute, parsed_url.as_str());
} else {
_ = node.set_attribute(attribute, trimmed_url);
};
}
}
Ok(())
}
fn fix_urls(context: &Context, url: &Url, document: &Document) {
_ = Self::repair_urls(context, "//img", "src", url, document);
_ = Self::repair_urls(context, "//a", "src", url, document);
_ = Self::repair_urls(context, "//a", "href", url, document);
_ = Self::repair_urls(context, "//object", "data", url, document);
_ = Self::repair_urls(context, "//iframe", "src", url, document);
}
pub(crate) fn prep_content(
context: &Context,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
url: &Url,
document: &Document,
title: Option<&str>,
) {
if let Ok(h1_nodes) = Util::evaluate_xpath(context, "//h1", false) {
for mut h1_node in h1_nodes {
_ = h1_node.set_name("h2");
}
}
if let Ok(h2_nodes) = Util::evaluate_xpath(context, "//h2", false) {
for mut h2_node in h2_nodes {
if h2_node.is_null() {
continue;
}
if Util::header_duplicates_title(&h2_node, title) {
h2_node.unlink();
}
}
}
if let Ok(font_nodes) = Util::evaluate_xpath(context, "//font", false) {
for mut font_node in font_nodes {
_ = font_node.set_name("span");
}
}
_ = Util::mark_data_tables(context);
if let Some(config) = config {
for xpath_strip in &config.xpath_strip {
_ = Util::strip_node(context, xpath_strip);
}
}
for xpath_strip in &global_config.xpath_strip {
_ = Util::strip_node(context, xpath_strip);
}
if let Some(config) = config {
for xpaht_strip_class in &config.strip_id_or_class {
_ = Util::strip_id_or_class(context, xpaht_strip_class);
}
}
for xpaht_strip_class in &global_config.strip_id_or_class {
_ = Util::strip_id_or_class(context, xpaht_strip_class);
}
if let Some(config) = config {
for xpath_strip_img_src in &config.strip_image_src {
_ = Util::strip_node(
context,
&format!("//img[contains(@src,'{xpath_strip_img_src}')]"),
);
}
}
for xpath_strip_img_src in &global_config.strip_image_src {
_ = Util::strip_node(
context,
&format!("//img[contains(@src,'{xpath_strip_img_src}')]"),
);
}
_ = Self::unwrap_noscript_images(context);
_ = Util::strip_node(context, "//noscript");
_ = Self::fix_lazy_images(context, document);
_ = Self::fix_iframe_size(context, "youtube.com");
_ = Self::remove_attribute(context, Some("html"), "class");
_ = Self::remove_attribute(context, Some("a"), "onclick");
_ = Self::remove_attribute(context, Some("img"), "decoding");
_ = Self::remove_attribute(context, Some("img"), "loading");
_ = Util::strip_node(
context,
"//*[contains(@class,' entry-unrelated ') or contains(@class,' instapaper_ignore ')]",
);
_ = Util::strip_node(context, "//*[contains(@style,'display:none')]");
_ = Util::strip_node(context, "//*[contains(@style,'display: none')]");
_ = Self::remove_attribute(context, None, "style");
_ = Util::strip_node(context, "//form");
_ = Util::strip_node(context, "//input");
_ = Util::strip_node(context, "//textarea");
_ = Util::strip_node(context, "//select");
_ = Util::strip_node(context, "//button");
_ = Util::strip_node(context, "//comment()");
_ = Util::strip_node(context, "//script");
_ = Util::strip_node(context, "//style");
_ = Util::strip_node(context, "//a[not(node())]");
_ = Util::strip_node(context, "//*[@type='text/css']");
_ = Util::strip_node(context, "//iframe");
_ = Util::strip_node(context, "//object");
_ = Util::strip_node(context, "//embed");
_ = Util::strip_node(context, "//footer");
_ = Util::strip_node(context, "//link");
_ = Util::strip_node(context, "//aside");
if let Some(root) = document.get_root_element() {
Util::replace_brs(&root, document);
Util::replace_emoji_images(&root, document);
}
Self::fix_urls(context, url, document);
}
fn unwrap_noscript_images(ctx: &Context) -> Result<(), FullTextParserError> {
let img_nodes = Util::evaluate_xpath(ctx, "//img", false)?;
for mut img_node in img_nodes {
if img_node.is_null() {
continue;
}
let attrs = img_node.get_attributes();
let keep = attrs.iter().any(|(name, value)| {
name == "src"
|| name == "srcset"
|| name == "data-src"
|| name == "data-srcset"
|| constants::IS_IMAGE.is_match(value)
});
if !keep {
img_node.unlink();
}
}
let noscript_nodes = Util::evaluate_xpath(ctx, "//noscript", false)?;
for mut noscript_node in noscript_nodes {
if noscript_node.is_null() {
continue;
}
if !Util::is_single_image(&noscript_node) {
continue;
}
if let Some(prev) = noscript_node.get_prev_element_sibling()
&& Util::is_single_image(&prev)
{
{
let mut prev_img = prev.clone();
if prev_img.get_name().to_uppercase() != "IMG"
&& let Some(img_node) = Util::get_elements_by_tag_name(&prev_img, "img")
.into_iter()
.next()
{
prev_img = img_node;
}
let new_img = Util::get_elements_by_tag_name(&noscript_node, "img")
.into_iter()
.next();
if let Some(mut new_img) = new_img {
for (key, value) in prev_img.get_attributes() {
if value.is_empty() {
continue;
}
if key == "src"
|| key == "srcset"
|| constants::IS_IMAGE.is_match(&value)
{
if new_img.get_attribute(&key).as_deref() == Some(&value) {
continue;
}
let mut attr_name = key;
if new_img.has_attribute(&attr_name) {
attr_name = format!("data-old-{attr_name}");
}
new_img.set_attribute(&attr_name, &value).map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
}
}
}
}
if let Some(mut parent) = noscript_node.get_parent()
&& let Some(first_child) = noscript_node.get_first_element_child()
{
parent
.replace_child_node(first_child, prev)
.map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
noscript_node.unlink();
}
}
}
Ok(())
}
fn extract_body(
context: &Context,
root: &mut Node,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
) -> Result<bool, FullTextParserError> {
let mut found_something = false;
if let Some(config) = config {
for xpath_body in &config.xpath_body {
if Self::extract_body_single(context, root, xpath_body)? {
found_something = true;
}
}
}
if !found_something {
for xpath_body in &global_config.xpath_body {
if Self::extract_body_single(context, root, xpath_body)? {
found_something = true;
}
}
}
Ok(found_something)
}
fn extract_body_single(
context: &Context,
root: &mut Node,
xpath: &str,
) -> Result<bool, FullTextParserError> {
let mut found_something = false;
let mut ancestor = xpath.to_string();
if ancestor.starts_with("//") {
ancestor = ancestor.chars().skip(2).collect();
}
let query = if xpath.starts_with('(') {
xpath
} else {
&format!("{xpath}[not(ancestor::{ancestor})]")
};
{
let node_vec = Util::evaluate_xpath(context, query, false)?;
for mut node in node_vec {
if node.is_null() || node.is_unlinked() {
continue;
}
if node.get_property("style").is_some() && node.remove_property("style").is_err() {
return Err(FullTextParserError::Xml);
}
Self::post_process_page(&mut node)?;
node.unlink();
if root.add_child(&mut node).is_ok() {
found_something = true;
} else {
tracing::error!("Failed to add body to prepared document");
return Err(FullTextParserError::Xml);
}
}
}
Ok(found_something)
}
fn check_for_next_page(
&self,
context: &Context,
config: Option<&ConfigEntry>,
global_config: &ConfigEntry,
article_url: &url::Url,
) -> Option<url::Url> {
if let Some(config) = config {
if let Some(next_page_xpath) = config.next_page_link.as_deref()
&& let Ok(next_page_string) = Util::get_attribute(context, next_page_xpath, "href")
{
tracing::debug!(next_page_string);
if let Some(next_page_url) = Self::parse_url(&next_page_string, article_url) {
return Some(next_page_url);
}
}
} else if let Some(next_page_xpath) = global_config.next_page_link.as_deref()
&& let Ok(next_page_string) = Util::get_attribute(context, next_page_xpath, "href")
&& let Some(next_page_url) = Self::parse_url(&next_page_string, article_url)
{
return Some(next_page_url);
}
None
}
fn parse_url(url: &str, article_url: &url::Url) -> Option<url::Url> {
let is_relative_url = url::Url::parse(url)
.err()
.map(|err| err == url::ParseError::RelativeUrlWithoutBase)
.unwrap_or(false);
if is_relative_url {
article_url.join(url.trim()).ok()
} else {
url::Url::parse(url).ok()
}
}
pub(crate) fn prevent_self_closing_tags(context: &Context) -> Result<(), FullTextParserError> {
let xpath = "//*[not(node())]";
let node_vec = Util::evaluate_xpath(context, xpath, false)?;
for mut node in node_vec {
let name = node.get_name().to_uppercase();
if constants::VALID_SELF_CLOSING_TAGS.contains(name.as_str()) {
continue;
}
_ = node.add_text_child(None, "empty", "");
}
Ok(())
}
pub(crate) fn post_process_document(document: &Document) -> Result<(), FullTextParserError> {
if let Some(mut root) = document.get_root_element() {
Self::simplify_nested_elements(&mut root)?;
Self::clean_attributes(&mut root)?;
Self::remove_single_cell_tables(&mut root);
Self::remove_extra_p_and_div(&mut root);
}
Ok(())
}
pub(crate) fn post_process_page(node: &mut Node) -> Result<(), FullTextParserError> {
Util::clean_headers(node);
Util::replace_schema_org_orbjects(node);
Util::clean_conditionally(node, "fieldset");
Util::clean_conditionally(node, "table");
Util::clean_conditionally(node, "ul");
Util::clean_conditionally(node, "div");
Self::remove_share_elements(node);
Self::clean_attributes(node)?;
Self::remove_single_cell_tables(node);
Self::remove_extra_p_and_div(node);
Self::remove_empty_nodes(node);
Ok(())
}
pub(crate) fn remove_single_cell_tables(root: &mut Node) {
let mut node_iter = Some(root.clone());
while let Some(node) = node_iter {
let tag_name = node.get_name().to_uppercase();
if tag_name == "TABLE" {
let t_body = if Util::has_single_tag_inside_element(&node, "TBODY") {
node.get_child_elements().drain(..).next().unwrap()
} else {
node.clone()
};
if Util::has_single_tag_inside_element(&t_body, "TR") {
let row = t_body.get_child_elements().first().cloned();
if let Some(row) = row
&& Util::has_single_tag_inside_element(&row, "TD")
{
let cell = row.get_child_elements().first().cloned();
if let Some(mut cell) = cell {
let all_phrasing_content = cell
.get_child_elements()
.into_iter()
.all(|child| Util::is_phrasing_content(&child));
cell.set_name(if all_phrasing_content { "P" } else { "DIV" })
.unwrap();
if let Some(mut parent) = node.get_parent() {
node_iter = Util::next_node(&node, true);
parent.replace_child_node(cell, node.clone()).unwrap();
continue;
}
}
}
}
}
node_iter = Util::next_node(&node, false);
}
}
pub(crate) fn remove_extra_p_and_div(root: &mut Node) {
let mut node_iter = Some(root.clone());
while let Some(mut node) = node_iter {
let tag_name = node.get_name().to_uppercase();
if tag_name == "P" || tag_name == "DIV" {
let img_count = Util::get_elements_by_tag_name(&node, "img").len();
let video_count = Util::get_elements_by_tag_name(&node, "video").len();
let embed_count = Util::get_elements_by_tag_name(&node, "embed").len();
let object_count = Util::get_elements_by_tag_name(&node, "object").len();
let iframe_count = Util::get_elements_by_tag_name(&node, "iframe").len();
let total_count =
img_count + video_count + embed_count + object_count + iframe_count;
if total_count == 0 && Util::get_inner_text(&node, false).trim().is_empty() {
node_iter = Util::remove_and_next(&mut node);
continue;
}
}
node_iter = Util::next_node(&node, false);
}
}
pub(crate) fn remove_share_elements(root: &mut Node) {
let mut node_iter = Some(root.clone());
while let Some(mut node) = node_iter {
let match_string = format!(
"{} {}",
node.get_attribute("class").unwrap_or_default(),
node.get_attribute("id").unwrap_or_default()
);
if constants::SHARE_ELEMENTS.is_match(&match_string)
&& node.get_content().len() < constants::DEFAULT_CHAR_THRESHOLD
{
node_iter = Util::remove_and_next(&mut node);
} else {
node_iter = Util::next_node(&node, false);
}
}
}
pub(crate) fn clean_attributes(root: &mut Node) -> Result<(), FullTextParserError> {
let mut node_iter = Some(root.clone());
while let Some(mut node) = node_iter {
let tag_name = node.get_name().to_uppercase();
for attr in constants::PRESENTATIONAL_ATTRIBUTES {
_ = node.remove_attribute(attr);
}
if constants::DEPRECATED_SIZE_ATTRIBUTE_ELEMS.contains(tag_name.as_str()) {
_ = node.remove_attribute("width");
_ = node.remove_attribute("height");
}
node.remove_attribute("class").map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
node.remove_attribute("align").map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
node.remove_attribute(constants::SCORE_ATTR)
.map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
node.remove_attribute(constants::DATA_TABLE_ATTR)
.map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
if node.get_name().to_lowercase() == "a"
&& node
.get_property("href")
.map(|href| !href.starts_with('#'))
.unwrap_or(false)
{
_ = node.set_property("target", "_blank");
}
node_iter = Util::next_node(&node, false);
}
Ok(())
}
fn simplify_nested_elements(root: &mut Node) -> Result<(), FullTextParserError> {
let mut node_iter = Some(root.clone());
while let Some(mut node) = node_iter {
let tag_name = node.get_name().to_uppercase();
if tag_name == "ARTICLE" || node.get_parent().is_none() {
node_iter = Util::next_node(&node, false);
continue;
}
if tag_name != "DIV" && tag_name != "SECTION" {
node_iter = Util::next_node(&node, false);
continue;
}
if Util::is_element_without_content(&node) {
node_iter = Util::remove_and_next(&mut node);
continue;
} else if (Util::has_single_tag_inside_element(&node, "DIV")
|| Util::has_single_tag_inside_element(&node, "SECTION"))
&& let Some(mut parent) = node.get_parent()
&& let Some(mut child) = node.get_child_elements().into_iter().next()
{
for (k, v) in node.get_attributes().into_iter() {
child.set_attribute(&k, &v).map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
}
parent
.replace_child_node(child, node.clone())
.map_err(|error| {
tracing::error!(%error);
FullTextParserError::Xml
})?;
node_iter = Util::next_node(&parent, false);
continue;
}
node_iter = Util::next_node(&node, false);
}
Ok(())
}
pub(crate) fn remove_empty_nodes(root: &mut Node) {
let mut node_iter = Some(root.clone());
while let Some(mut node) = node_iter {
let tag_name = node.get_name().to_uppercase();
if constants::VALID_EMPTY_TAGS.contains(tag_name.as_str()) {
node_iter = Util::next_node(&node, false);
continue;
}
if Util::is_element_without_children(&node) {
node_iter = Util::remove_and_next(&mut node);
continue;
}
node_iter = Util::next_node(&node, false);
}
}
}