#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
mod action_cache;
pub mod database;
mod default_portal;
mod error;
pub mod feed_api;
mod feed_api_implementations;
pub mod models;
mod password_encryption;
mod schema;
pub mod util;
use crate::database::Database;
use crate::default_portal::DefaultPortal;
use chrono;
use parking_lot::{Mutex, RwLock};
use std::sync::Arc;
use crate::action_cache::ActionCache;
pub use crate::error::{NewsFlashError, NewsFlashErrorKind};
pub use crate::feed_api::error::{FeedApiError, FeedApiErrorKind};
use crate::feed_api::{FeedApi, Portal};
use crate::models::{
Article, ArticleFilter, ArticleID, Category, CategoryID, CategoryType, Config, FatArticle, FavIcon, Feed, FeedID, FeedMapping, LoginData, Marked,
PluginCapabilities, PluginID, PluginInfo, Read, Tag, TagID, Tagging, Url, NEWSFLASH_TOPLEVEL,
};
use crate::util::favicon_cache::FavIconCache;
pub use crate::util::feed_parser::{self, FeedParserError, ParsedUrl};
use crate::util::mercury_parser::MercuryParser;
use crate::util::opml;
use crate::util::html2text::Html2Text;
#[cfg(feature = "readability-fallback")]
use readabiltiy_fork::extractor as readability;
use article_scraper::ArticleScraper;
use failure::{Fail, ResultExt};
use feed_api_implementations::FeedApiImplementations;
use log::{error, info, warn};
use reqwest::Client;
use std::collections::hash_map::HashMap;
use std::path::PathBuf;
static SCRAPER_DATA_DIR: &'static str = "scraper_data";
type NewsFlashResult<T> = Result<T, NewsFlashError>;
pub struct NewsFlash {
db: Arc<Database>,
api: RwLock<Box<dyn FeedApi>>,
config: RwLock<Config>,
icons: FavIconCache,
scraper: ArticleScraper,
sync_cache: Mutex<ActionCache>,
sync_ongoing: Arc<RwLock<bool>>,
}
impl NewsFlash {
pub fn list_backends() -> HashMap<PluginID, PluginInfo> {
let mut map: HashMap<PluginID, PluginInfo> = HashMap::new();
for api_meta in FeedApiImplementations::list() {
map.insert(api_meta.id(), api_meta.info().unwrap());
}
map
}
pub fn new(data_dir: &PathBuf, config_dir: &PathBuf, id: &PluginID) -> NewsFlashResult<Self> {
std::fs::DirBuilder::new()
.recursive(true)
.create(&data_dir)
.context(NewsFlashErrorKind::IO)?;
std::fs::DirBuilder::new()
.recursive(true)
.create(&config_dir)
.context(NewsFlashErrorKind::IO)?;
let db = Database::new(data_dir).context(NewsFlashErrorKind::Database)?;
let db = Arc::new(db);
let api = NewsFlash::load_backend(id, config_dir, db.clone())?;
let icons = FavIconCache::new(&db).context(NewsFlashErrorKind::IO)?;
let scraper = ArticleScraper::new(data_dir.join(SCRAPER_DATA_DIR));
let config = Config::open(&config_dir).context(NewsFlashErrorKind::IO)?;
let sync_cache = Mutex::new(ActionCache::new());
let base = NewsFlash {
db,
api: RwLock::new(api),
config: RwLock::new(config),
icons,
scraper,
sync_cache,
sync_ongoing: Arc::new(RwLock::new(false)),
};
Ok(base)
}
pub fn try_load(data_dir: &PathBuf, config_dir: &PathBuf) -> NewsFlashResult<Self> {
let config = Config::open(&config_dir).context(NewsFlashErrorKind::IO)?;
let plugin_id = config.get_backend().ok_or(NewsFlashErrorKind::LoadBackend)?;
Self::new(data_dir, config_dir, &plugin_id)
}
fn load_backend(backend_id: &PluginID, data_dir: &PathBuf, db: Arc<Database>) -> NewsFlashResult<Box<dyn FeedApi>> {
info!("Loading backend {}", backend_id);
if let Some(meta_data) = FeedApiImplementations::get(&backend_id) {
let portal = NewsFlash::default_portal(db).context(NewsFlashErrorKind::Portal)?;
let backend = meta_data.get_instance(data_dir, portal).context(NewsFlashErrorKind::LoadBackend)?;
return Ok(backend);
} else {
error!("No meta object for id '{}' found", backend_id);
}
Err(NewsFlashErrorKind::LoadBackend)?
}
pub fn id(&self) -> Option<PluginID> {
self.config.read().get_backend()
}
pub fn user_name(&self) -> Option<String> {
self.api.read().user_name()
}
pub fn features(&self) -> NewsFlashResult<PluginCapabilities> {
Ok(self.api.read().features().context(NewsFlashErrorKind::API)?)
}
pub fn get_login_data(&self) -> Option<LoginData> {
self.api.read().get_login_data()
}
pub fn is_sync_ongoing(&self) -> bool {
*self.sync_ongoing.read()
}
pub fn is_database_empty(&self) -> NewsFlashResult<bool> {
let is_empty = self.db.is_empty().context(NewsFlashErrorKind::Database)?;
Ok(is_empty)
}
fn default_portal(db: Arc<Database>) -> NewsFlashResult<Box<dyn Portal>> {
let portal = DefaultPortal::new(db);
let portal = Box::new(portal);
Ok(portal)
}
pub fn parse_error(error: &dyn Fail) -> Option<String> {
for api_meta in FeedApiImplementations::list() {
if let Some(error) = api_meta.parse_error(error) {
return Some(error);
}
}
None
}
pub fn error_login_related(error: &NewsFlashError) -> bool {
match error.kind() {
NewsFlashErrorKind::LoadBackend | NewsFlashErrorKind::NotLoggedIn | NewsFlashErrorKind::Login | NewsFlashErrorKind::API => true,
NewsFlashErrorKind::Database
| NewsFlashErrorKind::GrabContent
| NewsFlashErrorKind::Icon
| NewsFlashErrorKind::ImageDownload
| NewsFlashErrorKind::IO
| NewsFlashErrorKind::Portal
| NewsFlashErrorKind::OPML
| NewsFlashErrorKind::Syncing
| NewsFlashErrorKind::Unknown
| NewsFlashErrorKind::Config => false,
}
}
pub async fn get_icon_info(&self, feed: &Feed, client: &Client) -> NewsFlashResult<FavIcon> {
let info = self.icons.get_icon(feed, &self.api, client).await.context(NewsFlashErrorKind::Icon)?;
Ok(info)
}
pub async fn login(&self, data: LoginData, client: &Client) -> NewsFlashResult<()> {
let id = match &data {
LoginData::OAuth(data) => data.id.clone(),
LoginData::Password(data) => data.id.clone(),
LoginData::None(id) => id.clone(),
};
self.api.write().login(data, client).await.context(NewsFlashErrorKind::Login)?;
self.config.write().set_backend(Some(&id)).context(NewsFlashErrorKind::Login)?;
Ok(())
}
pub async fn logout(&self, client: &Client) -> NewsFlashResult<()> {
self.config.write().set_backend(None).context(NewsFlashErrorKind::Config)?;
self.api.write().logout(client).await.context(NewsFlashErrorKind::API)?;
self.db.reset().context(NewsFlashErrorKind::Database)?;
Ok(())
}
pub async fn initial_sync(&self, client: &Client) -> NewsFlashResult<i64> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
*self.sync_ongoing.write() = true;
let result = self
.api
.read()
.initial_sync(client)
.await
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::API)?;
let result = self.sync_cache.lock().process_sync_result(result);
self.sync_cache
.lock()
.execute_api_actions(&self.api, &self.config, client)
.await
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::API)?;
self.sync_cache.lock().reset();
let new_article_count = self
.db
.write_sync_result(result)
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::Database)?;
*self.sync_ongoing.write() = false;
return Ok(new_article_count);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn sync(&self, client: &Client) -> NewsFlashResult<i64> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
*self.sync_ongoing.write() = true;
let now = chrono::Utc::now();
let max_count = self.config.read().get_sync_amount();
let last_sync = self.config.read().get_last_sync();
let result = self
.api
.read()
.sync(max_count, last_sync, client)
.await
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::API)?;
let result = self.sync_cache.lock().process_sync_result(result);
self.sync_cache
.lock()
.execute_api_actions(&self.api, &self.config, client)
.await
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::API)?;
self.sync_cache.lock().reset();
let new_article_count = self
.db
.write_sync_result(result)
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::Database)?;
self.config
.write()
.set_last_sync(now)
.map_err(|error| {
*self.sync_ongoing.write() = false;
error
})
.context(NewsFlashErrorKind::Config)?;
*self.sync_ongoing.write() = false;
return Ok(new_article_count);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn set_article_read(&self, articles: &[ArticleID], read: Read, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
for article_id in articles {
match read {
Read::Read => self.sync_cache.lock().add_article_marked_read(article_id),
Read::Unread => self.sync_cache.lock().add_article_marked_unread(article_id),
}
}
} else {
self.api
.read()
.set_article_read(&articles, read, client)
.await
.context(NewsFlashErrorKind::API)?;
}
self.db.set_article_read(&articles, read).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn set_article_marked(&self, articles: &[ArticleID], marked: Marked, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
for article_id in articles {
match marked {
Marked::Marked => self.sync_cache.lock().add_article_mark(article_id),
Marked::Unmarked => self.sync_cache.lock().add_article_unmark(article_id),
}
}
} else {
self.api
.read()
.set_article_marked(&articles, marked, client)
.await
.context(NewsFlashErrorKind::API)?;
}
self.db.set_article_marked(&articles, marked).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn set_feed_read(&self, feeds: &[FeedID], client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
for feed_id in feeds {
self.sync_cache.lock().add_feed_mark_read(feed_id);
}
} else {
let last_sync = self.config.read().get_last_sync();
self.api
.read()
.set_feed_read(&feeds, last_sync, client)
.await
.context(NewsFlashErrorKind::API)?;
}
self.db.set_feed_read(&feeds).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn set_category_read(&self, categories: &[CategoryID], client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
for category_id in categories {
self.sync_cache.lock().add_category_mark_read(category_id);
}
} else {
let last_sync = self.config.read().get_last_sync();
self.api
.read()
.set_category_read(&categories, last_sync, client)
.await
.context(NewsFlashErrorKind::API)?;
}
self.db.set_category_read(&categories).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn set_tag_read(&self, tags: &[TagID], client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
for tag_id in tags {
self.sync_cache.lock().add_tag_mark_read(&tag_id);
}
} else {
let last_sync = self.config.read().get_last_sync();
self.api
.read()
.set_tag_read(tags, last_sync, client)
.await
.context(NewsFlashErrorKind::API)?;
}
self.db.set_tag_read(tags).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn set_all_read(&self, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
let categories = self.db.read_categories().context(NewsFlashErrorKind::Database)?;
for category in categories {
self.sync_cache.lock().add_category_mark_read(&category.category_id);
}
} else {
let last_sync = self.config.read().get_last_sync();
self.api.read().set_all_read(last_sync, client).await.context(NewsFlashErrorKind::API)?;
}
self.db.set_all_read().context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn add_feed(&self, url: &Url, title: Option<String>, category_id: Option<CategoryID>, client: &Client) -> NewsFlashResult<Feed> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let (feed, category) = self
.api
.read()
.add_feed(url, title, category_id.clone(), client)
.await
.context(NewsFlashErrorKind::API)?;
self.db.insert_feed(&feed).context(NewsFlashErrorKind::Database)?;
if let Some(category) = &category {
self.db.insert_category(&category).context(NewsFlashErrorKind::Database)?;
}
let category_id = match category_id {
Some(category_id) => Some(category_id.clone()),
None => match category {
Some(category) => Some(category.category_id.clone()),
None => None,
},
};
if let Some(category_id) = category_id {
let mapping = FeedMapping {
feed_id: feed.feed_id.clone(),
category_id: category_id.clone(),
};
self.db.insert_mapping(&mapping).context(NewsFlashErrorKind::Database)?;
}
return Ok(feed);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn remove_feed(&self, feed: &Feed, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
self.api
.read()
.remove_feed(&feed.feed_id, client)
.await
.context(NewsFlashErrorKind::API)?;
self.db.drop_feed(&feed.feed_id).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn move_feed(&self, feed: &FeedID, from: &CategoryID, to: &CategoryID, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
self.api.read().move_feed(feed, from, to, client).await.context(NewsFlashErrorKind::API)?;
self.db
.drop_mapping(&FeedMapping {
feed_id: feed.clone(),
category_id: from.clone(),
})
.context(NewsFlashErrorKind::Database)?;
self.db
.insert_mapping(&FeedMapping {
feed_id: feed.clone(),
category_id: to.clone(),
})
.context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn rename_feed(&self, feed: &Feed, new_title: &str, client: &Client) -> NewsFlashResult<Feed> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let new_id = self
.api
.read()
.rename_feed(&feed.feed_id, new_title, client)
.await
.context(NewsFlashErrorKind::API)?;
let mut modified_feed = feed.clone();
modified_feed.label = new_title.to_owned();
modified_feed.feed_id = new_id.clone();
self.db.insert_feed(&modified_feed).context(NewsFlashErrorKind::Database)?;
if new_id != feed.feed_id {
self.db.drop_feed(&feed.feed_id).context(NewsFlashErrorKind::Database)?;
let mappings = self.db.read_mappings(Some(&feed.feed_id), None).context(NewsFlashErrorKind::Database)?;
let modified_mappings: Vec<FeedMapping> = mappings
.into_iter()
.map(|mut mapping| {
mapping.feed_id = new_id.clone();
mapping
})
.collect();
self.db.drop_mapping_of_feed(&feed.feed_id).context(NewsFlashErrorKind::Database)?;
self.db.write_mappings(&modified_mappings).context(NewsFlashErrorKind::Database)?;
let articles = self
.db
.read_articles(ArticleFilter {
limit: None,
offset: None,
order: None,
unread: None,
marked: None,
feed: Some(feed.feed_id.clone()),
feed_blacklist: None,
category: None,
category_blacklist: None,
tag: None,
ids: None,
newer_than: None,
older_than: None,
search_term: None,
})
.context(NewsFlashErrorKind::Database)?;
let mut modified_ids: Vec<ArticleID> = Vec::new();
let modified_articles: Vec<Article> = articles
.into_iter()
.map(|mut article| {
modified_ids.push(article.article_id.clone());
article.feed_id = new_id.clone();
article
})
.collect();
self.db.drop_articles(&modified_ids).context(NewsFlashErrorKind::Database)?;
self.db.write_articles(&modified_articles).context(NewsFlashErrorKind::Database)?;
}
return Ok(modified_feed);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn add_category(
&self,
title: &str,
parent: Option<&CategoryID>,
sort_index: Option<i32>,
client: &Client,
) -> NewsFlashResult<Category> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let category_id = self
.api
.read()
.add_category(title, parent, client)
.await
.context(NewsFlashErrorKind::Database)?;
let category = Category {
category_id,
label: title.to_owned(),
sort_index,
parent_id: match parent {
Some(parent) => parent.clone(),
None => NEWSFLASH_TOPLEVEL.clone(),
},
category_type: CategoryType::Default,
};
self.db.insert_category(&category).context(NewsFlashErrorKind::Database)?;
return Ok(category);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn remove_category(&self, category: &Category, remove_children: bool, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
self.api
.read()
.remove_category(&category.category_id, remove_children, client)
.await
.context(NewsFlashErrorKind::API)?;
} else {
return Err(NewsFlashErrorKind::NotLoggedIn)?;
}
if remove_children {
self.remove_category_from_db_recurse(category)?;
} else {
self.remove_category_from_db_move_children_up(category)?;
}
Ok(())
}
fn remove_category_from_db_move_children_up(&self, category: &Category) -> NewsFlashResult<()> {
let parent_id = category.parent_id.clone();
let mappings = self
.db
.read_mappings(None, Some(&category.category_id))
.context(NewsFlashErrorKind::Database)?;
for mut mapping in mappings {
self.db.drop_mapping(&mapping).context(NewsFlashErrorKind::Database)?;
mapping.category_id = parent_id.clone();
self.db.insert_mapping(&mapping).context(NewsFlashErrorKind::Database)?;
}
let child_categories: Vec<Category> = self
.db
.read_categories()
.context(NewsFlashErrorKind::Database)?
.into_iter()
.filter(|category| category.parent_id == parent_id)
.collect();
let mutated_child_categories: Vec<Category> = child_categories
.into_iter()
.map(|mut category| {
category.parent_id = parent_id.clone();
category
})
.collect();
self.db
.insert_categories(&mutated_child_categories)
.context(NewsFlashErrorKind::Database)?;
Ok(())
}
fn remove_category_from_db_recurse(&self, category: &Category) -> NewsFlashResult<()> {
let mappings = self
.db
.read_mappings(None, Some(&category.category_id))
.context(NewsFlashErrorKind::Database)?;
for mapping in mappings {
self.db.drop_feed(&mapping.feed_id).context(NewsFlashErrorKind::Database)?;
}
self.db.drop_category(category).context(NewsFlashErrorKind::Database)?;
let categories = self.db.read_categories().context(NewsFlashErrorKind::Database)?;
let child_categories: Vec<Category> = categories
.into_iter()
.filter(|category| category.parent_id == category.category_id)
.collect();
for child_category in child_categories {
self.remove_category_from_db_recurse(&child_category)?;
}
Ok(())
}
pub async fn rename_category(&self, category: &Category, new_title: &str, client: &Client) -> NewsFlashResult<Category> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let new_id = self
.api
.read()
.rename_category(&category.category_id, new_title, client)
.await
.context(NewsFlashErrorKind::API)?;
let mut modified_category = category.clone();
modified_category.label = new_title.to_owned();
if new_id != category.category_id {
self.db.drop_category(&category).context(NewsFlashErrorKind::Database)?;
modified_category.category_id = new_id.clone();
let mappings = self
.db
.read_mappings(None, Some(&category.category_id))
.context(NewsFlashErrorKind::Database)?;
let modified_mappings: Vec<FeedMapping> = mappings
.into_iter()
.map(|mut mapping| {
mapping.category_id = new_id.clone();
mapping
})
.collect();
self.db
.drop_mapping_of_category(&category.category_id)
.context(NewsFlashErrorKind::Database)?;
self.db.write_mappings(&modified_mappings).context(NewsFlashErrorKind::Database)?;
}
self.db.insert_category(&modified_category).context(NewsFlashErrorKind::Database)?;
return Ok(modified_category);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn move_category(&self, category_id: &CategoryID, parent: &CategoryID, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
self.api
.read()
.move_category(category_id, parent, client)
.await
.context(NewsFlashErrorKind::API)?;
let categories = self.db.read_categories().context(NewsFlashErrorKind::Database)?;
if let Some(mut mutated_category) = categories.into_iter().find(|c| &c.category_id == category_id) {
mutated_category.parent_id = parent.clone();
self.db.insert_category(&mutated_category).context(NewsFlashErrorKind::Database)?;
}
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn add_tag(&self, title: &str, color: Option<String>, sort_index: Option<i32>, client: &Client) -> NewsFlashResult<Tag> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let tag_id = self.api.read().add_tag(title, client).await.context(NewsFlashErrorKind::API)?;
let tag = Tag {
tag_id,
label: title.to_owned(),
color,
sort_index,
};
self.db.insert_tag(&tag).context(NewsFlashErrorKind::Database)?;
return Ok(tag);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn remove_tag(&self, tag: &Tag, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
self.api.read().remove_tag(&tag.tag_id, client).await.context(NewsFlashErrorKind::API)?;
self.db.drop_tag(&tag).context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn rename_tag(&self, tag: &Tag, new_title: &str, client: &Client) -> NewsFlashResult<Tag> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let new_id = self
.api
.read()
.rename_tag(&tag.tag_id, new_title, client)
.await
.context(NewsFlashErrorKind::API)?;
self.db.drop_tag(&tag).context(NewsFlashErrorKind::Database)?;
let mutated_tag = Tag {
tag_id: new_id.clone(),
label: new_title.to_owned(),
color: tag.color.clone(),
sort_index: tag.sort_index,
};
self.db.insert_tag(&mutated_tag).context(NewsFlashErrorKind::Database)?;
if new_id != tag.tag_id {
let taggings = self.db.read_taggings(None, Some(&tag.tag_id)).context(NewsFlashErrorKind::Database)?;
let mutated_taggings: Vec<Tagging> = taggings
.into_iter()
.map(|mut tagging| {
tagging.tag_id = new_id.clone();
tagging
})
.collect();
self.db.drop_taggings_of_tag(tag).context(NewsFlashErrorKind::Database)?;
self.db.insert_taggings(&mutated_taggings).context(NewsFlashErrorKind::Database)?;
}
return Ok(mutated_tag);
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn tag_article(&self, article: &Article, tag: &Tag, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
self.sync_cache.lock().add_article_tagged(&article.article_id, &tag.tag_id);
} else {
self.api
.read()
.tag_article(&article.article_id, &tag.tag_id, client)
.await
.context(NewsFlashErrorKind::API)?;
let tagging = Tagging {
article_id: article.article_id.clone(),
tag_id: tag.tag_id.clone(),
};
self.db.insert_tagging(&tagging).context(NewsFlashErrorKind::Database)?;
}
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn untag_article(&self, article: &Article, tag: &Tag, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
self.sync_cache.lock().add_article_untagged(&article.article_id, &tag.tag_id);
} else {
self.api
.read()
.untag_article(&article.article_id, &tag.tag_id, client)
.await
.context(NewsFlashErrorKind::API)?;
let tagging = Tagging {
article_id: article.article_id.clone(),
tag_id: tag.tag_id.clone(),
};
self.db.drop_tagging(&tagging).context(NewsFlashErrorKind::Database)?;
}
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub async fn import_opml(&self, opml: &str, parse_feeds: bool, client: &Client) -> NewsFlashResult<()> {
if self.api.read().is_logged_in(client).await.context(NewsFlashErrorKind::API)? {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
self.api.read().import_opml(opml, client).await.context(NewsFlashErrorKind::API)?;
let opml_result = opml::parse_opml(opml, parse_feeds, Some(client))
.await
.context(NewsFlashErrorKind::OPML)?;
self.db.insert_categories(&opml_result.categories).context(NewsFlashErrorKind::Database)?;
self.db.insert_feeds(&opml_result.feeds).context(NewsFlashErrorKind::Database)?;
self.db
.insert_mappings(&opml_result.feed_mappings)
.context(NewsFlashErrorKind::Database)?;
return Ok(());
}
Err(NewsFlashErrorKind::NotLoggedIn)?
}
pub fn export_opml(&self) -> NewsFlashResult<String> {
if *self.sync_ongoing.read() {
return Err(NewsFlashErrorKind::Syncing)?;
}
let categories = self.db.read_categories().context(NewsFlashErrorKind::Database)?;
let feeds = self.db.read_feeds().context(NewsFlashErrorKind::Database)?;
let mappings = self.db.read_mappings(None, None).context(NewsFlashErrorKind::Database)?;
let opml_string = opml::generate_opml(&categories, &feeds, &mappings).context(NewsFlashErrorKind::OPML)?;
Ok(opml_string)
}
pub fn get_categories(&self) -> NewsFlashResult<Vec<Category>> {
let categories = self.db.read_categories().context(NewsFlashErrorKind::Database)?;
Ok(categories)
}
pub fn unread_count_category(&self, category: &CategoryID) -> NewsFlashResult<i64> {
let count = self.db.unread_count_category(category).context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn marked_count_category(&self, category: &CategoryID) -> NewsFlashResult<i64> {
let count = self.db.marked_count_category(category).context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn get_feeds(&self) -> NewsFlashResult<(Vec<Feed>, Vec<FeedMapping>)> {
let feeds = self.db.read_feeds().context(NewsFlashErrorKind::Database)?;
let mappings = self.db.read_mappings(None, None).context(NewsFlashErrorKind::Database)?;
Ok((feeds, mappings))
}
pub fn unread_count_feed(&self, feed: &FeedID) -> NewsFlashResult<i64> {
let count = self.db.unread_count_feed(feed).context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn marked_count_feed(&self, feed: &FeedID) -> NewsFlashResult<i64> {
let count = self.db.marked_count_feed(feed).context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn unread_count_feed_map(&self) -> NewsFlashResult<HashMap<FeedID, i64>> {
let mut count_vec = self.db.unread_count_feed_map().context(NewsFlashErrorKind::Database)?;
let mut map: HashMap<FeedID, i64> = HashMap::new();
count_vec.drain(..).for_each(|c| {
map.insert(c.feed_id, c.count);
});
Ok(map)
}
pub fn marked_count_feed_map(&self) -> NewsFlashResult<HashMap<FeedID, i64>> {
let mut count_vec = self.db.marked_count_feed_map().context(NewsFlashErrorKind::Database)?;
let mut map: HashMap<FeedID, i64> = HashMap::new();
count_vec.drain(..).for_each(|c| {
map.insert(c.feed_id, c.count);
});
Ok(map)
}
pub fn get_tags(&self) -> NewsFlashResult<Vec<Tag>> {
let tags = self.db.read_tags().context(NewsFlashErrorKind::Database)?;
Ok(tags)
}
pub fn get_tags_of_article(&self, article_id: &ArticleID) -> NewsFlashResult<Vec<Tag>> {
let tags = self.db.read_tags_for_article(article_id).context(NewsFlashErrorKind::Database)?;
Ok(tags)
}
pub fn unread_count_tag(&self, tag: &TagID) -> NewsFlashResult<i64> {
let count = self.db.unread_count_tag(tag).context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn marked_count_tag(&self, tag: &TagID) -> NewsFlashResult<i64> {
let count = self.db.marked_count_tag(tag).context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn unread_count_all(&self) -> NewsFlashResult<i64> {
let count = self.db.unread_count_all().context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn marked_count_all(&self) -> NewsFlashResult<i64> {
let count = self.db.marked_count_all().context(NewsFlashErrorKind::Database)?;
Ok(count)
}
pub fn get_articles(&self, filter: ArticleFilter) -> NewsFlashResult<Vec<Article>> {
let articles = self.db.read_articles(filter).context(NewsFlashErrorKind::Database)?;
Ok(articles)
}
pub fn get_article(&self, id: &ArticleID) -> NewsFlashResult<Article> {
let article = self.db.read_article(id).context(NewsFlashErrorKind::Database)?;
Ok(article)
}
pub fn get_fat_articles(&self, filter: ArticleFilter) -> NewsFlashResult<Vec<FatArticle>> {
let articles = self.db.read_fat_articles(filter).context(NewsFlashErrorKind::Database)?;
Ok(articles)
}
pub fn get_fat_article(&self, id: &ArticleID) -> NewsFlashResult<FatArticle> {
let article = self.db.read_fat_article(id).context(NewsFlashErrorKind::Database)?;
Ok(article)
}
pub async fn article_download_images(&self, id: &ArticleID, client: &Client) -> NewsFlashResult<FatArticle> {
let mut article = self.get_fat_article(id)?;
if let Some(scraped_content) = article.scraped_content {
let processed_scraped_content = self
.scraper
.image_downloader
.download_images_from_string(&scraped_content, client)
.await
.context(NewsFlashErrorKind::ImageDownload)?;
article.scraped_content = Some(processed_scraped_content);
} else if let Some(html) = article.html {
let processed_html = self
.scraper
.image_downloader
.download_images_from_string(&html, client)
.await
.context(NewsFlashErrorKind::ImageDownload)?;
article.html = Some(processed_html);
}
self.db.update_article_grabbed_content(&article).context(NewsFlashErrorKind::Database)?;
Ok(article)
}
pub async fn article_scrap_content(&self, id: &ArticleID, client: &Client) -> NewsFlashResult<FatArticle> {
let mut article = self.get_fat_article(id)?;
if let Some(url) = &article.url {
match self.scraper.parse(url.get(), false, client).await {
Ok(processed_article) => {
info!("Internal scraper: successfully scraped: '{}'", url);
if let Some(html) = processed_article.html {
article.plain_text = Html2Text::process(&html);
article.scraped_content = Some(html);
}
if let Some(title) = processed_article.title {
if article.title.is_none() {
article.title = Some(title);
}
}
if let Some(author) = processed_article.author {
if article.author.is_none() {
article.author = Some(author);
}
}
self.db.update_article_grabbed_content(&article).context(NewsFlashErrorKind::Database)?;
return Ok(article);
}
Err(error) => {
error!("Internal scraper: '{}'", error);
warn!("Internal scraper failed to process: '{}'", url);
}
}
match MercuryParser::parse_url(url, client).await {
Ok(processed_article) => {
info!("Mercury parser: successfully scraped: '{}'", url);
if let Some(html) = processed_article.content {
article.plain_text = Html2Text::process(&html);
article.scraped_content = Some(html);
}
if let Some(title) = processed_article.title {
if article.title.is_none() {
article.title = Some(title);
}
}
if let Some(author) = processed_article.author {
if article.author.is_none() {
article.author = Some(author);
}
}
if let Some(summary) = processed_article.excerpt {
if article.summary.is_none() {
article.summary = Some(summary);
}
}
self.db.update_article_grabbed_content(&article).context(NewsFlashErrorKind::Database)?;
return Ok(article);
}
Err(error) => {
error!("Mercury parser: '{}'", error);
warn!("Mercury parser failed to process: '{}'", url);
}
}
if cfg!(feature = "readability-fallback") {
#[cfg(feature = "readability-fallback")]
match readability::scrape_with_client(url.get().as_str(), client).await {
Ok(processed_article) => {
info!("Readability: successfully scraped: '{}'", url);
if processed_article.content.is_empty() {
warn!("Readability: content empty");
} else {
if !processed_article.text.is_empty() {
article.plain_text = Some(processed_article.text);
} else {
article.plain_text = Html2Text::process(&processed_article.content);
}
article.scraped_content = Some(processed_article.content);
if !processed_article.title.is_empty() {
if article.title.is_none() {
article.title = Some(processed_article.title);
}
}
self.db.update_article_grabbed_content(&article).context(NewsFlashErrorKind::Database)?;
return Ok(article);
}
}
Err(error) => {
error!("Readability: '{}'", error);
warn!("Readability failed to process: '{}'", url);
}
}
}
}
Err(NewsFlashErrorKind::GrabContent)?
}
}