#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
mod action_cache;
mod builder;
mod config;
mod database;
mod default_portal;
pub mod error;
pub mod feed_api;
mod feed_api_implementations;
pub mod models;
mod password_encryption;
mod schema;
pub mod util;
#[cfg(test)]
mod tests;
use crate::action_cache::ActionCache;
use crate::builder::NewsFlashBuilder;
use crate::config::ConfigProxy;
use crate::database::DatabaseExt;
use crate::default_portal::DefaultPortal;
use crate::error::NewsFlashError;
use crate::feed_api::FeedApi;
use crate::feed_api_implementations::FeedApiImplementations;
use crate::models::{
Article, ArticleFilter, ArticleID, Category, CategoryID, DatabaseSize, Enclosure, FatArticle, FatFavIcon, FavIcon, Feed, FeedID, FeedMapping,
LoginData, Marked, NEWSFLASH_TOPLEVEL, PluginCapabilities, PluginID, PluginInfo, Read, Tag, TagID, Tagging, Url,
};
use crate::util::favicons::FavIconLoader;
pub use crate::util::feed_parser::{self, ParsedUrl};
use crate::util::opml;
use chrono::{DateTime, Duration, Utc};
use feed_api::FeedHeaderMap;
use models::{CategoryMapping, OfflineAction, OfflineActionType, Thumbnail};
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use std::collections::hash_map::HashMap;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use tokio::sync::{Mutex, RwLock, Semaphore};
#[cfg(feature = "article-scraper")]
use article_scraper::ArticleScraper;
#[cfg(feature = "article-scraper")]
pub use article_scraper::DownloadProgress;
#[cfg(feature = "image-downloader")]
use crate::models::{Image, ImageMetadata};
#[cfg(feature = "image-downloader")]
use futures::channel::mpsc::Sender;
#[cfg(feature = "image-downloader")]
use std::collections::HashSet;
#[cfg(feature = "image-downloader")]
use tokio::io::AsyncReadExt;
#[cfg(any(feature = "article-scraper", feature = "image-downloader"))]
use std::path::PathBuf;
#[cfg(feature = "article-scraper")]
static SCRAPER_DATA_DIR: &str = "ftr-site-config";
#[cfg(feature = "image-downloader")]
static IMAGE_DATA_DIR: &str = "pictures";
type NewsFlashResult<T> = Result<T, NewsFlashError>;
pub struct NewsFlash {
#[cfg(any(feature = "article-scraper", feature = "image-downloader"))]
data_dir: PathBuf,
db: Arc<Box<dyn DatabaseExt>>,
api: RwLock<Box<dyn FeedApi>>,
config: Arc<RwLock<ConfigProxy>>,
download_semaphore: Arc<Semaphore>,
icons: FavIconLoader,
#[cfg(feature = "article-scraper")]
scraper: RwLock<Option<Arc<ArticleScraper>>>,
sync_cache: Mutex<ActionCache>,
is_sync_ongoing: Arc<AtomicBool>,
is_offline: Arc<AtomicBool>,
}
impl NewsFlash {
pub fn list_backends() -> HashMap<PluginID, PluginInfo> {
let mut map: HashMap<PluginID, PluginInfo> = HashMap::new();
for api_meta in FeedApiImplementations::list() {
map.insert(api_meta.id(), api_meta.info().unwrap());
}
map
}
pub fn builder() -> NewsFlashBuilder {
NewsFlashBuilder::default()
}
pub async fn id(&self) -> Option<PluginID> {
self.config.read().await.get_backend()
}
pub async fn user_name(&self) -> Option<String> {
self.api.read().await.user_name().await
}
pub async fn features(&self) -> NewsFlashResult<PluginCapabilities> {
let features = self.api.read().await.features()?;
Ok(features)
}
pub async fn get_login_data(&self) -> Option<LoginData> {
self.api.read().await.get_login_data().await
}
pub async fn last_sync(&self) -> DateTime<Utc> {
self.config.read().await.get_last_sync()
}
pub fn is_sync_ongoing(&self) -> bool {
self.is_sync_ongoing.load(Ordering::Acquire)
}
pub fn is_offline(&self) -> bool {
self.is_offline.load(Ordering::Acquire)
}
pub fn get_semaphore(&self) -> Arc<Semaphore> {
self.download_semaphore.clone()
}
pub async fn set_offline(&self, offline: bool, client: &Client) -> NewsFlashResult<()> {
self.is_offline.store(offline, Ordering::Release);
if !offline {
let offline_actions = self.db.read_offline_actions()?;
self.db.drop_offline_actions()?;
let mut action_cache = ActionCache::new();
for offline_action in offline_actions {
match offline_action.action_type {
OfflineActionType::Read => action_cache.add_article_marked_read(&offline_action.article_id),
OfflineActionType::Unread => action_cache.add_article_marked_unread(&offline_action.article_id),
OfflineActionType::Mark => action_cache.add_article_mark(&offline_action.article_id),
OfflineActionType::Unmark => action_cache.add_article_unmark(&offline_action.article_id),
OfflineActionType::Tag => {
if let Some(tag_id) = offline_action.tag_id {
action_cache.add_article_tagged(&offline_action.article_id, &tag_id);
}
}
OfflineActionType::Untag => {
if let Some(tag_id) = offline_action.tag_id {
action_cache.add_article_untagged(&offline_action.article_id, &tag_id);
}
}
}
}
self.sync_cache.lock().await.execute_api_actions(&self.api, client).await?;
}
Ok(())
}
pub fn is_database_empty(&self) -> NewsFlashResult<bool> {
let is_empty = self.db.is_empty()?;
Ok(is_empty)
}
pub async fn set_keep_articles_duration(&self, keep_articles: Option<chrono::Duration>) -> NewsFlashResult<()> {
self.config.write().await.set_keep_articles_duration(keep_articles)?;
if let Some(keep_articles) = keep_articles {
self.db.drop_old_articles(keep_articles)?;
#[cfg(feature = "image-downloader")]
self.delete_orphaned_images().await?;
}
Ok(())
}
pub async fn get_keep_articles_duration(&self) -> Option<chrono::Duration> {
self.config.read().await.get_keep_articles_duration()
}
pub fn error_login_related(error: &NewsFlashError) -> bool {
matches!(error, NewsFlashError::LoadBackend | NewsFlashError::NotLoggedIn | NewsFlashError::API(_))
}
pub async fn is_reachable(&self, client: &Client) -> NewsFlashResult<bool> {
let reachable = self.api.read().await.is_reachable(client).await?;
Ok(reachable)
}
pub async fn get_icon(&self, feed_id: &FeedID, client: &Client, header: HeaderMap<HeaderValue>) -> NewsFlashResult<FavIcon> {
let info = self.icons.get_icon(feed_id, &self.api, client, header).await?;
Ok(info)
}
pub fn load_icon_from_db(&self, feed_id: &FeedID) -> NewsFlashResult<FatFavIcon> {
let icon = self.db.read_fatfavicon(feed_id)?;
Ok(icon)
}
pub async fn get_article_thumbnail(&self, article_id: &ArticleID, client: &Client) -> NewsFlashResult<Option<Thumbnail>> {
if let Ok(thumbnail) = self.db.read_thumbnail(article_id) {
if thumbnail.data.is_some() {
return Ok(Some(thumbnail));
}
if thumbnail.last_try + Duration::try_days(4).unwrap() > Utc::now() {
tracing::debug!(%article_id, "Tried to download thumbnail recently, will not attemt to download again.");
return Ok(None);
}
}
let Ok(article) = self.db.read_article(article_id) else {
tracing::warn!(%article_id, "Couldn't download thumbnail: article not found");
return Ok(None);
};
let Some(thumbnail_url) = article.thumbnail_url.as_deref() else {
tracing::debug!(%article_id, "Couldn't download thumbnail: article doesn't specify thumbnail url");
return Ok(None);
};
let thumbnail_result = Thumbnail::from_url(thumbnail_url, article_id, client).await;
match thumbnail_result {
Ok(thumbnail) => {
self.db.insert_thumbnail(&thumbnail)?;
Ok(Some(thumbnail))
}
Err(error) => {
let empty_thumbnail = Thumbnail::empty(article_id);
self.db.insert_thumbnail(&empty_thumbnail)?;
tracing::warn!(%thumbnail_url, %error, "downloading thumbnail failed");
Err(NewsFlashError::Thumbnail)
}
}
}
pub fn database_size(&self) -> NewsFlashResult<DatabaseSize> {
let size = self.db.size()?;
Ok(size)
}
pub async fn login(&self, data: LoginData, client: &Client) -> NewsFlashResult<()> {
let id = data.id();
self.api.write().await.login(data, client).await?;
self.config.write().await.set_backend(Some(&id))?;
Ok(())
}
pub async fn logout(&self, client: &Client) -> NewsFlashResult<()> {
self.config.write().await.set_backend(None)?;
self.api.write().await.logout(client).await?;
self.db.reset()?;
Ok(())
}
pub fn clean_db(&self) -> NewsFlashResult<()> {
self.db.clean()?;
Ok(())
}
pub async fn initial_sync(&self, client: &Client, header: FeedHeaderMap) -> NewsFlashResult<HashMap<FeedID, i64>> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.is_sync_ongoing.load(Ordering::Acquire) {
tracing::warn!("sync is already ongoing");
return Err(NewsFlashError::Syncing);
}
if self.api.read().await.is_logged_in(client).await? {
self.is_sync_ongoing.store(true, Ordering::Release);
let now = chrono::Utc::now();
let keep_articles_duration = self.config.read().await.get_keep_articles_duration();
let result = self.api.read().await.initial_sync(client, header).await;
if result.is_err() {
self.is_sync_ongoing.store(true, Ordering::Release);
}
let sync_result = result?;
let sync_result = sync_result.remove_old_articles(keep_articles_duration);
let sync_result = sync_result.generate_tag_colors(&[]);
let sync_result = self.sync_cache.lock().await.process_sync_result(sync_result);
let result = self.sync_cache.lock().await.execute_api_actions(&self.api, client).await;
if let Err(cache_execution_error) = result {
self.is_sync_ongoing.store(false, Ordering::Release);
return Err(cache_execution_error.into());
}
self.sync_cache.lock().await.reset();
let result = self.db.write_sync_result(sync_result, keep_articles_duration);
if result.is_err() {
self.is_sync_ongoing.store(false, Ordering::Release);
}
let new_article_count = result?;
#[cfg(feature = "image-downloader")]
self.delete_orphaned_images().await?;
if let Err(write_config_error) = self.config.write().await.set_last_sync(now) {
self.is_sync_ongoing.store(false, Ordering::Release);
return Err(write_config_error.into());
}
self.is_sync_ongoing.store(false, Ordering::Release);
return Ok(new_article_count);
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn sync(&self, client: &Client, header: FeedHeaderMap) -> NewsFlashResult<HashMap<FeedID, i64>> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.is_sync_ongoing.load(Ordering::Acquire) {
tracing::warn!("sync is already ongoing");
return Err(NewsFlashError::Syncing);
}
if self.api.read().await.is_logged_in(client).await? {
self.is_sync_ongoing.store(true, Ordering::Release);
let now = chrono::Utc::now();
let keep_articles_duration = self.config.read().await.get_keep_articles_duration();
let sync_result = self.api.read().await.sync(client, header).await;
if sync_result.is_err() {
self.is_sync_ongoing.store(false, Ordering::Release);
}
let sync_result = sync_result?;
let sync_result = sync_result.remove_old_articles(keep_articles_duration);
let tags = self.db.read_tags()?;
let sync_result = sync_result.generate_tag_colors(&tags);
let sync_result = self.sync_cache.lock().await.process_sync_result(sync_result);
let result = self.sync_cache.lock().await.execute_api_actions(&self.api, client).await;
if let Err(cache_execution_error) = result {
self.is_sync_ongoing.store(false, Ordering::Release);
return Err(cache_execution_error.into());
}
self.sync_cache.lock().await.reset();
let result = self.db.write_sync_result(sync_result, keep_articles_duration);
if result.is_err() {
self.is_sync_ongoing.store(false, Ordering::Release);
}
let new_article_count = result?;
#[cfg(feature = "image-downloader")]
self.delete_orphaned_images().await?;
if let Err(write_config_error) = self.config.write().await.set_last_sync(now) {
self.is_sync_ongoing.store(false, Ordering::Release);
return Err(write_config_error.into());
}
self.is_sync_ongoing.store(false, Ordering::Release);
return Ok(new_article_count);
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn fetch_feed(&self, feed_id: &FeedID, client: &Client, header: HeaderMap<HeaderValue>) -> NewsFlashResult<i64> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.is_sync_ongoing.load(Ordering::Acquire) {
tracing::warn!("sync is already ongoing");
return Err(NewsFlashError::Syncing);
}
if self.api.read().await.is_logged_in(client).await? {
self.is_sync_ongoing.store(true, Ordering::Release);
let keep_articles_duration = self.config.read().await.get_keep_articles_duration();
let result = self.api.read().await.fetch_feed(feed_id, client, header).await;
if result.is_err() {
self.is_sync_ongoing.store(false, Ordering::Release);
}
let update_result = result?;
let update_result = update_result.remove_old_articles(keep_articles_duration);
let result = self.sync_cache.lock().await.execute_api_actions(&self.api, client).await;
if let Err(cache_execution_error) = result {
self.is_sync_ongoing.store(false, Ordering::Release);
return Err(cache_execution_error.into());
}
self.sync_cache.lock().await.reset();
let result = self.db.write_feed_update_result(feed_id, update_result);
if result.is_err() {
self.is_sync_ongoing.store(false, Ordering::Release);
}
let new_article_count = result?;
self.is_sync_ongoing.store(false, Ordering::Release);
Ok(new_article_count)
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn set_article_read(&self, articles: &[ArticleID], read: Read, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
let action_type = if read == Read::Read {
OfflineActionType::Read
} else {
OfflineActionType::Unread
};
let actions = articles
.iter()
.map(|article_id| OfflineAction {
action_type,
article_id: article_id.clone(),
tag_id: None,
})
.collect::<Vec<_>>();
self.db.insert_offline_actions(&actions)?;
self.db.set_article_read(articles, read)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
for article_id in articles {
match read {
Read::Read => self.sync_cache.lock().await.add_article_marked_read(article_id),
Read::Unread => self.sync_cache.lock().await.add_article_marked_unread(article_id),
}
}
self.db.set_article_read(articles, read)?;
} else {
let articles_before = self.db.read_articles(ArticleFilter::read_ids(articles.into(), read.invert()))?;
self.db.set_article_read(articles, read)?;
let api_result = self.api.read().await.set_article_read(articles, read, client).await;
if api_result.is_err() {
let ids_before = articles_before.iter().map(|a| a.article_id.clone()).collect::<Vec<_>>();
self.db.set_article_read(&ids_before, read.invert())?;
}
api_result?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn set_article_marked(&self, articles: &[ArticleID], marked: Marked, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
let action_type = if marked == Marked::Marked {
OfflineActionType::Mark
} else {
OfflineActionType::Unmark
};
let actions = articles
.iter()
.map(|article_id| OfflineAction {
action_type,
article_id: article_id.clone(),
tag_id: None,
})
.collect::<Vec<_>>();
self.db.insert_offline_actions(&actions)?;
self.db.set_article_marked(articles, marked)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
for article_id in articles {
match marked {
Marked::Marked => self.sync_cache.lock().await.add_article_mark(article_id),
Marked::Unmarked => self.sync_cache.lock().await.add_article_unmark(article_id),
}
}
self.db.set_article_marked(articles, marked)?;
} else {
let articles_before = self.db.read_articles(ArticleFilter::marked_ids(articles.into(), marked.invert()))?;
self.db.set_article_marked(articles, marked)?;
let article_ids_to_update = articles_before.iter().map(|a| &a.article_id).cloned().collect::<Vec<_>>();
let api_result = self.api.read().await.set_article_marked(&article_ids_to_update, marked, client).await;
if api_result.is_err() {
let ids_before = articles_before.iter().map(|a| a.article_id.clone()).collect::<Vec<_>>();
self.db.set_article_marked(&ids_before, marked.invert())?;
}
api_result?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn set_feed_read(&self, feeds: &[FeedID], client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
let mut actions = Vec::new();
for feed in feeds {
let filter = ArticleFilter::feed_unread(feed);
let articles = self.db.read_articles(filter)?;
actions.append(
&mut articles
.into_iter()
.map(|article| OfflineAction {
action_type: OfflineActionType::Read,
article_id: article.article_id,
tag_id: None,
})
.collect(),
);
}
self.db.insert_offline_actions(&actions)?;
self.db.set_feed_read(feeds)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
for feed_id in feeds {
self.sync_cache.lock().await.add_feed_mark_read(feed_id);
}
self.db.set_feed_read(feeds)?;
} else {
let mut unread_articles_before: Vec<ArticleID> = Vec::new();
for feed_id in feeds {
let articles_before = self.db.read_articles(ArticleFilter::feed_unread(feed_id))?;
unread_articles_before.append(&mut articles_before.into_iter().map(|a| a.article_id).collect());
}
self.db.set_feed_read(feeds)?;
let api_result = self.api.read().await.set_feed_read(feeds, &unread_articles_before, client).await;
if api_result.is_err() {
self.db.set_article_read(&unread_articles_before, Read::Unread)?;
}
api_result?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn set_category_read(&self, categories: &[CategoryID], client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
let mut actions = Vec::new();
for category in categories {
let filter = ArticleFilter::category_unread(category);
let articles = self.db.read_articles(filter)?;
actions.append(
&mut articles
.into_iter()
.map(|article| OfflineAction {
action_type: OfflineActionType::Read,
article_id: article.article_id,
tag_id: None,
})
.collect(),
);
}
self.db.insert_offline_actions(&actions)?;
self.db.set_category_read(categories)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
for category_id in categories {
self.sync_cache.lock().await.add_category_mark_read(category_id);
}
self.db.set_category_read(categories)?;
} else {
let mut unread_articles_before: Vec<ArticleID> = Vec::new();
for category_id in categories {
let articles_before = self.db.read_articles(ArticleFilter::category_unread(category_id))?;
unread_articles_before.append(&mut articles_before.into_iter().map(|a| a.article_id).collect());
}
self.db.set_category_read(categories)?;
let api_result = self.api.read().await.set_category_read(categories, &unread_articles_before, client).await;
if api_result.is_err() {
self.db.set_article_read(&unread_articles_before, Read::Unread)?;
}
api_result?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn set_tag_read(&self, tags: &[TagID], client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
let mut actions = Vec::new();
for tag in tags {
let filter = ArticleFilter::tag_unread(tag);
let articles = self.db.read_articles(filter)?;
actions.append(
&mut articles
.into_iter()
.map(|article| OfflineAction {
action_type: OfflineActionType::Read,
article_id: article.article_id,
tag_id: None,
})
.collect(),
);
}
self.db.insert_offline_actions(&actions)?;
self.db.set_tag_read(tags)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
for tag_id in tags {
self.sync_cache.lock().await.add_tag_mark_read(tag_id);
}
self.db.set_tag_read(tags)?;
} else {
let mut unread_articles_before: Vec<ArticleID> = Vec::new();
for tag_id in tags {
let articles_before = self.db.read_articles(ArticleFilter::tag_unread(tag_id))?;
unread_articles_before.append(&mut articles_before.into_iter().map(|a| a.article_id).collect());
}
self.db.set_tag_read(tags)?;
let api_result = self.api.read().await.set_tag_read(tags, &unread_articles_before, client).await;
if api_result.is_err() {
self.db.set_article_read(&unread_articles_before, Read::Unread)?;
}
api_result?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn set_all_read(&self, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
let filter = ArticleFilter::all_unread();
let articles = self.db.read_articles(filter)?;
let actions = articles
.into_iter()
.map(|article| OfflineAction {
action_type: OfflineActionType::Read,
article_id: article.article_id,
tag_id: None,
})
.collect::<Vec<_>>();
self.db.insert_offline_actions(&actions)?;
self.db.set_all_read()?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
let categories = self.db.read_categories()?;
for category in categories {
self.sync_cache.lock().await.add_category_mark_read(&category.category_id);
}
self.db.set_all_read()?;
} else {
let unread_articles_before = self
.db
.read_articles(ArticleFilter::all_unread())?
.into_iter()
.map(|a| a.article_id)
.collect::<Vec<_>>();
self.db.set_all_read()?;
let api_result = self.api.read().await.set_all_read(&unread_articles_before, client).await;
if api_result.is_err() {
self.db.set_article_read(&unread_articles_before, Read::Unread)?;
}
api_result?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn add_feed(
&self,
url: &Url,
title: Option<String>,
category_id: Option<CategoryID>,
client: &Client,
) -> NewsFlashResult<(Feed, FeedMapping, Option<Category>, Option<CategoryMapping>)> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let (feed, category) = self.api.read().await.add_feed(url, title, category_id.clone(), client).await?;
self.db.insert_feed(&feed)?;
let category_mapping = if let Some(category) = &category {
if self.db.read_category(&category.category_id).is_ok() {
None
} else {
let category_mapping = CategoryMapping {
parent_id: NEWSFLASH_TOPLEVEL.clone(),
category_id: category.category_id.clone(),
sort_index: Some(i32::MAX),
};
self.db.insert_category(category)?;
self.db.insert_category_mapping(&category_mapping)?;
Some(category_mapping)
}
} else {
None
};
let category_id = match category_id {
Some(category_id) => Some(category_id),
None => category.as_ref().map(|c| c.category_id.clone()),
};
let feed_mapping = FeedMapping {
feed_id: feed.feed_id.clone(),
category_id: category_id.unwrap_or(NEWSFLASH_TOPLEVEL.clone()),
sort_index: Some(i32::MAX),
};
self.db.insert_feed_mapping(&feed_mapping)?;
return Ok((feed, feed_mapping, category, category_mapping));
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn remove_feed(&self, feed_id: &FeedID, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
self.api.read().await.remove_feed(feed_id, client).await?;
self.db.drop_feed(feed_id)?;
#[cfg(feature = "image-downloader")]
self.delete_orphaned_images().await?;
return Ok(());
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn move_feed(&self, from: &FeedMapping, to: &FeedMapping, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if from.category_id == to.category_id {
self.db.drop_feed_mapping(from)?;
self.db.insert_feed_mapping(to)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
self.api
.read()
.await
.move_feed(&from.feed_id, &from.category_id, &to.category_id, client)
.await?;
self.db.drop_feed_mapping(from)?;
self.db.insert_feed_mapping(to)?;
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn rename_feed(&self, feed_id: &FeedID, new_title: &str, client: &Client) -> NewsFlashResult<Feed> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let new_id = self.api.read().await.rename_feed(feed_id, new_title, client).await?;
let mut modified_feed = self.db.read_feed(feed_id)?;
new_title.clone_into(&mut modified_feed.label);
modified_feed.feed_id = new_id.clone();
self.db.insert_feed(&modified_feed)?;
if &new_id != feed_id {
self.db.drop_feed(feed_id)?;
let mappings = self.db.read_feed_mappings(Some(feed_id), None)?;
let modified_mappings: Vec<FeedMapping> = mappings
.into_iter()
.map(|mut mapping| {
mapping.feed_id = new_id.clone();
mapping
})
.collect();
self.db.drop_mapping_of_feed(feed_id)?;
self.db.insert_feed_mappings(&modified_mappings)?;
let articles = self.db.read_articles(ArticleFilter {
feeds: Some([feed_id.clone()].into()),
..ArticleFilter::default()
})?;
let mut modified_ids: Vec<ArticleID> = Vec::new();
let modified_articles: Vec<Article> = articles
.into_iter()
.map(|mut article| {
modified_ids.push(article.article_id.clone());
article.feed_id = new_id.clone();
article
})
.collect();
self.db.drop_articles(&modified_ids)?;
self.db.write_articles(&modified_articles)?;
}
return Ok(modified_feed);
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn edit_feed_url(&self, feed_id: &FeedID, new_url: &str, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let parsed_url = Url::parse(new_url)?;
self.api.read().await.edit_feed_url(feed_id, new_url, client).await?;
let mut modified_feed = self.db.read_feed(feed_id)?;
modified_feed.feed_url = Some(parsed_url);
self.db.insert_feed(&modified_feed)?;
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn sort_alphabetically(&self) -> NewsFlashResult<()> {
self.db.sort_alphabetically()?;
Ok(())
}
pub async fn add_category(&self, title: &str, parent: Option<&CategoryID>, client: &Client) -> NewsFlashResult<(Category, CategoryMapping)> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let category_id = self.api.read().await.add_category(title, parent, client).await?;
let category = Category {
category_id: category_id.clone(),
label: title.to_owned(),
};
let category_mapping = CategoryMapping {
parent_id: match parent {
Some(parent) => parent.clone(),
None => NEWSFLASH_TOPLEVEL.clone(),
},
category_id,
sort_index: Some(i32::MAX),
};
self.db.insert_category(&category)?;
self.db.insert_category_mapping(&category_mapping)?;
return Ok((category, category_mapping));
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn remove_category(&self, category_id: &CategoryID, remove_children: bool, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
self.api.read().await.remove_category(category_id, remove_children, client).await?;
} else {
return Err(NewsFlashError::NotLoggedIn);
}
if remove_children {
self.remove_category_from_db_recurse(category_id)?;
} else {
self.remove_category_from_db_move_children_up(category_id)?;
}
#[cfg(feature = "image-downloader")]
self.delete_orphaned_images().await?;
Ok(())
}
fn remove_category_from_db_move_children_up(&self, category_id: &CategoryID) -> NewsFlashResult<()> {
let parent_id = self
.db
.read_category_mappings(None, Some(category_id))?
.first()
.map(|m| m.parent_id.clone())
.unwrap_or_else(|| NEWSFLASH_TOPLEVEL.clone());
tracing::trace!(%parent_id);
let feed_mappings = self.db.read_feed_mappings(None, Some(category_id))?;
tracing::trace!(%category_id, ?feed_mappings, "feeds of category");
for mut mapping in feed_mappings {
self.db.drop_feed_mapping(&mapping)?;
mapping.category_id = parent_id.clone();
self.db.insert_feed_mapping(&mapping)?;
}
let category_mappings = self.db.read_category_mappings(Some(category_id), None)?;
tracing::trace!(%category_id, ?category_mappings, "child categories of category");
for mut mapping in category_mappings {
self.db.drop_category_mapping(&mapping)?;
mapping.parent_id = parent_id.clone();
self.db.insert_category_mapping(&mapping)?;
}
self.db.drop_category(category_id)?;
Ok(())
}
fn remove_category_from_db_recurse(&self, category_id: &CategoryID) -> NewsFlashResult<()> {
let mappings = self.db.read_feed_mappings(None, Some(category_id))?;
tracing::trace!(%category_id, ?mappings, "drop feeds of category");
for mapping in mappings {
self.db.drop_feed(&mapping.feed_id)?;
}
self.db.drop_category(category_id)?;
let mappings = self.db.read_category_mappings(Some(category_id), None)?;
tracing::trace!(%category_id, ?mappings, "drop child categories of category");
let categories = self.db.read_categories()?;
let child_categories: Vec<Category> = categories
.into_iter()
.filter(|category| mappings.iter().any(|mapping| mapping.category_id == category.category_id))
.collect();
for child_category in child_categories {
self.remove_category_from_db_recurse(&child_category.category_id)?;
}
Ok(())
}
pub async fn rename_category(&self, category_id: &CategoryID, new_title: &str, client: &Client) -> NewsFlashResult<Category> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let mut modified_category_mappings = self.db.read_category_mappings(None, Some(category_id))?;
let mut modified_feed_mappings = self.db.read_feed_mappings(None, Some(category_id))?;
let new_id = self.api.read().await.rename_category(category_id, new_title, client).await?;
let mut modified_category = Category {
category_id: category_id.clone(),
label: new_title.to_owned(),
};
if &new_id != category_id {
self.db.drop_category(category_id)?;
self.db.drop_feed_mappings_of_category(category_id)?;
modified_category.category_id = new_id.clone();
modified_feed_mappings = modified_feed_mappings
.into_iter()
.map(|mut mapping| {
mapping.category_id = new_id.clone();
mapping
})
.collect();
modified_category_mappings = modified_category_mappings
.into_iter()
.map(|mut m| {
m.category_id = new_id.clone();
m
})
.collect();
}
self.db.insert_category(&modified_category)?;
self.db.insert_category_mappings(&modified_category_mappings)?;
self.db.insert_feed_mappings(&modified_feed_mappings)?;
return Ok(modified_category);
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn move_category(&self, mapping: &CategoryMapping, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if mapping.parent_id == *NEWSFLASH_TOPLEVEL {
self.db.drop_mapping_of_category(&mapping.category_id)?;
self.db.insert_category_mapping(mapping)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
self.api
.read()
.await
.move_category(&mapping.category_id, &mapping.parent_id, client)
.await?;
self.db.drop_mapping_of_category(&mapping.category_id)?;
self.db.insert_category_mapping(mapping)?;
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn add_tag(&self, title: &str, color: Option<String>, client: &Client) -> NewsFlashResult<Tag> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let tag_id = self.api.read().await.add_tag(title, client).await?;
let tag = Tag {
tag_id,
label: title.to_owned(),
color,
sort_index: Some(i32::MAX),
};
self.db.insert_tag(&tag)?;
return Ok(tag);
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn remove_tag(&self, tag_id: &TagID, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
self.api.read().await.remove_tag(tag_id, client).await?;
self.db.drop_tag(tag_id)?;
return Ok(());
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn edit_tag(&self, tag_id: &TagID, new_title: &str, new_color: &Option<String>, client: &Client) -> NewsFlashResult<Tag> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let new_id = self.api.read().await.rename_tag(tag_id, new_title, client).await?;
let taggings = self.db.read_taggings(None, Some(tag_id))?;
self.db.drop_tag(tag_id)?;
let sort_index = self
.db
.read_tags()?
.into_iter()
.find(|tag| &tag.tag_id == tag_id)
.and_then(|tag| tag.sort_index);
let mutated_tag = Tag {
tag_id: new_id.clone(),
label: new_title.to_owned(),
color: new_color.clone(),
sort_index,
};
self.db.insert_tag(&mutated_tag)?;
self.db.insert_taggings(&taggings)?;
let taggings = if &new_id != tag_id {
taggings
.into_iter()
.map(|mut tagging| {
tagging.tag_id = new_id.clone();
tagging
})
.collect()
} else {
taggings
};
self.db.insert_taggings(&taggings)?;
return Ok(mutated_tag);
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn tag_article(&self, article_id: &ArticleID, tag_id: &TagID, client: &Client) -> NewsFlashResult<()> {
let tagging = Tagging {
article_id: article_id.clone(),
tag_id: tag_id.clone(),
};
if self.is_offline.load(Ordering::Acquire) {
let action = OfflineAction {
action_type: OfflineActionType::Tag,
article_id: article_id.clone(),
tag_id: Some(tag_id.clone()),
};
self.db.insert_offline_actions(&[action])?;
self.db.insert_tagging(&tagging)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
self.sync_cache.lock().await.add_article_tagged(article_id, tag_id);
} else {
self.api.read().await.tag_article(article_id, tag_id, client).await?;
self.db.insert_tagging(&tagging)?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn untag_article(&self, article_id: &ArticleID, tag_id: &TagID, client: &Client) -> NewsFlashResult<()> {
let tagging = Tagging {
article_id: article_id.clone(),
tag_id: tag_id.clone(),
};
if self.is_offline.load(Ordering::Acquire) {
let action = OfflineAction {
action_type: OfflineActionType::Untag,
article_id: article_id.clone(),
tag_id: Some(tag_id.clone()),
};
self.db.insert_offline_actions(&[action])?;
self.db.drop_tagging(&tagging)?;
Ok(())
} else if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
self.sync_cache.lock().await.add_article_untagged(article_id, tag_id);
} else {
self.api.read().await.untag_article(article_id, tag_id, client).await?;
self.db.drop_tagging(&tagging)?;
}
Ok(())
} else {
Err(NewsFlashError::NotLoggedIn)
}
}
pub async fn import_opml(&self, opml: &str, parse_all_feeds: bool, client: &Client) -> NewsFlashResult<()> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
if self.api.read().await.is_logged_in(client).await? {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
self.api.read().await.import_opml(opml, client).await?;
let opml_result = opml::parse_opml(opml, parse_all_feeds, self.download_semaphore.clone(), client).await?;
self.db.insert_categories(&opml_result.categories)?;
self.db.insert_feeds(&opml_result.feeds)?;
self.db.insert_feed_mappings(&opml_result.feed_mappings)?;
self.db.insert_category_mappings(&opml_result.category_mappings)?;
return Ok(());
}
Err(NewsFlashError::NotLoggedIn)
}
pub async fn export_opml(&self) -> NewsFlashResult<String> {
if self.is_offline.load(Ordering::Acquire) {
return Err(NewsFlashError::Offline);
}
let categories = self.db.read_categories()?;
let category_mappings = self.db.read_category_mappings(None, None)?;
let feeds = self.db.read_feeds()?;
let feed_mappings = self.db.read_feed_mappings(None, None)?;
let opml_string = opml::generate_opml(&categories, &category_mappings, &feeds, &feed_mappings)?;
Ok(opml_string)
}
pub fn get_categories(&self) -> NewsFlashResult<(Vec<Category>, Vec<CategoryMapping>)> {
let categories = self.db.read_categories()?;
let category_mappings = self.db.read_category_mappings(None, None)?;
Ok((categories, category_mappings))
}
pub fn get_feeds(&self) -> NewsFlashResult<(Vec<Feed>, Vec<FeedMapping>)> {
let feeds = self.db.read_feeds()?;
let mappings = self.db.read_feed_mappings(None, None)?;
Ok((feeds, mappings))
}
pub fn unread_count_feed_map(&self, exclude_future: bool) -> NewsFlashResult<HashMap<FeedID, i64>> {
let map = self.db.unread_count_feed_map(exclude_future)?;
Ok(map)
}
pub fn marked_count_feed_map(&self) -> NewsFlashResult<HashMap<FeedID, i64>> {
let mut count_vec = self.db.marked_count_feed_map()?;
let mut map: HashMap<FeedID, i64> = HashMap::new();
count_vec.drain(..).for_each(|c| {
map.insert(c.feed_id, c.count);
});
Ok(map)
}
pub fn today_unread_count(&self, exclude_future: bool) -> NewsFlashResult<i64> {
let count = self.db.today_unread_count(exclude_future)?;
Ok(count)
}
pub fn today_marked_count(&self) -> NewsFlashResult<i64> {
let count = self.db.today_marked_count()?;
Ok(count)
}
pub fn get_tags(&self) -> NewsFlashResult<(Vec<Tag>, Vec<Tagging>)> {
let tags = self.db.read_tags()?;
let taggings = self.db.read_taggings(None, None)?;
Ok((tags, taggings))
}
pub fn get_tags_of_article(&self, article_id: &ArticleID) -> NewsFlashResult<Vec<Tag>> {
let tags = self.db.read_tags_for_article(article_id)?;
Ok(tags)
}
pub fn unread_count_all(&self) -> NewsFlashResult<i64> {
let count = self.db.unread_count_all()?;
Ok(count)
}
pub fn get_articles(&self, filter: ArticleFilter) -> NewsFlashResult<Vec<Article>> {
let articles = self.db.read_articles(filter)?;
Ok(articles)
}
pub fn get_article(&self, id: &ArticleID) -> NewsFlashResult<Article> {
let article = self.db.read_article(id)?;
Ok(article)
}
pub fn get_fat_articles(&self, filter: ArticleFilter) -> NewsFlashResult<Vec<FatArticle>> {
let articles = self.db.read_fat_articles(filter)?;
Ok(articles)
}
pub fn get_fat_article(&self, id: &ArticleID) -> NewsFlashResult<FatArticle> {
let article = self.db.read_fat_article(id)?;
Ok(article)
}
pub fn get_enclosures(&self, id: &ArticleID) -> NewsFlashResult<Vec<Enclosure>> {
let enclosures = self.db.read_enclosures(id)?;
Ok(enclosures)
}
pub fn update_enclosure(&self, enclosure: &Enclosure) -> NewsFlashResult<()> {
self.db.write_enclosures(std::slice::from_ref(enclosure))?;
Ok(())
}
#[cfg(feature = "article-scraper")]
pub async fn scrap_content_feeds(&self, synced_after: DateTime<Utc>, feeds: &[FeedID], client: &Client) -> NewsFlashResult<()> {
if self.is_sync_ongoing.load(Ordering::Acquire) {
return Err(NewsFlashError::Syncing);
}
let article_scraper = self.init_scraper().await;
let articles = self
.db
.read_fat_articles(ArticleFilter {
feeds: Some(feeds.to_owned()),
synced_after: Some(synced_after),
..Default::default()
})?
.into_iter()
.filter(|article| article.scraped_content.is_none())
.collect::<Vec<_>>();
let mut task_handles = Vec::new();
for article in articles.into_iter() {
let semaphore = self.download_semaphore.clone();
let client = client.clone();
let article_scraper = article_scraper.clone();
task_handles.push(tokio::spawn(async move {
Self::article_scrap_content_impl(article, article_scraper, semaphore, &client).await
}));
}
let result_vec = futures::future::join_all(task_handles)
.await
.into_iter()
.flatten()
.flatten()
.collect::<Vec<_>>();
self.db.update_articles_grabbed_content(&result_vec)?;
Ok(())
}
#[cfg(feature = "article-scraper")]
pub async fn scrap_content_article(&self, id: &ArticleID, client: &Client) -> NewsFlashResult<FatArticle> {
let article_scraper = self.init_scraper().await;
let article = self.db.read_fat_article(id)?;
let scraped_article = Self::article_scrap_content_impl(article, article_scraper, self.download_semaphore.clone(), client).await?;
self.db.update_article_grabbed_content(&scraped_article)?;
Ok(scraped_article)
}
#[cfg(feature = "article-scraper")]
async fn article_scrap_content_impl(
mut article: FatArticle,
article_scraper: Arc<ArticleScraper>,
download_semaphore: Arc<Semaphore>,
client: &Client,
) -> NewsFlashResult<FatArticle> {
let Some(url) = &article.url else {
tracing::error!("Article doesn't contain source URL");
return Err(NewsFlashError::GrabContent);
};
let permit = download_semaphore.acquire().await?;
#[cfg(feature = "image-downloader")]
let result = article_scraper.parse(url, client, false, None);
#[cfg(not(feature = "image-downloader"))]
let result = article_scraper.parse(url, client);
let processed_article = result.await.map_err(|error| {
tracing::error!(%url, %error, "Internal scraper failed");
NewsFlashError::GrabContent
})?;
drop(permit);
tracing::info!("Internal scraper: successfully scraped: '{url}'");
if let Some(html) = processed_article.html {
article.plain_text = Some(util::html2text::html2text(&html));
article.scraped_content = Some(html);
}
if let Some(title) = processed_article.title
&& article.title.is_none()
{
article.title = Some(title);
}
if let Some(author) = processed_article.author
&& article.author.is_none()
{
article.author = Some(author);
}
if let Some(thumbnail_url) = processed_article.thumbnail_url
&& article.thumbnail_url.is_none()
{
article.thumbnail_url = Some(thumbnail_url);
}
Ok(article)
}
#[cfg(feature = "article-scraper")]
async fn init_scraper(&self) -> Arc<ArticleScraper> {
if self.scraper.read().await.is_none() {
tracing::info!("Initialize ArticleScraper");
let scraper_data_dir = self.data_dir.join(SCRAPER_DATA_DIR);
let scraper_data_dir = if std::fs::DirBuilder::new().recursive(true).create(&scraper_data_dir).is_ok() {
Some(scraper_data_dir)
} else {
None
};
let scraper = ArticleScraper::new(scraper_data_dir.as_deref()).await;
let scraper = Arc::new(scraper);
self.scraper.write().await.replace(scraper.clone());
}
match self.scraper.read().await.as_ref() {
Some(scraper) => scraper.clone(),
None => unreachable!(),
}
}
#[cfg(feature = "image-downloader")]
pub async fn get_image(
&self,
article_id: &ArticleID,
url: &str,
client: &Client,
progress: Option<Sender<DownloadProgress>>,
) -> NewsFlashResult<Image> {
let image_dir = self.data_dir.join(IMAGE_DATA_DIR);
std::fs::DirBuilder::new().recursive(true).create(&image_dir)?;
let parsed_url = Url::parse(url)?;
if let Ok(image) = self.db.read_image(&parsed_url) {
if let Ok(mut file) = tokio::fs::File::open(&image.file_path).await {
let mut contents = vec![];
file.read_to_end(&mut contents).await?;
return Ok(Image::from_metadata(image, contents));
} else {
tracing::warn!(%image.file_path, "Failed to open file");
}
}
let permit = self.download_semaphore.acquire().await?;
let res = article_scraper::images::ImageDownloader::single_from_url(url, client, progress).await?;
drop(permit);
let file_name = sanitize_filename::sanitize(format!("{article_id}_{url}"));
let path = image_dir.join(file_name);
tracing::debug!(?path, "writing image");
tokio::fs::write(&path, &res)
.await
.inspect_err(|error| tracing::error!(%error, "Failed to write image"))?;
let (width, height) = if let Ok((width, height)) = image::image_dimensions(&path) {
(Some(width as i32), Some(height as i32))
} else {
(None, None)
};
if let Ok(mut enclosure) = self.db.read_enclosure(article_id, &parsed_url) {
enclosure.width = width;
enclosure.height = height;
self.db.write_enclosures(&[enclosure])?;
}
let image = ImageMetadata {
image_url: parsed_url,
article_id: article_id.clone(),
file_path: path.to_string_lossy().into(),
width,
height,
};
self.db.write_image(&image)?;
Ok(Image::from_metadata(image, res))
}
#[cfg(feature = "image-downloader")]
pub fn delete_all_images(&self) -> NewsFlashResult<()> {
self.db.drop_all_images()?;
std::fs::remove_dir_all(self.data_dir.join(IMAGE_DATA_DIR))?;
Ok(())
}
#[cfg(feature = "image-downloader")]
async fn delete_orphaned_images(&self) -> NewsFlashResult<()> {
let images = self.db.read_images()?;
let db_images = images
.into_iter()
.map(|image| {
let mut path = PathBuf::new();
path.push(image.file_path);
path
})
.collect::<HashSet<PathBuf>>();
let dir = match std::fs::read_dir(self.data_dir.join(IMAGE_DATA_DIR)) {
Ok(dir) => dir,
Err(error) => {
tracing::warn!(%error, "failed to read image directory");
return Ok(());
}
};
let existing_images = dir
.into_iter()
.filter_map(|item| item.ok().map(|i| i.path()))
.collect::<HashSet<PathBuf>>();
let difference = existing_images.difference(&db_images);
for file_path in difference {
if let Err(error) = std::fs::remove_file(file_path) {
tracing::warn!(?file_path, %error, "Failed to delete file");
}
}
Ok(())
}
}