mod connection_options;
mod error;
mod ext;
use self::connection_options::ConnectionOptions;
pub use self::error::DatabaseError;
pub use crate::database::ext::DatabaseExt;
#[cfg(test)]
pub use crate::database::ext::MockDatabaseExt;
use crate::diesel_migrations::MigrationHarness;
use crate::models::{
Article, ArticleFilter, ArticleID, ArticleOrder, Category, CategoryID, CategoryMapping, DatabaseSize, Enclosure, FatArticle, FatFavIcon, FavIcon,
Feed, FeedCount, FeedID, FeedMapping, Headline, Marked, NEWSFLASH_TOPLEVEL, OfflineAction, OrderBy, Read, SyncResult, Tag, TagID, Tagging,
Thumbnail, UnifiedMapping, Url,
};
use crate::models::{FeedUpdateResult, ImageMetadata};
use crate::schema::{
articles, categories, category_mapping, enclosures, fav_icons, feed_mapping, feeds, images, offline_actions, taggings, tags, thumbnails,
};
use crate::util;
use chrono::{Duration, Local, NaiveTime, TimeZone, Utc};
use diesel::dsl::*;
use diesel::prelude::*;
use diesel::r2d2::{ConnectionManager, Pool};
use diesel::sql_types::*;
use diesel::sqlite::SqliteConnection;
use diesel::upsert::excluded;
use diesel_migrations::EmbeddedMigrations;
use itertools::Itertools;
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
type DatabaseResult<T> = Result<T, DatabaseError>;
pub struct Database {
connection_pool: Pool<ConnectionManager<SqliteConnection>>,
path: PathBuf,
}
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./migrations");
pub const FILE_NAME: &str = "database.sqlite";
define_sql_function! { fn ifnull(x: diesel::sql_types::Nullable<TimestamptzSqlite>, y: TimestamptzSqlite) -> TimestamptzSqlite; }
macro_rules! prepare_article_query {
(
$filter:expr,
$feed_mappings:expr,
$category_mappings:expr
) => {{
let mut query = articles::table.into_boxed();
let order_by = $filter.order_by.unwrap_or(OrderBy::Published);
if $filter.offset.is_some() && $filter.limit.is_none() {
tracing::warn!("querying article with offset but no limit doesn't work");
}
if let Some(offset) = $filter.offset {
query = query.offset(offset);
}
if let Some(limit) = $filter.limit {
query = query.limit(limit);
}
if let Some(order) = $filter.order {
match order_by {
OrderBy::Published => match order {
ArticleOrder::NewestFirst => query = query.order(articles::date.desc()),
ArticleOrder::OldestFirst => query = query.order(articles::date.asc()),
},
OrderBy::Updated => match order {
ArticleOrder::NewestFirst => query = query.order(ifnull(articles::updated, articles::date).desc()),
ArticleOrder::OldestFirst => query = query.order(ifnull(articles::updated, articles::date).asc()),
},
}
}
if let Some(unread) = $filter.unread {
query = query.filter(articles::unread.eq(unread.to_int()));
}
if let Some(marked) = $filter.marked {
query = query.filter(articles::marked.eq(marked.to_int()));
}
if $filter.feeds.is_some() && $filter.categories.is_some() {
tracing::warn!("querying article by feed AND category might not be very useful");
}
if $filter.feeds.is_some() || $filter.categories.is_some() {
let feeds_to_load = $filter.feeds_to_load(&$category_mappings, &$feed_mappings);
let feed_id_strings = feeds_to_load.into_iter().map(|id| id.to_string()).collect::<Vec<String>>();
query = query.filter(articles::feed_id.eq_any(feed_id_strings));
}
if $filter.feed_blacklist.is_some() || $filter.category_blacklist.is_some() {
let feeds_to_blacklist = $filter.feeds_to_blacklist(&$category_mappings, &$feed_mappings);
for blacklisted_feed in feeds_to_blacklist {
query = query.filter(articles::feed_id.ne(blacklisted_feed.to_string()));
}
}
if let Some(tags) = $filter.tags {
for tag in tags {
let tagged_article_ids = taggings::table.select(taggings::article_id).filter(taggings::tag_id.eq(tag));
query = query.filter(articles::article_id.eq_any(tagged_article_ids));
}
}
if let Some(ids) = $filter.ids {
query = query.filter(articles::article_id.eq_any(ids));
}
if let (Some(newer_than), Some(older_than)) = ($filter.newer_than, $filter.older_than) {
if newer_than > older_than {
tracing::warn!(%older_than, %newer_than, "Impossible constraint");
}
}
if let Some(newer_than) = $filter.newer_than {
query = match order_by {
OrderBy::Published => query.filter(articles::date.gt(newer_than)),
OrderBy::Updated => query.filter(ifnull(articles::updated, articles::date).gt(newer_than)),
};
}
if let Some(older_than) = $filter.older_than {
query = match order_by {
OrderBy::Published => query.filter(articles::date.lt(older_than)),
OrderBy::Updated => query.filter(ifnull(articles::updated, articles::date).lt(older_than)),
};
}
if let (Some(synced_after), Some(synced_before)) = ($filter.synced_after, $filter.synced_before) {
if synced_after > synced_before {
tracing::warn!(%synced_before, %synced_after, "Impossible constraint");
}
}
if let Some(synced_after) = $filter.synced_after {
query = query.filter(articles::synced.gt(synced_after));
}
if let Some(synced_before) = $filter.synced_before {
query = query.filter(articles::synced.lt(synced_before));
}
if let Some(search_term) = $filter.search_term {
let search_term = util::prepare_search_term(&search_term);
query = query.filter(sql::<Bool>(&format!(
"article_id IN (SELECT article_id FROM fts_table WHERE fts_table MATCH '{}')",
search_term
)));
}
query
}};
}
impl Database {
pub fn new<P: AsRef<Path>>(data_dir: P) -> DatabaseResult<Database> {
let data_dir = data_dir.as_ref();
std::fs::DirBuilder::new().recursive(true).create(data_dir)?;
fs::create_dir_all(data_dir).map_err(|_| DatabaseError::InvalidPath)?;
let database_url = data_dir.join(FILE_NAME);
let database_url = match database_url.to_str() {
Some(url) => url.to_owned(),
None => return Err(DatabaseError::InvalidPath),
};
let manager = ConnectionManager::<SqliteConnection>::new(database_url);
let connection_pool = Pool::builder()
.connection_customizer(Box::<ConnectionOptions>::default())
.max_size(5)
.idle_timeout(Some(std::time::Duration::from_secs(120)))
.connection_timeout(std::time::Duration::from_secs(30))
.build(manager)?;
let db = Database {
connection_pool,
path: data_dir.into(),
};
db.init()?;
Ok(db)
}
fn init(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
let pending_migrations = connection.pending_migrations(MIGRATIONS).map_err(|_| DatabaseError::Migration)?;
if !pending_migrations.is_empty() {
tracing::info!("running pending migrations");
for migration in pending_migrations {
let name = migration.name();
tracing::info!(%name, "runing migration");
connection.run_migration(&migration).map_err(|error| {
tracing::error!(%error, "Database migration failed");
DatabaseError::Migration
})?;
}
tracing::info!("all migrations done");
}
Ok(())
}
fn write_tags(&self, tags: &[Tag], connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::delete(tags::table)
.filter(tags::tag_id.ne_all(tags.iter().map(|tag| tag.tag_id.clone())))
.execute(connection)?;
diesel::replace_into(tags::table).values(tags).execute(connection)?;
Ok(())
}
fn insert_taggings_impl(&self, taggings: &[Tagging], connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::replace_into(taggings::table).values(taggings).execute(connection)?;
Ok(())
}
fn write_categories(&self, categories: &[Category], connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::delete(categories::table)
.filter(categories::category_id.ne_all(categories.iter().map(|category| category.category_id.clone())))
.execute(connection)?;
diesel::replace_into(categories::table).values(categories).execute(connection)?;
Ok(())
}
fn write_feeds(&self, feeds: &[Feed], connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::delete(feeds::table)
.filter(feeds::feed_id.ne_all(feeds.iter().map(|feed| feed.feed_id.clone())))
.execute(connection)?;
diesel::replace_into(feeds::table).values(feeds).execute(connection)?;
Ok(())
}
fn write_feed_mappings(&self, mut mappings: Vec<FeedMapping>, connection: &mut SqliteConnection) -> DatabaseResult<()> {
let old_mappings: Vec<FeedMapping> = feed_mapping::table.load(&mut *connection)?;
for old_mapping in old_mappings {
if let Some(m) = mappings
.iter_mut()
.find(|m| m.feed_id == old_mapping.feed_id && m.category_id == old_mapping.category_id)
&& old_mapping.sort_index.is_some()
{
m.sort_index = old_mapping.sort_index;
}
}
diesel::delete(feed_mapping::table).execute(&mut *connection)?;
diesel::replace_into(feed_mapping::table).values(mappings).execute(&mut *connection)?;
Ok(())
}
fn write_category_mappings(&self, mut mappings: Vec<CategoryMapping>, connection: &mut SqliteConnection) -> DatabaseResult<()> {
let old_mappings: Vec<CategoryMapping> = category_mapping::table.load(&mut *connection)?;
for old_mapping in old_mappings {
if let Some(m) = mappings
.iter_mut()
.find(|m| m.category_id == old_mapping.category_id && m.parent_id == old_mapping.parent_id)
&& old_mapping.sort_index.is_some()
{
m.sort_index = old_mapping.sort_index;
}
}
diesel::delete(category_mapping::table).execute(&mut *connection)?;
diesel::replace_into(category_mapping::table).values(mappings).execute(&mut *connection)?;
Ok(())
}
fn normalize_sort_index(
&self,
parent_id: &CategoryID,
insert_feed_mapping: Option<&FeedMapping>,
insert_category_mapping: Option<&CategoryMapping>,
connection: &mut SqliteConnection,
) -> DatabaseResult<()> {
let mut feed_mappings_of_parent: Vec<FeedMapping> = feed_mapping::table
.filter(feed_mapping::category_id.eq(parent_id.as_str()))
.filter(feed_mapping::feed_id.ne(insert_feed_mapping.map(|m| m.feed_id.as_str()).unwrap_or_default()))
.load(&mut *connection)?;
let mut category_mappings_of_parent: Vec<CategoryMapping> = category_mapping::table
.filter(category_mapping::parent_id.eq(parent_id.as_str()))
.filter(category_mapping::category_id.ne(insert_category_mapping.map(|m| m.category_id.as_str()).unwrap_or_default()))
.load(&mut *connection)?;
if let Some(insert_feed_mapping) = insert_feed_mapping {
feed_mappings_of_parent.push(insert_feed_mapping.clone());
}
if let Some(insert_category_mapping) = insert_category_mapping {
category_mappings_of_parent.push(insert_category_mapping.clone());
}
let mut unified_mappings = Vec::new();
unified_mappings.append(&mut feed_mappings_of_parent.into_iter().map(UnifiedMapping::Feed).collect());
unified_mappings.append(&mut category_mappings_of_parent.into_iter().map(UnifiedMapping::Category).collect());
let mut feed_mappings_of_parent = Vec::new();
let mut category_mappings_of_parent = Vec::new();
for (i, mut m) in unified_mappings
.into_iter()
.sorted_by(|a, b| a.sort_index().cmp(&b.sort_index()))
.enumerate()
{
m.set_sort_index(1 + i as i32 * 2);
match m {
UnifiedMapping::Feed(f) => feed_mappings_of_parent.push(f),
UnifiedMapping::Category(c) => category_mappings_of_parent.push(c),
}
}
diesel::replace_into(feed_mapping::table)
.values(feed_mappings_of_parent)
.execute(&mut *connection)?;
diesel::replace_into(category_mapping::table)
.values(category_mappings_of_parent)
.execute(&mut *connection)?;
Ok(())
}
fn write_fat_articles(&self, articles: &[FatArticle], connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::insert_into(articles::table)
.values(articles)
.on_conflict(articles::article_id)
.do_update()
.set((
articles::date.eq(excluded(articles::date)),
articles::unread.eq(excluded(articles::unread)),
articles::marked.eq(excluded(articles::marked)),
articles::html.eq(excluded(articles::html)),
))
.execute(connection)?;
sql_query("INSERT INTO fts_table(fts_table) VALUES('rebuild')").execute(connection)?;
Ok(())
}
fn drop_old_articles_impl(&self, older_than: Duration, connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::delete(articles::table)
.filter(articles::synced.lt(Utc::now() - older_than))
.filter(articles::unread.eq(Read::Read))
.filter(articles::marked.eq(Marked::Unmarked))
.execute(connection)?;
Ok(())
}
fn drop_orphaned_articles(&self, connection: &mut SqliteConnection) -> DatabaseResult<()> {
let feed_ids = feeds::table.select(feeds::feed_id);
diesel::delete(articles::table)
.filter(articles::feed_id.ne_all(feed_ids))
.filter(articles::marked.eq(Marked::Unmarked))
.execute(connection)?;
Ok(())
}
fn write_enclosures_impl(&self, enclosures: &[Enclosure], connection: &mut SqliteConnection) -> DatabaseResult<()> {
diesel::replace_into(enclosures::table).values(enclosures).execute(connection)?;
Ok(())
}
fn insert_headlines(&self, headlines: &[Headline], connection: &mut SqliteConnection) -> DatabaseResult<()> {
for headline in headlines {
diesel::update(articles::table)
.filter(articles::article_id.eq(&headline.article_id))
.set((
articles::unread.eq(headline.unread.to_int()),
articles::marked.eq(headline.marked.to_int()),
))
.execute(connection)?;
}
Ok(())
}
fn article_count(&self, connection: &mut SqliteConnection, feed_id: &FeedID) -> DatabaseResult<i64> {
let count = articles::table
.filter(articles::feed_id.eq(feed_id))
.select(count_star())
.first(connection)?;
Ok(count)
}
fn article_count_feed_map(&self, connection: &mut SqliteConnection) -> DatabaseResult<HashMap<FeedID, i64>> {
let vec: Vec<FeedCount> = articles::table
.group_by(articles::feed_id)
.select((articles::feed_id, count_star()))
.load(connection)?;
let map: HashMap<FeedID, i64> = vec.into_iter().map(|count| (count.feed_id, count.count)).collect();
Ok(map)
}
}
impl DatabaseExt for Database {
fn reset(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
connection.transaction::<(), DatabaseError, _>(|con| {
diesel::delete(articles::table).execute(con)?;
diesel::delete(categories::table).execute(con)?;
diesel::delete(enclosures::table).execute(con)?;
diesel::delete(fav_icons::table).execute(con)?;
diesel::delete(feed_mapping::table).execute(con)?;
diesel::delete(feeds::table).execute(con)?;
diesel::delete(taggings::table).execute(con)?;
diesel::delete(tags::table).execute(con)?;
Ok(())
})?;
diesel::sql_query("VACUUM").execute(&mut connection)?;
Ok(())
}
fn clean(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::sql_query("VACUUM").execute(&mut connection)?;
diesel::sql_query("PRAGMA wal_checkpoint(TRUNCATE);").execute(&mut connection)?;
Ok(())
}
fn is_empty(&self) -> DatabaseResult<bool> {
let mut connection = self.connection_pool.get()?;
connection.transaction::<bool, DatabaseError, _>(|con| {
let article_count: i64 = articles::table.select(count_star()).first(con)?;
let feed_count: i64 = feeds::table.select(count_star()).first(con)?;
Ok(feed_count == 0 && article_count == 0)
})
}
fn size(&self) -> DatabaseResult<DatabaseSize> {
let mut connection = self.connection_pool.get()?;
let page_size = sql::<BigInt>("PRAGMA PAGE_SIZE").get_result::<i64>(&mut connection)? as u64;
let page_count = sql::<BigInt>("PRAGMA PAGE_COUNT").get_result::<i64>(&mut connection)? as u64;
let main_file = util::file_size(&self.path.join(FILE_NAME))?;
let shm_file = util::file_size(&self.path.join(format!("{FILE_NAME}-shm"))).unwrap_or(0);
let wal_file = util::file_size(&self.path.join(format!("{FILE_NAME}-wal"))).unwrap_or(0);
Ok(DatabaseSize {
allocated: page_size * page_count,
on_disk: main_file + shm_file + wal_file,
})
}
fn insert_tag(&self, tag: &Tag) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(tags::table).values(tag).execute(&mut connection)?;
Ok(())
}
fn insert_tags(&self, tags: &[Tag]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(tags::table).values(tags).execute(&mut *connection)?;
Ok(())
}
fn read_tags(&self) -> DatabaseResult<Vec<Tag>> {
let mut connection = self.connection_pool.get()?;
let tags = tags::table.load(&mut connection)?;
Ok(tags)
}
fn read_tags_for_article(&self, article_id: &ArticleID) -> DatabaseResult<Vec<Tag>> {
let mut connection = self.connection_pool.get()?;
let tag_ids_of_article = taggings::table.select(taggings::tag_id).filter(taggings::article_id.eq(article_id));
let tags = tags::table.filter(tags::tag_id.eq_any(tag_ids_of_article)).load(&mut connection)?;
Ok(tags)
}
fn drop_tag(&self, tag_id: &TagID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(tags::table).filter(tags::tag_id.eq(tag_id)).execute(&mut connection)?;
Ok(())
}
fn set_tag_read(&self, tags: &[TagID]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::update(articles::table)
.filter(articles::article_id.eq_any(taggings::table.filter(taggings::tag_id.eq_any(tags)).select(taggings::article_id)))
.set(articles::unread.eq(Read::Read))
.execute(&mut connection)?;
Ok(())
}
fn insert_tagging(&self, tagging: &Tagging) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(taggings::table).values(tagging).execute(&mut connection)?;
Ok(())
}
fn insert_taggings(&self, taggings: &[Tagging]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
self.insert_taggings_impl(taggings, &mut connection)
}
fn read_taggings(&self, article: Option<&ArticleID>, tag: Option<&TagID>) -> DatabaseResult<Vec<Tagging>> {
let mut connection = self.connection_pool.get()?;
let mut query = taggings::table.into_boxed();
if article.is_some() && tag.is_some() {
tracing::warn!("querying tagging by article AND tag might not be very useful");
}
if let Some(article) = article {
query = query.filter(taggings::article_id.eq(article.to_string()));
}
if let Some(tag) = tag {
query = query.filter(taggings::tag_id.eq(tag.to_string()));
}
let taggings = query.load(&mut connection)?;
Ok(taggings)
}
fn drop_tagging(&self, tagging: &Tagging) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(taggings::table)
.filter(taggings::tag_id.eq(&tagging.tag_id))
.filter(taggings::article_id.eq(&tagging.article_id))
.execute(&mut connection)?;
Ok(())
}
fn drop_taggings_of_tag(&self, tag: &Tag) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(taggings::table)
.filter(taggings::tag_id.eq(&tag.tag_id))
.execute(&mut connection)?;
Ok(())
}
fn drop_taggings_of_article(&self, article: &ArticleID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(taggings::table)
.filter(taggings::article_id.eq(article))
.execute(&mut connection)?;
Ok(())
}
fn insert_category(&self, category: &Category) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(categories::table).values(category).execute(&mut connection)?;
Ok(())
}
fn insert_categories(&self, categories: &[Category]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(categories::table).values(categories).execute(&mut *connection)?;
Ok(())
}
fn read_category(&self, category_id: &CategoryID) -> DatabaseResult<Category> {
let mut connection = self.connection_pool.get()?;
let category = categories::table.find(category_id).first(&mut connection)?;
Ok(category)
}
fn read_categories(&self) -> DatabaseResult<Vec<Category>> {
let mut connection = self.connection_pool.get()?;
let categories = categories::table.load(&mut connection)?;
Ok(categories)
}
fn drop_category(&self, category_id: &CategoryID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(categories::table)
.filter(categories::category_id.eq(category_id))
.execute(&mut connection)?;
Ok(())
}
fn set_category_read(&self, categories: &[CategoryID]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::update(articles::table)
.filter(
articles::feed_id.eq_any(
feed_mapping::table
.filter(feed_mapping::category_id.eq_any(categories))
.select(feed_mapping::feed_id),
),
)
.set(articles::unread.eq(Read::Read))
.execute(&mut connection)?;
Ok(())
}
fn insert_feed(&self, feed: &Feed) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(feeds::table).values(feed).execute(&mut connection)?;
Ok(())
}
fn insert_feeds(&self, feeds: &[Feed]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(feeds::table).values(feeds).execute(&mut *connection)?;
Ok(())
}
fn read_feeds(&self) -> DatabaseResult<Vec<Feed>> {
let mut connection = self.connection_pool.get()?;
let feeds = feeds::table.load(&mut connection)?;
Ok(feeds)
}
fn read_feed(&self, feed_id: &FeedID) -> DatabaseResult<Feed> {
let mut connection = self.connection_pool.get()?;
let feed = feeds::table.find(feed_id).first(&mut connection)?;
Ok(feed)
}
fn drop_feed(&self, feed_id: &FeedID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(feeds::table).filter(feeds::feed_id.eq(feed_id)).execute(&mut connection)?;
Ok(())
}
fn set_feed_read(&self, feeds: &[FeedID]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::update(articles::table)
.filter(articles::feed_id.eq_any(feeds))
.set(articles::unread.eq(Read::Read.to_int()))
.execute(&mut connection)?;
Ok(())
}
fn read_fatfavicon(&self, feed_id: &FeedID) -> DatabaseResult<FatFavIcon> {
let mut connection = self.connection_pool.get()?;
let favicon = fav_icons::table.find(feed_id).first(&mut connection)?;
Ok(favicon)
}
fn read_favicon(&self, feed_id: &FeedID) -> DatabaseResult<FavIcon> {
let mut connection = self.connection_pool.get()?;
let favicon = fav_icons::table
.find(feed_id)
.select((
fav_icons::feed_id,
fav_icons::date,
fav_icons::format,
fav_icons::etag,
fav_icons::lowres_source_url,
fav_icons::lowres,
))
.first::<FavIcon>(&mut connection)?;
Ok(favicon)
}
fn insert_favicon(&self, favicon: &FatFavIcon) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(fav_icons::table).values(favicon).execute(&mut connection)?;
Ok(())
}
fn insert_favicons(&self, favicons: &[FatFavIcon]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(fav_icons::table).values(favicons).execute(&mut *connection)?;
Ok(())
}
fn drop_favicon(&self, feed_id: &FeedID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(fav_icons::table)
.filter(fav_icons::feed_id.eq(feed_id))
.execute(&mut connection)?;
Ok(())
}
fn read_thumbnail(&self, article_id: &ArticleID) -> DatabaseResult<Thumbnail> {
let mut connection = self.connection_pool.get()?;
let thumbnail = thumbnails::table.find(article_id).first(&mut connection)?;
Ok(thumbnail)
}
fn insert_thumbnail(&self, thumbnail: &Thumbnail) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(thumbnails::table).values(thumbnail).execute(&mut connection)?;
Ok(())
}
fn insert_feed_mapping(&self, mapping: &FeedMapping) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
self.normalize_sort_index(&mapping.category_id, Some(mapping), None, &mut connection)?;
Ok(())
}
fn insert_feed_mappings(&self, mappings: &[FeedMapping]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(feed_mapping::table).values(mappings).execute(&mut *connection)?;
Ok(())
}
fn insert_category_mapping(&self, mapping: &CategoryMapping) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
self.normalize_sort_index(&mapping.parent_id, None, Some(mapping), &mut connection)?;
Ok(())
}
fn insert_category_mappings(&self, mappings: &[CategoryMapping]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(category_mapping::table).values(mappings).execute(&mut *connection)?;
Ok(())
}
fn read_feed_mappings(&self, feed: Option<&FeedID>, category: Option<&CategoryID>) -> DatabaseResult<Vec<FeedMapping>> {
let mut connection = self.connection_pool.get()?;
let mut query = feed_mapping::table.into_boxed();
if feed.is_some() && category.is_some() {
tracing::warn!("querying mapping by feed AND category might not be very useful");
}
if let Some(feed) = feed {
query = query.filter(feed_mapping::feed_id.eq(feed.to_string()));
}
if let Some(category) = category {
query = query.filter(feed_mapping::category_id.eq(category.to_string()));
}
let mappings = query.load(&mut connection)?;
Ok(mappings)
}
fn read_category_mappings(&self, parent: Option<&CategoryID>, category: Option<&CategoryID>) -> DatabaseResult<Vec<CategoryMapping>> {
let mut connection = self.connection_pool.get()?;
let mut query = category_mapping::table.into_boxed();
if parent.is_some() && category.is_some() {
tracing::warn!("querying mapping by feed AND category might not be very useful");
}
if let Some(parent) = parent {
query = query.filter(category_mapping::parent_id.eq(parent.to_string()));
}
if let Some(category) = category {
query = query.filter(category_mapping::category_id.eq(category.to_string()));
}
let mappings = query.load(&mut connection)?;
Ok(mappings)
}
fn drop_feed_mapping(&self, mapping: &FeedMapping) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(feed_mapping::table)
.filter(feed_mapping::feed_id.eq(&mapping.feed_id))
.filter(feed_mapping::category_id.eq(&mapping.category_id))
.execute(&mut connection)?;
Ok(())
}
fn drop_category_mapping(&self, mapping: &CategoryMapping) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(category_mapping::table)
.filter(category_mapping::parent_id.eq(&mapping.parent_id))
.filter(category_mapping::category_id.eq(&mapping.category_id))
.execute(&mut connection)?;
Ok(())
}
fn drop_mapping_of_feed(&self, feed_id: &FeedID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(feed_mapping::table)
.filter(feed_mapping::feed_id.eq(feed_id))
.execute(&mut connection)?;
Ok(())
}
fn drop_mapping_of_category(&self, category_id: &CategoryID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(category_mapping::table)
.filter(category_mapping::category_id.eq(category_id))
.execute(&mut connection)?;
Ok(())
}
fn drop_feed_mappings_of_category(&self, category_id: &CategoryID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(feed_mapping::table)
.filter(feed_mapping::category_id.eq(category_id))
.execute(&mut connection)?;
Ok(())
}
fn update_articles_grabbed_content(&self, articles: &[FatArticle]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
connection.transaction::<(), DatabaseError, _>(|con| {
for article in articles {
diesel::update(articles::table)
.filter(articles::article_id.eq(&article.article_id))
.set((
articles::scraped_content.eq(&article.scraped_content),
articles::plain_text.eq(&article.plain_text),
articles::html.eq(&article.html),
articles::title.eq(&article.title),
articles::author.eq(&article.author),
articles::summary.eq(&article.summary),
articles::thumbnail_url.eq(&article.thumbnail_url),
))
.execute(con)?;
}
sql_query("INSERT INTO fts_table(fts_table) VALUES('rebuild')").execute(con)?;
Ok(())
})
}
fn update_article_grabbed_content(&self, article: &FatArticle) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
connection.transaction::<(), DatabaseError, _>(|con| {
diesel::update(articles::table)
.filter(articles::article_id.eq(&article.article_id))
.set((
articles::scraped_content.eq(&article.scraped_content),
articles::plain_text.eq(&article.plain_text),
articles::html.eq(&article.html),
articles::title.eq(&article.title),
articles::author.eq(&article.author),
articles::summary.eq(&article.summary),
articles::thumbnail_url.eq(&article.thumbnail_url),
))
.execute(con)?;
sql_query("INSERT INTO fts_table(fts_table) VALUES('rebuild')").execute(con)?;
Ok(())
})
}
fn write_articles(&self, articles: &[Article]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
connection.transaction::<(), DatabaseError, _>(|con| {
diesel::insert_into(articles::table)
.values(articles)
.on_conflict(articles::article_id)
.do_update()
.set((
articles::date.eq(excluded(articles::date)),
articles::unread.eq(excluded(articles::unread)),
articles::marked.eq(excluded(articles::marked)),
))
.execute(con)?;
Ok(())
})
}
fn read_fat_article(&self, id: &ArticleID) -> DatabaseResult<FatArticle> {
let mut connection = self.connection_pool.get()?;
let article = articles::table.filter(articles::article_id.eq(id)).first::<FatArticle>(&mut connection)?;
Ok(article)
}
fn read_fat_articles(&self, filter: ArticleFilter) -> DatabaseResult<Vec<FatArticle>> {
let feed_mappings = self.read_feed_mappings(None, None)?;
let category_mappings = self.read_category_mappings(None, None)?;
let mut connection = self.connection_pool.get()?;
let query = prepare_article_query!(filter, feed_mappings, category_mappings);
let articles = query.load::<FatArticle>(&mut connection)?;
Ok(articles)
}
fn read_article(&self, id: &ArticleID) -> DatabaseResult<Article> {
let mut connection = self.connection_pool.get()?;
let article = articles::table
.filter(articles::article_id.eq(id))
.select((
articles::article_id,
articles::title,
articles::author,
articles::feed_id,
articles::url,
articles::date,
articles::synced,
articles::summary,
articles::direction,
articles::unread,
articles::marked,
articles::thumbnail_url,
articles::updated,
))
.first::<Article>(&mut connection)?;
Ok(article)
}
fn read_articles(&self, filter: ArticleFilter) -> DatabaseResult<Vec<Article>> {
let feed_mappings = self.read_feed_mappings(None, None)?;
let category_mappings = self.read_category_mappings(None, None)?;
let mut connection = self.connection_pool.get()?;
let query = prepare_article_query!(filter, feed_mappings, category_mappings);
let dbg_query = diesel::debug_query(&query);
tracing::trace!(?dbg_query, "read articles query");
let articles: Vec<Article> = query
.select((
articles::article_id,
articles::title,
articles::author,
articles::feed_id,
articles::url,
articles::date,
articles::synced,
articles::summary,
articles::direction,
articles::unread,
articles::marked,
articles::thumbnail_url,
articles::updated,
))
.load::<Article>(&mut connection)?;
Ok(articles)
}
fn article_exists(&self, article_id: &ArticleID) -> DatabaseResult<bool> {
let mut connection = self.connection_pool.get()?;
let count: i64 = articles::table
.filter(articles::article_id.eq(article_id))
.select(count_star())
.first(&mut connection)?;
Ok(count != 0)
}
fn drop_articles(&self, articles: &[ArticleID]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(articles::table)
.filter(articles::article_id.eq_any(articles))
.execute(&mut connection)?;
Ok(())
}
fn drop_old_articles(&self, older_than: Duration) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
self.drop_old_articles_impl(older_than, &mut connection)?;
Ok(())
}
fn set_article_read(&self, articles: &[ArticleID], read: Read) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::update(articles::table)
.filter(articles::article_id.eq_any(articles))
.set(articles::unread.eq(read.to_int()))
.execute(&mut connection)?;
Ok(())
}
fn set_all_read(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::update(articles::table)
.set(articles::unread.eq(Read::Read.to_int()))
.execute(&mut connection)?;
Ok(())
}
fn set_article_marked(&self, articles: &[ArticleID], marked: Marked) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::update(articles::table)
.filter(articles::article_id.eq_any(articles))
.set(articles::marked.eq(marked.to_int()))
.execute(&mut connection)?;
Ok(())
}
fn read_enclosures(&self, article: &ArticleID) -> DatabaseResult<Vec<Enclosure>> {
let mut connection = self.connection_pool.get()?;
let enclosures = enclosures::table.filter(enclosures::article_id.eq(article)).load(&mut connection)?;
Ok(enclosures)
}
fn read_enclosure(&self, article: &ArticleID, url: &Url) -> DatabaseResult<Enclosure> {
let mut connection = self.connection_pool.get()?;
let enclosure = enclosures::table
.filter(enclosures::article_id.eq(article))
.filter(enclosures::url.eq(url))
.first(&mut connection)?;
Ok(enclosure)
}
fn write_enclosures(&self, enclosures: &[Enclosure]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
self.write_enclosures_impl(enclosures, &mut connection)?;
Ok(())
}
fn drop_enclosures_of_article(&self, article: &ArticleID) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(enclosures::table)
.filter(enclosures::article_id.eq(&article))
.execute(&mut connection)?;
Ok(())
}
fn drop_enclosures_of_articles(&self, articles: &[ArticleID]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(enclosures::table)
.filter(enclosures::article_id.eq_any(articles))
.execute(&mut connection)?;
Ok(())
}
fn unread_count_feed_map(&self, exclude_future: bool) -> DatabaseResult<HashMap<FeedID, i64>> {
let mut connection = self.connection_pool.get()?;
let mut query = articles::table
.filter(articles::unread.eq(Read::Unread))
.group_by(articles::feed_id)
.select((articles::feed_id, count_star()))
.into_boxed();
if exclude_future {
query = query.filter(articles::date.lt(Utc::now()));
}
let vec: Vec<FeedCount> = query.load(&mut connection)?;
let unread_map: HashMap<FeedID, i64> = vec.into_iter().map(|count| (count.feed_id, count.count)).collect();
Ok(unread_map)
}
fn marked_count_feed_map(&self) -> DatabaseResult<Vec<FeedCount>> {
let mut connection = self.connection_pool.get()?;
let marked_map = articles::table
.filter(articles::marked.eq(Marked::Marked))
.select((articles::feed_id, diesel::dsl::sql::<diesel::sql_types::BigInt>("count(*)")))
.group_by(articles::feed_id)
.load(&mut connection)?;
Ok(marked_map)
}
fn today_unread_count(&self, exclude_future: bool) -> DatabaseResult<i64> {
let mut connection = self.connection_pool.get()?;
let local_now = Local::now();
let start_local = local_now.with_time(NaiveTime::from_hms_opt(0, 0, 0).unwrap()).unwrap();
let start_utc_naive = start_local.naive_utc();
let start = Utc.from_local_datetime(&start_utc_naive).unwrap();
let end_local = local_now.with_time(NaiveTime::from_hms_opt(23, 59, 59).unwrap()).unwrap();
let end_utc_naive = end_local.naive_utc();
let end = Utc.from_local_datetime(&end_utc_naive).unwrap();
let mut query = articles::table
.filter(articles::unread.eq(Read::Unread))
.filter(articles::date.gt(start))
.select(count_star())
.into_boxed();
if exclude_future {
query = query.filter(articles::date.lt(Utc::now()));
} else {
query = query.filter(articles::date.lt(end));
}
let unread_map = query.first(&mut connection)?;
Ok(unread_map)
}
fn today_marked_count(&self) -> DatabaseResult<i64> {
let mut connection = self.connection_pool.get()?;
let start = Utc::now().with_time(NaiveTime::from_hms_opt(0, 0, 0).unwrap()).unwrap();
let end = Utc::now().with_time(NaiveTime::from_hms_opt(23, 59, 59).unwrap()).unwrap();
let unread_map = articles::table
.filter(articles::marked.eq(Marked::Marked))
.filter(articles::date.gt(start))
.filter(articles::date.lt(end))
.select(count_star())
.first(&mut connection)?;
Ok(unread_map)
}
fn unread_count_all(&self) -> DatabaseResult<i64> {
let mut connection = self.connection_pool.get()?;
let count = articles::table
.filter(articles::unread.eq(Read::Unread))
.select(count_star())
.first(&mut connection)?;
Ok(count)
}
fn write_sync_result(&self, result: SyncResult, delete_articles_older_than: Option<Duration>) -> DatabaseResult<HashMap<FeedID, i64>> {
const CHUNK_SIZE: usize = 1000;
let mut connection = self.connection_pool.get()?;
let before_map = self.article_count_feed_map(&mut connection)?;
connection.transaction::<(), DatabaseError, _>(|con| {
let mut category_ids = vec![NEWSFLASH_TOPLEVEL.clone()];
if let Some(categories) = &result.categories {
category_ids.append(&mut categories.iter().map(|c| c.category_id.clone()).collect());
self.write_categories(categories, con)?;
}
if let Some(category_mappings) = result.category_mappings {
self.write_category_mappings(category_mappings, con)?;
}
if let Some(feeds) = &result.feeds {
self.write_feeds(feeds, con)?;
}
if let Some(feed_mappings) = result.feed_mappings {
self.write_feed_mappings(feed_mappings, con)?;
}
if let Some(tags) = &result.tags {
self.write_tags(tags, con)?;
}
if let Some(articles) = &result.articles {
for chunk in articles.chunks(CHUNK_SIZE) {
self.write_fat_articles(chunk, con)?;
}
}
if let Some(headlines) = &result.headlines {
self.insert_headlines(headlines, con)?;
}
if let Some(taggings) = result.taggings {
self.insert_taggings_impl(&taggings, con)?;
}
if let Some(enclosures) = &result.enclosures {
self.write_enclosures_impl(enclosures, con)?;
}
for category_id in &category_ids {
self.normalize_sort_index(category_id, None, None, con)?;
}
Ok(())
})?;
let after_map = self.article_count_feed_map(&mut connection)?;
let mut diff_map = HashMap::new();
for (key, value) in after_map.into_iter() {
if !before_map.contains_key(&key) {
diff_map.insert(key, value);
continue;
}
*diff_map.entry(key).or_insert(0) = value - before_map[&key];
}
connection.transaction::<(), DatabaseError, _>(|con| {
if let Some(older_than) = delete_articles_older_than {
self.drop_old_articles_impl(older_than, con)?;
}
self.drop_orphaned_articles(con)?;
Ok(())
})?;
Ok(diff_map)
}
fn write_feed_update_result(&self, feed_id: &FeedID, result: FeedUpdateResult) -> DatabaseResult<i64> {
let mut connection = self.connection_pool.get()?;
let before = self.article_count(&mut connection, feed_id)?;
connection.transaction::<(), DatabaseError, _>(|con| {
if let Some(feed) = &result.feed {
diesel::replace_into(feeds::table).values(feed).execute(con)?;
}
if let Some(articles) = &result.articles {
self.write_fat_articles(articles, con)?;
}
if let Some(taggings) = result.taggings {
self.insert_taggings_impl(&taggings, con)?;
}
if let Some(enclosures) = &result.enclosures {
self.write_enclosures_impl(enclosures, con)?;
}
Ok(())
})?;
let after = self.article_count(&mut connection, feed_id)?;
Ok(after - before)
}
fn sort_alphabetically(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
let mut category_mappings: Vec<CategoryMapping> = category_mapping::table.load(&mut connection)?;
let mut feed_mappings: Vec<FeedMapping> = feed_mapping::table.load(&mut connection)?;
let feed_names: HashMap<FeedID, String> = feeds::table
.load::<Feed>(&mut connection)?
.into_iter()
.map(|f| (f.feed_id, f.label.to_lowercase()))
.collect();
let category_names: HashMap<CategoryID, String> = categories::table
.load::<Category>(&mut connection)?
.into_iter()
.map(|c| (c.category_id, c.label.to_lowercase()))
.collect();
for (_key, group) in &feed_mappings
.iter_mut()
.sorted_by(|a, b| a.category_id.as_str().cmp(b.category_id.as_str()))
.chunk_by(|m| m.category_id.clone())
{
for (i, m) in group
.sorted_by(|a, b| {
let name_a = feed_names.get(&a.feed_id);
let name_b = feed_names.get(&b.feed_id);
name_a.cmp(&name_b)
})
.enumerate()
{
m.sort_index = Some(1 + i as i32 * 2);
}
}
for (_key, group) in &category_mappings
.iter_mut()
.sorted_by(|a, b| a.parent_id.as_str().cmp(b.parent_id.as_str()))
.chunk_by(|m| m.parent_id.clone())
{
for (i, m) in group
.sorted_by(|a, b| {
let name_a = category_names.get(&a.category_id);
let name_b = category_names.get(&b.category_id);
name_a.cmp(&name_b)
})
.enumerate()
{
m.sort_index = Some(1 + i as i32 * 2);
}
}
diesel::delete(category_mapping::table).execute(&mut *connection)?;
diesel::delete(feed_mapping::table).execute(&mut *connection)?;
diesel::replace_into(category_mapping::table)
.values(category_mappings)
.execute(&mut *connection)?;
diesel::replace_into(feed_mapping::table)
.values(feed_mappings)
.execute(&mut *connection)?;
Ok(())
}
fn insert_offline_actions(&self, offline_actions: &[OfflineAction]) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::replace_into(offline_actions::table)
.values(offline_actions)
.execute(&mut *connection)?;
Ok(())
}
fn read_offline_actions(&self) -> DatabaseResult<Vec<OfflineAction>> {
let mut connection = self.connection_pool.get()?;
let offline_actions = offline_actions::table.load(&mut connection)?;
Ok(offline_actions)
}
fn drop_offline_actions(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(offline_actions::table).execute(&mut connection)?;
Ok(())
}
fn read_image(&self, url: &Url) -> DatabaseResult<ImageMetadata> {
let mut connection = self.connection_pool.get()?;
let image = images::table.filter(images::image_url.eq(url)).first(&mut connection)?;
Ok(image)
}
fn read_images(&self) -> DatabaseResult<Vec<ImageMetadata>> {
let mut connection = self.connection_pool.get()?;
let images = images::table.load(&mut connection)?;
Ok(images)
}
fn drop_all_images(&self) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::delete(images::table).execute(&mut connection)?;
Ok(())
}
fn write_image(&self, image: &ImageMetadata) -> DatabaseResult<()> {
let mut connection = self.connection_pool.get()?;
diesel::insert_or_ignore_into(images::table).values(image).execute(&mut connection)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use crate::database::{Database, DatabaseExt};
use crate::models::{
ArticleFilter, ArticleID, Category, CategoryID, CategoryMapping, Direction, FatArticle, FatFavIcon, Feed, FeedID, FeedMapping, Marked,
NEWSFLASH_TOPLEVEL, Read, SyncResult, Tag, TagID, Tagging, Url,
};
use chrono::{Duration, Utc};
use diesel::sqlite::SqliteConnection;
use serial_test::serial;
use test_log::test;
fn setup_db(name: &str) -> Database {
let path = format!("./test-output/{name}");
if std::fs::exists(&path).unwrap() {
std::fs::remove_dir_all(&path).unwrap();
}
Database::new(&path).unwrap()
}
fn get_tags() -> Vec<Tag> {
vec![
Tag {
tag_id: TagID::new("tag_1"),
label: String::from("tag_1_label"),
color: Some("#FF00FF".to_string()),
sort_index: None,
},
Tag {
tag_id: TagID::new("tag_2"),
label: String::from("tag_2_label"),
color: Some("#FF00FF".to_string()),
sort_index: None,
},
]
}
fn get_taggings() -> Vec<Tagging> {
vec![
Tagging {
article_id: ArticleID::new("article_1"),
tag_id: TagID::new("tag_1"),
},
Tagging {
article_id: ArticleID::new("article_2"),
tag_id: TagID::new("tag_2"),
},
Tagging {
article_id: ArticleID::new("article_1"),
tag_id: TagID::new("tag_2"),
},
]
}
fn get_categories() -> Vec<Category> {
vec![
Category {
category_id: CategoryID::new("category_1"),
label: String::from("category_1_label"),
},
Category {
category_id: CategoryID::new("category_2"),
label: String::from("category_2_label"),
},
]
}
fn get_feeds() -> Vec<Feed> {
vec![
Feed {
feed_id: FeedID::new("feed_1"),
label: String::from("feed_1_label"),
website: Some(Url::parse("http://feed-1.com").unwrap()),
feed_url: Some(Url::parse("http://feed-1.com/rss").unwrap()),
icon_url: Some(Url::parse("http://feed-1.com/fav.ico").unwrap()),
error_count: 0,
error_message: None,
},
Feed {
feed_id: FeedID::new("feed_2"),
label: String::from("feed_2_label"),
website: Some(Url::parse("http://feed-2.com").unwrap()),
feed_url: Some(Url::parse("http://feed-2.com/rss").unwrap()),
icon_url: Some(Url::parse("http://feed-2.com/fav.ico").unwrap()),
error_count: 0,
error_message: None,
},
]
}
fn get_category_mappings() -> Vec<CategoryMapping> {
vec![
CategoryMapping {
parent_id: NEWSFLASH_TOPLEVEL.clone(),
category_id: CategoryID::new("category_1"),
sort_index: Some(0),
},
CategoryMapping {
parent_id: NEWSFLASH_TOPLEVEL.clone(),
category_id: CategoryID::new("category_2"),
sort_index: Some(1),
},
]
}
fn get_feed_mappings() -> Vec<FeedMapping> {
vec![
FeedMapping {
feed_id: FeedID::new("feed_1"),
category_id: CategoryID::new("category_1"),
sort_index: Some(0),
},
FeedMapping {
feed_id: FeedID::new("feed_2"),
category_id: CategoryID::new("category_2"),
sort_index: Some(1),
},
FeedMapping {
feed_id: FeedID::new("feed_1"),
category_id: CategoryID::new("category_2"),
sort_index: Some(0),
},
]
}
fn get_articles() -> Vec<FatArticle> {
vec![
FatArticle {
article_id: ArticleID::new("article_1"),
title: Some(String::from("article_1_title")),
author: Some(String::from("article_1_author")),
feed_id: FeedID::new("feed_1"),
url: None,
date: Utc::now(),
synced: Utc::now(),
updated: None,
html: Some("test html".to_owned()),
summary: None,
direction: Some(Direction::LeftToRight),
unread: Read::Unread,
marked: Marked::Unmarked,
scraped_content: None,
plain_text: None,
thumbnail_url: None,
},
FatArticle {
article_id: ArticleID::new("article_2"),
title: Some(String::from("article_2_title")),
author: Some(String::from("article_2_author")),
feed_id: FeedID::new("feed_2"),
url: None,
date: Utc::now() + Duration::try_hours(2).unwrap(),
synced: Utc::now(),
updated: None,
html: None,
summary: None,
direction: Some(Direction::LeftToRight),
unread: Read::Unread,
marked: Marked::Unmarked,
scraped_content: None,
plain_text: None,
thumbnail_url: None,
},
]
}
fn get_favicons() -> Vec<FatFavIcon> {
vec![
FatFavIcon {
feed_id: FeedID::new("feed_1"),
expires: Utc::now() + Duration::try_days(10).unwrap(),
format: Some(String::from("image/png")),
etag: None,
lowres_source_url: None,
lowres: None,
highres: None,
highres_source_url: None,
},
FatFavIcon {
feed_id: FeedID::new("feed_2"),
expires: Utc::now() + Duration::try_days(10).unwrap(),
format: Some(String::from("image/png")),
etag: None,
lowres_source_url: None,
lowres: None,
highres: None,
highres_source_url: None,
},
]
}
fn setup_full_db(db: &Database, connection: &mut SqliteConnection) {
let feeds = get_feeds();
db.write_feeds(&feeds, connection).unwrap();
let articles = get_articles();
let before = db.article_count_feed_map(connection).unwrap();
db.write_fat_articles(&articles, connection).unwrap();
let after = db.article_count_feed_map(connection).unwrap();
assert!(!before.contains_key(&feeds[0].feed_id));
assert_eq!(after[&feeds[0].feed_id], 1);
let tags = get_tags();
db.write_tags(&tags, connection).unwrap();
let taggings = get_taggings();
db.insert_taggings_impl(&taggings, connection).unwrap();
let favicons = get_favicons();
db.insert_favicons(&favicons).unwrap();
}
#[test]
#[serial]
fn size() {
let db = setup_db("size");
let _size = db.size().unwrap();
}
#[test]
#[serial]
fn write_read_tags() {
let db = setup_db("write_read_tags");
let mut connection = db.connection_pool.get().unwrap();
let tags = get_tags();
db.write_tags(&tags, &mut connection).unwrap();
let read_tags = db.read_tags().unwrap();
assert_eq!(tags, read_tags);
}
#[test]
#[serial]
fn drop_tag() {
let db = setup_db("drop_tag");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let tags = get_tags();
db.drop_tag(&tags[0].tag_id).unwrap();
let read_tags = db.read_tags().unwrap();
assert_eq!(&tags[1..], read_tags.as_slice());
}
#[test]
#[serial]
fn update_tags() {
let db = setup_db("update_tags");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let tags = vec![
Tag {
tag_id: TagID::new("tag_3"),
label: String::from("tag_3_label"),
color: Some("#FF00FF".to_string()),
sort_index: None,
},
Tag {
tag_id: TagID::new("tag_2"),
label: String::from("tag_2_label"),
color: Some("#FF00FF".to_string()),
sort_index: None,
},
];
db.write_tags(&tags, &mut connection).unwrap();
let read_tags = db.read_tags().unwrap();
assert_eq!(tags, read_tags);
}
#[test]
#[serial]
fn write_read_taggings() {
let db = setup_db("write_read_taggings");
let feeds = get_feeds();
let mut connection = db.connection_pool.get().unwrap();
db.write_feeds(&feeds, &mut connection).unwrap();
let articles = get_articles();
db.write_fat_articles(&articles, &mut connection).unwrap();
let tags = get_tags();
db.write_tags(&tags, &mut connection).unwrap();
let taggings = get_taggings();
db.insert_taggings_impl(&taggings, &mut connection).unwrap();
let read_taggings = db.read_taggings(None, None).unwrap();
assert_eq!(taggings, read_taggings);
}
#[test]
#[serial]
fn write_read_categories() {
let db = setup_db("write_read_categories");
let mut connection = db.connection_pool.get().unwrap();
let categories = get_categories();
db.write_categories(&categories, &mut connection).unwrap();
let read_categories = db.read_categories().unwrap();
assert_eq!(categories, read_categories);
}
#[test]
#[serial]
fn update_categories() {
let db = setup_db("update_categories");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let categories = vec![
Category {
category_id: CategoryID::new("category_3"),
label: String::from("category_3_label"),
},
Category {
category_id: CategoryID::new("category_2"),
label: String::from("category_2_label"),
},
];
db.write_categories(&categories, &mut connection).unwrap();
let read_categories = db.read_categories().unwrap();
assert_eq!(categories, read_categories);
}
#[test]
#[serial]
fn write_read_feeds() {
let db = setup_db("write_read_feeds");
let mut connection = db.connection_pool.get().unwrap();
let feeds = get_feeds();
db.write_feeds(&feeds, &mut connection).unwrap();
let read_feeds = db.read_feeds().unwrap();
assert_eq!(feeds, read_feeds);
}
#[test]
#[serial]
fn update_feeds() {
let db = setup_db("write_read_feeds");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let feeds = vec![
Feed {
feed_id: FeedID::new("feed_3"),
label: String::from("feed_3_label"),
website: Some(Url::parse("http://feed-3.com").unwrap()),
feed_url: Some(Url::parse("http://feed-3.com/rss").unwrap()),
icon_url: Some(Url::parse("http://feed-3.com/fav.ico").unwrap()),
error_count: 0,
error_message: None,
},
Feed {
feed_id: FeedID::new("feed_2"),
label: String::from("feed_2_label"),
website: Some(Url::parse("http://feed-2.com").unwrap()),
feed_url: Some(Url::parse("http://feed-2.com/rss").unwrap()),
icon_url: Some(Url::parse("http://feed-2.com/fav.ico").unwrap()),
error_count: 0,
error_message: None,
},
];
db.write_feeds(&feeds, &mut connection).unwrap();
let read_feeds = db.read_feeds().unwrap();
assert_eq!(feeds, read_feeds);
}
#[test]
#[serial]
fn write_read_mappings() {
let db = setup_db("write_read_mappings");
let mut connection = db.connection_pool.get().unwrap();
let categories = get_categories();
db.write_categories(&categories, &mut connection).unwrap();
let category_mappings = get_category_mappings();
db.write_category_mappings(category_mappings, &mut connection).unwrap();
let feeds = get_feeds();
db.write_feeds(&feeds, &mut connection).unwrap();
let feed_mappings = get_feed_mappings();
db.write_feed_mappings(feed_mappings.clone(), &mut connection).unwrap();
let read_mappings = db.read_feed_mappings(None, None).unwrap();
assert_eq!(feed_mappings, read_mappings);
}
#[test]
#[serial]
fn write_read_articles() {
let db = setup_db("write_read_articles");
let mut connection = db.connection_pool.get().unwrap();
let feeds = get_feeds();
db.write_feeds(&feeds, &mut connection).unwrap();
let articles = get_articles();
db.write_fat_articles(&articles, &mut connection).unwrap();
let read_articles = db.read_fat_articles(ArticleFilter::default()).unwrap();
assert_eq!(articles, read_articles);
}
#[test]
#[serial]
fn delete_triggers() {
let db = setup_db("delete_triggers");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let tags = get_tags();
db.drop_tag(&tags.first().unwrap().tag_id).unwrap();
let taggings = db.read_taggings(None, None).unwrap();
assert_eq!(2, taggings.len());
}
#[test]
#[serial]
fn unread_count_map() {
let db = setup_db("unread_count_map");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let count_map = db.unread_count_feed_map(false).unwrap();
assert_eq!(count_map[&FeedID::new("feed_1")], 1);
assert_eq!(count_map[&FeedID::new("feed_2")], 1);
}
#[test]
#[serial]
fn fat_articles() {
let db = setup_db("fat_articles");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let fat_articles = get_articles();
let fat_article = fat_articles.first().unwrap();
let slim_article = db.read_articles(ArticleFilter::ids(vec![fat_article.article_id.clone()])).unwrap();
db.write_articles(&slim_article).unwrap();
let read_fat_articles = db
.read_fat_articles(ArticleFilter {
ids: Some(vec![fat_article.article_id.clone()]),
..ArticleFilter::default()
})
.unwrap();
let read_fat_article = read_fat_articles.first().unwrap();
assert_eq!(fat_article.html, read_fat_article.html);
}
#[test]
#[serial]
fn large_sync_result() {
let db = setup_db("large_sync_result");
let mut connection = db.connection_pool.get().unwrap();
setup_full_db(&db, &mut connection);
let articles = (0..10000_i32)
.map(|i| FatArticle {
article_id: ArticleID::new(&format!("article_{i}")),
title: Some(format!("article_{i}_title")),
author: Some(format!("article_{i}_author")),
feed_id: FeedID::new("feed_1"),
url: None,
date: Utc::now(),
synced: Utc::now(),
updated: None,
html: Some("test html".to_owned()),
summary: None,
direction: Some(Direction::LeftToRight),
unread: Read::Unread,
marked: Marked::Unmarked,
scraped_content: None,
plain_text: None,
thumbnail_url: None,
})
.collect::<Vec<FatArticle>>();
let sync_result = SyncResult {
feeds: Some(get_feeds()),
categories: Some(get_categories()),
feed_mappings: Some(get_feed_mappings()),
category_mappings: Some(get_category_mappings()),
tags: Some(get_tags()),
headlines: None,
articles: Some(articles),
enclosures: None,
taggings: Some(get_taggings()),
};
let _diff_map = db.write_sync_result(sync_result, None).unwrap();
}
}