pub mod config;
pub mod metadata;
use self::config::AccountConfig;
use self::metadata::FeedbinMetadata;
use crate::feed_api::{FeedApi, FeedApiError, FeedApiResult, FeedHeaderMap, Portal};
use crate::models::{self, CategoryMapping, DirectLogin, FeedUpdateResult, StreamConversionResult};
use crate::models::{
ArticleID, Category, CategoryID, Enclosure, FatArticle, FavIcon, Feed, FeedID, FeedMapping, Headline, LoginData, Marked, NEWSFLASH_TOPLEVEL,
PasswordLogin, PluginCapabilities, Read, SyncResult, TagID, Url,
};
use crate::util;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use feedbin_api::ApiError as FeedbinError;
use feedbin_api::models::{
CacheRequestResponse, CacheResult, CreateSubscriptionResult, Entry, Icon as FeedbinIcon, Subscription, Tagging as FeedbinTagging,
};
use feedbin_api::{EntryID, FeedbinApi};
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use tokio::sync::RwLock;
use url::Host;
impl From<FeedbinError> for FeedApiError {
fn from(error: FeedbinError) -> FeedApiError {
match error {
FeedbinError::Url(e) => FeedApiError::Url(e),
FeedbinError::ServerIsBroken => FeedApiError::Api {
message: FeedbinError::ServerIsBroken.to_string(),
},
FeedbinError::Json { source, json } => FeedApiError::Json { source, json },
FeedbinError::Network(e) => FeedApiError::Network(e),
FeedbinError::InvalidLogin => FeedApiError::Auth,
FeedbinError::AccessDenied => FeedApiError::Auth,
FeedbinError::InputSize => FeedApiError::Api {
message: FeedbinError::InputSize.to_string(),
},
FeedbinError::InvalidCaching => FeedApiError::Api {
message: FeedbinError::InvalidCaching.to_string(),
},
}
}
}
pub struct Feedbin {
api: Option<FeedbinApi>,
portal: Box<dyn Portal>,
config: Arc<RwLock<AccountConfig>>,
}
impl Feedbin {
fn api_subdomain_url(url: &Url) -> Option<Url> {
if let Some(Host::Domain(host_string)) = url.host().to_owned()
&& !host_string.starts_with("api.")
{
let mut api_url = url.clone();
api_url.set_host(Some(&format!("api.{host_string}"))).ok();
return Some(api_url);
}
None
}
fn parse_itunes_duration(itunes_duration: Option<String>) -> Option<i32> {
let duration_str = itunes_duration?;
let mut components = duration_str.split(':');
let hours = components.next()?;
let minutes = components.next()?;
let seconds = components.next()?;
if components.next().is_some() {
return None;
}
let hours = hours.parse::<i32>().ok()?;
let minutes = minutes.parse::<i32>().ok()?;
let seconds = seconds.parse::<i32>().ok()?;
Some(hours * 360 + minutes * 60 + seconds)
}
fn taggings_to_categories(&self, taggings: &CacheRequestResponse<Vec<FeedbinTagging>>) -> FeedApiResult<(Vec<Category>, Vec<CategoryMapping>)> {
let taggings = match taggings {
CacheRequestResponse::NotModified => {
let categories = self.portal.get_categories()?;
let category_mappings = self.portal.get_category_mappings()?;
return Ok((categories, category_mappings));
}
CacheRequestResponse::Modified(CacheResult {
value: taggings,
cache: _cache,
}) => taggings,
};
let category_names: HashSet<String> = taggings.iter().map(|t| t.name.clone()).collect();
Ok(category_names
.into_iter()
.enumerate()
.map(|(i, n)| {
let category_id = CategoryID::new(&n);
let category = Category {
category_id: category_id.clone(),
label: n,
};
let category_mapping = CategoryMapping {
parent_id: NEWSFLASH_TOPLEVEL.clone(),
category_id,
sort_index: Some(i as i32),
};
(category, category_mapping)
})
.unzip())
}
fn subscriptions_to_feeds(
&self,
subscriptions: Vec<Subscription>,
icons: Vec<FeedbinIcon>,
taggings: &CacheRequestResponse<Vec<FeedbinTagging>>,
) -> FeedApiResult<(Vec<Feed>, Vec<FeedMapping>)> {
let icon_map: HashMap<String, String> = icons.into_iter().map(|i| (i.host, i.url)).collect();
let taggings = match taggings {
CacheRequestResponse::NotModified => {
let feeds = self.portal.get_feeds()?;
let feed_mappings = self.portal.get_feed_mappings()?;
return Ok((feeds, feed_mappings));
}
CacheRequestResponse::Modified(CacheResult {
value: taggings,
cache: _cache,
}) => taggings,
};
let taggings: HashMap<u64, String> = taggings.iter().map(|t| (t.feed_id, t.name.clone())).collect();
Ok(subscriptions
.into_iter()
.enumerate()
.filter_map(move |(i, s)| {
let title = s.title?;
let feed_id_u64 = s.feed_id;
let feed_id = FeedID::new(&feed_id_u64.to_string());
let website = Url::parse(&s.site_url).ok();
let feed_url = Url::parse(&s.feed_url).ok();
let icon_url = website
.clone()
.and_then(|url| url.host_str().map(|s| s.to_owned()))
.and_then(|host| icon_map.get(&host))
.and_then(|icon_url| Url::parse(icon_url).ok());
let feed = Feed {
feed_id: feed_id.clone(),
label: title,
website,
feed_url,
icon_url,
error_count: 0,
error_message: None,
};
let feed_mapping = FeedMapping {
feed_id,
category_id: taggings
.get(&feed_id_u64)
.map(|name| CategoryID::new(name))
.unwrap_or_else(|| NEWSFLASH_TOPLEVEL.clone()),
sort_index: Some(i as i32),
};
Some((feed, feed_mapping))
})
.unzip())
}
fn subscription_to_feed(&self, subscription: Subscription, icons: Vec<FeedbinIcon>) -> Option<Feed> {
let title = subscription.title?;
let icon_map: HashMap<String, String> = icons.into_iter().map(|i| (i.host, i.url)).collect();
let website = Url::parse(&subscription.site_url).ok();
let feed_url = Url::parse(&subscription.feed_url).ok();
let icon_url = website
.clone()
.and_then(|url| url.host_str().map(|s| s.to_owned()))
.and_then(|host| icon_map.get(&host))
.and_then(|icon_url| Url::parse(icon_url).ok());
Some(Feed {
feed_id: FeedID::new(&subscription.feed_id.to_string()),
label: title,
website,
feed_url,
icon_url,
error_count: 0,
error_message: None,
})
}
fn entries_to_articles(
entries: Vec<Entry>,
unread_entry_ids: &HashSet<EntryID>,
starred_entry_ids: &HashSet<EntryID>,
feed_ids: &HashSet<FeedID>,
portal: &dyn Portal,
) -> StreamConversionResult {
let mut enclosures: Vec<Enclosure> = Vec::new();
let articles = entries
.into_iter()
.filter_map(|e| {
let Entry {
id,
feed_id,
title,
url,
extracted_content_url: _,
author,
content,
summary,
published,
created_at: _,
original,
images,
enclosure,
extracted_articles: _,
} = e;
let feed_id = FeedID::new(&feed_id.to_string());
if !feed_ids.contains(&feed_id) && !starred_entry_ids.contains(&id) {
return None;
}
if let Some(enclosure) = enclosure
&& let Ok(url) = Url::parse(&enclosure.enclosure_url)
{
enclosures.push(Enclosure {
article_id: ArticleID::new(&id.to_string()),
url,
mime_type: Some(enclosure.enclosure_type),
title: None,
position: None,
summary: None,
thumbnail_url: enclosure.itunes_image,
filesize: enclosure.enclosure_length.and_then(|length| length.parse::<i32>().ok()),
width: None,
height: None,
duration: Self::parse_itunes_duration(enclosure.itunes_duration),
framerate: None,
alternative: None,
is_default: false,
});
}
let article_id = ArticleID::new(&id.to_string());
let article_exists_locally = portal.get_article_exists(&article_id).unwrap_or(false);
let plain_text = match &content {
Some(content) => Some(util::html2text::html2text(content)),
None => summary.as_ref().cloned(),
};
let summary = if article_exists_locally { None } else { summary.as_ref().cloned() };
let thumbnail_url = images.map(|img| img.original_url);
Some(FatArticle {
article_id,
title: title.map(|t| match escaper::decode_html(&t) {
Ok(title) => title,
Err(_error) => {
t
}
}),
author,
feed_id,
url: url.and_then(|url| Url::parse(&url).ok()),
date: match DateTime::parse_from_str(&published, "%+") {
Ok(date) => date.with_timezone(&Utc),
Err(_) => Utc::now(),
},
synced: Utc::now(),
updated: None,
html: match original.and_then(|original| original.content) {
Some(original_content) => Some(original_content),
None => match content {
Some(content) => Some(content),
None => summary.as_ref().cloned(),
},
},
summary: summary.as_deref().map(util::html2text::text2summary),
direction: None,
unread: if unread_entry_ids.contains(&id) { Read::Unread } else { Read::Read },
marked: if starred_entry_ids.contains(&id) {
Marked::Marked
} else {
Marked::Unmarked
},
scraped_content: None,
plain_text,
thumbnail_url,
})
})
.collect();
StreamConversionResult {
articles,
headlines: Vec::new(),
taggings: Vec::new(),
enclosures,
}
}
fn article_ids_to_entry_ids(article_ids: &[ArticleID]) -> Vec<EntryID> {
article_ids.iter().filter_map(|id| Self::article_id_to_entry_id(id).ok()).collect()
}
fn article_id_to_entry_id(id: &ArticleID) -> Result<EntryID, FeedApiError> {
let parsed_id = id.as_str().parse::<u64>().map_err(|_| FeedApiError::Api {
message: format!("Failed to parse id {id}"),
})?;
Ok(parsed_id)
}
fn feed_id_to_u64(id: &FeedID) -> Result<EntryID, FeedApiError> {
let parsed_id = id.as_str().parse::<u64>().map_err(|_| FeedApiError::Api {
message: format!("Failed to parse id {id}"),
})?;
Ok(parsed_id)
}
async fn initial_sync_impl(&self, client: &Client) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
let subscription_cache = self.config.read().await.get_subscription_cache();
let taggings_cache = self.config.read().await.get_taggins_cache();
let subscriptions = api.get_subscriptions(client, None, None, subscription_cache).await?;
let taggings = api.get_taggings(client, taggings_cache).await?;
self.config.write().await.set_subscription_cache(&subscriptions);
self.config.write().await.set_taggins_cache(&taggings);
self.config.read().await.save()?;
let (feeds, feed_mappings) = match subscriptions {
CacheRequestResponse::NotModified => (self.portal.get_feeds()?, self.portal.get_feed_mappings()?),
CacheRequestResponse::Modified(CacheResult {
value: subscriptions,
cache: _cache,
}) => self.subscriptions_to_feeds(subscriptions, api.get_icons(client).await?, &taggings)?,
};
let mut articles: Vec<FatArticle> = Vec::new();
let mut enclosures: Vec<Enclosure> = Vec::new();
let unread_entry_ids = api.get_unread_entry_ids(client).await?;
let starred_entry_ids = api.get_starred_entry_ids(client).await?;
let unread_entry_id_set: HashSet<EntryID> = unread_entry_ids.iter().copied().collect();
let starred_entry_id_set: HashSet<EntryID> = starred_entry_ids.iter().copied().collect();
let feed_id_set: HashSet<FeedID> = feeds.iter().map(|f| f.feed_id.clone()).collect();
let entry_ids_total: Vec<EntryID> = unread_entry_id_set.union(&starred_entry_id_set).copied().collect();
for entry_ids_total_chunk in entry_ids_total.chunks(100) {
let entries_total_chunk = api
.get_entries(client, None, None, Some(entry_ids_total_chunk), None, Some(true), true)
.await?;
let mut total = Self::entries_to_articles(
entries_total_chunk,
&unread_entry_id_set,
&starred_entry_id_set,
&feed_id_set,
self.portal.as_ref(),
);
articles.append(&mut total.articles);
enclosures.append(&mut total.enclosures);
}
let (categories, category_mappings) = self.taggings_to_categories(&taggings)?;
return Ok(SyncResult {
feeds: util::vec_to_option(feeds),
categories: util::vec_to_option(categories),
feed_mappings: util::vec_to_option(feed_mappings),
category_mappings: util::vec_to_option(category_mappings),
tags: None,
taggings: None,
headlines: None,
articles: util::vec_to_option(articles),
enclosures: util::vec_to_option(enclosures),
});
}
Err(FeedApiError::Login)
}
async fn sync_impl(&self, last_sync: DateTime<Utc>, client: &Client) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
let subscription_cache = self.config.read().await.get_subscription_cache();
let taggings_cache = self.config.read().await.get_taggins_cache();
let subscriptions = api.get_subscriptions(client, None, None, subscription_cache);
let taggings = api.get_taggings(client, taggings_cache);
let unread_entry_ids = api.get_unread_entry_ids(client);
let starred_entry_ids = api.get_starred_entry_ids(client);
let (subscriptions, taggings, unread_entry_ids, starred_entry_ids) =
futures::try_join!(subscriptions, taggings, unread_entry_ids, starred_entry_ids)?;
self.config.write().await.set_subscription_cache(&subscriptions);
self.config.write().await.set_taggins_cache(&taggings);
self.config.read().await.save()?;
let (feeds, feed_mappings) = match subscriptions {
CacheRequestResponse::NotModified => (self.portal.get_feeds()?, self.portal.get_feed_mappings()?),
CacheRequestResponse::Modified(CacheResult {
value: subscriptions,
cache: _cache,
}) => self.subscriptions_to_feeds(subscriptions, api.get_icons(client).await?, &taggings)?,
};
let unread_entry_id_set: HashSet<EntryID> = unread_entry_ids.iter().copied().collect();
let starred_entry_id_set: HashSet<EntryID> = starred_entry_ids.iter().copied().collect();
let local_unread_ids = self.portal.get_article_ids_unread_all()?;
let local_unread_ids = Self::article_ids_to_entry_ids(&local_unread_ids);
let local_unread_ids = local_unread_ids.into_iter().collect();
let local_marked_ids = self.portal.get_article_ids_marked_all()?;
let local_marked_ids = Self::article_ids_to_entry_ids(&local_marked_ids);
let local_marked_ids = local_marked_ids.into_iter().collect();
let missing_unread_ids: HashSet<EntryID> = unread_entry_id_set.difference(&local_unread_ids).cloned().collect();
let missing_marked_ids: HashSet<EntryID> = starred_entry_id_set.difference(&local_marked_ids).cloned().collect();
let feed_id_set: HashSet<FeedID> = feeds.iter().map(|f| f.feed_id.clone()).collect();
let missing_ids: Vec<EntryID> = missing_marked_ids.union(&missing_unread_ids).copied().collect();
let mut result = StreamConversionResult::new();
let mut futures = Vec::new();
for missing_ids_chunk in missing_ids.chunks(100) {
futures.push(api.get_entries(client, None, None, Some(missing_ids_chunk), None, Some(true), true));
}
futures.push(api.get_entries(client, None, Some(last_sync), None, None, Some(true), true));
let futures_results = futures::future::try_join_all(futures).await?;
for missing_entry_chunk in futures_results {
let converted_missing_chunk = Self::entries_to_articles(
missing_entry_chunk,
&unread_entry_id_set,
&starred_entry_id_set,
&feed_id_set,
self.portal.as_ref(),
);
result.add(converted_missing_chunk);
}
let mut should_mark_read_headlines = local_unread_ids
.difference(&unread_entry_id_set)
.copied()
.map(|id| Headline {
article_id: ArticleID::new(&id.to_string()),
unread: Read::Read,
marked: if starred_entry_id_set.contains(&id) {
Marked::Marked
} else {
Marked::Unmarked
},
})
.collect();
result.headlines.append(&mut should_mark_read_headlines);
let mut missing_unmarked_headlines = local_marked_ids
.difference(&starred_entry_id_set)
.copied()
.map(|id| Headline {
article_id: ArticleID::new(&id.to_string()),
marked: Marked::Unmarked,
unread: if unread_entry_id_set.contains(&id) { Read::Unread } else { Read::Read },
})
.collect();
result.headlines.append(&mut missing_unmarked_headlines);
let (categories, category_mappings) = self.taggings_to_categories(&taggings)?;
Ok(SyncResult {
feeds: util::vec_to_option(feeds),
categories: util::vec_to_option(categories),
feed_mappings: util::vec_to_option(feed_mappings),
category_mappings: util::vec_to_option(category_mappings),
tags: None,
taggings: None,
headlines: util::vec_to_option(result.headlines),
articles: util::vec_to_option(result.articles),
enclosures: util::vec_to_option(result.enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
}
#[async_trait]
impl FeedApi for Feedbin {
fn features(&self) -> FeedApiResult<PluginCapabilities> {
Ok(PluginCapabilities::ADD_REMOVE_FEEDS | PluginCapabilities::SUPPORT_CATEGORIES | PluginCapabilities::MODIFY_CATEGORIES)
}
fn has_user_configured(&self) -> FeedApiResult<bool> {
Ok(self.api.is_some())
}
async fn is_reachable(&self, client: &Client) -> FeedApiResult<bool> {
if let Some(url) = self.config.read().await.get_url() {
let res = client.head(&url).send().await?;
Ok(res.status().is_success())
} else {
Err(FeedApiError::Login)
}
}
async fn is_logged_in(&self, client: &Client) -> FeedApiResult<bool> {
match &self.api {
None => Ok(false),
Some(api) => {
let authenticated = api.is_authenticated(client).await?;
Ok(authenticated)
}
}
}
async fn user_name(&self) -> Option<String> {
self.config.read().await.get_user_name()
}
async fn get_login_data(&self) -> Option<LoginData> {
if self.has_user_configured().unwrap_or(false) {
let username = self.config.read().await.get_user_name();
let password = self.config.read().await.get_password();
if let (Some(username), Some(password)) = (username, password) {
return Some(LoginData::Direct(DirectLogin::Password(PasswordLogin {
id: FeedbinMetadata::get_id(),
url: self.config.read().await.get_url(),
user: username,
password,
basic_auth: None, })));
}
}
None
}
async fn login(&mut self, data: LoginData, client: &Client) -> FeedApiResult<()> {
self.api = None;
if let LoginData::Direct(DirectLogin::Password(data)) = data
&& let Some(mut url_string) = data.url.clone()
{
let url = Url::parse(&url_string)?;
let mut api = FeedbinApi::new(&url, data.user.clone(), data.password.clone());
let mut auth_req = api.is_authenticated(client).await;
if auth_req.is_err() {
if let Some(api_url) = Self::api_subdomain_url(&url) {
tracing::info!(%api_url, "Trying to authenticate with base url");
api = FeedbinApi::new(&api_url, data.user.clone(), data.password.clone());
auth_req = api.is_authenticated(client).await;
if auth_req.is_err() {
return Err(FeedApiError::Auth);
} else {
url_string = api_url.to_string();
}
} else {
return Err(FeedApiError::Auth);
}
}
if let Ok(true) = auth_req {
let mut config_guard = self.config.write().await;
config_guard.set_url(&url_string);
config_guard.set_password(&data.password);
config_guard.set_user_name(&data.user);
config_guard.save()?;
self.api = Some(api);
return Ok(());
}
}
Err(FeedApiError::Login)
}
async fn logout(&mut self, _client: &Client) -> FeedApiResult<()> {
self.config.read().await.delete()?;
Ok(())
}
async fn initial_sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
let result = self.initial_sync_impl(client).await;
if result.is_err() {
self.config.write().await.reset_subscription_cache();
self.config.write().await.reset_taggings_cache();
}
result
}
async fn sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
let last_sync = self.portal.get_config().read().await.get_last_sync();
let result = self.sync_impl(last_sync, client).await;
if result.is_err() {
self.config.write().await.reset_subscription_cache();
self.config.write().await.reset_taggings_cache();
}
result
}
async fn fetch_feed(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FeedUpdateResult> {
if let Some(api) = &self.api {
let subscription_id = Self::feed_id_to_u64(feed_id)?;
let subscription_cache = self.config.read().await.get_subscription_cache();
let subscriptions = api.get_subscriptions(client, None, None, subscription_cache).await?;
let subscription = match subscriptions {
CacheRequestResponse::NotModified => None,
CacheRequestResponse::Modified(result) => result.value.into_iter().find(|s| s.feed_id == subscription_id),
}
.ok_or(FeedApiError::Unknown)?;
let icons = api.get_icons(client).await?;
let feed = self.subscription_to_feed(subscription, icons);
let unread_entry_ids = api.get_unread_entry_ids(client).await?;
let starred_entry_ids = api.get_starred_entry_ids(client).await?;
let unread_entry_id_set: HashSet<EntryID> = unread_entry_ids.iter().copied().collect();
let starred_entry_id_set: HashSet<EntryID> = starred_entry_ids.iter().copied().collect();
let entries = api.get_entries_for_feed(client, subscription_id, None).await?;
let entries = match entries {
CacheRequestResponse::Modified(result) => result.value,
CacheRequestResponse::NotModified => Vec::new(),
};
let mut feed_id_set = HashSet::new();
feed_id_set.insert(feed_id.clone());
let result = Self::entries_to_articles(entries, &unread_entry_id_set, &starred_entry_id_set, &feed_id_set, self.portal.as_ref());
Ok(FeedUpdateResult {
feed,
taggings: None,
articles: util::vec_to_option(result.articles),
enclosures: util::vec_to_option(result.enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
async fn set_article_read(&self, articles: &[ArticleID], read: models::Read, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
match read {
Read::Unread => api.set_entries_unread(client, &Self::article_ids_to_entry_ids(articles)).await?,
Read::Read => api.set_entries_read(client, &Self::article_ids_to_entry_ids(articles)).await?,
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn set_article_marked(&self, articles: &[ArticleID], marked: models::Marked, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
match marked {
Marked::Unmarked => api.set_entries_unstarred(client, &Self::article_ids_to_entry_ids(articles)).await?,
Marked::Marked => api.set_entries_starred(client, &Self::article_ids_to_entry_ids(articles)).await?,
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn set_feed_read(&self, _feeds: &[FeedID], articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn set_category_read(&self, _categories: &[CategoryID], articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn set_tag_read(&self, _tags: &[TagID], _articles: &[ArticleID], _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn set_all_read(&self, articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn add_feed(
&self,
url: &Url,
title: Option<String>,
category: Option<CategoryID>,
client: &Client,
) -> FeedApiResult<(Feed, Option<Category>)> {
if let Some(api) = &self.api {
let res = api.create_subscription(client, url.to_string()).await?;
match res {
CreateSubscriptionResult::NotFound | CreateSubscriptionResult::MultipleOptions(_) => return Err(FeedApiError::Unsupported),
CreateSubscriptionResult::Found(url) => {
return Err(FeedApiError::Api {
message: format!("Feed already present: {url}"),
});
}
CreateSubscriptionResult::Created(mut subscription) => {
let icons = api.get_icons(client).await?;
if let Some(title) = title
&& subscription.title.as_ref() != Some(&title)
{
api.update_subscription(client, subscription.id, &title).await?;
subscription.title = Some(title);
}
let mut res_category: Option<Category> = None;
if let Some(category_id) = category {
let title = category_id.to_string();
api.create_tagging(client, subscription.feed_id, &title).await?;
res_category = Some(Category { category_id, label: title });
}
if let Some(feed) = self.subscription_to_feed(subscription, icons) {
return Ok((feed, res_category));
} else {
let message = "Subscription is missing a title";
tracing::error!(%message);
return Err(FeedApiError::Api {
message: message.to_string(),
});
}
}
}
}
Err(FeedApiError::Login)
}
async fn remove_feed(&self, feed_id: &FeedID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
let feed_id = Self::feed_id_to_u64(feed_id)?;
let subscriptions = match api.get_subscriptions(client, None, None, None).await? {
CacheRequestResponse::Modified(CacheResult {
value: subscriptions,
cache: _cache,
}) => subscriptions,
CacheRequestResponse::NotModified => return Err(FeedApiError::Unknown),
};
let subscription_id = subscriptions.iter().find(|s| s.feed_id == feed_id).map(|s| s.id);
if let Some(subscription_id) = subscription_id {
api.delete_subscription(client, subscription_id).await?;
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn move_feed(&self, feed_id: &FeedID, from: &CategoryID, to: &CategoryID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
let feed_id = Self::feed_id_to_u64(feed_id)?;
if from != &*NEWSFLASH_TOPLEVEL {
let taggings = match api.get_taggings(client, None).await? {
CacheRequestResponse::Modified(CacheResult {
value: taggings,
cache: _cache,
}) => taggings,
CacheRequestResponse::NotModified => return Err(FeedApiError::Unknown),
};
let tagging_id = taggings.iter().find(|t| t.name == from.as_str() && t.feed_id == feed_id).map(|t| t.id);
if let Some(tagging_id) = tagging_id {
api.delete_tagging(client, tagging_id).await?;
}
}
api.create_tagging(client, feed_id, to.as_str()).await?;
return Ok(());
}
Err(FeedApiError::Login)
}
async fn rename_feed(&self, feed_id: &FeedID, new_title: &str, client: &Client) -> FeedApiResult<FeedID> {
if let Some(api) = &self.api {
let subscriptions = match api.get_subscriptions(client, None, None, None).await? {
CacheRequestResponse::Modified(CacheResult {
value: subscriptions,
cache: _cache,
}) => subscriptions,
CacheRequestResponse::NotModified => return Err(FeedApiError::Unknown),
};
let subscription_id = subscriptions
.iter()
.find(|s| s.feed_id.to_string() == feed_id.as_str())
.map(|s| s.id)
.expect("Failed to get subscription ID");
api.update_subscription(client, subscription_id, new_title).await?;
return Ok(feed_id.clone());
}
Err(FeedApiError::Login)
}
async fn edit_feed_url(&self, _feed_id: &FeedID, _new_url: &str, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn add_category<'a>(&self, title: &str, _parent: Option<&'a CategoryID>, _client: &Client) -> FeedApiResult<CategoryID> {
Ok(CategoryID::new(title))
}
async fn remove_category(&self, id: &CategoryID, remove_children: bool, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
api.delete_tag(client, id.as_str()).await?;
if remove_children {
let mappings = self.portal.get_feed_mappings()?;
for mapping in mappings {
if &mapping.category_id == id {
self.remove_feed(&mapping.feed_id, client).await?;
}
}
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn move_category(&self, _id: &CategoryID, _parent: &CategoryID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn rename_category(&self, id: &CategoryID, new_title: &str, client: &Client) -> FeedApiResult<CategoryID> {
if let Some(api) = &self.api {
api.rename_tag(client, id.as_str(), new_title).await?;
return Ok(id.clone());
}
Err(FeedApiError::Login)
}
async fn import_opml(&self, opml: &str, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
api.import_opml(client, opml).await?;
return Ok(());
}
Err(FeedApiError::Login)
}
async fn add_tag(&self, _title: &str, _client: &Client) -> FeedApiResult<TagID> {
Err(FeedApiError::Unsupported)
}
async fn remove_tag(&self, _id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn rename_tag(&self, _id: &TagID, _new_title: &str, _client: &Client) -> FeedApiResult<TagID> {
Err(FeedApiError::Unsupported)
}
async fn tag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn untag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn get_favicon(&self, _feed_id: &FeedID, _client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FavIcon> {
Err(FeedApiError::Unsupported)
}
}