pub mod config;
pub mod feedly_secrets;
pub mod metadata;
use self::config::AccountConfig;
use self::feedly_secrets::FeedlySecrets;
use self::metadata::FeedlyMetadata;
use crate::ParsedUrl;
use crate::feed_api::{FeedApi, FeedApiError, FeedApiResult, FeedHeaderMap, Portal};
use crate::models::{self, CategoryMapping, FeedConversionResult, FeedUpdateResult, StreamConversionResult};
use crate::models::{
ArticleID, Category, CategoryID, Direction, Enclosure, FatArticle, FavIcon, Feed, FeedID, FeedMapping, Headline, LoginData, Marked,
NEWSFLASH_TOPLEVEL, OAuthData, PluginCapabilities, Read, SyncResult, Tag, TagID, Tagging, Url,
};
use crate::util::{self, feed_parser};
use async_trait::async_trait;
use chrono::{Duration, Utc};
use feedly_api::models::{
Category as FeedlyCategory, Collection as FeedlyCollection, Content as FeedlyContent, Entry, Link, Subscription, SubscriptionInput,
Tag as FeedlyTag,
};
use feedly_api::{ApiError as FeedlyApiError, FeedlyApi};
use futures::future;
use regex::Regex;
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use std::collections::HashSet;
use std::sync::Arc;
use tokio::sync::RwLock;
impl From<FeedlyApiError> for FeedApiError {
fn from(error: FeedlyApiError) -> FeedApiError {
match error {
FeedlyApiError::Url(e) => FeedApiError::Url(e),
FeedlyApiError::Json { source, json } => FeedApiError::Json { source, json },
FeedlyApiError::ManualJson => FeedApiError::Api {
message: FeedlyApiError::ManualJson.to_string(),
},
FeedlyApiError::Http(e) => FeedApiError::Network(e),
FeedlyApiError::Feedly(feedly_error) => FeedApiError::Api {
message: format!("Feedly Error (code {})\nMessage: {}", feedly_error.error_code, feedly_error.error_message),
},
FeedlyApiError::Input => FeedApiError::Api {
message: FeedlyApiError::Input.to_string(),
},
FeedlyApiError::Token => FeedApiError::Api {
message: FeedlyApiError::Token.to_string(),
},
FeedlyApiError::AccessDenied => FeedApiError::Auth,
FeedlyApiError::InternalMutabilty => FeedApiError::Api {
message: FeedlyApiError::InternalMutabilty.to_string(),
},
FeedlyApiError::TokenExpired => FeedApiError::Api {
message: FeedlyApiError::TokenExpired.to_string(),
},
FeedlyApiError::Unknown => FeedApiError::Unknown,
}
}
}
pub struct ArticleQuery<'a> {
pub stream_id: &'a str,
pub count: Option<u32>,
pub ranked: Option<&'a str>,
pub unread_only: Option<bool>,
pub newer_than: Option<u64>,
pub feed_ids: &'a HashSet<FeedID>,
}
pub struct Feedly {
api: Option<FeedlyApi>,
portal: Arc<Box<dyn Portal>>,
logged_in: bool,
config: Arc<RwLock<AccountConfig>>,
}
impl Feedly {
fn convert_tag_vec(mut tags: Vec<FeedlyTag>) -> Vec<Tag> {
tags.drain(..)
.enumerate()
.filter_map(|(i, t)| {
let FeedlyTag { id, label, description: _ } = t;
if id.contains("global") {
return None;
}
Some(Tag {
tag_id: TagID::new(&id),
color: None,
label: match label {
Some(label) => label,
None => {
let mut tag_label = "Unknown".to_string();
if let Some(l) = label {
tag_label = l;
} else if let Ok(regex) = Regex::new(r"user/\S*tag/(.*)")
&& let Some(captures) = regex.captures(&id)
&& let Some(regex_match) = captures.get(1)
{
regex_match.as_str().clone_into(&mut tag_label);
}
tag_label
}
},
sort_index: Some(i as i32),
})
})
.collect()
}
fn convert_collection_vec(collections: Vec<FeedlyCollection>) -> FeedConversionResult {
let mut feed_mappings = Vec::new();
let mut categories = Vec::new();
let mut category_mappings = Vec::new();
let feeds = collections
.into_iter()
.enumerate()
.flat_map(|(index, collection)| {
let FeedlyCollection {
id,
label,
description: _,
feeds,
} = collection;
let category_id = CategoryID::new(&id);
let collection_category = Category {
category_id: category_id.clone(),
label: {
let mut category_label = "Unknown".to_string();
if let Some(l) = label {
category_label = l;
} else if let Ok(regex) = Regex::new(r"user/\S*category/(.*)")
&& let Some(captures) = regex.captures(&id)
&& let Some(regex_match) = captures.get(1)
{
regex_match.as_str().clone_into(&mut category_label);
}
category_label
},
};
categories.push(collection_category);
let category_mapping = CategoryMapping {
parent_id: NEWSFLASH_TOPLEVEL.clone(),
category_id: category_id.clone(),
sort_index: Some(index as i32),
};
category_mappings.push(category_mapping);
match feeds {
Some(subscriptions) => subscriptions
.into_iter()
.filter_map(|feed| {
let Subscription {
id,
title,
categories: _,
website,
updated: _,
subscribers: _,
velocity: _,
topics: _,
content_type: _,
icon_url,
partial: _,
sort_id: _,
added: _,
visual_url,
} = feed;
let title = match title {
Some(title) => title,
None => return None,
};
let feed_id = FeedID::new(&id);
feed_mappings.push(FeedMapping {
feed_id: feed_id.clone(),
category_id: category_id.clone(),
sort_index: Some(index as i32),
});
Some(Feed {
feed_id,
label: title,
website: match website {
Some(url) => Url::parse(&url).ok(),
None => None,
},
feed_url: None,
icon_url: match icon_url {
Some(url) => Url::parse(&url).ok(),
None => match visual_url {
Some(url) => Url::parse(&url).ok(),
None => None,
},
},
error_count: 0,
error_message: None,
})
})
.collect(),
None => Vec::new(),
}
})
.collect();
FeedConversionResult {
feeds,
feed_mappings,
categories,
category_mappings,
}
}
async fn convert_entry_vec(
entries: Vec<Entry>,
marked_tag: &str,
feed_ids: &HashSet<FeedID>,
portal: Arc<Box<dyn Portal>>,
) -> StreamConversionResult {
let enclosures: Arc<RwLock<Vec<Enclosure>>> = Arc::new(RwLock::new(Vec::new()));
let taggings: Arc<RwLock<Vec<Tagging>>> = Arc::new(RwLock::new(Vec::new()));
let headlines: Arc<RwLock<Vec<Headline>>> = Arc::new(RwLock::new(Vec::new()));
let tasks = entries
.into_iter()
.map(|e| {
let enclosures = enclosures.clone();
let taggings = taggings.clone();
let headlines = headlines.clone();
let marked_tag = marked_tag.to_owned();
let feed_ids = feed_ids.clone();
let portal = portal.clone();
tokio::spawn(async move {
let Entry {
id,
title,
content,
summary,
author,
crawled: _,
recrawled: _,
published,
updated,
alternate,
origin,
keywords: _,
visual,
unread,
tags,
categories: _,
engagement: _,
action_timestamp: _,
enclosure,
fingerprint: _,
origin_id: _,
sid: _,
} = e;
let article_id = ArticleID::new(&id);
let article_exists_locally = portal.get_article_exists(&article_id).unwrap_or(false);
let feed_id = match origin {
Some(origin) => match origin.stream_id {
Some(stream_id) => FeedID::new(&stream_id),
None => FeedID::new("None"),
},
None => FeedID::new("None"),
};
let unread = if unread { models::Read::Unread } else { models::Read::Read };
let marked = match tags {
Some(ref tags) => match tags.iter().find(|t| t.id.contains(&marked_tag)) {
Some(_) => models::Marked::Marked,
None => models::Marked::Unmarked,
},
None => models::Marked::Unmarked,
};
if !feed_ids.contains(&feed_id) && marked == models::Marked::Unmarked {
return None;
}
if article_exists_locally && updated.is_none() {
headlines.write().await.push(Headline { article_id, unread, marked });
return None;
}
if let Some(ref mut article_enclosures) = Feedly::convert_enclosures(&enclosure, ArticleID::new(&id)) {
enclosures.write().await.append(article_enclosures);
}
if let Some(tag_vec) = &tags {
let mut article_taggings: Vec<Tagging> = tag_vec
.iter()
.filter(|t| !t.id.contains("global."))
.map(|t| Tagging {
article_id: ArticleID::new(&id),
tag_id: TagID::new(&t.id),
})
.collect();
taggings.write().await.append(&mut article_taggings);
}
let (html, direction) = match Feedly::convert_content(&content) {
Some((html, direction)) => (Some(html), Some(direction)),
None => match Feedly::convert_content(&summary) {
Some((html, direction)) => (Some(html), Some(direction)),
None => (None, None),
},
};
let plain_text = if article_exists_locally {
None
} else {
html.as_deref().map(util::html2text::html2text)
};
let summary = plain_text.as_deref().map(util::html2text::html2text);
let thumbnail_url = visual.and_then(|vis| if vis.url == "none" { None } else { Some(vis.url) });
Some(FatArticle {
article_id,
title,
author,
feed_id,
url: match alternate {
Some(alternates) => match alternates.first() {
Some(link_obj) => Url::parse(&link_obj.href).ok(),
None => None,
},
None => None,
},
date: util::timestamp_to_datetime(published / 1000),
synced: Utc::now(),
updated: updated.map(|timestamp| util::timestamp_to_datetime(timestamp / 1000)),
html,
summary,
direction,
unread,
marked,
scraped_content: None,
plain_text,
thumbnail_url,
})
})
})
.collect::<Vec<_>>();
let articles = future::join_all(tasks).await.into_iter().filter_map(|res| res.ok().flatten()).collect();
StreamConversionResult {
articles,
headlines: Arc::into_inner(headlines).map(|e| e.into_inner()).unwrap_or_default(),
taggings: Arc::into_inner(taggings).map(|e| e.into_inner()).unwrap_or_default(),
enclosures: Arc::into_inner(enclosures).map(|e| e.into_inner()).unwrap_or_default(),
}
}
fn convert_content(content: &Option<FeedlyContent>) -> Option<(String, Direction)> {
match content {
Some(c) => {
let direction = match c.direction {
Some(ref direction) => {
if direction == "rtl" {
Direction::RightToLeft
} else {
Direction::LeftToRight
}
}
None => Direction::LeftToRight,
};
Some((c.content.clone(), direction))
}
None => None,
}
}
fn convert_enclosures(enclosures: &Option<Vec<Link>>, article_id: ArticleID) -> Option<Vec<Enclosure>> {
match enclosures {
Some(enclosure_vec) => {
let res = enclosure_vec
.iter()
.map(|enc| Feedly::convert_enclosure(enc, &article_id))
.collect::<Result<Vec<Enclosure>, _>>();
res.ok()
}
None => None,
}
}
fn convert_enclosure(enc: &Link, article_id: &ArticleID) -> FeedApiResult<Enclosure> {
let url = Url::parse(&enc.href)?;
Ok(Enclosure {
article_id: article_id.clone(),
url,
mime_type: enc._type.clone(),
title: None,
position: None,
summary: None,
thumbnail_url: None,
filesize: None,
width: None,
height: None,
duration: None,
framerate: None,
alternative: None,
is_default: false,
})
}
async fn get_articles(&self, api: &FeedlyApi, query: ArticleQuery<'_>, client: &Client) -> FeedApiResult<StreamConversionResult> {
let mut continuation: Option<String> = None;
let mut articles: Vec<FatArticle> = Vec::new();
let mut enclosures: Vec<Enclosure> = Vec::new();
let mut taggings: Vec<Tagging> = Vec::new();
let mut headlines: Vec<Headline> = Vec::new();
let tag_marked = api.tag_marked(client).await?;
loop {
let stream = api
.get_stream(
query.stream_id,
continuation,
query.count,
query.ranked,
query.unread_only,
query.newer_than,
client,
)
.await?;
let mut converted = Feedly::convert_entry_vec(stream.items, &tag_marked, query.feed_ids, self.portal.clone()).await;
articles.append(&mut converted.articles);
enclosures.append(&mut converted.enclosures);
taggings.append(&mut converted.taggings);
headlines.append(&mut converted.headlines);
continuation = stream.continuation;
if continuation.is_none() {
break;
}
}
Ok(StreamConversionResult {
articles,
enclosures,
taggings,
headlines,
})
}
async fn is_token_expired(&self) -> Result<bool, FeedlyApiError> {
let timestamp = self.config.write().await.get_token_expires().ok_or(FeedlyApiError::TokenExpired)?;
let timestamp = timestamp.parse::<i64>().map_err(|_| FeedlyApiError::TokenExpired)?;
let expires_at = util::timestamp_to_datetime(timestamp);
let expires_in = expires_at.signed_duration_since(Utc::now());
Ok(expires_in.num_seconds() <= 60)
}
async fn refresh_token(&self, api: &FeedlyApi, client: &Client) -> FeedApiResult<()> {
let response = api.refresh_auth_token(client).await?;
let token_expires = Utc::now() + Duration::try_seconds(i64::from(response.expires_in)).unwrap();
self.config.write().await.set_access_token(&response.access_token);
self.config.write().await.set_token_expires(&token_expires.timestamp().to_string());
self.config.write().await.write()?;
Ok(())
}
async fn check_and_update_token(&self, api: &FeedlyApi, client: &Client) -> FeedApiResult<()> {
if self.is_token_expired().await? {
self.refresh_token(api, client).await?;
}
Ok(())
}
}
#[async_trait]
impl FeedApi for Feedly {
fn features(&self) -> FeedApiResult<PluginCapabilities> {
Ok(PluginCapabilities::ADD_REMOVE_FEEDS
| PluginCapabilities::SUPPORT_CATEGORIES
| PluginCapabilities::MODIFY_CATEGORIES
| PluginCapabilities::SUPPORT_TAGS)
}
fn has_user_configured(&self) -> FeedApiResult<bool> {
Ok(self.api.is_some())
}
async fn is_reachable(&self, client: &Client) -> FeedApiResult<bool> {
let res = client.head("https://cloud.feedly.com").send().await?;
Ok(res.status().is_success())
}
async fn is_logged_in(&self, _client: &Client) -> FeedApiResult<bool> {
Ok(self.logged_in)
}
async fn user_name(&self) -> Option<String> {
self.config.read().await.get_user_name()
}
async fn get_login_data(&self) -> Option<LoginData> {
if let Ok(true) = self.has_user_configured() {
return Some(LoginData::OAuth(OAuthData {
id: FeedlyMetadata::get_id(),
url: String::new(),
custom_api_secret: None,
}));
}
None
}
async fn login(&mut self, data: LoginData, client: &Client) -> FeedApiResult<()> {
if let LoginData::OAuth(data) = data {
let url = Url::parse(&data.url)?;
let secret_struct = FeedlySecrets::new();
match FeedlyApi::parse_redirected_url(&url) {
Ok(auth_code) => match FeedlyApi::request_auth_token(&secret_struct.id(), &secret_struct.secret(), auth_code, client).await {
Ok(response) => {
let now = Utc::now();
let token_expires = now + Duration::try_seconds(i64::from(response.expires_in)).unwrap();
let api = FeedlyApi::new(
secret_struct.id(),
secret_struct.secret(),
response.access_token.clone(),
response.refresh_token.clone(),
token_expires,
)?;
api.initialize_user_id(client).await?;
let profile = api.get_profile(client).await?;
self.config.write().await.set_access_token(&response.access_token);
self.config.write().await.set_refresh_token(&response.refresh_token);
self.config.write().await.set_token_expires(&token_expires.timestamp().to_string());
if let Some(user_name) = profile.given_name {
self.config.write().await.set_user_name(&user_name);
}
self.config.read().await.write()?;
self.api = Some(api);
self.logged_in = true;
return Ok(());
}
Err(_e) => {
self.api = None;
self.logged_in = false;
return Err(FeedApiError::Login);
}
},
Err(_e) => {
self.api = None;
self.logged_in = false;
return Err(FeedApiError::Login);
}
}
}
self.api = None;
self.logged_in = false;
Err(FeedApiError::Login)
}
async fn logout(&mut self, _client: &Client) -> FeedApiResult<()> {
self.config.read().await.delete()?;
Ok(())
}
async fn initial_sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let tag_marked = api.tag_marked(client).await?;
let tag_all = api.category_all(client).await?;
let collections = api.get_collections(client);
let tags = api.get_tags(client);
let (collections, tags) = futures::try_join!(collections, tags)?;
let conversion_result = Feedly::convert_collection_vec(collections);
let feed_ids: HashSet<FeedID> = conversion_result.feeds.iter().map(|f| f.feed_id.clone()).collect();
let tags = Feedly::convert_tag_vec(tags);
let mut result = StreamConversionResult::new();
let mut futures = Vec::new();
let query = ArticleQuery {
stream_id: &tag_marked,
count: Some(200),
ranked: None,
unread_only: None,
newer_than: None,
feed_ids: &feed_ids,
};
futures.push(self.get_articles(api, query, client));
for tag in &tags {
let query = ArticleQuery {
stream_id: tag.tag_id.as_str(),
count: Some(200),
ranked: None,
unread_only: None,
newer_than: None,
feed_ids: &feed_ids,
};
futures.push(self.get_articles(api, query, client));
}
let query = ArticleQuery {
stream_id: &tag_all,
count: Some(200),
ranked: None,
unread_only: Some(true),
newer_than: None,
feed_ids: &feed_ids,
};
futures.push(self.get_articles(api, query, client));
let article_results = futures::future::try_join_all(futures).await?;
for articles in article_results {
result.add(articles);
}
Ok(SyncResult {
feeds: util::vec_to_option(conversion_result.feeds),
categories: util::vec_to_option(conversion_result.categories),
feed_mappings: util::vec_to_option(conversion_result.feed_mappings),
category_mappings: util::vec_to_option(conversion_result.category_mappings),
tags: util::vec_to_option(tags),
taggings: util::vec_to_option(result.taggings),
headlines: util::vec_to_option(result.headlines),
articles: util::vec_to_option(result.articles),
enclosures: util::vec_to_option(result.enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
async fn sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let last_sync = self.portal.get_config().read().await.get_last_sync();
let tag_all = api.category_all(client).await?;
let tag_marked = api.tag_marked(client).await?;
let collections = api.get_collections(client);
let tags = api.get_tags(client);
let (collections, tags) = futures::try_join!(collections, tags)?;
let conversion_result = Feedly::convert_collection_vec(collections);
let feed_ids: HashSet<FeedID> = conversion_result.feeds.iter().map(|f| f.feed_id.clone()).collect();
let tags = Feedly::convert_tag_vec(tags);
let mut result = StreamConversionResult::new();
let recent = self.get_articles(
api,
ArticleQuery {
stream_id: &tag_all,
count: Some(200),
ranked: None,
unread_only: None,
newer_than: Some(last_sync.timestamp() as u64),
feed_ids: &feed_ids,
},
client,
);
let marked = self.get_articles(
api,
ArticleQuery {
stream_id: &tag_marked,
count: Some(50),
ranked: None,
unread_only: None,
newer_than: None,
feed_ids: &feed_ids,
},
client,
);
let unread = self.get_articles(
api,
ArticleQuery {
stream_id: &tag_all,
count: None,
ranked: None,
unread_only: Some(true),
newer_than: None,
feed_ids: &feed_ids,
},
client,
);
let (recent, marked, unread) = futures::try_join!(recent, marked, unread)?;
let remote_marked_ids: HashSet<ArticleID> = marked
.articles
.iter()
.map(|a| &a.article_id)
.cloned()
.chain(marked.headlines.iter().map(|h| &h.article_id).cloned())
.collect();
let remote_unread_ids: HashSet<ArticleID> = unread
.articles
.iter()
.map(|a| &a.article_id)
.cloned()
.chain(unread.headlines.iter().map(|h| &h.article_id).cloned())
.collect();
result.add(recent);
result.add(marked);
result.add(unread);
let local_unread_ids = self.portal.get_article_ids_unread_all()?;
let local_marked_ids = self.portal.get_article_ids_marked_all()?;
let local_unread_ids: HashSet<ArticleID> = local_unread_ids.into_iter().collect();
let local_marked_ids: HashSet<ArticleID> = local_marked_ids.into_iter().collect();
let mut should_mark_read_headlines = local_unread_ids
.difference(&remote_unread_ids)
.map(|id| Headline {
article_id: ArticleID::new(&id.to_string()),
unread: Read::Read,
marked: if remote_marked_ids.contains(id) {
Marked::Marked
} else {
Marked::Unmarked
},
})
.collect();
result.headlines.append(&mut should_mark_read_headlines);
let mut missing_unmarked_headlines = local_marked_ids
.difference(&remote_marked_ids)
.map(|id| Headline {
article_id: ArticleID::new(&id.to_string()),
marked: Marked::Unmarked,
unread: if remote_unread_ids.contains(id) { Read::Unread } else { Read::Read },
})
.collect();
result.headlines.append(&mut missing_unmarked_headlines);
Ok(SyncResult {
feeds: util::vec_to_option(conversion_result.feeds),
categories: util::vec_to_option(conversion_result.categories),
feed_mappings: util::vec_to_option(conversion_result.feed_mappings),
category_mappings: util::vec_to_option(conversion_result.category_mappings),
tags: util::vec_to_option(tags),
taggings: util::vec_to_option(result.taggings),
headlines: util::vec_to_option(result.headlines),
articles: util::vec_to_option(result.articles),
enclosures: util::vec_to_option(result.enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
async fn fetch_feed(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FeedUpdateResult> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let collections = api.get_collections(client).await?;
let conversion_result = Feedly::convert_collection_vec(collections);
let feed = conversion_result.feeds.iter().find(|feed| &feed.feed_id == feed_id).cloned();
let mut feed_ids: HashSet<FeedID> = HashSet::new();
feed_ids.insert(feed_id.clone());
let query = ArticleQuery {
stream_id: feed_id.as_str(),
count: Some(200),
ranked: None,
unread_only: None,
newer_than: None,
feed_ids: &feed_ids,
};
let result = self.get_articles(api, query, client).await?;
Ok(FeedUpdateResult {
feed,
taggings: util::vec_to_option(result.taggings),
articles: util::vec_to_option(result.articles),
enclosures: util::vec_to_option(result.enclosures),
})
} else {
Err(FeedApiError::Login)
}
}
async fn set_article_read(&self, articles: &[ArticleID], read: models::Read, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let string_vec: Vec<&str> = articles.iter().map(|x| x.as_str()).collect();
match read {
models::Read::Read => api.mark_entries_read(string_vec.clone(), client).await?,
models::Read::Unread => api.mark_entries_unread(string_vec.clone(), client).await?,
};
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn set_article_marked(&self, articles: &[ArticleID], marked: models::Marked, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let string_vec: Vec<&str> = articles.iter().map(|x| x.as_str()).collect();
match marked {
models::Marked::Marked => api.mark_entries_saved(string_vec.clone(), client).await?,
models::Marked::Unmarked => api.mark_entries_unsaved(string_vec.clone(), client).await?,
};
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn set_feed_read(&self, feeds: &[FeedID], _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let string_vec: Vec<&str> = feeds.iter().map(|x| x.as_str()).collect();
api.mark_feeds_read(string_vec.clone(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn set_category_read(&self, categories: &[CategoryID], _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let string_vec: Vec<&str> = categories.iter().map(|x| x.as_str()).collect();
api.mark_categories_read(string_vec.clone(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn set_tag_read(&self, tags: &[TagID], _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let string_vec: Vec<&str> = tags.iter().map(|x| x.as_str()).collect();
api.mark_tags_read(string_vec.clone(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn set_all_read(&self, _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let all = api.category_all(client).await?;
let vec: Vec<&str> = vec![&all];
api.mark_categories_read(vec.clone(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn add_feed(
&self,
url: &Url,
title: Option<String>,
category: Option<CategoryID>,
client: &Client,
) -> FeedApiResult<(Feed, Option<Category>)> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let feed_id = FeedlyApi::gernerate_feed_id(url);
let feed = SubscriptionInput {
id: feed_id.clone(),
title: title.as_ref().cloned(),
categories: match category {
Some(category_id) => {
let category = FeedlyCategory {
id: category_id.to_string(),
label: None,
description: None,
};
Some(vec![category])
}
None => None,
},
};
api.add_subscription(feed.clone(), client).await?;
let feed_id = FeedID::new(&feed_id);
let semaphore = self.portal.get_download_semaphore();
let feed = feed_parser::download_and_parse_feed(url, &feed_id, title, semaphore, client).await;
if feed.is_err() {
self.remove_feed(&feed_id, client).await?;
}
if let Ok(ParsedUrl::SingleFeed(feed)) = feed {
return Ok((*feed, None));
}
}
Err(FeedApiError::Login)
}
async fn remove_feed(&self, id: &FeedID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
api.delete_subscription(id.as_str(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn move_feed(&self, feed_id: &FeedID, from: &CategoryID, to: &CategoryID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let mappings = self.portal.get_feed_mappings()?;
let mut categories: Vec<FeedlyCategory> = mappings
.into_iter()
.filter(|mapping| &mapping.feed_id == feed_id && &mapping.category_id != from)
.map(|mapping| FeedlyCategory {
id: mapping.category_id.to_string(),
label: None,
description: None,
})
.collect();
categories.push(FeedlyCategory {
id: to.to_string(),
label: None,
description: None,
});
let feed = SubscriptionInput {
id: feed_id.to_string(),
title: None,
categories: Some(categories),
};
api.add_subscription(feed.clone(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn rename_feed(&self, feed_id: &FeedID, new_title: &str, client: &Client) -> FeedApiResult<FeedID> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let feed = SubscriptionInput {
id: feed_id.to_string(),
title: Some(new_title.to_owned()),
categories: None,
};
api.add_subscription(feed.clone(), client).await?;
Ok(feed_id.clone())
} else {
Err(FeedApiError::Login)
}
}
async fn edit_feed_url(&self, _feed_id: &FeedID, _new_url: &str, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn add_category<'a>(&self, title: &str, parent: Option<&'a CategoryID>, client: &Client) -> FeedApiResult<CategoryID> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
if parent.is_some() {
return Err(FeedApiError::Unsupported);
}
let category_id = api.generate_category_id(title, client).await?;
Ok(CategoryID::new(&category_id))
} else {
Err(FeedApiError::Login)
}
}
async fn remove_category(&self, id: &CategoryID, remove_children: bool, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
if remove_children {
let mappings = self.portal.get_feed_mappings()?;
let updated_subscriptions = mappings
.iter()
.filter(|m| &m.category_id == id)
.map(|m| SubscriptionInput {
id: m.feed_id.to_string(),
title: None,
categories: {
let categories = mappings
.iter()
.filter(|m2| m2.feed_id == m.feed_id && &m2.category_id != id)
.map(|m3| FeedlyCategory {
id: m3.category_id.to_string(),
label: None,
description: None,
})
.collect::<Vec<FeedlyCategory>>();
Some(categories)
},
})
.collect::<Vec<SubscriptionInput>>();
api.update_subscriptions(updated_subscriptions.clone(), client).await?;
}
api.delete_category(id.as_str(), client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn rename_category(&self, id: &CategoryID, new_title: &str, client: &Client) -> FeedApiResult<CategoryID> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let new_id = api.generate_category_id(new_title, client).await?;
api.update_category(id.as_str(), new_title, client).await?;
Ok(CategoryID::new(&new_id))
} else {
Err(FeedApiError::Login)
}
}
async fn move_category(&self, _id: &CategoryID, _parent: &CategoryID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn import_opml(&self, opml: &str, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
api.import_opml(opml, client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn add_tag(&self, title: &str, client: &Client) -> FeedApiResult<TagID> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let id = api.generate_tag_id(title, client).await?;
Ok(TagID::new(&id))
} else {
Err(FeedApiError::Login)
}
}
async fn remove_tag(&self, id: &TagID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
api.delete_tags(vec![id.as_str()], client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn rename_tag(&self, id: &TagID, new_title: &str, client: &Client) -> FeedApiResult<TagID> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
let new_id = api.generate_tag_id(new_title, client).await?;
api.update_tag(id.as_str(), new_title, client).await?;
Ok(TagID::new(&new_id))
} else {
Err(FeedApiError::Login)
}
}
async fn tag_article(&self, article_id: &ArticleID, tag_id: &TagID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
api.tag_entry(article_id.as_str(), vec![tag_id.as_str()], client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn untag_article(&self, article_id: &ArticleID, tag_id: &TagID, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
self.check_and_update_token(api, client).await?;
api.untag_entries(vec![article_id.as_str()], vec![tag_id.as_str()], client).await?;
Ok(())
} else {
Err(FeedApiError::Login)
}
}
async fn get_favicon(&self, _feed_id: &FeedID, _client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FavIcon> {
Err(FeedApiError::Unsupported)
}
}