pub mod config;
pub mod metadata;
use std::collections::HashSet;
use self::config::AccountConfig;
use crate::FatArticle;
use crate::FeedMapping;
use crate::feed_api::{FeedApi, FeedApiError, FeedApiResult, FeedHeaderMap, Portal};
use crate::feed_api_implementations::NewsBlurMetadata;
use crate::models::{
ArticleID, Category, CategoryID, CategoryMapping, DirectLogin, FavIcon, Feed, FeedConversionResult, FeedID, FeedUpdateResult, Headline,
LoginData, Marked, NEWSFLASH_TOPLEVEL, PasswordLogin, PluginCapabilities, Read, SyncResult, TagID, Url,
};
use crate::util;
use crate::util::favicons::EXPIRES_AFTER_DAYS;
use async_trait::async_trait;
use base64::Engine;
use base64::engine::general_purpose::STANDARD as base64_std;
use chrono::{Duration, NaiveDateTime, Utc};
use newsblur_api_updated::{ApiError as NewsBlurError, NewsBlurApi};
use reqwest::Client;
use reqwest::header::{HeaderMap, HeaderValue};
use serde_json::{Map, Value};
impl From<NewsBlurError> for FeedApiError {
fn from(error: NewsBlurError) -> FeedApiError {
match error {
NewsBlurError::Url(e) => FeedApiError::Url(e),
NewsBlurError::Http(e) => FeedApiError::Network(e),
NewsBlurError::AccessDenied => FeedApiError::Auth,
}
}
}
pub struct NewsBlurService {
api: Option<NewsBlurApi>,
portal: Box<dyn Portal>,
logged_in: bool,
config: AccountConfig,
}
impl NewsBlurService {
fn convert_feed_vec(raw_feeds: &Map<String, Value>, raw_folders: &[Value]) -> FeedApiResult<FeedConversionResult> {
let mut feed_mappings: Vec<FeedMapping> = Vec::new();
let mut folder_mappings: Vec<CategoryMapping> = Vec::new();
let mut feeds: Vec<Feed> = Vec::new();
let mut folders: Vec<Category> = Vec::new();
for feed in raw_feeds {
let website = feed.1["feed_link"].as_str().ok_or(FeedApiError::Unknown)?;
let feed_address = feed.1["feed_address"].as_str().ok_or(FeedApiError::Unknown)?;
let icon_url = feed.1["favicon_url"].as_str().ok_or(FeedApiError::Unknown)?;
feeds.push(Feed {
feed_id: FeedID::new(&feed.0.to_string()),
label: feed.1["feed_title"].as_str().unwrap_or("Unkown Feed").into(),
website: Url::parse(website).ok(),
feed_url: Url::parse(feed_address).ok(),
icon_url: Url::parse(&format!("https://newsblur.com/{icon_url}")).ok(),
error_count: 0,
error_message: None,
});
}
let feed_ids: HashSet<&FeedID> = feeds.iter().map(|f| &f.feed_id).collect();
for (i, folder) in raw_folders.iter().enumerate() {
if folder.is_object() {
let folder_object = folder.as_object().ok_or(FeedApiError::Unknown)?;
for (j, fol) in folder_object.iter().enumerate() {
Self::parse_folder_value(
fol,
&NEWSFLASH_TOPLEVEL,
(i + j) as i32,
&feed_ids,
&mut feed_mappings,
&mut folders,
&mut folder_mappings,
)?;
}
} else {
let id_num = folder.as_u64().ok_or(FeedApiError::Unknown)?;
let id_string = format!("{id_num}");
feed_mappings.push(FeedMapping {
feed_id: FeedID::new(&id_string),
category_id: NEWSFLASH_TOPLEVEL.clone(),
sort_index: Some(i as i32),
});
}
}
for mapping in &feed_mappings {
let feed_exists = feeds.iter().any(|f| f.feed_id == mapping.feed_id);
let folder_exits = folders.iter().any(|f| f.category_id == mapping.category_id);
if !feed_exists {
tracing::error!(%mapping.feed_id, "feed does not exist");
}
if !folder_exits && mapping.category_id != *NEWSFLASH_TOPLEVEL {
tracing::error!(%mapping.category_id, "folder does not exist");
}
}
Ok(FeedConversionResult {
feeds,
feed_mappings,
categories: folders,
category_mappings: folder_mappings,
})
}
fn parse_folder_value(
fol: (&String, &Value),
parent: &CategoryID,
sort_index: i32,
feed_ids: &HashSet<&FeedID>,
feed_mappings: &mut Vec<FeedMapping>,
folders: &mut Vec<Category>,
folder_mappings: &mut Vec<CategoryMapping>,
) -> FeedApiResult<()> {
let category_id = CategoryID::new(fol.0);
folders.push(Category {
category_id: category_id.clone(),
label: fol.0.to_string(),
});
folder_mappings.push(CategoryMapping {
parent_id: parent.clone(),
category_id: category_id.clone(),
sort_index: Some(sort_index),
});
for (i, id) in fol.1.as_array().ok_or(FeedApiError::Unknown)?.iter().enumerate() {
if let Some(subfolder) = id.as_object() {
for (i, subfol) in subfolder.iter().enumerate() {
Self::parse_folder_value(subfol, &category_id, i as i32, feed_ids, feed_mappings, folders, folder_mappings)?;
}
} else {
let id_num = id.as_u64().ok_or(FeedApiError::Unknown)?;
let id_string = format!("{id_num}");
let feed_id = FeedID::new(&id_string);
if !feed_ids.contains(&feed_id) {
tracing::error!(%feed_id, folder = fol.0, "feed in folder does not exist");
continue;
}
feed_mappings.push(FeedMapping {
feed_id,
category_id: CategoryID::new(fol.0),
sort_index: Some(i as i32),
});
}
}
Ok(())
}
}
#[async_trait]
impl FeedApi for NewsBlurService {
fn features(&self) -> FeedApiResult<PluginCapabilities> {
Ok(PluginCapabilities::NONE)
}
fn has_user_configured(&self) -> FeedApiResult<bool> {
Ok(self.api.is_some())
}
async fn is_reachable(&self, client: &Client) -> FeedApiResult<bool> {
if let Some(url) = self.config.get_url() {
let res = client.head(&url).send().await?;
Ok(res.status().is_success())
} else {
Err(FeedApiError::Login)
}
}
async fn is_logged_in(&self, _client: &Client) -> FeedApiResult<bool> {
Ok(self.logged_in)
}
async fn user_name(&self) -> Option<String> {
self.config.get_user_name()
}
async fn get_login_data(&self) -> Option<LoginData> {
if let Ok(true) = self.has_user_configured()
&& let Some(username) = self.config.get_user_name()
&& let Some(password) = self.config.get_password()
{
return Some(LoginData::Direct(DirectLogin::Password(PasswordLogin {
id: NewsBlurMetadata::get_id(),
url: self.config.get_url(),
user: username,
password,
basic_auth: None,
})));
}
None
}
async fn login(&mut self, data: LoginData, client: &Client) -> FeedApiResult<()> {
if let LoginData::Direct(DirectLogin::Password(data)) = data
&& let Some(url_string) = data.url.clone()
{
let url = Url::parse(&url_string)?;
let mut api = NewsBlurApi::new(&url, &data.user, &data.password, None);
let cookie_string = api.login(client).await?;
self.api = Some(api);
self.logged_in = true;
self.config.set_url(&url_string);
self.config.set_password(&data.password);
self.config.set_user_name(&data.user);
self.config.set_cookie_string(&cookie_string);
self.config.write()?;
return Ok(());
}
self.logged_in = false;
self.api = None;
Err(FeedApiError::Login)
}
async fn logout(&mut self, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
api.logout(client).await?;
self.config.delete()?;
self.logged_in = false;
return Ok(());
}
Err(FeedApiError::Login)
}
async fn initial_sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
let response = api.get_feeds(client).await?;
let feeds = &response["feeds"].as_object().ok_or(FeedApiError::Unknown)?;
let folders = &response["folders"].as_array().ok_or(FeedApiError::Unknown)?;
let conversion_result = NewsBlurService::convert_feed_vec(feeds, folders)?;
let mut articles: Vec<FatArticle> = Vec::new();
for feed in &conversion_result.feeds {
for page in 1..2 {
let response = api.get_stories(client, feed.feed_id.as_str(), false, page).await?;
let stories_array = &response["stories"].as_array();
let stories = match stories_array {
Some(stry) => stry,
None => break,
};
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!("{}:{}", feed.feed_id, story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?);
let marked = match story["starred"].as_bool() {
Some(starred) => {
if starred {
Marked::Marked
} else {
Marked::Unmarked
}
}
None => Marked::Unmarked,
};
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: feed.feed_id.clone(),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
summary: None,
direction: None,
unread,
marked,
scraped_content: None,
plain_text: None,
thumbnail_url: None,
});
}
}
}
let response = api.get_unread_story_hashes(client).await?;
let hashes = &response["unread_feed_story_hashes"].as_object().ok_or(FeedApiError::Unknown)?;
let mut unread_story_hashes = Vec::new();
for hash in hashes.iter() {
for story in hash.1.as_array().ok_or(FeedApiError::Unknown)? {
unread_story_hashes.push(story.as_str().ok_or(FeedApiError::Unknown)?);
}
}
for offset in 0..(unread_story_hashes.len() / 99) {
let start = offset * 99;
let end = (offset + 1) * 99;
let response = if end < unread_story_hashes.len() {
api.get_river_stories(client, &unread_story_hashes[start..end]).await?
} else {
api.get_river_stories(client, &unread_story_hashes[start..unread_story_hashes.len()])
.await?
};
let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!(
"{}:{}",
story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
);
let marked = match story["starred"].as_bool() {
Some(starred) => {
if starred {
Marked::Marked
} else {
Marked::Unmarked
}
}
None => Marked::Unmarked,
};
let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(html),
summary: None,
direction: None,
unread,
marked,
scraped_content: None,
plain_text: None,
thumbnail_url,
});
}
}
let response = api.get_stared_story_hashes(client).await?;
let hashes = &response["starred_story_hashes"].as_array().ok_or(FeedApiError::Unknown)?;
let mut stared_story_hashes = Vec::new();
for hash in hashes.iter() {
stared_story_hashes.push(hash.as_str().ok_or(FeedApiError::Unknown)?);
}
for offset in 0..(stared_story_hashes.len() / 99) {
let start = offset * 99;
let end = (offset + 1) * 99;
let response = if end < stared_story_hashes.len() {
api.get_river_stories(client, &stared_story_hashes[start..end]).await?
} else {
api.get_river_stories(client, &stared_story_hashes[start..stared_story_hashes.len()])
.await?
};
let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!(
"{}:{}",
story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
);
let mut article_offset = None;
for (i, article) in articles.iter().enumerate() {
if article.article_id == ArticleID::new(&article_id) {
article_offset = Some(i);
}
}
let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
if article_offset.is_none() {
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(html),
summary: None,
direction: None,
unread,
marked: Marked::Marked,
scraped_content: None,
plain_text: None,
thumbnail_url,
});
}
}
}
return Ok(SyncResult {
feeds: util::vec_to_option(conversion_result.feeds),
categories: util::vec_to_option(conversion_result.categories),
feed_mappings: util::vec_to_option(conversion_result.feed_mappings),
category_mappings: util::vec_to_option(conversion_result.category_mappings),
tags: None,
taggings: None,
headlines: None,
articles: util::vec_to_option(articles),
enclosures: None,
});
}
Err(FeedApiError::Login)
}
async fn sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
if let Some(api) = &self.api {
let max_count = self.portal.get_config().read().await.get_sync_amount();
let response = api.get_feeds(client).await?;
let feeds = &response["feeds"].as_object().ok_or(FeedApiError::Unknown)?;
let folders = &response["folders"].as_array().ok_or(FeedApiError::Unknown)?;
let conversion_result = NewsBlurService::convert_feed_vec(feeds, folders)?;
let feed_ids = conversion_result.feeds.iter().map(|f| f.feed_id.clone()).collect::<Vec<_>>();
let mut articles: Vec<FatArticle> = Vec::new();
let mut headlines: Vec<Headline> = Vec::new();
let response = api.get_unread_story_hashes(client).await?;
let hashes = &response["unread_feed_story_hashes"].as_object().ok_or(FeedApiError::Unknown)?;
let mut unread_story_hashes = Vec::new();
for hash in hashes.iter() {
for story in hash.1.as_array().ok_or(FeedApiError::Unknown)? {
unread_story_hashes.push(story.as_str().ok_or(FeedApiError::Unknown)?);
}
}
let response = api.get_stared_story_hashes(client).await?;
let hashes = &response["starred_story_hashes"].as_array().ok_or(FeedApiError::Unknown)?;
let mut stared_story_hashes = Vec::new();
for hash in hashes.iter() {
stared_story_hashes.push(hash.as_str().ok_or(FeedApiError::Unknown)?);
}
let local_unread_ids = self.portal.get_article_ids_unread_all()?;
let mut unread_story_hashes_to_fetch = Vec::new();
for id in &unread_story_hashes {
if local_unread_ids.contains(&ArticleID::new(id)) {
continue;
}
unread_story_hashes_to_fetch.push(*id);
}
for offset in 0..((unread_story_hashes_to_fetch.len() / 99) + 1) {
let start = offset * 99;
let end = (offset + 1) * 99;
let response = if end < unread_story_hashes_to_fetch.len() {
api.get_river_stories(client, &unread_story_hashes_to_fetch[start..end]).await?
} else {
api.get_river_stories(client, &unread_story_hashes_to_fetch[start..unread_story_hashes_to_fetch.len()])
.await?
};
let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!(
"{}:{}",
story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
);
let marked = match story["starred"].as_bool() {
Some(starred) => {
if starred {
Marked::Marked
} else {
Marked::Unmarked
}
}
None => Marked::Unmarked,
};
let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(html),
summary: None,
direction: None,
unread,
marked,
scraped_content: None,
plain_text: None,
thumbnail_url,
});
}
}
let local_unread_ids = self.portal.get_article_ids_unread_all()?;
for local_id in local_unread_ids {
if unread_story_hashes.contains(&local_id.as_str()) {
continue;
}
let should_mark_read_headlines = Headline {
article_id: ArticleID::new(&local_id.to_string()),
unread: Read::Read,
marked: if stared_story_hashes.contains(&local_id.as_str()) {
Marked::Marked
} else {
Marked::Unmarked
},
};
headlines.push(should_mark_read_headlines);
}
let read_sync_num = if max_count > unread_story_hashes.len() as u32 {
(max_count - unread_story_hashes.len() as u32) / 6
} else {
1
};
for page in 1..read_sync_num {
let response = api.get_read_stories(client, page).await?;
let stories_array = &response["stories"].as_array();
let mut page_articles: Vec<ArticleID> = Vec::new();
let stories = match stories_array {
Some(stry) => stry,
None => break,
};
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!(
"{}:{}",
story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
);
let marked = match story["starred"].as_bool() {
Some(starred) => {
if starred {
Marked::Marked
} else {
Marked::Unmarked
}
}
None => Marked::Unmarked,
};
let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(html),
summary: None,
direction: None,
unread,
marked,
scraped_content: None,
plain_text: None,
thumbnail_url,
});
if unread == Read::Read {
page_articles.push(ArticleID::new(&article_id));
}
}
if self.portal.get_articles(&page_articles).is_ok() {
break;
}
}
let local_marked_ids = self.portal.get_article_ids_marked_all()?;
let mut marked_story_hashes_to_fetch = Vec::new();
for id in stared_story_hashes {
if local_marked_ids.contains(&ArticleID::new(id)) {
continue;
}
marked_story_hashes_to_fetch.push(id);
}
if !marked_story_hashes_to_fetch.is_empty() {
for offset in 0..((marked_story_hashes_to_fetch.len() / 99) + 1) {
let start = offset * 99;
let end = (offset + 1) * 99;
let response = if end < marked_story_hashes_to_fetch.len() {
api.get_river_stories(client, &marked_story_hashes_to_fetch[start..end]).await?
} else {
api.get_river_stories(client, &marked_story_hashes_to_fetch[start..marked_story_hashes_to_fetch.len()])
.await?
};
let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!(
"{}:{}",
story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
);
let mut article_offset = None;
for (i, article) in articles.iter().enumerate() {
if article.article_id == ArticleID::new(&article_id) {
article_offset = Some(i);
}
}
let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
if article_offset.is_none() {
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(html),
summary: None,
direction: None,
unread,
marked: Marked::Marked,
scraped_content: None,
plain_text: None,
thumbnail_url,
});
}
}
}
}
let articles = articles
.into_iter()
.filter(|a| feed_ids.contains(&a.feed_id) || a.marked == Marked::Marked)
.collect();
return Ok(SyncResult {
feeds: util::vec_to_option(conversion_result.feeds),
categories: util::vec_to_option(conversion_result.categories),
feed_mappings: util::vec_to_option(conversion_result.feed_mappings),
category_mappings: util::vec_to_option(conversion_result.category_mappings),
tags: None,
taggings: None,
headlines: util::vec_to_option(headlines),
articles: util::vec_to_option(articles),
enclosures: None,
});
}
Err(FeedApiError::Login)
}
async fn fetch_feed(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FeedUpdateResult> {
if let Some(api) = &self.api {
let response = api.get_feeds(client).await?;
let feeds = &response["feeds"].as_object().ok_or(FeedApiError::Unknown)?;
let folders = &response["folders"].as_array().ok_or(FeedApiError::Unknown)?;
let conversion_result = NewsBlurService::convert_feed_vec(feeds, folders)?;
let feed = conversion_result.feeds.iter().find(|feed| &feed.feed_id == feed_id).cloned();
let mut articles: Vec<FatArticle> = Vec::new();
for page in 1..2 {
let response = api.get_stories(client, feed_id.as_str(), false, page).await?;
let stories_array = &response["stories"].as_array();
let stories = match stories_array {
Some(stry) => stry,
None => break,
};
for story in stories.iter() {
let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
let error_msg = format!("date_string: {date_string:?}");
let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
} else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
} else {
panic!("Can't parse data: {date_string:?}");
}
.and_utc();
let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
let unread = match article_read {
0 => Read::Unread,
_ => Read::Read,
};
let article_id = format!("{}:{}", feed_id, story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?);
let marked = match story["starred"].as_bool() {
Some(starred) => {
if starred {
Marked::Marked
} else {
Marked::Unmarked
}
}
None => Marked::Unmarked,
};
articles.push(FatArticle {
article_id: ArticleID::new(&article_id),
title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
feed_id: feed_id.clone(),
url: Url::parse(url).ok(),
date,
synced: Utc::now(),
updated: None,
html: Some(story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
summary: None,
direction: None,
unread,
marked,
scraped_content: None,
plain_text: None,
thumbnail_url: None,
});
}
}
Ok(FeedUpdateResult {
feed,
taggings: None,
articles: util::vec_to_option(articles),
enclosures: None,
})
} else {
Err(FeedApiError::Login)
}
}
async fn set_article_read(&self, articles: &[ArticleID], read: Read, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
let story_hashes = articles.iter().map(ArticleID::as_str).collect::<Vec<_>>();
if read == Read::Read {
api.mark_stories_read(client, &story_hashes).await?;
} else {
api.mark_story_unread(client, &story_hashes).await?;
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn set_article_marked(&self, articles: &[ArticleID], marked: Marked, client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
for article in articles {
if marked == Marked::Marked {
api.mark_story_hash_as_starred(client, article.as_str()).await?;
} else {
api.mark_story_hash_as_unstarred(client, article.as_str()).await?;
}
}
return Ok(());
}
Err(FeedApiError::Login)
}
async fn set_feed_read(&self, feeds: &[FeedID], _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
for feed in feeds {
api.mark_feed_read(client, feed.as_str()).await?;
}
return Ok(());
}
Err(FeedApiError::Unsupported)
}
async fn set_category_read(&self, _categories: &[CategoryID], articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
self.set_article_read(articles, Read::Read, client).await
}
async fn set_tag_read(&self, _tags: &[TagID], _articles: &[ArticleID], _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn set_all_read(&self, _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
if let Some(api) = &self.api {
api.mark_all_read(client).await?;
return Ok(());
}
Err(FeedApiError::Unsupported)
}
async fn add_feed(
&self,
_url: &Url,
_title: Option<String>,
_category_id: Option<CategoryID>,
_client: &Client,
) -> FeedApiResult<(Feed, Option<Category>)> {
Err(FeedApiError::Unsupported)
}
async fn remove_feed(&self, _id: &FeedID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn move_feed(&self, _feed_id: &FeedID, _from: &CategoryID, _to: &CategoryID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn rename_feed(&self, _feed_id: &FeedID, _new_title: &str, _client: &Client) -> FeedApiResult<FeedID> {
Err(FeedApiError::Unsupported)
}
async fn edit_feed_url(&self, _feed_id: &FeedID, _new_url: &str, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn add_category<'a>(&self, _title: &str, _parent: Option<&'a CategoryID>, _client: &Client) -> FeedApiResult<CategoryID> {
Err(FeedApiError::Unsupported)
}
async fn remove_category(&self, _id: &CategoryID, _remove_children: bool, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn rename_category(&self, _id: &CategoryID, _new_title: &str, _client: &Client) -> FeedApiResult<CategoryID> {
Err(FeedApiError::Unsupported)
}
async fn move_category(&self, _id: &CategoryID, _parent: &CategoryID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn import_opml(&self, _opml: &str, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn add_tag(&self, _title: &str, _client: &Client) -> FeedApiResult<TagID> {
Err(FeedApiError::Unsupported)
}
async fn remove_tag(&self, _id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn rename_tag(&self, _id: &TagID, _new_title: &str, _client: &Client) -> FeedApiResult<TagID> {
Err(FeedApiError::Unsupported)
}
async fn tag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn untag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
Err(FeedApiError::Unsupported)
}
async fn get_favicon(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FavIcon> {
if let Some(api) = &self.api {
let response = api.favicons(client, feed_id.as_str()).await?;
let favicon_data = &response[feed_id.as_str()];
let data = match favicon_data.as_str() {
Some(string) => Some(base64_std.decode(string).map_err(|_| FeedApiError::Encryption)?),
None => None,
};
let favicon = FavIcon {
feed_id: feed_id.clone(),
expires: Utc::now() + Duration::try_days(EXPIRES_AFTER_DAYS).unwrap(),
format: None,
etag: None,
source_url: None,
data,
};
return Ok(favicon);
}
Err(FeedApiError::Login)
}
}