1pub mod config;
2pub mod metadata;
3
4use std::collections::HashSet;
5
6use self::config::AccountConfig;
7use crate::FatArticle;
8use crate::FeedMapping;
9use crate::feed_api::{FeedApi, FeedApiError, FeedApiResult, FeedHeaderMap, Portal};
10use crate::feed_api_implementations::NewsBlurMetadata;
11use crate::models::{
12 ArticleID, Category, CategoryID, CategoryMapping, DirectLogin, FavIcon, Feed, FeedConversionResult, FeedID, FeedUpdateResult, Headline,
13 LoginData, Marked, NEWSFLASH_TOPLEVEL, PasswordLogin, PluginCapabilities, Read, SyncResult, TagID, Url,
14};
15use crate::util;
16use crate::util::favicons::EXPIRES_AFTER_DAYS;
17use async_trait::async_trait;
18use base64::Engine;
19use base64::engine::general_purpose::STANDARD as base64_std;
20use chrono::{Duration, NaiveDateTime, Utc};
21use newsblur_api_updated::{ApiError as NewsBlurError, NewsBlurApi};
22use reqwest::Client;
23use reqwest::header::{HeaderMap, HeaderValue};
24use serde_json::{Map, Value};
25
26impl From<NewsBlurError> for FeedApiError {
27 fn from(error: NewsBlurError) -> FeedApiError {
28 match error {
29 NewsBlurError::Url(e) => FeedApiError::Url(e),
30 NewsBlurError::Http(e) => FeedApiError::Network(e),
31 NewsBlurError::AccessDenied => FeedApiError::Auth,
32 }
33 }
34}
35
36pub struct NewsBlurService {
37 api: Option<NewsBlurApi>,
38 portal: Box<dyn Portal>,
39 logged_in: bool,
40 config: AccountConfig,
41}
42
43impl NewsBlurService {
44 fn convert_feed_vec(raw_feeds: &Map<String, Value>, raw_folders: &[Value]) -> FeedApiResult<FeedConversionResult> {
45 let mut feed_mappings: Vec<FeedMapping> = Vec::new();
46 let mut folder_mappings: Vec<CategoryMapping> = Vec::new();
47 let mut feeds: Vec<Feed> = Vec::new();
48 let mut folders: Vec<Category> = Vec::new();
49
50 for feed in raw_feeds {
51 let website = feed.1["feed_link"].as_str().ok_or(FeedApiError::Unknown)?;
52 let feed_address = feed.1["feed_address"].as_str().ok_or(FeedApiError::Unknown)?;
53 let icon_url = feed.1["favicon_url"].as_str().ok_or(FeedApiError::Unknown)?;
54
55 feeds.push(Feed {
56 feed_id: FeedID::new(&feed.0.to_string()),
57 label: feed.1["feed_title"].as_str().unwrap_or("Unkown Feed").into(),
58 website: Url::parse(website).ok(),
59 feed_url: Url::parse(feed_address).ok(),
60 icon_url: Url::parse(&format!("https://newsblur.com/{icon_url}")).ok(),
61 error_count: 0,
62 error_message: None,
63 });
64 }
65
66 let feed_ids: HashSet<&FeedID> = feeds.iter().map(|f| &f.feed_id).collect();
67
68 for (i, folder) in raw_folders.iter().enumerate() {
69 if folder.is_object() {
70 let folder_object = folder.as_object().ok_or(FeedApiError::Unknown)?;
71
72 for (j, fol) in folder_object.iter().enumerate() {
73 Self::parse_folder_value(
74 fol,
75 &NEWSFLASH_TOPLEVEL,
76 (i + j) as i32,
77 &feed_ids,
78 &mut feed_mappings,
79 &mut folders,
80 &mut folder_mappings,
81 )?;
82 }
83 } else {
84 let id_num = folder.as_u64().ok_or(FeedApiError::Unknown)?;
85 let id_string = format!("{id_num}");
86 feed_mappings.push(FeedMapping {
87 feed_id: FeedID::new(&id_string),
88 category_id: NEWSFLASH_TOPLEVEL.clone(),
89 sort_index: Some(i as i32),
90 });
91 }
92 }
93
94 for mapping in &feed_mappings {
95 let feed_exists = feeds.iter().any(|f| f.feed_id == mapping.feed_id);
96 let folder_exits = folders.iter().any(|f| f.category_id == mapping.category_id);
97
98 if !feed_exists {
99 tracing::error!(%mapping.feed_id, "feed does not exist");
100 }
101 if !folder_exits && mapping.category_id != *NEWSFLASH_TOPLEVEL {
102 tracing::error!(%mapping.category_id, "folder does not exist");
103 }
104 }
105
106 Ok(FeedConversionResult {
107 feeds,
108 feed_mappings,
109 categories: folders,
110 category_mappings: folder_mappings,
111 })
112 }
113
114 fn parse_folder_value(
115 fol: (&String, &Value),
116 parent: &CategoryID,
117 sort_index: i32,
118 feed_ids: &HashSet<&FeedID>,
119 feed_mappings: &mut Vec<FeedMapping>,
120 folders: &mut Vec<Category>,
121 folder_mappings: &mut Vec<CategoryMapping>,
122 ) -> FeedApiResult<()> {
123 let category_id = CategoryID::new(fol.0);
124
125 folders.push(Category {
126 category_id: category_id.clone(),
127 label: fol.0.to_string(),
128 });
129
130 folder_mappings.push(CategoryMapping {
131 parent_id: parent.clone(),
132 category_id: category_id.clone(),
133 sort_index: Some(sort_index),
134 });
135
136 for (i, id) in fol.1.as_array().ok_or(FeedApiError::Unknown)?.iter().enumerate() {
137 if let Some(subfolder) = id.as_object() {
138 for (i, subfol) in subfolder.iter().enumerate() {
139 Self::parse_folder_value(subfol, &category_id, i as i32, feed_ids, feed_mappings, folders, folder_mappings)?;
140 }
141 } else {
142 let id_num = id.as_u64().ok_or(FeedApiError::Unknown)?;
143 let id_string = format!("{id_num}");
144 let feed_id = FeedID::new(&id_string);
145
146 if !feed_ids.contains(&feed_id) {
147 tracing::error!(%feed_id, folder = fol.0, "feed in folder does not exist");
148 continue;
149 }
150
151 feed_mappings.push(FeedMapping {
152 feed_id,
153 category_id: CategoryID::new(fol.0),
154 sort_index: Some(i as i32),
155 });
156 }
157 }
158
159 Ok(())
160 }
161}
162
163#[async_trait]
164impl FeedApi for NewsBlurService {
165 fn features(&self) -> FeedApiResult<PluginCapabilities> {
166 Ok(PluginCapabilities::NONE)
167 }
168
169 fn has_user_configured(&self) -> FeedApiResult<bool> {
170 Ok(self.api.is_some())
171 }
172
173 async fn is_reachable(&self, client: &Client) -> FeedApiResult<bool> {
174 if let Some(url) = self.config.get_url() {
175 let res = client.head(&url).send().await?;
176 Ok(res.status().is_success())
177 } else {
178 Err(FeedApiError::Login)
179 }
180 }
181
182 async fn is_logged_in(&self, _client: &Client) -> FeedApiResult<bool> {
183 Ok(self.logged_in)
184 }
185
186 async fn user_name(&self) -> Option<String> {
187 self.config.get_user_name()
188 }
189
190 async fn get_login_data(&self) -> Option<LoginData> {
191 if let Ok(true) = self.has_user_configured()
192 && let Some(username) = self.config.get_user_name()
193 && let Some(password) = self.config.get_password()
194 {
195 return Some(LoginData::Direct(DirectLogin::Password(PasswordLogin {
196 id: NewsBlurMetadata::get_id(),
197 url: self.config.get_url(),
198 user: username,
199 password,
200 basic_auth: None,
201 })));
202 }
203
204 None
205 }
206
207 async fn login(&mut self, data: LoginData, client: &Client) -> FeedApiResult<()> {
208 if let LoginData::Direct(DirectLogin::Password(data)) = data
209 && let Some(url_string) = data.url.clone()
210 {
211 let url = Url::parse(&url_string)?;
212 let mut api = NewsBlurApi::new(&url, &data.user, &data.password, None);
213 let cookie_string = api.login(client).await?;
214 self.api = Some(api);
215 self.logged_in = true;
216 self.config.set_url(&url_string);
217 self.config.set_password(&data.password);
218 self.config.set_user_name(&data.user);
219 self.config.set_cookie_string(&cookie_string);
220 self.config.write()?;
221 return Ok(());
222 }
223
224 self.logged_in = false;
225 self.api = None;
226 Err(FeedApiError::Login)
227 }
228
229 async fn logout(&mut self, client: &Client) -> FeedApiResult<()> {
230 if let Some(api) = &self.api {
231 api.logout(client).await?;
232 self.config.delete()?;
233 self.logged_in = false;
234 return Ok(());
235 }
236 Err(FeedApiError::Login)
237 }
238
239 async fn initial_sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
240 if let Some(api) = &self.api {
241 let response = api.get_feeds(client).await?;
242
243 let feeds = &response["feeds"].as_object().ok_or(FeedApiError::Unknown)?;
244 let folders = &response["folders"].as_array().ok_or(FeedApiError::Unknown)?;
245
246 let conversion_result = NewsBlurService::convert_feed_vec(feeds, folders)?;
247
248 let mut articles: Vec<FatArticle> = Vec::new();
249
250 for feed in &conversion_result.feeds {
252 for page in 1..2 {
253 let response = api.get_stories(client, feed.feed_id.as_str(), false, page).await?;
254
255 let stories_array = &response["stories"].as_array();
256 let stories = match stories_array {
257 Some(stry) => stry,
258 None => break,
259 };
260
261 for story in stories.iter() {
262 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
263 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
264
265 let error_msg = format!("date_string: {date_string:?}");
266
267 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
268 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
269 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
270 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
271 } else {
272 panic!("Can't parse data: {date_string:?}");
273 }
274 .and_utc();
275
276 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
277
278 let unread = match article_read {
279 0 => Read::Unread,
280 _ => Read::Read,
281 };
282
283 let article_id = format!("{}:{}", feed.feed_id, story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?);
284
285 let marked = match story["starred"].as_bool() {
286 Some(starred) => {
287 if starred {
288 Marked::Marked
289 } else {
290 Marked::Unmarked
291 }
292 }
293 None => Marked::Unmarked,
294 };
295
296 articles.push(FatArticle {
297 article_id: ArticleID::new(&article_id),
298 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
299 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
300 feed_id: feed.feed_id.clone(),
301 url: Url::parse(url).ok(),
302 date,
303 synced: Utc::now(),
304 updated: None,
305 html: Some(story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
306 summary: None,
307 direction: None,
308 unread,
309 marked,
310 scraped_content: None,
311 plain_text: None,
312 thumbnail_url: None,
313 });
314 }
315 }
316 }
317
318 let response = api.get_unread_story_hashes(client).await?;
320 let hashes = &response["unread_feed_story_hashes"].as_object().ok_or(FeedApiError::Unknown)?;
321
322 let mut unread_story_hashes = Vec::new();
323
324 for hash in hashes.iter() {
325 for story in hash.1.as_array().ok_or(FeedApiError::Unknown)? {
326 unread_story_hashes.push(story.as_str().ok_or(FeedApiError::Unknown)?);
327 }
328 }
329
330 for offset in 0..(unread_story_hashes.len() / 99) {
331 let start = offset * 99;
332 let end = (offset + 1) * 99;
333 let response = if end < unread_story_hashes.len() {
334 api.get_river_stories(client, &unread_story_hashes[start..end]).await?
335 } else {
336 api.get_river_stories(client, &unread_story_hashes[start..unread_story_hashes.len()])
337 .await?
338 };
339 let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
340
341 for story in stories.iter() {
342 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
343 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
344
345 let error_msg = format!("date_string: {date_string:?}");
346
347 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
348 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
349 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
350 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
351 } else {
352 panic!("Can't parse data: {date_string:?}");
353 }
354 .and_utc();
355
356 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
357
358 let unread = match article_read {
359 0 => Read::Unread,
360 _ => Read::Read,
361 };
362
363 let article_id = format!(
364 "{}:{}",
365 story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
366 story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
367 );
368
369 let marked = match story["starred"].as_bool() {
370 Some(starred) => {
371 if starred {
372 Marked::Marked
373 } else {
374 Marked::Unmarked
375 }
376 }
377 None => Marked::Unmarked,
378 };
379
380 let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
381 let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
382
383 articles.push(FatArticle {
384 article_id: ArticleID::new(&article_id),
385 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
386 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
387 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
388 url: Url::parse(url).ok(),
389 date,
390 synced: Utc::now(),
391 updated: None,
392 html: Some(html),
393 summary: None,
394 direction: None,
395 unread,
396 marked,
397 scraped_content: None,
398 plain_text: None,
399 thumbnail_url,
400 });
401 }
402 }
403
404 let response = api.get_stared_story_hashes(client).await?;
408 let hashes = &response["starred_story_hashes"].as_array().ok_or(FeedApiError::Unknown)?;
409
410 let mut stared_story_hashes = Vec::new();
411
412 for hash in hashes.iter() {
413 stared_story_hashes.push(hash.as_str().ok_or(FeedApiError::Unknown)?);
414 }
415
416 for offset in 0..(stared_story_hashes.len() / 99) {
417 let start = offset * 99;
418 let end = (offset + 1) * 99;
419 let response = if end < stared_story_hashes.len() {
420 api.get_river_stories(client, &stared_story_hashes[start..end]).await?
421 } else {
422 api.get_river_stories(client, &stared_story_hashes[start..stared_story_hashes.len()])
423 .await?
424 };
425 let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
426
427 for story in stories.iter() {
428 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
429 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
430
431 let error_msg = format!("date_string: {date_string:?}");
432
433 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
434 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
435 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
436 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
437 } else {
438 panic!("Can't parse data: {date_string:?}");
439 }
440 .and_utc();
441
442 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
443
444 let unread = match article_read {
445 0 => Read::Unread,
446 _ => Read::Read,
447 };
448
449 let article_id = format!(
450 "{}:{}",
451 story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
452 story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
453 );
454
455 let mut article_offset = None;
456 for (i, article) in articles.iter().enumerate() {
457 if article.article_id == ArticleID::new(&article_id) {
458 article_offset = Some(i);
459 }
460 }
461
462 let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
463 let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
464
465 if article_offset.is_none() {
466 articles.push(FatArticle {
467 article_id: ArticleID::new(&article_id),
468 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
469 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
470 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
471 url: Url::parse(url).ok(),
472 date,
473 synced: Utc::now(),
474 updated: None,
475 html: Some(html),
476 summary: None,
477 direction: None,
478 unread,
479 marked: Marked::Marked,
480 scraped_content: None,
481 plain_text: None,
482 thumbnail_url,
483 });
484 }
485 }
486 }
487
488 return Ok(SyncResult {
489 feeds: util::vec_to_option(conversion_result.feeds),
490 categories: util::vec_to_option(conversion_result.categories),
491 feed_mappings: util::vec_to_option(conversion_result.feed_mappings),
492 category_mappings: util::vec_to_option(conversion_result.category_mappings),
493 tags: None,
494 taggings: None,
495 headlines: None,
496 articles: util::vec_to_option(articles),
497 enclosures: None,
498 });
499 }
500 Err(FeedApiError::Login)
501 }
502
503 async fn sync(&self, client: &Client, _custom_header: FeedHeaderMap) -> FeedApiResult<SyncResult> {
504 if let Some(api) = &self.api {
505 let max_count = self.portal.get_config().read().await.get_sync_amount();
506
507 let response = api.get_feeds(client).await?;
508
509 let feeds = &response["feeds"].as_object().ok_or(FeedApiError::Unknown)?;
510 let folders = &response["folders"].as_array().ok_or(FeedApiError::Unknown)?;
511
512 let conversion_result = NewsBlurService::convert_feed_vec(feeds, folders)?;
513 let feed_ids = conversion_result.feeds.iter().map(|f| f.feed_id.clone()).collect::<Vec<_>>();
514
515 let mut articles: Vec<FatArticle> = Vec::new();
516 let mut headlines: Vec<Headline> = Vec::new();
517
518 let response = api.get_unread_story_hashes(client).await?;
520 let hashes = &response["unread_feed_story_hashes"].as_object().ok_or(FeedApiError::Unknown)?;
521 let mut unread_story_hashes = Vec::new();
522
523 for hash in hashes.iter() {
524 for story in hash.1.as_array().ok_or(FeedApiError::Unknown)? {
525 unread_story_hashes.push(story.as_str().ok_or(FeedApiError::Unknown)?);
526 }
527 }
528
529 let response = api.get_stared_story_hashes(client).await?;
531 let hashes = &response["starred_story_hashes"].as_array().ok_or(FeedApiError::Unknown)?;
532
533 let mut stared_story_hashes = Vec::new();
534
535 for hash in hashes.iter() {
536 stared_story_hashes.push(hash.as_str().ok_or(FeedApiError::Unknown)?);
537 }
538
539 let local_unread_ids = self.portal.get_article_ids_unread_all()?;
541 let mut unread_story_hashes_to_fetch = Vec::new();
542 for id in &unread_story_hashes {
543 if local_unread_ids.contains(&ArticleID::new(id)) {
544 continue;
545 }
546 unread_story_hashes_to_fetch.push(*id);
547 }
548
549 for offset in 0..((unread_story_hashes_to_fetch.len() / 99) + 1) {
550 let start = offset * 99;
551 let end = (offset + 1) * 99;
552 let response = if end < unread_story_hashes_to_fetch.len() {
553 api.get_river_stories(client, &unread_story_hashes_to_fetch[start..end]).await?
554 } else {
555 api.get_river_stories(client, &unread_story_hashes_to_fetch[start..unread_story_hashes_to_fetch.len()])
556 .await?
557 };
558 let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
559
560 for story in stories.iter() {
561 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
562 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
563
564 let error_msg = format!("date_string: {date_string:?}");
565
566 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
567 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
568 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
569 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
570 } else {
571 panic!("Can't parse data: {date_string:?}");
572 }
573 .and_utc();
574
575 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
576
577 let unread = match article_read {
578 0 => Read::Unread,
579 _ => Read::Read,
580 };
581
582 let article_id = format!(
583 "{}:{}",
584 story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
585 story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
586 );
587
588 let marked = match story["starred"].as_bool() {
589 Some(starred) => {
590 if starred {
591 Marked::Marked
592 } else {
593 Marked::Unmarked
594 }
595 }
596 None => Marked::Unmarked,
597 };
598
599 let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
600 let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
601
602 articles.push(FatArticle {
603 article_id: ArticleID::new(&article_id),
604 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
605 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
606 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
607 url: Url::parse(url).ok(),
608 date,
609 synced: Utc::now(),
610 updated: None,
611 html: Some(html),
612 summary: None,
613 direction: None,
614 unread,
615 marked,
616 scraped_content: None,
617 plain_text: None,
618 thumbnail_url,
619 });
620 }
621 }
622
623 let local_unread_ids = self.portal.get_article_ids_unread_all()?;
625 for local_id in local_unread_ids {
626 if unread_story_hashes.contains(&local_id.as_str()) {
627 continue;
628 }
629 let should_mark_read_headlines = Headline {
630 article_id: ArticleID::new(&local_id.to_string()),
631 unread: Read::Read,
632 marked: if stared_story_hashes.contains(&local_id.as_str()) {
633 Marked::Marked
634 } else {
635 Marked::Unmarked
636 },
637 };
638 headlines.push(should_mark_read_headlines);
639 }
640
641 let read_sync_num = if max_count > unread_story_hashes.len() as u32 {
643 (max_count - unread_story_hashes.len() as u32) / 6
644 } else {
645 1
646 };
647
648 for page in 1..read_sync_num {
651 let response = api.get_read_stories(client, page).await?;
652 let stories_array = &response["stories"].as_array();
653 let mut page_articles: Vec<ArticleID> = Vec::new();
654 let stories = match stories_array {
655 Some(stry) => stry,
656 None => break,
657 };
658
659 for story in stories.iter() {
660 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
661 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
662
663 let error_msg = format!("date_string: {date_string:?}");
664
665 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
666 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
667 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
668 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
669 } else {
670 panic!("Can't parse data: {date_string:?}");
671 }
672 .and_utc();
673
674 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
675
676 let unread = match article_read {
677 0 => Read::Unread,
678 _ => Read::Read,
679 };
680
681 let article_id = format!(
682 "{}:{}",
683 story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
684 story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
685 );
686
687 let marked = match story["starred"].as_bool() {
688 Some(starred) => {
689 if starred {
690 Marked::Marked
691 } else {
692 Marked::Unmarked
693 }
694 }
695 None => Marked::Unmarked,
696 };
697
698 let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
699 let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
700
701 articles.push(FatArticle {
702 article_id: ArticleID::new(&article_id),
703 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
704 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
705 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
706 url: Url::parse(url).ok(),
707 date,
708 synced: Utc::now(),
709 updated: None,
710 html: Some(html),
711 summary: None,
712 direction: None,
713 unread,
714 marked,
715 scraped_content: None,
716 plain_text: None,
717 thumbnail_url,
718 });
719
720 if unread == Read::Read {
722 page_articles.push(ArticleID::new(&article_id));
723 }
724 }
725
726 if self.portal.get_articles(&page_articles).is_ok() {
731 break;
732 }
733 }
734
735 let local_marked_ids = self.portal.get_article_ids_marked_all()?;
739 let mut marked_story_hashes_to_fetch = Vec::new();
740 for id in stared_story_hashes {
741 if local_marked_ids.contains(&ArticleID::new(id)) {
742 continue;
743 }
744 marked_story_hashes_to_fetch.push(id);
745 }
746
747 if !marked_story_hashes_to_fetch.is_empty() {
748 for offset in 0..((marked_story_hashes_to_fetch.len() / 99) + 1) {
749 let start = offset * 99;
750 let end = (offset + 1) * 99;
751 let response = if end < marked_story_hashes_to_fetch.len() {
752 api.get_river_stories(client, &marked_story_hashes_to_fetch[start..end]).await?
753 } else {
754 api.get_river_stories(client, &marked_story_hashes_to_fetch[start..marked_story_hashes_to_fetch.len()])
755 .await?
756 };
757 let stories = &response["stories"].as_array().ok_or(FeedApiError::Unknown)?;
758
759 for story in stories.iter() {
760 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
761 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
762
763 let error_msg = format!("date_string: {date_string:?}");
764
765 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
766 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
767 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
768 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
769 } else {
770 panic!("Can't parse data: {date_string:?}");
771 }
772 .and_utc();
773
774 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
775
776 let unread = match article_read {
777 0 => Read::Unread,
778 _ => Read::Read,
779 };
780
781 let article_id = format!(
782 "{}:{}",
783 story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?,
784 story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?
785 );
786
787 let mut article_offset = None;
788 for (i, article) in articles.iter().enumerate() {
789 if article.article_id == ArticleID::new(&article_id) {
790 article_offset = Some(i);
791 }
792 }
793 let html = story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string();
794 let thumbnail_url = crate::util::thumbnail::extract_thumbnail(&html);
795
796 if article_offset.is_none() {
797 articles.push(FatArticle {
798 article_id: ArticleID::new(&article_id),
799 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
800 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
801 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiError::Unknown)?)),
802 url: Url::parse(url).ok(),
803 date,
804 synced: Utc::now(),
805 updated: None,
806 html: Some(html),
807 summary: None,
808 direction: None,
809 unread,
810 marked: Marked::Marked,
811 scraped_content: None,
812 plain_text: None,
813 thumbnail_url,
814 });
815 }
816 }
817 }
818 }
819
820 let articles = articles
821 .into_iter()
822 .filter(|a| feed_ids.contains(&a.feed_id) || a.marked == Marked::Marked)
823 .collect();
824
825 return Ok(SyncResult {
826 feeds: util::vec_to_option(conversion_result.feeds),
827 categories: util::vec_to_option(conversion_result.categories),
828 feed_mappings: util::vec_to_option(conversion_result.feed_mappings),
829 category_mappings: util::vec_to_option(conversion_result.category_mappings),
830 tags: None,
831 taggings: None,
832 headlines: util::vec_to_option(headlines),
833 articles: util::vec_to_option(articles),
834 enclosures: None,
835 });
836 }
837 Err(FeedApiError::Login)
838 }
839
840 async fn fetch_feed(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FeedUpdateResult> {
841 if let Some(api) = &self.api {
842 let response = api.get_feeds(client).await?;
843
844 let feeds = &response["feeds"].as_object().ok_or(FeedApiError::Unknown)?;
845 let folders = &response["folders"].as_array().ok_or(FeedApiError::Unknown)?;
846
847 let conversion_result = NewsBlurService::convert_feed_vec(feeds, folders)?;
848 let feed = conversion_result.feeds.iter().find(|feed| &feed.feed_id == feed_id).cloned();
849
850 let mut articles: Vec<FatArticle> = Vec::new();
851
852 for page in 1..2 {
854 let response = api.get_stories(client, feed_id.as_str(), false, page).await?;
855
856 let stories_array = &response["stories"].as_array();
857 let stories = match stories_array {
858 Some(stry) => stry,
859 None => break,
860 };
861
862 for story in stories.iter() {
863 let url = story["story_permalink"].as_str().ok_or(FeedApiError::Unknown)?;
864 let date_string = story["story_date"].as_str().ok_or(FeedApiError::Unknown)?;
865
866 let error_msg = format!("date_string: {date_string:?}");
867
868 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
869 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
870 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
871 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
872 } else {
873 panic!("Can't parse data: {date_string:?}");
874 }
875 .and_utc();
876
877 let article_read = story["read_status"].as_i64().ok_or(FeedApiError::Unknown)?;
878
879 let unread = match article_read {
880 0 => Read::Unread,
881 _ => Read::Read,
882 };
883
884 let article_id = format!("{}:{}", feed_id, story["guid_hash"].as_str().ok_or(FeedApiError::Unknown)?);
885
886 let marked = match story["starred"].as_bool() {
887 Some(starred) => {
888 if starred {
889 Marked::Marked
890 } else {
891 Marked::Unmarked
892 }
893 }
894 None => Marked::Unmarked,
895 };
896
897 articles.push(FatArticle {
898 article_id: ArticleID::new(&article_id),
899 title: Some(story["story_title"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
900 author: Some(story["story_authors"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
901 feed_id: feed_id.clone(),
902 url: Url::parse(url).ok(),
903 date,
904 synced: Utc::now(),
905 updated: None,
906 html: Some(story["story_content"].as_str().ok_or(FeedApiError::Unknown)?.to_string()),
907 summary: None,
908 direction: None,
909 unread,
910 marked,
911 scraped_content: None,
912 plain_text: None,
913 thumbnail_url: None,
914 });
915 }
916 }
917
918 Ok(FeedUpdateResult {
919 feed,
920 taggings: None,
921 articles: util::vec_to_option(articles),
922 enclosures: None,
923 })
924 } else {
925 Err(FeedApiError::Login)
926 }
927 }
928
929 async fn set_article_read(&self, articles: &[ArticleID], read: Read, client: &Client) -> FeedApiResult<()> {
930 if let Some(api) = &self.api {
931 let story_hashes = articles.iter().map(ArticleID::as_str).collect::<Vec<_>>();
932 if read == Read::Read {
933 api.mark_stories_read(client, &story_hashes).await?;
934 } else {
935 api.mark_story_unread(client, &story_hashes).await?;
936 }
937
938 return Ok(());
939 }
940 Err(FeedApiError::Login)
941 }
942
943 async fn set_article_marked(&self, articles: &[ArticleID], marked: Marked, client: &Client) -> FeedApiResult<()> {
944 if let Some(api) = &self.api {
945 for article in articles {
946 if marked == Marked::Marked {
947 api.mark_story_hash_as_starred(client, article.as_str()).await?;
948 } else {
949 api.mark_story_hash_as_unstarred(client, article.as_str()).await?;
950 }
951 }
952
953 return Ok(());
954 }
955 Err(FeedApiError::Login)
956 }
957
958 async fn set_feed_read(&self, feeds: &[FeedID], _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
959 if let Some(api) = &self.api {
960 for feed in feeds {
961 api.mark_feed_read(client, feed.as_str()).await?;
962 }
963
964 return Ok(());
965 }
966 Err(FeedApiError::Unsupported)
967 }
968
969 async fn set_category_read(&self, _categories: &[CategoryID], articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
970 self.set_article_read(articles, Read::Read, client).await
971 }
972
973 async fn set_tag_read(&self, _tags: &[TagID], _articles: &[ArticleID], _client: &Client) -> FeedApiResult<()> {
974 Err(FeedApiError::Unsupported)
975 }
976
977 async fn set_all_read(&self, _articles: &[ArticleID], client: &Client) -> FeedApiResult<()> {
978 if let Some(api) = &self.api {
979 api.mark_all_read(client).await?;
980
981 return Ok(());
982 }
983 Err(FeedApiError::Unsupported)
984 }
985
986 async fn add_feed(
987 &self,
988 _url: &Url,
989 _title: Option<String>,
990 _category_id: Option<CategoryID>,
991 _client: &Client,
992 ) -> FeedApiResult<(Feed, Option<Category>)> {
993 Err(FeedApiError::Unsupported)
994 }
995
996 async fn remove_feed(&self, _id: &FeedID, _client: &Client) -> FeedApiResult<()> {
997 Err(FeedApiError::Unsupported)
998 }
999
1000 async fn move_feed(&self, _feed_id: &FeedID, _from: &CategoryID, _to: &CategoryID, _client: &Client) -> FeedApiResult<()> {
1001 Err(FeedApiError::Unsupported)
1002 }
1003
1004 async fn rename_feed(&self, _feed_id: &FeedID, _new_title: &str, _client: &Client) -> FeedApiResult<FeedID> {
1005 Err(FeedApiError::Unsupported)
1006 }
1007
1008 async fn edit_feed_url(&self, _feed_id: &FeedID, _new_url: &str, _client: &Client) -> FeedApiResult<()> {
1009 Err(FeedApiError::Unsupported)
1010 }
1011
1012 async fn add_category<'a>(&self, _title: &str, _parent: Option<&'a CategoryID>, _client: &Client) -> FeedApiResult<CategoryID> {
1013 Err(FeedApiError::Unsupported)
1014 }
1015
1016 async fn remove_category(&self, _id: &CategoryID, _remove_children: bool, _client: &Client) -> FeedApiResult<()> {
1017 Err(FeedApiError::Unsupported)
1018 }
1019
1020 async fn rename_category(&self, _id: &CategoryID, _new_title: &str, _client: &Client) -> FeedApiResult<CategoryID> {
1021 Err(FeedApiError::Unsupported)
1022 }
1023
1024 async fn move_category(&self, _id: &CategoryID, _parent: &CategoryID, _client: &Client) -> FeedApiResult<()> {
1025 Err(FeedApiError::Unsupported)
1026 }
1027
1028 async fn import_opml(&self, _opml: &str, _client: &Client) -> FeedApiResult<()> {
1029 Err(FeedApiError::Unsupported)
1030 }
1031
1032 async fn add_tag(&self, _title: &str, _client: &Client) -> FeedApiResult<TagID> {
1033 Err(FeedApiError::Unsupported)
1034 }
1035
1036 async fn remove_tag(&self, _id: &TagID, _client: &Client) -> FeedApiResult<()> {
1037 Err(FeedApiError::Unsupported)
1038 }
1039
1040 async fn rename_tag(&self, _id: &TagID, _new_title: &str, _client: &Client) -> FeedApiResult<TagID> {
1041 Err(FeedApiError::Unsupported)
1042 }
1043
1044 async fn tag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
1045 Err(FeedApiError::Unsupported)
1046 }
1047
1048 async fn untag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
1049 Err(FeedApiError::Unsupported)
1050 }
1051
1052 async fn get_favicon(&self, feed_id: &FeedID, client: &Client, _custom_header: HeaderMap<HeaderValue>) -> FeedApiResult<FavIcon> {
1053 if let Some(api) = &self.api {
1054 let response = api.favicons(client, feed_id.as_str()).await?;
1055 let favicon_data = &response[feed_id.as_str()];
1056
1057 let data = match favicon_data.as_str() {
1058 Some(string) => Some(base64_std.decode(string).map_err(|_| FeedApiError::Encryption)?),
1059 None => None,
1060 };
1061
1062 let favicon = FavIcon {
1063 feed_id: feed_id.clone(),
1064 expires: Utc::now() + Duration::try_days(EXPIRES_AFTER_DAYS).unwrap(),
1065 format: None,
1066 etag: None,
1067 source_url: None,
1068 data,
1069 };
1070
1071 return Ok(favicon);
1072 }
1073 Err(FeedApiError::Login)
1074 }
1075}