1pub mod config;
2pub mod metadata;
3
4use std::collections::HashSet;
5
6use self::config::AccountConfig;
7use crate::feed_api::{FeedApi, FeedApiErrorKind, FeedApiResult, Portal};
8use crate::feed_api_implementations::NewsBlurMetadata;
9use crate::models::{
10 self, article, ArticleID, Category, CategoryID, CategoryMapping, CategoryType, DirectLogin, FavIcon, Feed, FeedID, Headline, LoginData, Marked,
11 PasswordLogin, PluginCapabilities, SyncResult, TagID, Url, NEWSFLASH_TOPLEVEL,
12};
13use crate::util;
14use crate::util::favicon_cache::EXPIRES_AFTER_DAYS;
15use crate::FatArticle;
16use crate::FeedMapping;
17use article::Read;
18use async_trait::async_trait;
19use chrono::{DateTime, Duration, NaiveDateTime, Utc};
20use failure::ResultExt;
21use newsblur_api::NewsBlurApi;
22use reqwest::Client;
23use serde_json::{Map, Value};
24
25pub struct NewsBlurService {
26 api: Option<NewsBlurApi>,
27 portal: Box<dyn Portal>,
28 logged_in: bool,
29 config: AccountConfig,
30}
31
32impl NewsBlurService {
33 fn convert_feed_vec(
34 raw_feeds: &Map<String, Value>,
35 raw_folders: &[Value],
36 ) -> FeedApiResult<(Vec<Feed>, Vec<FeedMapping>, Vec<Category>, Vec<CategoryMapping>)> {
37 let mut feed_mappings: Vec<FeedMapping> = Vec::new();
38 let mut folder_mappings: Vec<CategoryMapping> = Vec::new();
39 let mut feeds: Vec<Feed> = Vec::new();
40 let mut folders: Vec<Category> = Vec::new();
41
42 for feed in raw_feeds {
43 let website = feed.1["feed_link"].as_str().ok_or(FeedApiErrorKind::Api)?;
44 let feed_address = feed.1["feed_address"].as_str().ok_or(FeedApiErrorKind::Api)?;
45 let icon_url = feed.1["favicon_url"].as_str().ok_or(FeedApiErrorKind::Api)?;
46
47 feeds.push(Feed {
48 feed_id: FeedID::new(&feed.0.to_string()),
49 label: feed.1["feed_title"].as_str().unwrap_or("Unkown Feed").into(),
50 website: Url::parse(website).ok(),
51 feed_url: Url::parse(feed_address).ok(),
52 icon_url: Url::parse(&format!("https://newsblur.com/{}", icon_url)).ok(),
53 });
54 }
55
56 let feed_ids: HashSet<&FeedID> = feeds.iter().map(|f| &f.feed_id).collect();
57
58 for (i, folder) in raw_folders.iter().enumerate() {
59 if folder.is_object() {
60 let folder_object = folder.as_object().ok_or(FeedApiErrorKind::Api)?;
61
62 for (j, fol) in folder_object.iter().enumerate() {
63 Self::parse_folder_value(
64 fol,
65 &NEWSFLASH_TOPLEVEL,
66 (i + j) as i32,
67 &feed_ids,
68 &mut feed_mappings,
69 &mut folders,
70 &mut folder_mappings,
71 )?;
72 }
73 } else {
74 let id_num = folder.as_u64().ok_or(FeedApiErrorKind::Api)?;
75 let id_string = format!("{}", id_num);
76 feed_mappings.push(FeedMapping {
77 feed_id: FeedID::new(&id_string),
78 category_id: NEWSFLASH_TOPLEVEL.clone(),
79 sort_index: Some(i as i32),
80 });
81 }
82 }
83
84 for mapping in &feed_mappings {
85 let feed_exists = feeds.iter().any(|f| f.feed_id == mapping.feed_id);
86 let folder_exits = folders.iter().any(|f| f.category_id == mapping.category_id);
87
88 if !feed_exists {
89 log::error!("feed does not exist: {}", mapping.feed_id);
90 }
91 if !folder_exits {
92 log::error!("folder does not exist: {}", mapping.category_id);
93 }
94 }
95
96 Ok((feeds, feed_mappings, folders, folder_mappings))
97 }
98
99 fn parse_folder_value(
100 fol: (&String, &Value),
101 parent: &CategoryID,
102 sort_index: i32,
103 feed_ids: &HashSet<&FeedID>,
104 feed_mappings: &mut Vec<FeedMapping>,
105 folders: &mut Vec<Category>,
106 folder_mappings: &mut Vec<CategoryMapping>,
107 ) -> FeedApiResult<()> {
108 let category_id = CategoryID::new(fol.0);
109
110 folders.push(Category {
111 category_id: category_id.clone(),
112 label: fol.0.to_string(),
113 category_type: CategoryType::Default,
114 });
115
116 folder_mappings.push(CategoryMapping {
117 parent_id: parent.clone(),
118 category_id: category_id.clone(),
119 sort_index: Some(sort_index),
120 });
121
122 for (i, id) in fol.1.as_array().ok_or(FeedApiErrorKind::Api)?.iter().enumerate() {
123 if let Some(subfolder) = id.as_object() {
124 for (i, subfol) in subfolder.iter().enumerate() {
125 Self::parse_folder_value(subfol, &category_id, i as i32, feed_ids, feed_mappings, folders, folder_mappings)?;
126 }
127 } else {
128 let id_num = id.as_u64().ok_or(FeedApiErrorKind::Api)?;
129 let id_string = format!("{}", id_num);
130 let feed_id = FeedID::new(&id_string);
131
132 if !feed_ids.contains(&feed_id) {
133 log::error!("feed with id {} in folder {} does not exist", feed_id, fol.0);
134 continue;
135 }
136
137 feed_mappings.push(FeedMapping {
138 feed_id,
139 category_id: CategoryID::new(fol.0),
140 sort_index: Some(i as i32),
141 });
142 }
143 }
144
145 Ok(())
146 }
147}
148
149#[async_trait]
150impl FeedApi for NewsBlurService {
151 fn features(&self) -> FeedApiResult<PluginCapabilities> {
152 Ok(PluginCapabilities::NONE)
153 }
154
155 fn has_user_configured(&self) -> FeedApiResult<bool> {
156 Ok(self.api.is_some())
157 }
158
159 async fn is_logged_in(&self, _client: &Client) -> FeedApiResult<bool> {
160 Ok(self.logged_in)
161 }
162
163 fn user_name(&self) -> Option<String> {
164 self.config.get_user_name()
165 }
166
167 fn get_login_data(&self) -> Option<LoginData> {
168 if let Ok(true) = self.has_user_configured() {
169 if let Some(username) = self.config.get_user_name() {
170 if let Some(password) = self.config.get_password() {
171 return Some(LoginData::Direct(DirectLogin::Password(PasswordLogin {
172 id: NewsBlurMetadata::get_id(),
173 url: self.config.get_url(),
174 user: username,
175 password,
176 basic_auth: None,
177 })));
178 }
179 }
180 }
181
182 None
183 }
184
185 async fn login(&mut self, data: LoginData, client: &Client) -> FeedApiResult<()> {
186 if let LoginData::Direct(DirectLogin::Password(data)) = data {
187 let url_string = data.url.clone().ok_or(FeedApiErrorKind::Url)?;
188 let url = Url::parse(&url_string).context(FeedApiErrorKind::Url)?;
189 let mut api = NewsBlurApi::new(&url, &data.user, &data.password, None);
190 let cookie_string = api.login(client).await.context(FeedApiErrorKind::Api)?;
191 self.api = Some(api);
192 self.logged_in = true;
193 self.config.set_url(&url_string);
194 self.config.set_password(&data.password);
195 self.config.set_user_name(&data.user);
196 self.config.set_cookie_string(&cookie_string);
197 self.config.write()?;
198 return Ok(());
199 }
200
201 self.logged_in = false;
202 self.api = None;
203 Err(FeedApiErrorKind::Login.into())
204 }
205
206 async fn logout(&mut self, client: &Client) -> FeedApiResult<()> {
207 if let Some(api) = &self.api {
208 api.logout(client).await.context(FeedApiErrorKind::Api)?;
209 self.config.delete()?;
210 self.logged_in = false;
211 return Ok(());
212 }
213 Err(FeedApiErrorKind::Login.into())
214 }
215
216 async fn initial_sync(&self, client: &Client) -> FeedApiResult<SyncResult> {
217 if let Some(api) = &self.api {
218 let response = api.get_feeds(client).await.context(FeedApiErrorKind::Api)?;
219
220 let feeds = &response["feeds"].as_object().ok_or(FeedApiErrorKind::Api)?;
221 let folders = &response["folders"].as_array().ok_or(FeedApiErrorKind::Api)?;
222
223 let (feeds, feed_mappings, folders, folder_mappings) = NewsBlurService::convert_feed_vec(feeds, folders)?;
224
225 let mut articles: Vec<FatArticle> = Vec::new();
226
227 for feed in &feeds {
229 for page in 1..2 {
230 let response = api
231 .get_stories(client, feed.feed_id.as_str(), false, page)
232 .await
233 .context(FeedApiErrorKind::Api)?;
234
235 let stories_array = &response["stories"].as_array();
236 let stories = match stories_array {
237 Some(stry) => stry,
238 None => break,
239 };
240
241 for story in stories.iter() {
242 let url = story["story_permalink"].as_str().ok_or(FeedApiErrorKind::Api)?;
243 let date_string = story["story_date"].as_str().ok_or(FeedApiErrorKind::Api)?;
244
245 let error_msg = format!("date_string: {:?}", date_string);
246
247 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
248 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
249 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
250 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
251 } else {
252 panic!("Can't parse data: {:?}", date_string);
253 };
254
255 let article_read = story["read_status"].as_i64().ok_or(FeedApiErrorKind::Api)?;
256
257 let unread = match article_read {
258 0 => article::Read::Unread,
259 _ => article::Read::Read,
260 };
261
262 let article_id = format!("{}:{}", feed.feed_id, story["guid_hash"].as_str().ok_or(FeedApiErrorKind::Api)?);
263
264 let marked = match story["starred"].as_bool() {
265 Some(starred) => {
266 if starred {
267 Marked::Marked
268 } else {
269 Marked::Unmarked
270 }
271 }
272 None => Marked::Unmarked,
273 };
274
275 articles.push(FatArticle {
276 article_id: ArticleID::new(&article_id),
277 title: Some(story["story_title"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
278 author: Some(story["story_authors"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
279 feed_id: feed.feed_id.clone(),
280 url: Url::parse(url).ok(),
281 date,
282 synced: Utc::now().naive_utc(),
283 html: Some(story["story_content"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
284 summary: None,
285 direction: None,
286 unread,
287 marked,
288 scraped_content: None,
289 plain_text: None,
290 thumbnail_url: None,
291 });
292 }
293 }
294 }
295
296 let response = api.get_unread_story_hashes(client).await.context(FeedApiErrorKind::Api)?;
298 let hashes = &response["unread_feed_story_hashes"].as_object().ok_or(FeedApiErrorKind::Api)?;
299
300 let mut unread_story_hashes = Vec::new();
301
302 for hash in hashes.iter() {
303 for story in hash.1.as_array().ok_or(FeedApiErrorKind::Api)? {
304 unread_story_hashes.push(story.as_str().ok_or(FeedApiErrorKind::Api)?);
305 }
306 }
307
308 for offset in 0..(unread_story_hashes.len() / 99) {
309 let start = offset * 99;
310 let end = (offset + 1) * 99;
311 let response = if end < unread_story_hashes.len() {
312 api.get_river_stories(client, &unread_story_hashes[start..end])
313 .await
314 .context(FeedApiErrorKind::Api)?
315 } else {
316 api.get_river_stories(client, &unread_story_hashes[start..unread_story_hashes.len()])
317 .await
318 .context(FeedApiErrorKind::Api)?
319 };
320 let stories = &response["stories"].as_array().ok_or(FeedApiErrorKind::Api)?;
321
322 for story in stories.iter() {
323 let url = story["story_permalink"].as_str().ok_or(FeedApiErrorKind::Api)?;
324 let date_string = story["story_date"].as_str().ok_or(FeedApiErrorKind::Api)?;
325
326 let error_msg = format!("date_string: {:?}", date_string);
327
328 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
329 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
330 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
331 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
332 } else {
333 panic!("Can't parse data: {:?}", date_string);
334 };
335
336 let article_read = story["read_status"].as_i64().ok_or(FeedApiErrorKind::Api)?;
337
338 let unread = match article_read {
339 0 => article::Read::Unread,
340 _ => article::Read::Read,
341 };
342
343 let article_id = format!(
344 "{}:{}",
345 story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?,
346 story["guid_hash"].as_str().ok_or(FeedApiErrorKind::Api)?
347 );
348
349 let marked = match story["starred"].as_bool() {
350 Some(starred) => {
351 if starred {
352 Marked::Marked
353 } else {
354 Marked::Unmarked
355 }
356 }
357 None => Marked::Unmarked,
358 };
359
360 articles.push(FatArticle {
361 article_id: ArticleID::new(&article_id),
362 title: Some(story["story_title"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
363 author: Some(story["story_authors"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
364 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?)),
365 url: Url::parse(url).ok(),
366 date,
367 synced: Utc::now().naive_utc(),
368 html: Some(story["story_content"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
369 summary: None,
370 direction: None,
371 unread,
372 marked,
373 scraped_content: None,
374 plain_text: None,
375 thumbnail_url: None,
376 });
377 }
378 }
379
380 let response = api.get_stared_story_hashes(client).await.context(FeedApiErrorKind::Api)?;
384 let hashes = &response["starred_story_hashes"].as_array().ok_or(FeedApiErrorKind::Api)?;
385
386 let mut stared_story_hashes = Vec::new();
387
388 for hash in hashes.iter() {
389 stared_story_hashes.push(hash.as_str().ok_or(FeedApiErrorKind::Api)?);
390 }
391
392 for offset in 0..(stared_story_hashes.len() / 99) {
393 let start = offset * 99;
394 let end = (offset + 1) * 99;
395 let response = if end < stared_story_hashes.len() {
396 api.get_river_stories(client, &stared_story_hashes[start..end])
397 .await
398 .context(FeedApiErrorKind::Api)?
399 } else {
400 api.get_river_stories(client, &stared_story_hashes[start..stared_story_hashes.len()])
401 .await
402 .context(FeedApiErrorKind::Api)?
403 };
404 let stories = &response["stories"].as_array().ok_or(FeedApiErrorKind::Api)?;
405
406 for story in stories.iter() {
407 let url = story["story_permalink"].as_str().ok_or(FeedApiErrorKind::Api)?;
408 let date_string = story["story_date"].as_str().ok_or(FeedApiErrorKind::Api)?;
409
410 let error_msg = format!("date_string: {:?}", date_string);
411
412 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
413 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
414 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
415 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
416 } else {
417 panic!("Can't parse data: {:?}", date_string);
418 };
419
420 let article_read = story["read_status"].as_i64().ok_or(FeedApiErrorKind::Api)?;
421
422 let unread = match article_read {
423 0 => article::Read::Unread,
424 _ => article::Read::Read,
425 };
426
427 let article_id = format!(
428 "{}:{}",
429 story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?,
430 story["guid_hash"].as_str().ok_or(FeedApiErrorKind::Api)?
431 );
432
433 let mut article_offset = None;
434 for (i, article) in articles.iter().enumerate() {
435 if article.article_id == ArticleID::new(&article_id) {
436 article_offset = Some(i);
437 }
438 }
439
440 if article_offset == None {
441 articles.push(FatArticle {
442 article_id: ArticleID::new(&article_id),
443 title: Some(story["story_title"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
444 author: Some(story["story_authors"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
445 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?)),
446 url: Url::parse(url).ok(),
447 date,
448 synced: Utc::now().naive_utc(),
449 html: Some(story["story_content"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
450 summary: None,
451 direction: None,
452 unread,
453 marked: Marked::Marked,
454 scraped_content: None,
455 plain_text: None,
456 thumbnail_url: None,
457 });
458 }
459 }
460 }
461
462 return Ok(SyncResult {
463 feeds: util::vec_to_option(feeds),
464 categories: util::vec_to_option(folders),
465 feed_mappings: util::vec_to_option(feed_mappings),
466 category_mappings: util::vec_to_option(folder_mappings),
467 tags: None,
468 taggings: None,
469 headlines: None,
470 articles: util::vec_to_option(articles),
471 enclosures: None,
472 });
473 }
474 Err(FeedApiErrorKind::Login.into())
475 }
476
477 async fn sync(&self, max_count: u32, _last_sync: DateTime<Utc>, client: &Client) -> FeedApiResult<SyncResult> {
478 if let Some(api) = &self.api {
479 let response = api.get_feeds(client).await.context(FeedApiErrorKind::Api)?;
480
481 let feeds = &response["feeds"].as_object().ok_or(FeedApiErrorKind::Api)?;
482 let folders = &response["folders"].as_array().ok_or(FeedApiErrorKind::Api)?;
483
484 let (feeds, feed_mappings, folders, folder_mappings) = NewsBlurService::convert_feed_vec(feeds, folders)?;
485 let feed_ids = feeds.iter().map(|f| f.feed_id.clone()).collect::<Vec<_>>();
486
487 let mut articles: Vec<FatArticle> = Vec::new();
488 let mut headlines: Vec<Headline> = Vec::new();
489
490 let response = api.get_unread_story_hashes(client).await.context(FeedApiErrorKind::Api)?;
492 let hashes = &response["unread_feed_story_hashes"].as_object().ok_or(FeedApiErrorKind::Api)?;
493 let mut unread_story_hashes = Vec::new();
494
495 for hash in hashes.iter() {
496 for story in hash.1.as_array().ok_or(FeedApiErrorKind::Api)? {
497 unread_story_hashes.push(story.as_str().ok_or(FeedApiErrorKind::Api)?);
498 }
499 }
500
501 let response = api.get_stared_story_hashes(client).await.context(FeedApiErrorKind::Api)?;
503 let hashes = &response["starred_story_hashes"].as_array().ok_or(FeedApiErrorKind::Api)?;
504
505 let mut stared_story_hashes = Vec::new();
506
507 for hash in hashes.iter() {
508 stared_story_hashes.push(hash.as_str().ok_or(FeedApiErrorKind::Api)?);
509 }
510
511 let local_unread_ids = self.portal.get_article_ids_unread_all().context(FeedApiErrorKind::Portal)?;
513 let mut unread_story_hashes_to_fetch = Vec::new();
514 for id in &unread_story_hashes {
515 if local_unread_ids.contains(&ArticleID::new(id)) {
516 continue;
517 }
518 unread_story_hashes_to_fetch.push(*id);
519 }
520
521 for offset in 0..((unread_story_hashes_to_fetch.len() / 99) + 1) {
522 let start = offset * 99;
523 let end = (offset + 1) * 99;
524 let response = if end < unread_story_hashes_to_fetch.len() {
525 api.get_river_stories(client, &unread_story_hashes_to_fetch[start..end])
526 .await
527 .context(FeedApiErrorKind::Api)?
528 } else {
529 api.get_river_stories(client, &unread_story_hashes_to_fetch[start..unread_story_hashes_to_fetch.len()])
530 .await
531 .context(FeedApiErrorKind::Api)?
532 };
533 let stories = &response["stories"].as_array().ok_or(FeedApiErrorKind::Api)?;
534
535 for story in stories.iter() {
536 let url = story["story_permalink"].as_str().ok_or(FeedApiErrorKind::Api)?;
537 let date_string = story["story_date"].as_str().ok_or(FeedApiErrorKind::Api)?;
538
539 let error_msg = format!("date_string: {:?}", date_string);
540
541 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
542 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
543 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
544 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
545 } else {
546 panic!("Can't parse data: {:?}", date_string);
547 };
548
549 let article_read = story["read_status"].as_i64().ok_or(FeedApiErrorKind::Api)?;
550
551 let unread = match article_read {
552 0 => article::Read::Unread,
553 _ => article::Read::Read,
554 };
555
556 let article_id = format!(
557 "{}:{}",
558 story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?,
559 story["guid_hash"].as_str().ok_or(FeedApiErrorKind::Api)?
560 );
561
562 let marked = match story["starred"].as_bool() {
563 Some(starred) => {
564 if starred {
565 Marked::Marked
566 } else {
567 Marked::Unmarked
568 }
569 }
570 None => Marked::Unmarked,
571 };
572
573 articles.push(FatArticle {
574 article_id: ArticleID::new(&article_id),
575 title: Some(story["story_title"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
576 author: Some(story["story_authors"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
577 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?)),
578 url: Url::parse(url).ok(),
579 date,
580 synced: Utc::now().naive_utc(),
581 html: Some(story["story_content"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
582 summary: None,
583 direction: None,
584 unread,
585 marked,
586 scraped_content: None,
587 plain_text: None,
588 thumbnail_url: None,
589 });
590 }
591 }
592
593 let local_unread_ids = self.portal.get_article_ids_unread_all().context(FeedApiErrorKind::Portal)?;
595 for local_id in local_unread_ids {
596 if unread_story_hashes.contains(&local_id.as_str()) {
597 continue;
598 }
599 let should_mark_read_headlines = Headline {
600 article_id: ArticleID::new(&local_id.to_string()),
601 unread: article::Read::Read,
602 marked: if stared_story_hashes.contains(&local_id.as_str()) {
603 Marked::Marked
604 } else {
605 Marked::Unmarked
606 },
607 };
608 headlines.push(should_mark_read_headlines);
609 }
610
611 let read_sync_num = if max_count > unread_story_hashes.len() as u32 {
613 (max_count - unread_story_hashes.len() as u32) / 6
614 } else {
615 1
616 };
617
618 for page in 1..read_sync_num {
621 let response = api.get_read_stories(client, page).await.context(FeedApiErrorKind::Api)?;
622 let stories_array = &response["stories"].as_array();
623 let mut page_articles: Vec<ArticleID> = Vec::new();
624 let stories = match stories_array {
625 Some(stry) => stry,
626 None => break,
627 };
628
629 for story in stories.iter() {
630 let url = story["story_permalink"].as_str().ok_or(FeedApiErrorKind::Api)?;
631 let date_string = story["story_date"].as_str().ok_or(FeedApiErrorKind::Api)?;
632
633 let error_msg = format!("date_string: {:?}", date_string);
634
635 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
636 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
637 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
638 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
639 } else {
640 panic!("Can't parse data: {:?}", date_string);
641 };
642
643 let article_read = story["read_status"].as_i64().ok_or(FeedApiErrorKind::Api)?;
644
645 let unread = match article_read {
646 0 => article::Read::Unread,
647 _ => article::Read::Read,
648 };
649
650 let article_id = format!(
651 "{}:{}",
652 story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?,
653 story["guid_hash"].as_str().ok_or(FeedApiErrorKind::Api)?
654 );
655
656 let marked = match story["starred"].as_bool() {
657 Some(starred) => {
658 if starred {
659 Marked::Marked
660 } else {
661 Marked::Unmarked
662 }
663 }
664 None => Marked::Unmarked,
665 };
666
667 articles.push(FatArticle {
668 article_id: ArticleID::new(&article_id),
669 title: Some(story["story_title"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
670 author: Some(story["story_authors"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
671 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?)),
672 url: Url::parse(url).ok(),
673 date,
674 synced: Utc::now().naive_utc(),
675 html: Some(story["story_content"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
676 summary: None,
677 direction: None,
678 unread,
679 marked,
680 scraped_content: None,
681 plain_text: None,
682 thumbnail_url: None,
683 });
684
685 if unread == article::Read::Read {
687 page_articles.push(ArticleID::new(&article_id));
688 }
689 }
690
691 if self.portal.get_articles(&page_articles).is_ok() {
696 break;
697 }
698 }
699
700 let local_marked_ids = self.portal.get_article_ids_marked_all().context(FeedApiErrorKind::Portal)?;
704 let mut marked_story_hashes_to_fetch = Vec::new();
705 for id in stared_story_hashes {
706 if local_marked_ids.contains(&ArticleID::new(id)) {
707 continue;
708 }
709 marked_story_hashes_to_fetch.push(id);
710 }
711
712 if !marked_story_hashes_to_fetch.is_empty() {
713 for offset in 0..((marked_story_hashes_to_fetch.len() / 99) + 1) {
714 let start = offset * 99;
715 let end = (offset + 1) * 99;
716 let response = if end < marked_story_hashes_to_fetch.len() {
717 api.get_river_stories(client, &marked_story_hashes_to_fetch[start..end])
718 .await
719 .context(FeedApiErrorKind::Api)?
720 } else {
721 api.get_river_stories(client, &marked_story_hashes_to_fetch[start..marked_story_hashes_to_fetch.len()])
722 .await
723 .context(FeedApiErrorKind::Api)?
724 };
725 let stories = &response["stories"].as_array().ok_or(FeedApiErrorKind::Api)?;
726
727 for story in stories.iter() {
728 let url = story["story_permalink"].as_str().ok_or(FeedApiErrorKind::Api)?;
729 let date_string = story["story_date"].as_str().ok_or(FeedApiErrorKind::Api)?;
730
731 let error_msg = format!("date_string: {:?}", date_string);
732
733 let date = if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").is_ok() {
734 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S.%f").expect(&error_msg)
735 } else if NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").is_ok() {
736 NaiveDateTime::parse_from_str(date_string, "%Y-%m-%d %H:%M:%S").expect(&error_msg)
737 } else {
738 panic!("Can't parse data: {:?}", date_string);
739 };
740
741 let article_read = story["read_status"].as_i64().ok_or(FeedApiErrorKind::Api)?;
742
743 let unread = match article_read {
744 0 => article::Read::Unread,
745 _ => article::Read::Read,
746 };
747
748 let article_id = format!(
749 "{}:{}",
750 story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?,
751 story["guid_hash"].as_str().ok_or(FeedApiErrorKind::Api)?
752 );
753
754 let mut article_offset = None;
755 for (i, article) in articles.iter().enumerate() {
756 if article.article_id == ArticleID::new(&article_id) {
757 article_offset = Some(i);
758 }
759 }
760
761 if article_offset == None {
762 articles.push(FatArticle {
763 article_id: ArticleID::new(&article_id),
764 title: Some(story["story_title"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
765 author: Some(story["story_authors"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
766 feed_id: FeedID::new(&format!("{}", story["story_feed_id"].as_u64().ok_or(FeedApiErrorKind::Api)?)),
767 url: Url::parse(url).ok(),
768 date,
769 synced: Utc::now().naive_utc(),
770 html: Some(story["story_content"].as_str().ok_or(FeedApiErrorKind::Api)?.to_string()),
771 summary: None,
772 direction: None,
773 unread,
774 marked: Marked::Marked,
775 scraped_content: None,
776 plain_text: None,
777 thumbnail_url: None,
778 });
779 }
780 }
781 }
782 }
783
784 let articles = articles
785 .into_iter()
786 .filter(|a| feed_ids.contains(&a.feed_id) || a.marked == Marked::Marked)
787 .collect();
788
789 return Ok(SyncResult {
790 feeds: util::vec_to_option(feeds),
791 categories: util::vec_to_option(folders),
792 feed_mappings: util::vec_to_option(feed_mappings),
793 category_mappings: util::vec_to_option(folder_mappings),
794 tags: None,
795 taggings: None,
796 headlines: util::vec_to_option(headlines),
797 articles: util::vec_to_option(articles),
798 enclosures: None,
799 });
800 }
801 Err(FeedApiErrorKind::Login.into())
802 }
803
804 async fn set_article_read(&self, articles: &[ArticleID], read: models::Read, client: &Client) -> FeedApiResult<()> {
805 if let Some(api) = &self.api {
806 for article in articles {
807 if read == models::Read::Read {
808 api.mark_stories_read(client, article.as_str()).await.context(FeedApiErrorKind::Api)?;
809 } else {
810 api.mark_story_unread(client, article.as_str()).await.context(FeedApiErrorKind::Api)?;
811 }
812 }
813
814 return Ok(());
815 }
816 Err(FeedApiErrorKind::Login.into())
817 }
818
819 async fn set_article_marked(&self, articles: &[ArticleID], marked: models::Marked, client: &Client) -> FeedApiResult<()> {
820 if let Some(api) = &self.api {
821 for article in articles {
822 if marked == models::Marked::Marked {
823 api.mark_story_hash_as_starred(client, article.as_str())
824 .await
825 .context(FeedApiErrorKind::Api)?;
826 } else {
827 api.mark_story_hash_as_unstarred(client, article.as_str())
828 .await
829 .context(FeedApiErrorKind::Api)?;
830 }
831 }
832
833 return Ok(());
834 }
835 Err(FeedApiErrorKind::Login.into())
836 }
837
838 async fn set_feed_read(&self, feeds: &[FeedID], _articles: &[ArticleID], _last_sync: DateTime<Utc>, client: &Client) -> FeedApiResult<()> {
839 if let Some(api) = &self.api {
840 for feed in feeds {
841 api.mark_feed_read(client, feed.as_str()).await.context(FeedApiErrorKind::Api)?;
842 }
843
844 return Ok(());
845 }
846 Err(FeedApiErrorKind::Unsupported.into())
847 }
848
849 async fn set_category_read(
850 &self,
851 _categories: &[CategoryID],
852 articles: &[ArticleID],
853 _last_sync: DateTime<Utc>,
854 client: &Client,
855 ) -> FeedApiResult<()> {
856 self.set_article_read(articles, Read::Read, client).await
857 }
858
859 async fn set_tag_read(&self, _tags: &[TagID], _articles: &[ArticleID], _last_sync: DateTime<Utc>, _client: &Client) -> FeedApiResult<()> {
860 Err(FeedApiErrorKind::Unsupported.into())
861 }
862
863 async fn set_all_read(&self, _articles: &[ArticleID], _last_sync: DateTime<Utc>, client: &Client) -> FeedApiResult<()> {
864 if let Some(api) = &self.api {
865 api.mark_all_read(client).await.context(FeedApiErrorKind::Api)?;
866
867 return Ok(());
868 }
869 Err(FeedApiErrorKind::Unsupported.into())
870 }
871
872 async fn add_feed(
873 &self,
874 _url: &Url,
875 _title: Option<String>,
876 _category_id: Option<CategoryID>,
877 _client: &Client,
878 ) -> FeedApiResult<(Feed, Option<Category>)> {
879 Err(FeedApiErrorKind::Unsupported.into())
880 }
881
882 async fn remove_feed(&self, _id: &FeedID, _client: &Client) -> FeedApiResult<()> {
883 Err(FeedApiErrorKind::Unsupported.into())
884 }
885
886 async fn move_feed(&self, _feed_id: &FeedID, _from: &CategoryID, _to: &CategoryID, _client: &Client) -> FeedApiResult<()> {
887 Err(FeedApiErrorKind::Unsupported.into())
888 }
889
890 async fn rename_feed(&self, _feed_id: &FeedID, _new_title: &str, _client: &Client) -> FeedApiResult<FeedID> {
891 Err(FeedApiErrorKind::Unsupported.into())
892 }
893
894 async fn add_category(&self, _title: &str, _parent: Option<&CategoryID>, _client: &Client) -> FeedApiResult<CategoryID> {
895 Err(FeedApiErrorKind::Unsupported.into())
896 }
897
898 async fn remove_category(&self, _id: &CategoryID, _remove_children: bool, _client: &Client) -> FeedApiResult<()> {
899 Err(FeedApiErrorKind::Unsupported.into())
900 }
901
902 async fn rename_category(&self, _id: &CategoryID, _new_title: &str, _client: &Client) -> FeedApiResult<CategoryID> {
903 Err(FeedApiErrorKind::Unsupported.into())
904 }
905
906 async fn move_category(&self, _id: &CategoryID, _parent: &CategoryID, _client: &Client) -> FeedApiResult<()> {
907 Err(FeedApiErrorKind::Unsupported.into())
908 }
909
910 async fn import_opml(&self, _opml: &str, _client: &Client) -> FeedApiResult<()> {
911 Err(FeedApiErrorKind::Unsupported.into())
912 }
913
914 async fn add_tag(&self, _title: &str, _client: &Client) -> FeedApiResult<TagID> {
915 Err(FeedApiErrorKind::Unsupported.into())
916 }
917
918 async fn remove_tag(&self, _id: &TagID, _client: &Client) -> FeedApiResult<()> {
919 Err(FeedApiErrorKind::Unsupported.into())
920 }
921
922 async fn rename_tag(&self, _id: &TagID, _new_title: &str, _client: &Client) -> FeedApiResult<TagID> {
923 Err(FeedApiErrorKind::Unsupported.into())
924 }
925
926 async fn tag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
927 Err(FeedApiErrorKind::Unsupported.into())
928 }
929
930 async fn untag_article(&self, _article_id: &ArticleID, _tag_id: &TagID, _client: &Client) -> FeedApiResult<()> {
931 Err(FeedApiErrorKind::Unsupported.into())
932 }
933
934 async fn get_favicon(&self, feed_id: &FeedID, client: &Client) -> FeedApiResult<FavIcon> {
935 if let Some(api) = &self.api {
936 let response = api.favicons(client, feed_id.as_str()).await.context(FeedApiErrorKind::Api)?;
937 let favicon_data = &response[feed_id.as_str()];
938
939 let data = match favicon_data.as_str() {
940 Some(string) => Some(base64::decode(string).context(FeedApiErrorKind::Encryption)?),
941 None => None,
942 };
943
944 let favicon = FavIcon {
945 feed_id: feed_id.clone(),
946 expires: Utc::now().naive_utc() + Duration::days(EXPIRES_AFTER_DAYS),
947 format: None,
948 etag: None,
949 source_url: None,
950 data,
951 };
952
953 return Ok(favicon);
954 }
955 Err(FeedApiErrorKind::Login.into())
956 }
957}