1pub mod fs;
2pub mod validation;
3pub mod linkleaf_proto {
4 include!(concat!(env!("OUT_DIR"), "/linkleaf.v1.rs"));
5}
6
7use crate::fs::{read_feed, write_feed};
8use crate::linkleaf_proto::{DateTime, Feed, Link, Summary, Via};
9use anyhow::Result;
10use chrono::{FixedOffset, TimeZone};
11use rss::{CategoryBuilder, ChannelBuilder, GuidBuilder, Item, ItemBuilder};
12use std::path::Path;
13use time::Month;
14use time::OffsetDateTime;
15use uuid::Uuid;
16
17fn is_not_found(err: &anyhow::Error) -> bool {
18 err.downcast_ref::<std::io::Error>()
19 .map(|e| e.kind() == std::io::ErrorKind::NotFound)
20 .unwrap_or(false)
21}
22
23fn update_link_in_place(
24 feed: &mut Feed,
25 pos: usize,
26 title: String,
27 url: String,
28 date: Option<DateTime>,
29 summary: Option<Summary>,
30 tags: Vec<String>,
31 via: Option<Via>,
32) -> Link {
33 let mut item = feed.links.remove(pos);
35 item.title = title;
36 item.url = url;
37 item.datetime = date;
38 item.summary = summary;
39 item.tags = tags;
40 item.via = via;
41
42 feed.links.insert(0, item.clone());
43 item
44}
45
46fn insert_new_link_front(
47 feed: &mut Feed,
48 id: String,
49 title: String,
50 url: String,
51 datetime: Option<DateTime>,
52 summary: Option<Summary>,
53 tags: Vec<String>,
54 via: Option<Via>,
55) -> Link {
56 let link = Link {
57 summary: summary,
58 tags, via: via,
60 id,
61 title,
62 url,
63 datetime,
64 };
65 feed.links.insert(0, link.clone());
66 link
67}
68
69fn from_month(value: Month) -> i32 {
70 match value {
71 Month::January => 1,
72 Month::February => 2,
73 Month::March => 3,
74 Month::April => 4,
75 Month::May => 5,
76 Month::June => 6,
77 Month::July => 7,
78 Month::August => 8,
79 Month::September => 9,
80 Month::October => 10,
81 Month::November => 11,
82 Month::December => 12,
83 }
84}
85
86pub fn add<P, S, T>(
164 file: P,
165 title: S,
166 url: S,
167 summary: Option<Summary>,
168 tags: T,
169 via: Option<Via>,
170 id: Option<Uuid>,
171) -> Result<Link>
172where
173 P: AsRef<Path>,
174 S: Into<String>,
175 T: IntoIterator<Item = S>,
176{
177 let file = file.as_ref();
178 let local_now = OffsetDateTime::now_local()
180 .map_err(|e| anyhow::anyhow!("failed to get local time offset: {e}"))?;
181
182 let datetime = DateTime {
183 year: local_now.year() as i32,
184 month: from_month(local_now.month()),
185 day: local_now.day() as i32,
186 hours: local_now.hour() as i32,
187 minutes: local_now.minute() as i32,
188 seconds: local_now.second() as i32,
189 nanos: local_now.nanosecond() as i32,
190 };
191
192 let mut feed = match read_feed(file) {
194 Ok(f) => f,
195 Err(err) if is_not_found(&err) => {
196 let mut f = Feed::default();
197 f.version = 1;
198 f
199 }
200 Err(err) => return Err(err),
201 };
202
203 let title = title.into();
204 let url = url.into();
205 let summary = summary.map(Into::into);
206 let via = via.map(Into::into);
207 let tags: Vec<String> = tags.into_iter().map(Into::into).collect();
208 let id_opt: Option<String> = id.map(|u| u.to_string());
209
210 let updated_or_new = match id_opt {
214 Some(uid) => {
215 if let Some(pos) = feed.links.iter().position(|l| l.id == uid) {
216 let item = update_link_in_place(
217 &mut feed,
218 pos,
219 title,
220 url,
221 Some(datetime),
222 summary,
223 tags,
224 via,
225 );
226 #[cfg(feature = "logs")]
227 tracing::info!(id = %item.id, "updated existing link by id");
228 item
229 } else {
230 let item = insert_new_link_front(
231 &mut feed,
232 uid,
233 title,
234 url,
235 Some(datetime),
236 summary,
237 tags,
238 via,
239 );
240 #[cfg(feature = "logs")]
241 tracing::info!(id = %item.id, "inserted new link with explicit id");
242 item
243 }
244 }
245 None => {
246 if let Some(pos) = feed.links.iter().position(|l| l.url == url) {
247 let item = update_link_in_place(
248 &mut feed,
249 pos,
250 title,
251 url,
252 Some(datetime),
253 summary,
254 tags,
255 via,
256 );
257 #[cfg(feature = "logs")]
258 tracing::info!(id = %item.id, "inserted new link with explicit id");
259 item
260 } else {
261 let uid = Uuid::new_v4().to_string();
262 let item = insert_new_link_front(
263 &mut feed,
264 uid,
265 title,
266 url,
267 Some(datetime),
268 summary,
269 tags,
270 via,
271 );
272 #[cfg(feature = "logs")]
273 tracing::info!(id = %item.id, "inserted new link with explicit id");
274 item
275 }
276 }
277 };
278
279 let _modified_feed = write_feed(&file, feed)?;
280 #[cfg(feature = "logs")]
281 tracing::debug!(links = _modified_feed.links.len(), path = %file.display(), "feed written");
282
283 Ok(updated_or_new)
284}
285
286pub fn list<P: AsRef<Path>>(
313 file: P,
314 tags: Option<Vec<String>>,
315 datetime: Option<DateTime>,
316) -> Result<Feed> {
317 let file = file.as_ref();
318 let mut feed = read_feed(file)?;
319
320 let tag_norms: Option<Vec<String>> = tags.map(|ts| {
321 ts.iter()
322 .map(|t| t.trim().to_ascii_lowercase())
323 .filter(|t| !t.is_empty())
324 .collect()
325 });
326
327 let date_filter: Option<&DateTime> = datetime.as_ref();
328
329 feed.links.retain(|l| {
330 let tag_ok = match &tag_norms {
331 Some(needles) => l
332 .tags
333 .iter()
334 .any(|t| needles.iter().any(|n| t.eq_ignore_ascii_case(n))),
335 None => true,
336 };
337
338 let date_ok = match date_filter {
339 Some(p) => l.datetime.as_ref().map(|dt| dt == p).unwrap_or(false),
340 None => true,
341 };
342
343 tag_ok && date_ok
344 });
345
346 Ok(feed)
347}
348
349impl DateTime {
350 #[allow(deprecated)]
354 pub fn to_rfc2822(&self) -> Option<String> {
355 let month = u32::try_from(self.month).ok()?; let day = u32::try_from(self.day).ok()?; let hours = u32::try_from(self.hours).ok()?; let minutes = u32::try_from(self.minutes).ok()?; let seconds = u32::try_from(self.seconds).ok()?; let dt = FixedOffset::east_opt(0) .map(|d| {
364 d.ymd(self.year, month, day)
365 .and_hms(hours, minutes, seconds)
366 })?;
367
368 Some(dt.to_rfc2822())
369 }
370}
371
372fn to_datetime(proto_datetime: &Option<DateTime>) -> Option<String> {
373 proto_datetime.as_ref().and_then(|dt| dt.to_rfc2822())
374}
375
376pub fn feed_to_rss_xml(feed: &Feed, site_title: &str, site_link: &str) -> Result<String> {
422 let items: Vec<Item> = feed.links.iter().map(|l| link_to_rss_item(l)).collect();
423 let description = format!("Feed about {} generated through Linkleaf", &feed.title);
424
425 let channel = ChannelBuilder::default()
426 .title(if feed.title.is_empty() {
427 site_title.to_string()
428 } else {
429 feed.title.clone()
430 })
431 .link(site_link.to_string())
432 .description(description) .items(items)
434 .build();
435
436 let mut buf = Vec::new();
437 channel.pretty_write_to(&mut buf, b' ', 2)?;
438 Ok(String::from_utf8(buf)?)
439}
440
441fn link_to_rss_item(l: &Link) -> Item {
442 let cats = l
443 .tags
444 .iter()
445 .map(|t| CategoryBuilder::default().name(t.clone()).build())
446 .collect::<Vec<_>>();
447
448 ItemBuilder::default()
449 .title(Some(l.title.clone()))
450 .link(Some(l.url.clone()))
451 .description(l.summary.as_ref().map(|c| c.content.clone()))
452 .categories(cats)
453 .guid(Some(
454 GuidBuilder::default()
455 .value(format!("urn:uuid:{}", l.id))
456 .permalink(false)
457 .build(),
458 ))
459 .pub_date(to_datetime(&l.datetime))
460 .build()
461}
462
463impl Summary {
464 pub fn new(content: &str) -> Self {
483 Summary {
484 content: content.into(),
485 }
486 }
487}
488
489impl Via {
490 pub fn new(url: &str) -> Self {
509 Via { url: url.into() }
510 }
511}
512
513#[cfg(test)]
514mod tests {
515 use super::{add, feed_to_rss_xml, link_to_rss_item, list};
516 use crate::fs::{read_feed, write_feed};
517 use crate::linkleaf_proto::{DateTime, Feed, Link, Summary, Via};
518 use anyhow::Result;
519 use tempfile::tempdir;
520 use uuid::Uuid;
521
522 fn mk_link(
525 id: &str,
526 title: &str,
527 url: &str,
528 date_s: DateTime,
529 tags: &[&str],
530 summary: &str,
531 via: &str,
532 ) -> Link {
533 let _summary = Some(Summary::new(summary));
534
535 let _via = Some(Via::new(via));
536
537 Link {
538 id: id.to_string(),
539 title: title.to_string(),
540 url: url.to_string(),
541 datetime: Some(date_s),
542 summary: _summary,
543 tags: tags.iter().map(|s| s.to_string()).collect(),
544 via: _via,
545 }
546 }
547
548 fn mk_feed(links: Vec<Link>) -> Feed {
549 let mut f = Feed::default();
550 f.version = 1;
551 f.links = links;
552 f
553 }
554
555 fn sample_link() -> Link {
556 Link {
557 id: "1234".to_string(),
558 title: "Example Post".to_string(),
559 url: "https://example.com/post".to_string(),
560 summary: Some(Summary::new("This is a summary")),
561 tags: vec!["rust".to_string(), "rss".to_string()],
562 via: None,
563 datetime: Some(DateTime {
564 year: 2025,
565 month: 10,
566 day: 1,
567 hours: 14,
568 minutes: 30,
569 seconds: 45,
570 nanos: 00,
571 }),
572 }
573 }
574
575 fn sample_feed() -> Feed {
576 Feed {
577 title: "Test Feed".to_string(),
578 links: vec![sample_link()],
579 version: 1,
580 }
581 }
582
583 #[test]
586 fn add_creates_file_and_initializes_feed() -> Result<()> {
587 let dir = tempdir()?;
588 let file = dir.path().join("feed.pb");
589
590 let created = add(
592 file.clone(),
593 "Tokio",
594 "https://tokio.rs/".into(),
595 None, vec!["rust", "async", "tokio"],
597 None, None::<Uuid>, )?;
600
601 let feed = read_feed(&file)?;
603 assert_eq!(feed.version, 1);
604 assert_eq!(feed.links.len(), 1);
605 let l = &feed.links[0];
606 assert_eq!(l.id, created.id);
607 assert_eq!(l.title, "Tokio");
608 assert_eq!(l.url, "https://tokio.rs/");
609 assert_eq!(l.summary, None);
610 assert_eq!(l.via, None);
611 assert_eq!(l.tags, vec!["rust", "async", "tokio"]);
612
613 let _ = Uuid::parse_str(&created.id).expect("id should be a valid UUID");
615 Ok(())
616 }
617
618 #[test]
619 fn add_with_explicit_id_inserts_with_given_id() -> Result<()> {
620 let dir = tempdir()?;
621 let file = dir.path().join("feed.pb");
622 let wanted = Uuid::new_v4();
623
624 let created = add(
625 file.clone(),
626 "A",
627 "https://a.example/".into(),
628 Some(Summary::new("hi")),
629 Some("x,y".into()),
630 Some(Via::new("via")),
631 Some(wanted),
632 )?;
633
634 assert_eq!(created.id, wanted.to_string());
635
636 let feed = list(&file, None, None)?;
638 assert_eq!(feed.links.len(), 1);
639 assert_eq!(feed.links[0].id, wanted.to_string());
640 Ok(())
641 }
642
643 #[test]
644 fn add_update_by_id_moves_to_front_and_updates_fields() -> Result<()> {
645 let dir = tempdir()?;
646 let file = dir.path().join("feed.pb");
647 let tags = ["alpha"];
648 let a = add(
650 file.clone(),
651 "First",
652 "https://one/".into(),
653 None,
654 tags,
655 None,
656 None::<Uuid>,
657 )?;
658 let _b = add(
659 file.clone(),
660 "Second",
661 "https://two/".into(),
662 None,
663 Some("beta".into()),
664 None,
665 None,
666 )?;
667
668 let updated = add(
670 file.clone(),
671 "First (updated)",
672 "https://one-new/".into(),
673 Some(Summary::new("note")),
674 ["rust", "updated"],
675 Some(Via::new("HN")),
676 Some(Uuid::parse_str(&a.id)?),
677 )?;
678 assert_eq!(updated.id, a.id);
679 assert_eq!(updated.title, "First (updated)");
680 assert_eq!(updated.url, "https://one-new/");
681 assert_eq!(updated.summary, Some(Summary::new("note")));
682 assert_eq!(updated.via, Some(Via::new("HN")));
683 assert_eq!(updated.tags, vec!["rust", "updated"]);
684
685 let feed = list(&file, None, None)?;
686 assert_eq!(feed.links.len(), 2);
687 assert_eq!(feed.links[0].id, a.id, "updated item should be at index 0");
688 assert_eq!(feed.links[0].title, "First (updated)");
689 Ok(())
690 }
691
692 #[test]
693 fn add_update_by_url_when_id_absent() -> Result<()> {
694 let dir = tempdir()?;
695 let file = dir.path().join("feed.pb");
696
697 let first = add(
698 file.clone(),
699 "Original",
700 "https://same.url/".into(),
701 None,
702 None,
703 None,
704 None,
705 )?;
706
707 let updated = add(
709 file.clone(),
710 "Original (updated)",
711 "https://same.url/".into(),
712 Some(Summary::new("s")),
713 ["t1", "t2"],
714 None,
715 None,
716 )?;
717 assert_eq!(updated.id, first.id);
718
719 let feed = list(&file, None, None)?;
720 assert_eq!(feed.links.len(), 1);
721 assert_eq!(feed.links[0].title, "Original (updated)");
722 assert_eq!(feed.links[0].tags, vec!["t1", "t2"]);
723 Ok(())
724 }
725
726 #[test]
727 fn add_inserts_new_when_url_diff_and_id_absent() -> Result<()> {
728 let dir = tempdir()?;
729 let file = dir.path().join("feed.pb");
730
731 let _a = add(
732 file.clone(),
733 "A",
734 "https://a/".into(),
735 None,
736 None,
737 None,
738 None,
739 )?;
740 let b = add(
741 file.clone(),
742 "B",
743 "https://b/".into(),
744 None,
745 None,
746 None,
747 None,
748 )?;
749
750 let feed = list(&file, None, None)?;
751 assert_eq!(feed.links.len(), 2);
752 assert_eq!(feed.links[0].id, b.id, "new item should be at front");
753 Ok(())
754 }
755
756 #[test]
757 fn add_returns_error_on_corrupt_feed() -> Result<()> {
758 let dir = tempdir()?;
759 let file = dir.path().join("feed.pb");
760
761 std::fs::write(&file, b"not a protobuf")?;
763
764 let err = add(
765 file.clone(),
766 "X",
767 "https://x/".into(),
768 None,
769 None,
770 None,
771 None,
772 )
773 .unwrap_err();
774
775 assert!(!err.to_string().is_empty());
777 Ok(())
778 }
779
780 #[test]
781 fn list_without_filters_returns_all() -> Result<()> {
782 let dir = tempdir()?;
783 let file = dir.path().join("feed.pb");
784
785 let dt1 = DateTime {
786 year: 2025,
787 month: 1,
788 day: 2,
789 hours: 12,
790 minutes: 0,
791 seconds: 0,
792 nanos: 0,
793 };
794
795 let dt2 = DateTime {
796 year: 2025,
797 month: 1,
798 day: 3,
799 hours: 9,
800 minutes: 30,
801 seconds: 15,
802 nanos: 0,
803 };
804
805 let l1 = mk_link("1", "One", "https://1/", dt1, &["rust", "async"], "", "");
807 let l2 = mk_link("2", "Two", "https://2/", dt2, &["tokio"], "", "");
808 write_feed(&file, mk_feed(vec![l2.clone(), l1.clone()]))?;
809
810 let feed = list(&file, None, None)?;
811 assert_eq!(feed.links.len(), 2);
812 assert_eq!(feed.links[0].id, l2.id);
814 assert_eq!(feed.links[1].id, l1.id);
815 Ok(())
816 }
817
818 #[test]
819 fn list_filters_by_tag_case_insensitive_any_match() -> Result<()> {
820 let dir = tempdir()?;
821 let file = dir.path().join("feed.pb");
822
823 let dt1 = DateTime {
824 year: 2025,
825 month: 1,
826 day: 2,
827 hours: 12,
828 minutes: 0,
829 seconds: 0,
830 nanos: 0,
831 };
832
833 let dt2 = DateTime {
834 year: 2025,
835 month: 1,
836 day: 3,
837 hours: 9,
838 minutes: 30,
839 seconds: 15,
840 nanos: 0,
841 };
842
843 let l1 = mk_link("1", "One", "https://1/", dt1, &["rust", "async"], "", "");
844 let l2 = mk_link(
845 "2",
846 "Two",
847 "https://2/",
848 dt2,
849 &["Tokio"], "",
851 "",
852 );
853 write_feed(&file, mk_feed(vec![l1.clone(), l2.clone()]))?;
854
855 let feed_tokio = list(&file, Some(vec!["tokio".into()]), None)?;
857 assert_eq!(feed_tokio.links.len(), 1);
858 assert_eq!(feed_tokio.links[0].id, l2.id);
859
860 let feed_async = list(&file, Some(vec!["ASYNC".into()]), None)?;
861 assert_eq!(feed_async.links.len(), 1);
862 assert_eq!(feed_async.links[0].id, l1.id);
863
864 let feed_multi = list(&file, Some(vec!["zzz".into(), "rust".into()]), None)?;
866 assert_eq!(feed_multi.links.len(), 1);
867 assert_eq!(feed_multi.links[0].id, l1.id);
868
869 Ok(())
870 }
871
872 #[test]
873 fn list_filters_by_exact_date_component() -> Result<()> {
874 let dir = tempdir()?;
875 let file = dir.path().join("feed.pb");
876
877 let dt1 = DateTime {
878 year: 2025,
879 month: 1,
880 day: 3,
881 hours: 12,
882 minutes: 0,
883 seconds: 0,
884 nanos: 0,
885 };
886
887 let dt2 = DateTime {
888 year: 2025,
889 month: 1,
890 day: 3,
891 hours: 23,
892 minutes: 59,
893 seconds: 59,
894 nanos: 0,
895 };
896
897 let l1 = mk_link("1", "Jan02", "https://1/", dt1, &[], "", "");
898 let l2 = mk_link("2", "Jan03", "https://2/", dt2, &[], "", "");
899 write_feed(&file, mk_feed(vec![l1.clone(), l2.clone()]))?;
900
901 let filtered = list(&file, None, Some(dt2))?;
902 assert_eq!(filtered.links.len(), 1);
903 assert_eq!(filtered.links[0].id, l2.id);
904
905 let filtered2 = list(&file, None, Some(dt1))?;
906 assert_eq!(filtered2.links.len(), 1);
907 assert_eq!(filtered2.links[0].id, l1.id);
908
909 Ok(())
910 }
911
912 #[test]
913 fn test_link_to_rss_item() {
914 let link = sample_link();
915 let item = link_to_rss_item(&link);
916
917 assert_eq!(item.title.unwrap(), link.title);
918 assert_eq!(item.link.unwrap(), link.url);
919 assert_eq!(item.description.unwrap(), link.summary.unwrap().content);
920 assert_eq!(item.categories.len(), link.tags.len());
921 assert!(item.guid.is_some());
922 assert!(item.pub_date.is_some());
923 }
924
925 #[test]
926 fn test_feed_to_rss_xml_basic() {
927 let feed = sample_feed();
928 let site_title = "Default Site";
929 let site_link = "https://example.com";
930
931 let rss_xml =
932 feed_to_rss_xml(&feed, site_title, site_link).expect("Failed to generate RSS XML");
933
934 assert!(rss_xml.contains("<title>Test Feed</title>"));
936 assert!(rss_xml.contains("<link>https://example.com</link>"));
937 assert!(rss_xml.contains("Example Post"));
938 assert!(rss_xml.contains("This is a summary"));
939 assert!(rss_xml.contains("rust"));
940 assert!(rss_xml.contains("rss"));
941 assert!(rss_xml.contains("urn:uuid:1234"));
942 }
943
944 #[test]
945 fn test_feed_to_rss_xml_empty_feed_title() {
946 let mut feed = sample_feed();
947 feed.title = "".to_string();
948
949 let rss_xml = feed_to_rss_xml(&feed, "Default Site", "https://example.com")
950 .expect("Failed to generate RSS XML");
951
952 assert!(rss_xml.contains("<title>Default Site</title>"));
954 }
955
956 #[test]
957 fn test_link_without_summary_or_tags() {
958 let link = Link {
959 id: "5678".to_string(),
960 title: "No Summary Post".to_string(),
961 url: "https://example.com/nosummary".to_string(),
962 via: None,
963 summary: None,
964 tags: vec![],
965 datetime: None,
966 };
967
968 let item = link_to_rss_item(&link);
969
970 assert!(item.description.is_none());
972 assert!(item.categories.is_empty());
974 assert!(item.pub_date.is_none());
976 }
977}