1pub mod fs;
2pub mod validation;
3pub mod linkleaf_proto {
4 include!(concat!(env!("OUT_DIR"), "/linkleaf.v1.rs"));
5}
6
7use crate::fs::{read_feed, write_feed};
8use crate::linkleaf_proto::{Feed, Link};
9use anyhow::Result;
10use std::path::Path;
11use time::{Date, OffsetDateTime, PrimitiveDateTime, macros::format_description};
12use uuid::Uuid;
13
14const TS_FMT: &[time::format_description::FormatItem<'_>] =
15 format_description!("[year]-[month]-[day] [hour]:[minute]:[second]");
16
17fn is_not_found(err: &anyhow::Error) -> bool {
18 err.downcast_ref::<std::io::Error>()
19 .map(|e| e.kind() == std::io::ErrorKind::NotFound)
20 .unwrap_or(false)
21}
22
23fn update_link_in_place(
24 feed: &mut Feed,
25 pos: usize,
26 title: String,
27 url: String,
28 date: String,
29 summary: Option<String>,
30 tags: Vec<String>,
31 via: Option<String>,
32) -> Link {
33 let mut item = feed.links.remove(pos);
35 item.title = title;
36 item.url = url;
37 item.date = date;
38 item.summary = summary.unwrap_or_default();
39 item.tags = tags;
40 item.via = via.unwrap_or_default();
41
42 feed.links.insert(0, item.clone());
43 item
44}
45
46fn insert_new_link_front(
47 feed: &mut Feed,
48 id: String,
49 title: String,
50 url: String,
51 date: String,
52 summary: Option<String>,
53 tags: Vec<String>,
54 via: Option<String>,
55) -> Link {
56 let link = Link {
57 summary: summary.unwrap_or_default(),
58 tags, via: via.unwrap_or_default(),
60 id,
61 title,
62 url,
63 date,
64 };
65 feed.links.insert(0, link.clone());
66 link
67}
68
69pub fn add<P, S, T>(
146 file: P,
147 title: S,
148 url: S,
149 summary: Option<S>,
150 tags: T,
151 via: Option<S>,
152 id: Option<Uuid>,
153) -> Result<Link>
154where
155 P: AsRef<Path>,
156 S: Into<String>,
157 T: IntoIterator<Item = S>,
158{
159 let file = file.as_ref();
160 let local_now = OffsetDateTime::now_local()
162 .map_err(|e| anyhow::anyhow!("failed to get local time offset: {e}"))?;
163 let date = local_now
164 .format(TS_FMT)
165 .map_err(|e| anyhow::anyhow!("failed to format timestamp: {e}"))?;
166
167 let mut feed = match read_feed(file) {
169 Ok(f) => f,
170 Err(err) if is_not_found(&err) => {
171 let mut f = Feed::default();
172 f.version = 1;
173 f
174 }
175 Err(err) => return Err(err),
176 };
177
178 let tags: Vec<String> = tags.into_iter().map(Into::into).collect();
179
180 let updated_or_new = match id {
184 Some(uid) => {
185 let uid_str = uid.to_string();
186 if let Some(pos) = feed.links.iter().position(|l| l.id == uid_str) {
187 let item = update_link_in_place(
188 &mut feed,
189 pos,
190 title.into(),
191 url.into(),
192 date,
193 summary.map(Into::into),
194 tags,
195 via.map(Into::into),
196 );
197 #[cfg(feature = "logs")]
198 tracing::info!(id = %item.id, "updated existing link by id");
199 item
200 } else {
201 let item = insert_new_link_front(
202 &mut feed,
203 uid_str,
204 title.into(),
205 url.into(),
206 date,
207 summary.map(Into::into),
208 tags,
209 via.map(Into::into),
210 );
211 #[cfg(feature = "logs")]
212 tracing::info!(id = %item.id, "inserted new link with explicit id");
213 item
214 }
215 }
216 None => {
217 let url = url.into();
218 if let Some(pos) = feed.links.iter().position(|l| l.url == url) {
219 let item = update_link_in_place(
220 &mut feed,
221 pos,
222 title.into(),
223 url.into(),
224 date,
225 summary.map(|s| s.into()),
226 tags,
227 via.map(|s| s.into()),
228 );
229 #[cfg(feature = "logs")]
230 tracing::info!(id = %item.id, "inserted new link with explicit id");
231 item
232 } else {
233 let uid = Uuid::new_v4().to_string();
234 let item = insert_new_link_front(
235 &mut feed,
236 uid,
237 title.into(),
238 url.into(),
239 date,
240 summary.map(Into::into),
241 tags,
242 via.map(Into::into),
243 );
244 #[cfg(feature = "logs")]
245 tracing::info!(id = %item.id, "inserted new link with explicit id");
246 item
247 }
248 }
249 };
250
251 let _modified_feed = write_feed(&file, feed)?;
252 #[cfg(feature = "logs")]
253 tracing::debug!(links = _modified_feed.links.len(), path = %file.display(), "feed written");
254
255 Ok(updated_or_new)
256}
257
258pub fn list<P: AsRef<Path>>(
285 file: P,
286 tags: Option<Vec<String>>,
287 date: Option<Date>,
288) -> Result<Feed> {
289 let file = file.as_ref();
290 let mut feed = read_feed(file)?;
291
292 let tag_norms: Option<Vec<String>> = tags.map(|ts| {
293 ts.iter()
294 .map(|t| t.trim().to_ascii_lowercase())
295 .filter(|t| !t.is_empty())
296 .collect()
297 });
298
299 feed.links.retain(|l| {
300 let tag_ok = match &tag_norms {
301 Some(needles) => l
302 .tags
303 .iter()
304 .any(|t| needles.iter().any(|n| t.eq_ignore_ascii_case(n))),
305 None => true,
306 };
307
308 let date_ok = match date {
309 Some(p) => PrimitiveDateTime::parse(&l.date, TS_FMT)
310 .map(|dt| dt.date() == p)
311 .unwrap_or(false),
312 None => true,
313 };
314
315 tag_ok && date_ok
316 });
317
318 Ok(feed)
319}
320
321#[cfg(test)]
322mod tests {
323 use super::{add, list};
324 use crate::fs::{read_feed, write_feed};
325 use crate::linkleaf_proto::{Feed, Link};
326 use anyhow::Result;
327 use tempfile::tempdir;
328 use time::macros::date;
329 use uuid::Uuid;
330
331 fn mk_link(
334 id: &str,
335 title: &str,
336 url: &str,
337 date_s: &str,
338 tags: &[&str],
339 summary: &str,
340 via: &str,
341 ) -> Link {
342 Link {
343 id: id.to_string(),
344 title: title.to_string(),
345 url: url.to_string(),
346 date: date_s.to_string(),
347 summary: summary.to_string(),
348 tags: tags.iter().map(|s| s.to_string()).collect(),
349 via: via.to_string(),
350 }
351 }
352
353 fn mk_feed(links: Vec<Link>) -> Feed {
354 let mut f = Feed::default();
355 f.version = 1;
356 f.links = links;
357 f
358 }
359
360 #[test]
363 fn add_creates_file_and_initializes_feed() -> Result<()> {
364 let dir = tempdir()?;
365 let file = dir.path().join("feed.pb");
366
367 let created = add(
369 file.clone(),
370 "Tokio",
371 "https://tokio.rs/".into(),
372 None, vec!["rust", "async", "tokio"],
374 None, None::<Uuid>, )?;
377
378 let feed = read_feed(&file)?;
380 assert_eq!(feed.version, 1);
381 assert_eq!(feed.links.len(), 1);
382 let l = &feed.links[0];
383 assert_eq!(l.id, created.id);
384 assert_eq!(l.title, "Tokio");
385 assert_eq!(l.url, "https://tokio.rs/");
386 assert_eq!(l.summary, "");
387 assert_eq!(l.via, "");
388 assert_eq!(l.tags, vec!["rust", "async", "tokio"]);
389
390 let _ = Uuid::parse_str(&created.id).expect("id should be a valid UUID");
392 Ok(())
393 }
394
395 #[test]
396 fn add_with_explicit_id_inserts_with_given_id() -> Result<()> {
397 let dir = tempdir()?;
398 let file = dir.path().join("feed.pb");
399 let wanted = Uuid::new_v4();
400
401 let created = add(
402 file.clone(),
403 "A",
404 "https://a.example/".into(),
405 Some("hi".into()),
406 Some("x,y".into()),
407 Some("via".into()),
408 Some(wanted),
409 )?;
410
411 assert_eq!(created.id, wanted.to_string());
412
413 let feed = list(&file, None, None)?;
415 assert_eq!(feed.links.len(), 1);
416 assert_eq!(feed.links[0].id, wanted.to_string());
417 Ok(())
418 }
419
420 #[test]
421 fn add_update_by_id_moves_to_front_and_updates_fields() -> Result<()> {
422 let dir = tempdir()?;
423 let file = dir.path().join("feed.pb");
424 let tags = ["alpha"];
425 let a = add(
427 file.clone(),
428 "First",
429 "https://one/".into(),
430 None,
431 tags,
432 None,
433 None::<Uuid>,
434 )?;
435 let _b = add(
436 file.clone(),
437 "Second",
438 "https://two/".into(),
439 None,
440 Some("beta".into()),
441 None,
442 None,
443 )?;
444
445 let updated = add(
447 file.clone(),
448 "First (updated)",
449 "https://one-new/".into(),
450 Some("note".into()),
451 ["rust", "updated"],
452 Some("HN".into()),
453 Some(Uuid::parse_str(&a.id)?),
454 )?;
455 assert_eq!(updated.id, a.id);
456 assert_eq!(updated.title, "First (updated)");
457 assert_eq!(updated.url, "https://one-new/");
458 assert_eq!(updated.summary, "note");
459 assert_eq!(updated.via, "HN");
460 assert_eq!(updated.tags, vec!["rust", "updated"]);
461
462 let feed = list(&file, None, None)?;
463 assert_eq!(feed.links.len(), 2);
464 assert_eq!(feed.links[0].id, a.id, "updated item should be at index 0");
465 assert_eq!(feed.links[0].title, "First (updated)");
466 Ok(())
467 }
468
469 #[test]
470 fn add_update_by_url_when_id_absent() -> Result<()> {
471 let dir = tempdir()?;
472 let file = dir.path().join("feed.pb");
473
474 let first = add(
475 file.clone(),
476 "Original",
477 "https://same.url/".into(),
478 None,
479 None,
480 None,
481 None,
482 )?;
483
484 let updated = add(
486 file.clone(),
487 "Original (updated)",
488 "https://same.url/".into(),
489 Some("s".into()),
490 ["t1", "t2"],
491 None,
492 None,
493 )?;
494 assert_eq!(updated.id, first.id);
495
496 let feed = list(&file, None, None)?;
497 assert_eq!(feed.links.len(), 1);
498 assert_eq!(feed.links[0].title, "Original (updated)");
499 assert_eq!(feed.links[0].tags, vec!["t1", "t2"]);
500 Ok(())
501 }
502
503 #[test]
504 fn add_inserts_new_when_url_diff_and_id_absent() -> Result<()> {
505 let dir = tempdir()?;
506 let file = dir.path().join("feed.pb");
507
508 let _a = add(
509 file.clone(),
510 "A",
511 "https://a/".into(),
512 None,
513 None,
514 None,
515 None,
516 )?;
517 let b = add(
518 file.clone(),
519 "B",
520 "https://b/".into(),
521 None,
522 None,
523 None,
524 None,
525 )?;
526
527 let feed = list(&file, None, None)?;
528 assert_eq!(feed.links.len(), 2);
529 assert_eq!(feed.links[0].id, b.id, "new item should be at front");
530 Ok(())
531 }
532
533 #[test]
534 fn add_returns_error_on_corrupt_feed() -> Result<()> {
535 let dir = tempdir()?;
536 let file = dir.path().join("feed.pb");
537
538 std::fs::write(&file, b"not a protobuf")?;
540
541 let err = add(
542 file.clone(),
543 "X",
544 "https://x/".into(),
545 None,
546 None,
547 None,
548 None,
549 )
550 .unwrap_err();
551
552 assert!(!err.to_string().is_empty());
554 Ok(())
555 }
556
557 #[test]
558 fn list_without_filters_returns_all() -> Result<()> {
559 let dir = tempdir()?;
560 let file = dir.path().join("feed.pb");
561
562 let l1 = mk_link(
564 "1",
565 "One",
566 "https://1/",
567 "2025-01-02 12:00:00",
568 &["rust", "async"],
569 "",
570 "",
571 );
572 let l2 = mk_link(
573 "2",
574 "Two",
575 "https://2/",
576 "2025-01-03 09:30:15",
577 &["tokio"],
578 "",
579 "",
580 );
581 write_feed(&file, mk_feed(vec![l2.clone(), l1.clone()]))?;
582
583 let feed = list(&file, None, None)?;
584 assert_eq!(feed.links.len(), 2);
585 assert_eq!(feed.links[0].id, l2.id);
587 assert_eq!(feed.links[1].id, l1.id);
588 Ok(())
589 }
590
591 #[test]
592 fn list_filters_by_tag_case_insensitive_any_match() -> Result<()> {
593 let dir = tempdir()?;
594 let file = dir.path().join("feed.pb");
595
596 let l1 = mk_link(
597 "1",
598 "One",
599 "https://1/",
600 "2025-01-02 12:00:00",
601 &["rust", "async"],
602 "",
603 "",
604 );
605 let l2 = mk_link(
606 "2",
607 "Two",
608 "https://2/",
609 "2025-01-03 09:30:15",
610 &["Tokio"], "",
612 "",
613 );
614 write_feed(&file, mk_feed(vec![l1.clone(), l2.clone()]))?;
615
616 let feed_tokio = list(&file, Some(vec!["tokio".into()]), None)?;
618 assert_eq!(feed_tokio.links.len(), 1);
619 assert_eq!(feed_tokio.links[0].id, l2.id);
620
621 let feed_async = list(&file, Some(vec!["ASYNC".into()]), None)?;
622 assert_eq!(feed_async.links.len(), 1);
623 assert_eq!(feed_async.links[0].id, l1.id);
624
625 let feed_multi = list(&file, Some(vec!["zzz".into(), "rust".into()]), None)?;
627 assert_eq!(feed_multi.links.len(), 1);
628 assert_eq!(feed_multi.links[0].id, l1.id);
629
630 Ok(())
631 }
632
633 #[test]
634 fn list_filters_by_exact_date_component() -> Result<()> {
635 let dir = tempdir()?;
636 let file = dir.path().join("feed.pb");
637
638 let l1 = mk_link(
639 "1",
640 "Jan02",
641 "https://1/",
642 "2025-01-02 00:00:00",
643 &[],
644 "",
645 "",
646 );
647 let l2 = mk_link(
648 "2",
649 "Jan03",
650 "https://2/",
651 "2025-01-03 23:59:59",
652 &[],
653 "",
654 "",
655 );
656 write_feed(&file, mk_feed(vec![l1.clone(), l2.clone()]))?;
657
658 let filtered = list(&file, None, Some(date!(2025 - 01 - 03)))?;
659 assert_eq!(filtered.links.len(), 1);
660 assert_eq!(filtered.links[0].id, l2.id);
661
662 let filtered2 = list(&file, None, Some(date!(2025 - 01 - 02)))?;
663 assert_eq!(filtered2.links.len(), 1);
664 assert_eq!(filtered2.links[0].id, l1.id);
665
666 Ok(())
667 }
668}