1use std::{
2 collections::{HashMap, HashSet},
3 path::PathBuf,
4 time::Duration,
5};
6
7use log::{debug, error, info, warn};
8use mecomp_analysis::{
9 clustering::{ClusteringHelper, KOptimal, NotInitialized},
10 decoder::{Decoder, MecompDecoder},
11};
12use mecomp_core::{
13 config::ReclusterSettings,
14 state::library::{LibraryBrief, LibraryFull, LibraryHealth},
15};
16use one_or_many::OneOrMany;
17use surrealdb::{Connection, Surreal};
18use tap::TapFallible;
19use tracing::{Instrument, instrument};
20use walkdir::WalkDir;
21
22use mecomp_storage::{
23 db::{
24 health::{
25 count_albums, count_artists, count_collections, count_dynamic_playlists,
26 count_orphaned_albums, count_orphaned_artists, count_orphaned_collections,
27 count_orphaned_playlists, count_playlists, count_songs, count_unanalyzed_songs,
28 },
29 schemas::{
30 album::Album,
31 analysis::Analysis,
32 artist::Artist,
33 collection::Collection,
34 dynamic::DynamicPlaylist,
35 playlist::Playlist,
36 song::{Song, SongMetadata},
37 },
38 },
39 errors::Error,
40 util::MetadataConflictResolution,
41};
42
43#[instrument]
51pub async fn rescan<C: Connection>(
52 db: &Surreal<C>,
53 paths: &[PathBuf],
54 artist_name_separator: &OneOrMany<String>,
55 protected_artist_names: &OneOrMany<String>,
56 genre_separator: Option<&str>,
57 conflict_resolution_mode: MetadataConflictResolution,
58) -> Result<(), Error> {
59 let songs = Song::read_all(db).await?;
61 let mut paths_to_skip = HashSet::new(); async {
65 for song in songs {
66 let path = song.path.clone();
67 if !path.exists() {
68 warn!("Song {} no longer exists, deleting", path.to_string_lossy());
70 Song::delete(db, song.id).await?;
71 continue;
72 }
73
74 debug!("loading metadata for {}", path.to_string_lossy());
75 match SongMetadata::load_from_path(path.clone(), artist_name_separator,protected_artist_names, genre_separator) {
77 Ok(metadata) if metadata != SongMetadata::from(&song) => {
79 let log_postfix = if conflict_resolution_mode == MetadataConflictResolution::Skip {
80 "but conflict resolution mode is \"skip\", so we do nothing"
81 } else {
82 "resolving conflict"
83 };
84 info!(
85 "{} has conflicting metadata with index, {log_postfix}",
86 path.to_string_lossy(),
87 );
88
89 match conflict_resolution_mode {
90 MetadataConflictResolution::Overwrite => {
92 Song::update(db, song.id.clone(), metadata.merge_with_song(&song)).await?;
94 }
95 MetadataConflictResolution::Skip => {
97 continue;
98 }
99 }
100 }
101 Err(e) => {
103 warn!(
104 "Error reading metadata for {}: {}",
105 path.to_string_lossy(),
106 e
107 );
108 info!("assuming the file isn't a song or doesn't exist anymore, removing from library");
109 Song::delete(db, song.id).await?;
110 }
111 _ => {}
113 }
114
115 paths_to_skip.insert(path);
117 }
118
119 <Result<(), Error>>::Ok(())
120 }.instrument(tracing::info_span!("Checking library for missing or outdated songs")).await?;
121
122 let mut visited_paths = paths_to_skip;
124
125 debug!("Indexing paths: {paths:?}");
126 async {
127 for path in paths
128 .iter()
129 .filter_map(|p| {
130 p.canonicalize()
131 .tap_err(|e| warn!("Error canonicalizing path: {e}"))
132 .ok()
133 })
134 .flat_map(|x| WalkDir::new(x).into_iter())
135 .filter_map(|x| x.tap_err(|e| warn!("Error reading path: {e}")).ok())
136 .filter_map(|x| x.file_type().is_file().then_some(x))
137 {
138 if visited_paths.contains(path.path()) {
139 continue;
140 }
141
142 visited_paths.insert(path.path().to_owned());
143
144 match SongMetadata::load_from_path(
146 path.path().to_owned(),
147 artist_name_separator,
148 protected_artist_names,
149 genre_separator,
150 ) {
151 Ok(metadata) => Song::try_load_into_db(db, metadata).await.map_or_else(
152 |e| warn!("Error indexing {}: {}", path.path().to_string_lossy(), e),
153 |_| debug!("Indexed {}", path.path().to_string_lossy()),
154 ),
155 Err(e) => warn!(
156 "Error reading metadata for {}: {}",
157 path.path().to_string_lossy(),
158 e
159 ),
160 }
161 }
162
163 <Result<(), Error>>::Ok(())
164 }
165 .instrument(tracing::info_span!("Indexing new songs"))
166 .await?;
167
168 async {
172 for album in Album::read_all(db).await? {
173 if Album::repair(db, album.id.clone()).await? {
174 info!("Deleted orphaned album {}", album.id.clone());
175 Album::delete(db, album.id.clone()).await?;
176 }
177 }
178 <Result<(), Error>>::Ok(())
179 }
180 .instrument(tracing::info_span!("Repairing albums"))
181 .await?;
182 async {
183 for artist in Artist::read_all(db).await? {
184 if Artist::repair(db, artist.id.clone()).await? {
185 info!("Deleted orphaned artist {}", artist.id.clone());
186 Artist::delete(db, artist.id.clone()).await?;
187 }
188 }
189 <Result<(), Error>>::Ok(())
190 }
191 .instrument(tracing::info_span!("Repairing artists"))
192 .await?;
193 async {
194 for collection in Collection::read_all(db).await? {
195 if Collection::repair(db, collection.id.clone()).await? {
196 info!("Deleted orphaned collection {}", collection.id.clone());
197 Collection::delete(db, collection.id.clone()).await?;
198 }
199 }
200 <Result<(), Error>>::Ok(())
201 }
202 .instrument(tracing::info_span!("Repairing collections"))
203 .await?;
204 async {
205 for playlist in Playlist::read_all(db).await? {
206 if Playlist::repair(db, playlist.id.clone()).await? {
207 info!("Deleted orphaned playlist {}", playlist.id.clone());
208 Playlist::delete(db, playlist.id.clone()).await?;
209 }
210 }
211 <Result<(), Error>>::Ok(())
212 }
213 .instrument(tracing::info_span!("Repairing playlists"))
214 .await?;
215
216 info!("Library rescan complete");
217 info!("Library brief: {:?}", brief(db).await?);
218
219 Ok(())
220}
221
222#[instrument]
238pub async fn analyze<C: Connection>(db: &Surreal<C>, overwrite: bool) -> Result<(), Error> {
239 if overwrite {
240 async {
242 for analysis in Analysis::read_all(db).await? {
243 Analysis::delete(db, analysis.id.clone()).await?;
244 }
245 <Result<(), Error>>::Ok(())
246 }
247 .instrument(tracing::info_span!("Deleting existing analyses"))
248 .await?;
249 }
250
251 let songs_to_analyze: Vec<Song> = Analysis::read_songs_without_analysis(db).await?;
253 let paths = songs_to_analyze
255 .iter()
256 .map(|song| (song.path.clone(), song.id.clone()))
257 .collect::<HashMap<_, _>>();
258
259 let keys = paths.keys().cloned().collect::<Vec<_>>();
260
261 let (tx, rx) = std::sync::mpsc::channel();
262
263 let Ok(decoder) = MecompDecoder::new() else {
264 error!("Error creating decoder");
265 return Ok(());
266 };
267
268 let handle = std::thread::spawn(move || {
270 decoder.analyze_paths_with_callback(keys, tx);
271 });
272
273 async {
274 for (song_path, maybe_analysis) in rx {
275 let Some(song_id) = paths.get(&song_path) else {
276 error!("No song id found for path: {}", song_path.to_string_lossy());
277 continue;
278 };
279
280 match maybe_analysis {
281 Ok(analysis) => Analysis::create(
282 db,
283 song_id.clone(),
284 Analysis {
285 id: Analysis::generate_id(),
286 features: *analysis.inner(),
287 },
288 )
289 .await?
290 .map_or_else(
291 || {
292 warn!(
293 "Error analyzing {}: song either wasn't found or already has an analysis",
294 song_path.to_string_lossy()
295 );
296 },
297 |_| debug!("Analyzed {}", song_path.to_string_lossy()),
298 ),
299 Err(e) => {
300 error!("Error analyzing {}: {}", song_path.to_string_lossy(), e);
301 }
302 }
303 }
304
305 <Result<(), Error>>::Ok(())
306 }
307 .instrument(tracing::info_span!("Adding analyses to database"))
308 .await?;
309
310 handle.join().expect("Couldn't join thread");
311
312 info!("Library analysis complete");
313 info!("Library brief: {:?}", brief(db).await?);
314
315 Ok(())
316}
317
318#[instrument]
326pub async fn recluster<C: Connection>(
327 db: &Surreal<C>,
328 settings: &ReclusterSettings,
329) -> Result<(), Error> {
330 let samples = Analysis::read_all(db).await?;
332
333 let entered = tracing::info_span!("Clustering library").entered();
334 let model: ClusteringHelper<NotInitialized> = match ClusteringHelper::new(
336 samples
337 .iter()
338 .map(Into::into)
339 .collect::<Vec<mecomp_analysis::Analysis>>()
340 .into(),
341 settings.max_clusters,
342 KOptimal::GapStatistic {
343 b: settings.gap_statistic_reference_datasets,
344 },
345 settings.algorithm.into(),
346 settings.projection_method.into(),
347 ) {
348 Err(e) => {
349 error!("There was an error creating the clustering helper: {e}",);
350 return Ok(());
351 }
352 Ok(kmeans) => kmeans,
353 };
354
355 let model = match model.initialize() {
356 Err(e) => {
357 error!("There was an error initializing the clustering helper: {e}",);
358 return Ok(());
359 }
360 Ok(kmeans) => kmeans.cluster(),
361 };
362 drop(entered);
363
364 async {
366 for collection in Collection::read_all(db).await? {
369 Collection::delete(db, collection.id.clone()).await?;
370 }
371
372 <Result<(), Error>>::Ok(())
373 }
374 .instrument(tracing::info_span!("Deleting old collections"))
375 .await?;
376
377 async {
379 let clusters = model.extract_analysis_clusters(samples);
380
381 for (i, cluster) in clusters.iter().filter(|c| !c.is_empty()).enumerate() {
383 let collection = Collection::create(
384 db,
385 Collection {
386 id: Collection::generate_id(),
387 name: format!("Collection {i}"),
388 runtime: Duration::default(),
389 song_count: Default::default(),
390 },
391 )
392 .await?
393 .ok_or(Error::NotCreated)?;
394
395 let mut songs = Vec::with_capacity(cluster.len());
396
397 async {
398 for analysis in cluster {
399 songs.push(Analysis::read_song(db, analysis.id.clone()).await?.id);
400 }
401
402 Collection::add_songs(db, collection.id.clone(), songs).await?;
403
404 <Result<(), Error>>::Ok(())
405 }
406 .instrument(tracing::info_span!("Adding songs to collection"))
407 .await?;
408 }
409 Ok::<(), Error>(())
410 }
411 .instrument(tracing::info_span!("Creating new collections"))
412 .await?;
413
414 info!("Library recluster complete");
415 info!("Library brief: {:?}", brief(db).await?);
416
417 Ok(())
418}
419
420#[instrument]
426pub async fn brief<C: Connection>(db: &Surreal<C>) -> Result<LibraryBrief, Error> {
427 Ok(LibraryBrief {
428 artists: count_artists(db).await?,
429 albums: count_albums(db).await?,
430 songs: count_songs(db).await?,
431 playlists: count_playlists(db).await?,
432 collections: count_collections(db).await?,
433 dynamic_playlists: count_dynamic_playlists(db).await?,
434 })
435}
436
437#[instrument]
443pub async fn full<C: Connection>(db: &Surreal<C>) -> Result<LibraryFull, Error> {
444 Ok(LibraryFull {
445 artists: Artist::read_all(db).await?.into(),
446 albums: Album::read_all(db).await?.into(),
447 songs: Song::read_all(db).await?.into(),
448 playlists: Playlist::read_all(db).await?.into(),
449 collections: Collection::read_all(db).await?.into(),
450 dynamic_playlists: DynamicPlaylist::read_all(db).await?.into(),
451 })
452}
453
454#[instrument]
462pub async fn health<C: Connection>(db: &Surreal<C>) -> Result<LibraryHealth, Error> {
463 Ok(LibraryHealth {
464 artists: count_artists(db).await?,
465 albums: count_albums(db).await?,
466 songs: count_songs(db).await?,
467 #[cfg(feature = "analysis")]
468 unanalyzed_songs: Some(count_unanalyzed_songs(db).await?),
469 #[cfg(not(feature = "analysis"))]
470 unanalyzed_songs: None,
471 playlists: count_playlists(db).await?,
472 collections: count_collections(db).await?,
473 dynamic_playlists: count_dynamic_playlists(db).await?,
474 orphaned_artists: count_orphaned_artists(db).await?,
475 orphaned_albums: count_orphaned_albums(db).await?,
476 orphaned_playlists: count_orphaned_playlists(db).await?,
477 orphaned_collections: count_orphaned_collections(db).await?,
478 })
479}
480
481#[cfg(test)]
482mod tests {
483 use super::*;
484 use crate::test_utils::init;
485
486 use mecomp_core::config::{ClusterAlgorithm, ProjectionMethod};
487 use mecomp_storage::db::schemas::song::{SongChangeSet, SongMetadata};
488 use mecomp_storage::test_utils::{
489 ARTIST_NAME_SEPARATOR, SongCase, arb_analysis_features, arb_song_case, arb_vec,
490 create_song_metadata, create_song_with_overrides, init_test_database,
491 };
492 use one_or_many::OneOrMany;
493 use pretty_assertions::assert_eq;
494 use rstest::rstest;
495
496 #[tokio::test]
497 #[allow(clippy::too_many_lines)]
498 async fn test_rescan() {
499 init();
500 let tempdir = tempfile::tempdir().unwrap();
501 let db = init_test_database().await.unwrap();
502
503 let song_cases = arb_vec(&arb_song_case(), 10..=15)();
505 let metadatas = song_cases
506 .into_iter()
507 .map(|song_case| create_song_metadata(&tempdir, song_case))
508 .collect::<Result<Vec<_>, _>>()
509 .unwrap();
510 let song_with_nonexistent_path = create_song_with_overrides(
513 &db,
514 arb_song_case()(),
515 SongChangeSet {
516 path: Some(tempdir.path().join("nonexistent.mp3")),
517 ..Default::default()
518 },
519 )
520 .await
521 .unwrap();
522 let mut metadata_of_song_with_outdated_metadata =
523 create_song_metadata(&tempdir, arb_song_case()()).unwrap();
524 metadata_of_song_with_outdated_metadata.genre = OneOrMany::None;
525 let song_with_outdated_metadata =
526 Song::try_load_into_db(&db, metadata_of_song_with_outdated_metadata)
527 .await
528 .unwrap();
529 let invalid_song_path = tempdir.path().join("invalid1.mp3");
531 std::fs::write(&invalid_song_path, "this is not a song").unwrap();
532 let invalid_song_path = tempdir.path().join("invalid2.mp3");
534 std::fs::write(&invalid_song_path, "this is not a song").unwrap();
535 let song_with_invalid_metadata = create_song_with_overrides(
536 &db,
537 arb_song_case()(),
538 SongChangeSet {
539 path: Some(tempdir.path().join("invalid2.mp3")),
540 ..Default::default()
541 },
542 )
543 .await
544 .unwrap();
545
546 rescan(
548 &db,
549 &[tempdir.path().to_owned()],
550 &OneOrMany::One(ARTIST_NAME_SEPARATOR.to_string()),
551 &OneOrMany::None,
552 Some(ARTIST_NAME_SEPARATOR),
553 MetadataConflictResolution::Overwrite,
554 )
555 .await
556 .unwrap();
557
558 assert_eq!(
561 Song::read(&db, song_with_nonexistent_path.id)
562 .await
563 .unwrap(),
564 None
565 );
566 assert_eq!(
568 Song::read(&db, song_with_invalid_metadata.id)
569 .await
570 .unwrap(),
571 None
572 );
573 assert!(
575 Song::read(&db, song_with_outdated_metadata.id)
576 .await
577 .unwrap()
578 .unwrap()
579 .genre
580 .is_some()
581 );
582 for metadata in metadatas {
585 let song = Song::read_by_path(&db, metadata.path.clone())
587 .await
588 .unwrap();
589 assert!(song.is_some());
590 let song = song.unwrap();
591
592 assert_eq!(SongMetadata::from(&song), metadata);
594
595 let artists = Artist::read_by_names(&db, Vec::from(metadata.artist.clone()))
597 .await
598 .unwrap();
599 assert_eq!(artists.len(), metadata.artist.len());
600 for artist in &artists {
602 assert!(metadata.artist.contains(&artist.name));
603 assert!(
604 Artist::read_songs(&db, artist.id.clone())
605 .await
606 .unwrap()
607 .contains(&song)
608 );
609 }
610 if let Ok(song_artists) = Song::read_artist(&db, song.id.clone()).await {
612 for artist in &artists {
613 assert!(song_artists.contains(artist));
614 }
615 } else {
616 panic!("Error reading song artists");
617 }
618
619 let album = Album::read_by_name_and_album_artist(
621 &db,
622 &metadata.album,
623 metadata.album_artist.clone(),
624 )
625 .await
626 .unwrap();
627 assert!(album.is_some());
628 let album = album.unwrap();
629 assert_eq!(
631 Song::read_album(&db, song.id.clone()).await.unwrap(),
632 Some(album.clone())
633 );
634 assert!(
636 Album::read_songs(&db, album.id.clone())
637 .await
638 .unwrap()
639 .contains(&song)
640 );
641
642 let album_artists =
644 Artist::read_by_names(&db, Vec::from(metadata.album_artist.clone()))
645 .await
646 .unwrap();
647 assert_eq!(album_artists.len(), metadata.album_artist.len());
648 for album_artist in album_artists {
650 assert!(metadata.album_artist.contains(&album_artist.name));
651 assert!(
652 Artist::read_albums(&db, album_artist.id.clone())
653 .await
654 .unwrap()
655 .contains(&album)
656 );
657 }
658 }
659 }
660
661 #[tokio::test]
662 async fn rescan_deletes_preexisting_orphans() {
663 init();
664 let tempdir = tempfile::tempdir().unwrap();
665 let db = init_test_database().await.unwrap();
666
667 let metadata = create_song_metadata(&tempdir, arb_song_case()()).unwrap();
669 let song = Song::try_load_into_db(&db, metadata.clone()).await.unwrap();
670
671 std::fs::remove_file(&song.path).unwrap();
673 Song::delete(&db, (song.id.clone(), false)).await.unwrap();
674
675 rescan(
677 &db,
678 &[tempdir.path().to_owned()],
679 &OneOrMany::One(ARTIST_NAME_SEPARATOR.to_string()),
680 &OneOrMany::None,
681 Some(ARTIST_NAME_SEPARATOR),
682 MetadataConflictResolution::Overwrite,
683 )
684 .await
685 .unwrap();
686
687 assert_eq!(Song::read_all(&db).await.unwrap().len(), 0);
689 assert_eq!(Album::read_all(&db).await.unwrap().len(), 0);
690 assert_eq!(Artist::read_all(&db).await.unwrap().len(), 0);
691 }
692
693 #[tokio::test]
694 async fn rescan_deletes_orphaned_albums_and_artists() {
695 init();
696 let tempdir = tempfile::tempdir().unwrap();
697 let db = init_test_database().await.unwrap();
698
699 let metadata = create_song_metadata(&tempdir, arb_song_case()()).unwrap();
701 let song = Song::try_load_into_db(&db, metadata.clone()).await.unwrap();
702 let artist = Artist::read_by_names(&db, Vec::from(metadata.artist.clone()))
703 .await
704 .unwrap()
705 .pop()
706 .unwrap();
707 let album = Album::read_by_name_and_album_artist(
708 &db,
709 &metadata.album,
710 metadata.album_artist.clone(),
711 )
712 .await
713 .unwrap()
714 .unwrap();
715
716 std::fs::remove_file(&song.path).unwrap();
718
719 rescan(
721 &db,
722 &[tempdir.path().to_owned()],
723 &OneOrMany::One(ARTIST_NAME_SEPARATOR.to_string()),
724 &OneOrMany::None,
725 Some(ARTIST_NAME_SEPARATOR),
726 MetadataConflictResolution::Overwrite,
727 )
728 .await
729 .unwrap();
730
731 assert_eq!(Artist::read(&db, artist.id.clone()).await.unwrap(), None);
733 assert_eq!(Album::read(&db, album.id.clone()).await.unwrap(), None);
734 }
735
736 #[tokio::test]
737 async fn test_analyze() {
738 init();
739 let dir = tempfile::tempdir().unwrap();
740 let db = init_test_database().await.unwrap();
741
742 let song_cases = arb_vec(&arb_song_case(), 10..=15)();
744 let song_cases = song_cases.into_iter().enumerate().map(|(i, sc)| SongCase {
745 song: u8::try_from(i).unwrap(),
746 ..sc
747 });
748 let metadatas = song_cases
749 .into_iter()
750 .map(|song_case| create_song_metadata(&dir, song_case))
751 .collect::<Result<Vec<_>, _>>()
752 .unwrap();
753 for metadata in &metadatas {
754 Song::try_load_into_db(&db, metadata.clone()).await.unwrap();
755 }
756
757 assert_eq!(
759 Analysis::read_songs_without_analysis(&db)
760 .await
761 .unwrap()
762 .len(),
763 metadatas.len()
764 );
765
766 analyze(&db, true).await.unwrap();
768
769 assert_eq!(
771 Analysis::read_songs_without_analysis(&db)
772 .await
773 .unwrap()
774 .len(),
775 0
776 );
777 for metadata in &metadatas {
778 let song = Song::read_by_path(&db, metadata.path.clone())
779 .await
780 .unwrap()
781 .unwrap();
782 let analysis = Analysis::read_for_song(&db, song.id.clone()).await.unwrap();
783 assert!(analysis.is_some());
784 }
785
786 for analysis in Analysis::read_all(&db).await.unwrap() {
788 let neighbors = Analysis::nearest_neighbors(&db, analysis.id.clone(), 100)
789 .await
790 .unwrap();
791 assert!(!neighbors.contains(&analysis));
792 assert_eq!(neighbors.len(), metadatas.len() - 1);
793 assert_eq!(
794 neighbors.len(),
795 neighbors
796 .iter()
797 .map(|n| n.id.clone())
798 .collect::<HashSet<_>>()
799 .len()
800 );
801 }
802 }
803
804 #[rstest]
805 #[tokio::test]
806 async fn test_recluster(
807 #[values(ProjectionMethod::TSne, ProjectionMethod::None, ProjectionMethod::Pca)]
808 projection_method: ProjectionMethod,
809 ) {
810 init();
811 let dir = tempfile::tempdir().unwrap();
812 let db = init_test_database().await.unwrap();
813 let settings = ReclusterSettings {
814 gap_statistic_reference_datasets: 5,
815 max_clusters: 18,
816 algorithm: ClusterAlgorithm::GMM,
817 projection_method,
818 };
819
820 let song_cases = arb_vec(&arb_song_case(), 32..=32)();
822 let song_cases = song_cases.into_iter().enumerate().map(|(i, sc)| SongCase {
823 song: u8::try_from(i).unwrap(),
824 ..sc
825 });
826 let metadatas = song_cases
827 .into_iter()
828 .map(|song_case| create_song_metadata(&dir, song_case))
829 .collect::<Result<Vec<_>, _>>()
830 .unwrap();
831 let mut songs = Vec::with_capacity(metadatas.len());
832 for metadata in &metadatas {
833 songs.push(Song::try_load_into_db(&db, metadata.clone()).await.unwrap());
834 }
835
836 for song in &songs {
838 Analysis::create(
839 &db,
840 song.id.clone(),
841 Analysis {
842 id: Analysis::generate_id(),
843 features: arb_analysis_features()(),
844 },
845 )
846 .await
847 .unwrap();
848 }
849
850 recluster(&db, &settings).await.unwrap();
852
853 let collections = Collection::read_all(&db).await.unwrap();
855 assert!(!collections.is_empty());
856 for collection in collections {
857 let songs = Collection::read_songs(&db, collection.id.clone())
858 .await
859 .unwrap();
860 assert!(!songs.is_empty());
861 }
862 }
863
864 #[tokio::test]
865 async fn test_brief() {
866 init();
867 let db = init_test_database().await.unwrap();
868 let brief = brief(&db).await.unwrap();
869 assert_eq!(brief.artists, 0);
870 assert_eq!(brief.albums, 0);
871 assert_eq!(brief.songs, 0);
872 assert_eq!(brief.playlists, 0);
873 assert_eq!(brief.collections, 0);
874 }
875
876 #[tokio::test]
877 async fn test_full() {
878 init();
879 let db = init_test_database().await.unwrap();
880 let full = full(&db).await.unwrap();
881 assert_eq!(full.artists.len(), 0);
882 assert_eq!(full.albums.len(), 0);
883 assert_eq!(full.songs.len(), 0);
884 assert_eq!(full.playlists.len(), 0);
885 assert_eq!(full.collections.len(), 0);
886 }
887
888 #[tokio::test]
889 async fn test_health() {
890 init();
891 let db = init_test_database().await.unwrap();
892 let health = health(&db).await.unwrap();
893 assert_eq!(health.artists, 0);
894 assert_eq!(health.albums, 0);
895 assert_eq!(health.songs, 0);
896 #[cfg(feature = "analysis")]
897 assert_eq!(health.unanalyzed_songs, Some(0));
898 #[cfg(not(feature = "analysis"))]
899 assert_eq!(health.unanalyzed_songs, None);
900 assert_eq!(health.playlists, 0);
901 assert_eq!(health.collections, 0);
902 assert_eq!(health.orphaned_artists, 0);
903 assert_eq!(health.orphaned_albums, 0);
904 assert_eq!(health.orphaned_playlists, 0);
905 assert_eq!(health.orphaned_collections, 0);
906 }
907}