lastfm_client/file_handler.rs
1use chrono::Local;
2use csv::Writer;
3use serde::Serialize;
4use std::collections::HashMap;
5use std::fs::{self, File, OpenOptions};
6use std::io::{BufRead, BufReader, Result, Write as _};
7
8#[cfg(feature = "sqlite")]
9use rusqlite::Connection as SqliteConnection;
10
11use crate::types::TrackPlayInfo;
12
13/// File format options for saving track data
14#[derive(Debug)]
15#[allow(dead_code)]
16#[non_exhaustive]
17pub enum FileFormat {
18 /// Save as JSON format with pretty printing
19 Json,
20 /// Save as CSV format with headers
21 Csv,
22 /// Save as NDJSON (Newline Delimited JSON) - one compact JSON object per line
23 Ndjson,
24}
25
26/// Handler for file I/O operations (JSON and CSV)
27#[derive(Debug)]
28#[non_exhaustive]
29pub struct FileHandler;
30
31impl FileHandler {
32 /// Save data to a file in the data directory.
33 ///
34 /// Files are saved to the `data/` directory (created if it doesn't exist) with a timestamp in the filename.
35 ///
36 /// # Arguments
37 /// * `data` - Data to save (must implement Serialize)
38 /// * `format` - File format to save as (`FileFormat::Json` for JSON or `FileFormat::Csv` for CSV)
39 /// * `filename_prefix` - Prefix for the filename. The final filename will be `{prefix}_{timestamp}.{extension}`
40 ///
41 /// # Errors
42 /// * `std::io::Error` - If the file cannot be opened or written to, or if the data directory cannot be created
43 /// * `serde_json::Error` - If the JSON cannot be serialized
44 ///
45 /// # Returns
46 /// * `Result<String>` - Full path to the saved file (e.g., `data/recent_tracks_20240101_120000.json`)
47 pub fn save<T: Serialize>(
48 data: &[T],
49 format: &FileFormat,
50 filename_prefix: &str,
51 ) -> Result<String> {
52 // Create data directory if it doesn't exist
53 fs::create_dir_all("data")?;
54
55 // Generate timestamp
56 let timestamp = Local::now().format("%Y%m%d_%H%M%S");
57
58 // Create filename with timestamp
59 let filename = format!(
60 "data/{}_{}.{}",
61 filename_prefix,
62 timestamp,
63 match format {
64 FileFormat::Json => "json",
65 FileFormat::Csv => "csv",
66 FileFormat::Ndjson => "ndjson",
67 }
68 );
69
70 match format {
71 FileFormat::Json => {
72 // Special case: if T is a HashMap with track info
73 if std::any::type_name::<T>()
74 == std::any::type_name::<HashMap<String, TrackPlayInfo>>()
75 && let Some(single_item) = data.first()
76 {
77 Self::save_single(single_item, &filename)?;
78 return Ok(filename);
79 }
80 Self::save_as_json(data, &filename)
81 }
82 FileFormat::Csv => Self::save_as_csv(data, &filename),
83 FileFormat::Ndjson => Self::save_as_ndjson(data, &filename),
84 }?;
85
86 Ok(filename)
87 }
88
89 /// Save data to a JSON file.
90 ///
91 /// # Arguments
92 /// * `data` - Data to save
93 /// * `filename` - Filename to save as
94 #[allow(dead_code)]
95 fn save_as_json<T: Serialize>(data: &[T], filename: &str) -> Result<()> {
96 let json = serde_json::to_string_pretty(data)?;
97 let mut file = File::create(filename)?;
98
99 file.write_all(json.as_bytes())?;
100
101 Ok(())
102 }
103
104 /// Save data to a CSV file.
105 ///
106 /// # Arguments
107 /// * `data` - Data to save
108 /// * `filename` - Filename to save as
109 fn save_as_csv<T: Serialize>(data: &[T], filename: &str) -> Result<()> {
110 let mut writer = Writer::from_path(filename)?;
111
112 for item in data {
113 writer.serialize(item)?;
114 }
115
116 writer.flush()?;
117 Ok(())
118 }
119
120 /// Save data to an NDJSON file - one compact JSON object per line.
121 ///
122 /// # Arguments
123 /// * `data` - Data to save
124 /// * `filename` - Filename to save as
125 fn save_as_ndjson<T: Serialize>(data: &[T], filename: &str) -> Result<()> {
126 let mut file = File::create(filename)?;
127 for item in data {
128 let line = serde_json::to_string(item)?;
129 file.write_all(line.as_bytes())?;
130 file.write_all(b"\n")?;
131 }
132 Ok(())
133 }
134
135 /// Append items to an existing NDJSON file as new lines.
136 ///
137 /// # Arguments
138 /// * `data` - Data to append
139 /// * `file_path` - Path to the target file
140 fn append_ndjson_lines<T: Serialize>(data: &[T], file_path: &str) -> Result<()> {
141 let mut file = OpenOptions::new().append(true).open(file_path)?;
142 for item in data {
143 let line = serde_json::to_string(item)?;
144 file.write_all(line.as_bytes())?;
145 file.write_all(b"\n")?;
146 }
147 Ok(())
148 }
149
150 /// Load existing NDJSON data from a file - one item per line.
151 ///
152 /// # Arguments
153 /// * `file_path` - Path to the NDJSON file to read
154 ///
155 /// # Errors
156 /// * `std::io::Error` - If the file cannot be opened
157 /// * `serde_json::Error` - If a line cannot be deserialized into `T`
158 pub fn load_ndjson<T: serde::de::DeserializeOwned>(file_path: &str) -> Result<Vec<T>> {
159 let file = File::open(file_path)?;
160 let reader = BufReader::new(file);
161 let mut items = Vec::new();
162 for line in reader.lines() {
163 let line = line?;
164 if line.is_empty() {
165 continue;
166 }
167 let item: T = serde_json::from_str(&line)?;
168 items.push(item);
169 }
170 Ok(items)
171 }
172
173 /// Append new items to an existing NDJSON file, or create it if it does not exist.
174 ///
175 /// # Arguments
176 /// * `new_data` - New items to append
177 /// * `file_path` - Path to the target NDJSON file
178 ///
179 /// # Errors
180 /// * `std::io::Error` - If the file cannot be opened or written
181 pub fn append_or_create_ndjson<T: Serialize>(new_data: &[T], file_path: &str) -> Result<()> {
182 if std::path::Path::new(file_path).exists() {
183 Self::append_ndjson_lines(new_data, file_path)
184 } else {
185 if let Some(parent) = std::path::Path::new(file_path).parent() {
186 fs::create_dir_all(parent)?;
187 }
188 Self::save_as_ndjson(new_data, file_path)
189 }
190 }
191
192 /// Append data to an existing file.
193 ///
194 /// # Arguments
195 /// * `data` - Data to append
196 /// * `file_path` - Path to the file to append to
197 ///
198 /// # Returns
199 /// * `Result<String>` - Path of the updated file
200 ///
201 /// Append data to an existing file.
202 ///
203 /// # Arguments
204 /// * `data` - Data to append
205 /// * `file_path` - Path to the file to append to
206 ///
207 /// # Errors
208 /// * `std::io::Error` - If an I/O error occurs
209 ///
210 /// # Returns
211 /// * `Result<String>` - Path of the updated file
212 #[allow(dead_code)]
213 pub fn append<T: Serialize + for<'de> serde::Deserialize<'de> + Clone>(
214 data: &[T],
215 file_path: &str,
216 ) -> Result<String> {
217 // Determine file format from extension
218 let ext = std::path::Path::new(file_path)
219 .extension()
220 .and_then(|e| e.to_str())
221 .map(str::to_ascii_lowercase);
222
223 let format = match ext.as_deref() {
224 Some("json") => FileFormat::Json,
225 Some("csv") => FileFormat::Csv,
226 Some("ndjson") => FileFormat::Ndjson,
227 _ => {
228 return Err(std::io::Error::new(
229 std::io::ErrorKind::InvalidInput,
230 "Unsupported file format",
231 ));
232 }
233 };
234
235 match format {
236 FileFormat::Json => {
237 // For JSON, we need to read the existing data, combine it, and write it back
238 let file = File::open(file_path)?;
239 let mut existing_data: Vec<T> = serde_json::from_reader(file)?;
240
241 existing_data.extend(data.iter().cloned());
242
243 Self::save_as_json(&existing_data, file_path)?;
244 }
245 FileFormat::Csv => {
246 // For CSV, we can simply append to the file
247 let mut writer =
248 Writer::from_writer(OpenOptions::new().append(true).open(file_path)?);
249
250 for item in data {
251 writer.serialize(item)?;
252 }
253 writer.flush()?;
254 }
255 FileFormat::Ndjson => {
256 Self::append_ndjson_lines(data, file_path)?;
257 }
258 }
259
260 Ok(file_path.to_string())
261 }
262
263 /// Save a single item to a JSON file
264 ///
265 /// # Errors
266 /// * `std::io::Error` - If there was an error reading or writing the file
267 /// * `serde_json::Error` - If there was an error serializing the data
268 ///
269 /// # Arguments
270 /// * `data` - Data to save
271 /// * `filename` - Filename to save as
272 pub fn save_single<T: Serialize>(data: &T, filename: &str) -> Result<()> {
273 let json = serde_json::to_string_pretty(data)?;
274 let mut file = File::create(filename)?;
275 file.write_all(json.as_bytes())?;
276 Ok(())
277 }
278
279 /// Load existing JSON data from a file.
280 ///
281 /// # Arguments
282 /// * `file_path` - Path to the JSON file to read
283 ///
284 /// # Errors
285 /// * `std::io::Error` - If the file cannot be opened
286 /// * `serde_json::Error` - If the JSON cannot be deserialized into `Vec<T>`
287 pub fn load<T: serde::de::DeserializeOwned>(file_path: &str) -> Result<Vec<T>> {
288 let file = File::open(file_path)?;
289 let data: Vec<T> = serde_json::from_reader(file)?;
290 Ok(data)
291 }
292
293 /// Return the path of the sidecar metadata file for `file_path`.
294 ///
295 /// The sidecar stores the latest known Unix timestamp so subsequent update calls do not
296 /// need to deserialize the full data file.
297 #[must_use]
298 pub fn sidecar_path(file_path: &str) -> String {
299 format!("{file_path}.meta")
300 }
301
302 /// Read the latest timestamp from a sidecar metadata file.
303 ///
304 /// Returns `None` if the sidecar does not exist or cannot be parsed.
305 #[must_use]
306 pub fn read_sidecar_timestamp(file_path: &str) -> Option<u32> {
307 fs::read_to_string(Self::sidecar_path(file_path))
308 .ok()
309 .and_then(|s| s.trim().parse().ok())
310 }
311
312 /// Write a timestamp to the sidecar metadata file associated with `file_path`.
313 ///
314 /// # Errors
315 /// * `std::io::Error` - If the sidecar file cannot be written
316 pub fn write_sidecar_timestamp(file_path: &str, timestamp: u32) -> Result<()> {
317 fs::write(Self::sidecar_path(file_path), timestamp.to_string())
318 }
319
320 /// Append new items to an existing CSV file, or create it with headers if it does not exist.
321 ///
322 /// When appending to an existing file the header row is omitted so it is not duplicated.
323 ///
324 /// # Arguments
325 /// * `new_data` - New items to append
326 /// * `file_path` - Path to the target CSV file
327 ///
328 /// # Errors
329 /// * `std::io::Error` - If the file cannot be opened or written
330 /// * `csv::Error` - If serialization fails
331 pub fn append_or_create_csv<T: Serialize>(new_data: &[T], file_path: &str) -> Result<()> {
332 if std::path::Path::new(file_path).exists() {
333 let mut writer = csv::WriterBuilder::new()
334 .has_headers(false)
335 .from_writer(OpenOptions::new().append(true).open(file_path)?);
336 for item in new_data {
337 writer.serialize(item)?;
338 }
339 writer.flush()?;
340 } else {
341 if let Some(parent) = std::path::Path::new(file_path).parent() {
342 fs::create_dir_all(parent)?;
343 }
344 Self::save_as_csv(new_data, file_path)?;
345 }
346 Ok(())
347 }
348
349 /// Save data to a new `SQLite` database file.
350 ///
351 /// Creates a timestamped `.db` file under `data/`. All rows are inserted in a single
352 /// transaction for performance.
353 ///
354 /// # Arguments
355 /// * `data` - Data to save (must implement `SqliteExportable`)
356 /// * `filename_prefix` - Prefix for the generated filename
357 ///
358 /// # Errors
359 /// * `std::io::Error` - If the data directory cannot be created or the database cannot be opened or written
360 ///
361 /// # Returns
362 /// * `Result<String>` - Full path to the saved database file (e.g., `data/recent_tracks_20240101_120000.db`)
363 #[cfg(feature = "sqlite")]
364 pub fn save_sqlite<T: crate::sqlite::SqliteExportable>(
365 data: &[T],
366 filename_prefix: &str,
367 ) -> Result<String> {
368 fs::create_dir_all("data")?;
369 let timestamp = Local::now().format("%Y%m%d_%H%M%S");
370 let filename = format!("data/{filename_prefix}_{timestamp}.db");
371
372 let mut conn =
373 SqliteConnection::open(&filename).map_err(|e| std::io::Error::other(e.to_string()))?;
374
375 conn.execute_batch(T::create_table_sql())
376 .map_err(|e| std::io::Error::other(e.to_string()))?;
377
378 let tx = conn
379 .transaction()
380 .map_err(|e| std::io::Error::other(e.to_string()))?;
381
382 {
383 let mut stmt = tx
384 .prepare(T::insert_sql())
385 .map_err(|e| std::io::Error::other(e.to_string()))?;
386
387 for item in data {
388 item.bind_and_execute(&mut stmt)
389 .map_err(|e| std::io::Error::other(e.to_string()))?;
390 }
391 }
392
393 tx.commit()
394 .map_err(|e| std::io::Error::other(e.to_string()))?;
395
396 Ok(filename)
397 }
398
399 /// Append new items to an existing `SQLite` database, or create it if it does not exist.
400 ///
401 /// Opens the database at `file_path`, creates the table if it does not already exist,
402 /// and inserts all rows in a single transaction.
403 ///
404 /// # Arguments
405 /// * `data` - Data to insert
406 /// * `file_path` - Path to the target `.db` file
407 ///
408 /// # Errors
409 /// * `std::io::Error` - If the file cannot be opened or the data cannot be written
410 #[cfg(feature = "sqlite")]
411 pub fn append_or_create_sqlite<T: crate::sqlite::SqliteExportable>(
412 data: &[T],
413 file_path: &str,
414 ) -> Result<()> {
415 if let Some(parent) = std::path::Path::new(file_path).parent()
416 && !parent.as_os_str().is_empty()
417 {
418 fs::create_dir_all(parent)?;
419 }
420
421 let mut conn =
422 SqliteConnection::open(file_path).map_err(|e| std::io::Error::other(e.to_string()))?;
423
424 conn.execute_batch(T::create_table_sql())
425 .map_err(|e| std::io::Error::other(e.to_string()))?;
426
427 let tx = conn
428 .transaction()
429 .map_err(|e| std::io::Error::other(e.to_string()))?;
430
431 {
432 let mut stmt = tx
433 .prepare(T::insert_sql())
434 .map_err(|e| std::io::Error::other(e.to_string()))?;
435
436 for item in data {
437 item.bind_and_execute(&mut stmt)
438 .map_err(|e| std::io::Error::other(e.to_string()))?;
439 }
440 }
441
442 tx.commit()
443 .map_err(|e| std::io::Error::other(e.to_string()))?;
444
445 Ok(())
446 }
447
448 /// Query the maximum `date_uts` value stored in a `SQLite` table.
449 ///
450 /// Used by the update flow to determine the latest timestamp already present in the
451 /// database, so only newer records need to be fetched from the API.
452 ///
453 /// Returns `None` if the file does not exist, the table is empty, or the query fails.
454 ///
455 /// # Arguments
456 /// * `file_path` - Path to the `.db` file
457 /// * `table_name` - Name of the table to query
458 #[cfg(feature = "sqlite")]
459 #[must_use]
460 pub fn read_sqlite_max_timestamp(file_path: &str, table_name: &str) -> Option<u32> {
461 if !std::path::Path::new(file_path).exists() {
462 return None;
463 }
464 let conn = SqliteConnection::open(file_path).ok()?;
465 conn.query_row(
466 &format!("SELECT MAX(date_uts) FROM {table_name}"),
467 [],
468 |row| row.get::<_, Option<u32>>(0),
469 )
470 .ok()
471 .flatten()
472 }
473
474 /// Prepend new items to an existing JSON file, or create the file if it does not exist.
475 ///
476 /// New items are placed before existing items so the result remains sorted newest-first,
477 /// which matches the order returned by the Last.fm API.
478 ///
479 /// # Arguments
480 /// * `new_data` - New items to prepend
481 /// * `file_path` - Path to the target JSON file
482 ///
483 /// # Errors
484 /// * `std::io::Error` - If the file cannot be read or written
485 /// * `serde_json::Error` - If serialization or deserialization fails
486 pub fn prepend_json<T: Serialize + serde::de::DeserializeOwned + Clone>(
487 new_data: &[T],
488 file_path: &str,
489 ) -> Result<()> {
490 let existing: Vec<T> = if std::path::Path::new(file_path).exists() {
491 Self::load(file_path)?
492 } else {
493 // Ensure the parent directory exists before creating the file
494 if let Some(parent) = std::path::Path::new(file_path).parent() {
495 fs::create_dir_all(parent)?;
496 }
497 vec![]
498 };
499
500 let mut combined = new_data.to_vec();
501 combined.extend(existing);
502 Self::save_as_json(&combined, file_path)
503 }
504}