Skip to main content

reinhardt_admin/core/
import.rs

1//! Import functionality for admin data
2//!
3//! This module provides import capabilities for admin data from various formats
4//! including CSV and JSON.
5
6use crate::types::{AdminError, AdminResult};
7use csv::ReaderBuilder;
8use rayon::prelude::*;
9use serde::{Deserialize, Serialize};
10use std::collections::HashMap;
11use std::io::Cursor;
12
13/// Import format
14#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
15pub enum ImportFormat {
16	/// Comma-separated values
17	CSV,
18	/// JSON format
19	JSON,
20	/// Tab-separated values
21	TSV,
22}
23
24impl ImportFormat {
25	/// Get file extensions for this format
26	pub fn extensions(&self) -> &[&'static str] {
27		match self {
28			ImportFormat::CSV => &["csv"],
29			ImportFormat::JSON => &["json"],
30			ImportFormat::TSV => &["tsv", "tab"],
31		}
32	}
33
34	/// Detect format from filename
35	pub fn from_filename(filename: &str) -> Option<Self> {
36		let ext = filename.split('.').next_back()?.to_lowercase();
37		match ext.as_str() {
38			"csv" => Some(ImportFormat::CSV),
39			"json" => Some(ImportFormat::JSON),
40			"tsv" | "tab" => Some(ImportFormat::TSV),
41			_ => None,
42		}
43	}
44
45	/// Detect format from Content-Type header
46	///
47	/// Supports standard MIME types and common variations.
48	pub fn from_content_type(content_type: &str) -> Option<Self> {
49		// Extract the main MIME type (ignore charset and other parameters)
50		let mime_type = content_type.split(';').next()?.trim().to_lowercase();
51
52		match mime_type.as_str() {
53			// JSON formats
54			"application/json" | "text/json" => Some(ImportFormat::JSON),
55			// CSV formats
56			"text/csv" | "application/csv" => Some(ImportFormat::CSV),
57			// TSV formats
58			"text/tab-separated-values" | "text/tsv" => Some(ImportFormat::TSV),
59			_ => None,
60		}
61	}
62}
63
64/// Import configuration
65///
66/// # Examples
67///
68/// ```
69/// use reinhardt_admin::core::{ImportConfig, ImportFormat};
70///
71/// let config = ImportConfig::new("User", ImportFormat::CSV)
72///     .with_field_mapping("username", "login")
73///     .skip_duplicates(true)
74///     .update_existing(false);
75///
76/// assert_eq!(config.model_name(), "User");
77/// ```
78#[derive(Debug, Clone)]
79pub struct ImportConfig {
80	/// Model name
81	model_name: String,
82	/// Import format
83	format: ImportFormat,
84	/// Field mappings (import_field -> model_field)
85	field_mappings: HashMap<String, String>,
86	/// Fields to skip during import
87	skip_fields: Vec<String>,
88	/// Skip duplicate records
89	skip_duplicates: bool,
90	/// Update existing records
91	update_existing: bool,
92	/// Key field for duplicate detection
93	key_field: Option<String>,
94	/// Maximum records to import
95	max_records: Option<usize>,
96	/// Skip header row (for CSV/TSV)
97	skip_header: bool,
98	/// Validate before import
99	validate_first: bool,
100}
101
102impl ImportConfig {
103	/// Create a new import configuration
104	pub fn new(model_name: impl Into<String>, format: ImportFormat) -> Self {
105		Self {
106			model_name: model_name.into(),
107			format,
108			field_mappings: HashMap::new(),
109			skip_fields: Vec::new(),
110			skip_duplicates: false,
111			update_existing: false,
112			key_field: None,
113			max_records: None,
114			skip_header: true,
115			validate_first: true,
116		}
117	}
118
119	/// Get model name
120	pub fn model_name(&self) -> &str {
121		&self.model_name
122	}
123
124	/// Get import format
125	pub fn format(&self) -> ImportFormat {
126		self.format
127	}
128
129	/// Add field mapping
130	pub fn with_field_mapping(
131		mut self,
132		import_field: impl Into<String>,
133		model_field: impl Into<String>,
134	) -> Self {
135		self.field_mappings
136			.insert(import_field.into(), model_field.into());
137		self
138	}
139
140	/// Get field mappings
141	pub fn field_mappings(&self) -> &HashMap<String, String> {
142		&self.field_mappings
143	}
144
145	/// Map import field to model field
146	pub fn map_field<'a>(&'a self, import_field: &'a str) -> &'a str {
147		self.field_mappings
148			.get(import_field)
149			.map(|s| s.as_str())
150			.unwrap_or(import_field)
151	}
152
153	/// Add field to skip
154	pub fn skip_field(mut self, field: impl Into<String>) -> Self {
155		self.skip_fields.push(field.into());
156		self
157	}
158
159	/// Get skip fields
160	pub fn skip_fields(&self) -> &[String] {
161		&self.skip_fields
162	}
163
164	/// Set whether to skip duplicates
165	pub fn skip_duplicates(mut self, skip: bool) -> Self {
166		self.skip_duplicates = skip;
167		self
168	}
169
170	/// Check if duplicates should be skipped
171	pub fn should_skip_duplicates(&self) -> bool {
172		self.skip_duplicates
173	}
174
175	/// Set whether to update existing records
176	pub fn update_existing(mut self, update: bool) -> Self {
177		self.update_existing = update;
178		self
179	}
180
181	/// Check if existing records should be updated
182	pub fn should_update_existing(&self) -> bool {
183		self.update_existing
184	}
185
186	/// Set key field for duplicate detection
187	pub fn with_key_field(mut self, field: impl Into<String>) -> Self {
188		self.key_field = Some(field.into());
189		self
190	}
191
192	/// Get key field
193	pub fn key_field(&self) -> Option<&String> {
194		self.key_field.as_ref()
195	}
196
197	/// Set maximum records to import
198	pub fn with_max_records(mut self, max: usize) -> Self {
199		self.max_records = Some(max);
200		self
201	}
202
203	/// Get maximum records
204	pub fn max_records(&self) -> Option<usize> {
205		self.max_records
206	}
207
208	/// Set whether to skip header row
209	pub fn with_skip_header(mut self, skip: bool) -> Self {
210		self.skip_header = skip;
211		self
212	}
213
214	/// Check if header should be skipped
215	pub fn should_skip_header(&self) -> bool {
216		self.skip_header
217	}
218
219	/// Set whether to validate before import
220	pub fn with_validation(mut self, validate: bool) -> Self {
221		self.validate_first = validate;
222		self
223	}
224
225	/// Check if validation should be performed
226	pub fn should_validate(&self) -> bool {
227		self.validate_first
228	}
229}
230
231/// Import result
232#[derive(Debug, Clone, Serialize, Deserialize)]
233pub struct ImportResult {
234	/// Number of records imported
235	pub imported_count: usize,
236	/// Number of records updated
237	pub updated_count: usize,
238	/// Number of records skipped
239	pub skipped_count: usize,
240	/// Number of records failed
241	pub failed_count: usize,
242	/// Error messages
243	pub errors: Vec<ImportError>,
244}
245
246impl ImportResult {
247	/// Create a new import result
248	pub fn new() -> Self {
249		Self {
250			imported_count: 0,
251			updated_count: 0,
252			skipped_count: 0,
253			failed_count: 0,
254			errors: Vec::new(),
255		}
256	}
257
258	/// Get total processed count
259	pub fn total_processed(&self) -> usize {
260		self.imported_count + self.updated_count + self.skipped_count + self.failed_count
261	}
262
263	/// Check if import was successful (no failures)
264	pub fn is_successful(&self) -> bool {
265		self.failed_count == 0
266	}
267
268	/// Add imported record
269	pub fn add_imported(&mut self) {
270		self.imported_count += 1;
271	}
272
273	/// Add updated record
274	pub fn add_updated(&mut self) {
275		self.updated_count += 1;
276	}
277
278	/// Add skipped record
279	pub fn add_skipped(&mut self) {
280		self.skipped_count += 1;
281	}
282
283	/// Add failed record
284	pub fn add_failed(&mut self, error: ImportError) {
285		self.failed_count += 1;
286		self.errors.push(error);
287	}
288}
289
290impl Default for ImportResult {
291	fn default() -> Self {
292		Self::new()
293	}
294}
295
296/// Import error
297#[derive(Debug, Clone, Serialize, Deserialize)]
298pub struct ImportError {
299	/// Row number (1-indexed)
300	pub row_number: usize,
301	/// Error message
302	pub message: String,
303	/// Failed data (optional)
304	pub data: Option<HashMap<String, String>>,
305}
306
307impl ImportError {
308	/// Create a new import error
309	pub fn new(row_number: usize, message: impl Into<String>) -> Self {
310		Self {
311			row_number,
312			message: message.into(),
313			data: None,
314		}
315	}
316
317	/// Create import error with data
318	pub fn with_data(
319		row_number: usize,
320		message: impl Into<String>,
321		data: HashMap<String, String>,
322	) -> Self {
323		Self {
324			row_number,
325			message: message.into(),
326			data: Some(data),
327		}
328	}
329}
330
331/// CSV importer
332pub struct CsvImporter;
333
334impl CsvImporter {
335	/// Import data from CSV format
336	///
337	/// # Examples
338	///
339	/// ```
340	/// use reinhardt_admin::core::CsvImporter;
341	///
342	/// let csv_data = b"id,name\n1,Alice\n2,Bob";
343	/// let result = CsvImporter::import(csv_data, true);
344	///
345	/// assert!(result.is_ok());
346	/// ```
347	pub fn import(data: &[u8], skip_header: bool) -> AdminResult<Vec<HashMap<String, String>>> {
348		// Use csv crate for RFC 4180 compliant parsing
349		let mut reader = ReaderBuilder::new()
350			.has_headers(true) // Always expect headers
351			.flexible(false) // Strict column count validation
352			.trim(csv::Trim::All) // Trim whitespace
353			.from_reader(Cursor::new(data));
354
355		// Get headers
356		let headers = reader
357			.headers()
358			.map_err(|e| AdminError::ValidationError(format!("Failed to read CSV headers: {}", e)))?
359			.iter()
360			.map(|h| h.to_string())
361			.collect::<Vec<_>>();
362
363		if headers.is_empty() {
364			return Err(AdminError::ValidationError(
365				"CSV header is empty".to_string(),
366			));
367		}
368
369		// Parse records
370		let mut records = Vec::new();
371		let mut row_num = 1; // Start from 1 (after header)
372
373		for result in reader.records() {
374			row_num += 1;
375
376			let record = result.map_err(|e| {
377				AdminError::ValidationError(format!("Row {}: CSV parse error: {}", row_num, e))
378			})?;
379
380			// Validate column count
381			if record.len() != headers.len() {
382				return Err(AdminError::ValidationError(format!(
383					"Row {}: Expected {} columns, got {}",
384					row_num,
385					headers.len(),
386					record.len()
387				)));
388			}
389
390			// Convert to HashMap
391			let mut map = HashMap::new();
392			for (header, value) in headers.iter().zip(record.iter()) {
393				map.insert(header.clone(), value.to_string());
394			}
395
396			records.push(map);
397		}
398
399		// If skip_header is false, we need to include the header as a data row
400		// (but csv crate already skipped it as headers)
401		// This is unusual but matches the original behavior
402		if !skip_header && !headers.is_empty() {
403			// Insert header row at the beginning
404			let mut header_record = HashMap::new();
405			for header in &headers {
406				header_record.insert(header.clone(), header.clone());
407			}
408			records.insert(0, header_record);
409		}
410
411		Ok(records)
412	}
413}
414
415/// JSON importer
416pub struct JsonImporter;
417
418impl JsonImporter {
419	/// Import data from JSON format
420	///
421	/// # Examples
422	///
423	/// ```
424	/// use reinhardt_admin::core::JsonImporter;
425	///
426	/// let json_data = br#"[{"id":"1","name":"Alice"}]"#;
427	/// let result = JsonImporter::import(json_data);
428	///
429	/// assert!(result.is_ok());
430	/// ```
431	pub fn import(data: &[u8]) -> AdminResult<Vec<HashMap<String, String>>> {
432		let value: serde_json::Value = serde_json::from_slice(data)
433			.map_err(|e| AdminError::ValidationError(format!("Invalid JSON: {}", e)))?;
434
435		let array = value
436			.as_array()
437			.ok_or_else(|| AdminError::ValidationError("JSON must be an array".to_string()))?;
438
439		// Use parallel processing for large JSON arrays (1000+ items)
440		let records: Vec<HashMap<String, String>> = if array.len() > 1000 {
441			// Parallel processing with rayon
442			array
443				.par_iter()
444				.filter_map(|item| {
445					let obj = item.as_object()?;
446
447					let mut record = HashMap::new();
448					for (key, value) in obj {
449						let value_str = match value {
450							serde_json::Value::String(s) => s.clone(),
451							serde_json::Value::Number(n) => n.to_string(),
452							serde_json::Value::Bool(b) => b.to_string(),
453							serde_json::Value::Null => String::new(),
454							_ => value.to_string(),
455						};
456						record.insert(key.clone(), value_str);
457					}
458
459					Some(record)
460				})
461				.collect()
462		} else {
463			// Sequential processing for small arrays
464			let mut records = Vec::new();
465
466			for (idx, item) in array.iter().enumerate() {
467				let obj = item.as_object().ok_or_else(|| {
468					AdminError::ValidationError(format!("Item {} is not an object", idx))
469				})?;
470
471				let mut record = HashMap::new();
472				for (key, value) in obj {
473					let value_str = match value {
474						serde_json::Value::String(s) => s.clone(),
475						serde_json::Value::Number(n) => n.to_string(),
476						serde_json::Value::Bool(b) => b.to_string(),
477						serde_json::Value::Null => String::new(),
478						_ => value.to_string(),
479					};
480					record.insert(key.clone(), value_str);
481				}
482
483				records.push(record);
484			}
485
486			records
487		};
488
489		Ok(records)
490	}
491}
492
493/// TSV (Tab-Separated Values) importer
494pub struct TsvImporter;
495
496impl TsvImporter {
497	/// Import data from TSV format
498	pub fn import(data: &[u8], skip_header: bool) -> AdminResult<Vec<HashMap<String, String>>> {
499		let content = String::from_utf8(data.to_vec())
500			.map_err(|e| AdminError::ValidationError(format!("Invalid UTF-8: {}", e)))?;
501
502		let lines: Vec<&str> = content.lines().collect();
503
504		if lines.is_empty() {
505			return Ok(Vec::new());
506		}
507
508		// Parse header
509		let headers: Vec<String> = lines[0].split('\t').map(|s| s.to_string()).collect();
510
511		if headers.is_empty() {
512			return Err(AdminError::ValidationError(
513				"TSV header is empty".to_string(),
514			));
515		}
516
517		let start_row = if skip_header { 1 } else { 0 };
518		let mut records = Vec::new();
519
520		for (idx, line) in lines.iter().enumerate().skip(start_row) {
521			if line.trim().is_empty() {
522				continue;
523			}
524
525			let values: Vec<String> = line.split('\t').map(|s| s.to_string()).collect();
526
527			if values.len() != headers.len() {
528				return Err(AdminError::ValidationError(format!(
529					"Row {}: Expected {} columns, got {}",
530					idx + 1,
531					headers.len(),
532					values.len()
533				)));
534			}
535
536			let mut record = HashMap::new();
537			for (header, value) in headers.iter().zip(values.iter()) {
538				record.insert(header.clone(), value.clone());
539			}
540
541			records.push(record);
542		}
543
544		Ok(records)
545	}
546}
547
548/// Import builder for fluent API
549///
550/// # Examples
551///
552/// ```
553/// use reinhardt_admin::core::{ImportBuilder, ImportFormat};
554///
555/// let csv_data = b"id,name\n1,Alice\n2,Bob";
556///
557/// let result = ImportBuilder::new("User", ImportFormat::CSV)
558///     .data(csv_data.to_vec())
559///     .skip_duplicates(true)
560///     .parse();
561///
562/// assert!(result.is_ok());
563/// ```
564pub struct ImportBuilder {
565	config: ImportConfig,
566	data: Vec<u8>,
567}
568
569impl ImportBuilder {
570	/// Create a new import builder
571	pub fn new(model_name: impl Into<String>, format: ImportFormat) -> Self {
572		Self {
573			config: ImportConfig::new(model_name, format),
574			data: Vec::new(),
575		}
576	}
577
578	/// Set data
579	pub fn data(mut self, data: Vec<u8>) -> Self {
580		self.data = data;
581		self
582	}
583
584	/// Add field mapping
585	pub fn field_mapping(
586		mut self,
587		import_field: impl Into<String>,
588		model_field: impl Into<String>,
589	) -> Self {
590		self.config = self.config.with_field_mapping(import_field, model_field);
591		self
592	}
593
594	/// Skip duplicates
595	pub fn skip_duplicates(mut self, skip: bool) -> Self {
596		self.config = self.config.skip_duplicates(skip);
597		self
598	}
599
600	/// Update existing
601	pub fn update_existing(mut self, update: bool) -> Self {
602		self.config = self.config.update_existing(update);
603		self
604	}
605
606	/// Set key field
607	pub fn key_field(mut self, field: impl Into<String>) -> Self {
608		self.config = self.config.with_key_field(field);
609		self
610	}
611
612	/// Set maximum records
613	pub fn max_records(mut self, max: usize) -> Self {
614		self.config = self.config.with_max_records(max);
615		self
616	}
617
618	/// Parse data
619	pub fn parse(self) -> AdminResult<Vec<HashMap<String, String>>> {
620		let mut records = match self.config.format() {
621			ImportFormat::CSV => CsvImporter::import(&self.data, self.config.should_skip_header())?,
622			ImportFormat::JSON => JsonImporter::import(&self.data)?,
623			ImportFormat::TSV => TsvImporter::import(&self.data, self.config.should_skip_header())?,
624		};
625
626		// Apply field mappings
627		if !self.config.field_mappings().is_empty() {
628			records = records
629				.into_iter()
630				.map(|mut record| {
631					let mut mapped_record = HashMap::new();
632					for (key, value) in record.drain() {
633						let mapped_key = self.config.map_field(&key).to_string();
634						mapped_record.insert(mapped_key, value);
635					}
636					mapped_record
637				})
638				.collect();
639		}
640
641		// Apply max records limit
642		if let Some(max) = self.config.max_records() {
643			records.truncate(max);
644		}
645
646		Ok(records)
647	}
648}
649
650#[cfg(test)]
651mod tests {
652	use super::*;
653
654	#[test]
655	fn test_import_format_from_filename() {
656		assert_eq!(
657			ImportFormat::from_filename("data.csv"),
658			Some(ImportFormat::CSV)
659		);
660		assert_eq!(
661			ImportFormat::from_filename("data.json"),
662			Some(ImportFormat::JSON)
663		);
664		assert_eq!(
665			ImportFormat::from_filename("data.tsv"),
666			Some(ImportFormat::TSV)
667		);
668		assert_eq!(ImportFormat::from_filename("data.txt"), None);
669	}
670
671	#[test]
672	fn test_import_config_new() {
673		let config = ImportConfig::new("User", ImportFormat::CSV);
674		assert_eq!(config.model_name(), "User");
675		assert_eq!(config.format(), ImportFormat::CSV);
676		assert!(config.should_skip_header());
677		assert!(config.should_validate());
678	}
679
680	#[test]
681	fn test_import_config_field_mapping() {
682		let config =
683			ImportConfig::new("User", ImportFormat::CSV).with_field_mapping("username", "login");
684
685		assert_eq!(config.map_field("username"), "login");
686		assert_eq!(config.map_field("email"), "email");
687	}
688
689	#[test]
690	fn test_csv_importer_basic() {
691		let csv_data = b"id,name\n1,Alice\n2,Bob";
692		let result = CsvImporter::import(csv_data, true);
693
694		let records = result.unwrap();
695		assert_eq!(records.len(), 2);
696		assert_eq!(records[0].get("id"), Some(&"1".to_string()));
697		assert_eq!(records[0].get("name"), Some(&"Alice".to_string()));
698	}
699
700	#[test]
701	fn test_csv_importer_quoted() {
702		let csv_data = b"id,name\n1,\"Smith, John\"\n2,\"Doe, Jane\"";
703		let result = CsvImporter::import(csv_data, true);
704
705		let records = result.unwrap();
706		assert_eq!(records.len(), 2);
707		assert_eq!(records[0].get("name"), Some(&"Smith, John".to_string()));
708	}
709
710	#[test]
711	fn test_json_importer() {
712		let json_data = br#"[{"id":"1","name":"Alice"},{"id":"2","name":"Bob"}]"#;
713		let result = JsonImporter::import(json_data);
714
715		let records = result.unwrap();
716		assert_eq!(records.len(), 2);
717		assert_eq!(records[0].get("id"), Some(&"1".to_string()));
718		assert_eq!(records[0].get("name"), Some(&"Alice".to_string()));
719	}
720
721	#[test]
722	fn test_tsv_importer() {
723		let tsv_data = b"id\tname\n1\tAlice\n2\tBob";
724		let result = TsvImporter::import(tsv_data, true);
725
726		let records = result.unwrap();
727		assert_eq!(records.len(), 2);
728		assert_eq!(records[0].get("id"), Some(&"1".to_string()));
729		assert_eq!(records[0].get("name"), Some(&"Alice".to_string()));
730	}
731
732	#[test]
733	fn test_import_builder() {
734		let csv_data = b"id,name\n1,Alice\n2,Bob";
735
736		let result = ImportBuilder::new("User", ImportFormat::CSV)
737			.data(csv_data.to_vec())
738			.parse();
739
740		let records = result.unwrap();
741		assert_eq!(records.len(), 2);
742	}
743
744	#[test]
745	fn test_import_builder_with_mapping() {
746		let csv_data = b"id,username\n1,alice\n2,bob";
747
748		let result = ImportBuilder::new("User", ImportFormat::CSV)
749			.data(csv_data.to_vec())
750			.field_mapping("username", "login")
751			.parse();
752
753		let records = result.unwrap();
754		assert_eq!(records[0].get("login"), Some(&"alice".to_string()));
755		assert_eq!(records[0].get("username"), None);
756	}
757
758	#[test]
759	fn test_import_builder_max_records() {
760		let csv_data = b"id,name\n1,Alice\n2,Bob\n3,Charlie";
761
762		let result = ImportBuilder::new("User", ImportFormat::CSV)
763			.data(csv_data.to_vec())
764			.max_records(2)
765			.parse();
766
767		let records = result.unwrap();
768		assert_eq!(records.len(), 2);
769	}
770
771	#[test]
772	fn test_import_result() {
773		let mut result = ImportResult::new();
774		assert_eq!(result.total_processed(), 0);
775		assert!(result.is_successful());
776
777		result.add_imported();
778		result.add_updated();
779		assert_eq!(result.imported_count, 1);
780		assert_eq!(result.updated_count, 1);
781		assert_eq!(result.total_processed(), 2);
782
783		result.add_failed(ImportError::new(1, "Test error".to_string()));
784		assert!(!result.is_successful());
785		assert_eq!(result.failed_count, 1);
786	}
787
788	// ==================== from_content_type tests ====================
789
790	#[test]
791	fn test_from_content_type_json() {
792		assert_eq!(
793			ImportFormat::from_content_type("application/json"),
794			Some(ImportFormat::JSON)
795		);
796	}
797
798	#[test]
799	fn test_from_content_type_csv() {
800		assert_eq!(
801			ImportFormat::from_content_type("text/csv"),
802			Some(ImportFormat::CSV)
803		);
804	}
805
806	#[test]
807	fn test_from_content_type_tsv() {
808		assert_eq!(
809			ImportFormat::from_content_type("text/tab-separated-values"),
810			Some(ImportFormat::TSV)
811		);
812	}
813
814	#[test]
815	fn test_from_content_type_with_charset() {
816		// Content-Type with charset parameter should still be parsed correctly
817		assert_eq!(
818			ImportFormat::from_content_type("application/json; charset=utf-8"),
819			Some(ImportFormat::JSON)
820		);
821		assert_eq!(
822			ImportFormat::from_content_type("text/csv; charset=utf-8"),
823			Some(ImportFormat::CSV)
824		);
825		assert_eq!(
826			ImportFormat::from_content_type("text/tab-separated-values; charset=utf-8"),
827			Some(ImportFormat::TSV)
828		);
829	}
830
831	#[test]
832	fn test_from_content_type_unknown() {
833		assert_eq!(ImportFormat::from_content_type("text/html"), None);
834		assert_eq!(ImportFormat::from_content_type("application/xml"), None);
835		assert_eq!(ImportFormat::from_content_type("image/png"), None);
836	}
837
838	#[test]
839	fn test_from_content_type_empty() {
840		assert_eq!(ImportFormat::from_content_type(""), None);
841	}
842
843	#[test]
844	fn test_from_content_type_case_insensitive() {
845		// Content-Type header values should be case-insensitive per RFC
846		assert_eq!(
847			ImportFormat::from_content_type("Application/JSON"),
848			Some(ImportFormat::JSON)
849		);
850		assert_eq!(
851			ImportFormat::from_content_type("TEXT/CSV"),
852			Some(ImportFormat::CSV)
853		);
854		assert_eq!(
855			ImportFormat::from_content_type("Text/Tab-Separated-Values"),
856			Some(ImportFormat::TSV)
857		);
858	}
859
860	#[test]
861	fn test_from_content_type_text_json() {
862		// text/json is an alternative MIME type for JSON
863		assert_eq!(
864			ImportFormat::from_content_type("text/json"),
865			Some(ImportFormat::JSON)
866		);
867	}
868
869	#[test]
870	fn test_from_content_type_application_csv() {
871		// application/csv is an alternative MIME type for CSV
872		assert_eq!(
873			ImportFormat::from_content_type("application/csv"),
874			Some(ImportFormat::CSV)
875		);
876	}
877
878	#[test]
879	fn test_from_content_type_text_tsv() {
880		// text/tsv is an alternative MIME type for TSV
881		assert_eq!(
882			ImportFormat::from_content_type("text/tsv"),
883			Some(ImportFormat::TSV)
884		);
885	}
886
887	#[test]
888	fn test_from_content_type_with_extra_parameters() {
889		// Content-Type with multiple parameters
890		assert_eq!(
891			ImportFormat::from_content_type("application/json; charset=utf-8; boundary=something"),
892			Some(ImportFormat::JSON)
893		);
894	}
895
896	#[test]
897	fn test_from_content_type_whitespace() {
898		// Content-Type with whitespace variations
899		assert_eq!(
900			ImportFormat::from_content_type("  application/json  "),
901			Some(ImportFormat::JSON)
902		);
903		assert_eq!(
904			ImportFormat::from_content_type("application/json ;charset=utf-8"),
905			Some(ImportFormat::JSON)
906		);
907	}
908}