Skip to main content

alimentar/tui/
adapter.rs

1//! Dataset adapter for TUI viewing
2//!
3//! Provides uniform access to Arrow datasets for TUI rendering.
4//! Supports both in-memory and streaming modes for memory efficiency.
5
6use std::sync::Arc;
7
8use arrow::{
9    array::RecordBatch,
10    datatypes::{Schema, SchemaRef},
11};
12use unicode_width::UnicodeWidthStr;
13
14use super::{error::TuiResult, format::format_array_value};
15use crate::{dataset::ArrowDataset, Dataset};
16
17/// Threshold for switching from in-memory to streaming mode (rows)
18const STREAMING_THRESHOLD: usize = 100_000;
19
20/// Adapter providing uniform access to Arrow datasets for TUI rendering
21///
22/// Supports two modes:
23/// - `InMemory`: All batches loaded upfront, fast random access
24/// - `Streaming`: Lazy batch loading for large datasets (OOM prevention)
25///
26/// # Example
27///
28/// ```ignore
29/// use alimentar::tui::DatasetAdapter;
30/// use alimentar::ArrowDataset;
31///
32/// let dataset = ArrowDataset::from_parquet("data.parquet")?;
33/// let adapter = DatasetAdapter::from_dataset(&dataset)?;
34///
35/// println!("Rows: {}", adapter.row_count());
36/// println!("Columns: {}", adapter.column_count());
37///
38/// if let Some(value) = adapter.get_cell(0, 0)? {
39///     println!("First cell: {}", value);
40/// }
41/// ```
42#[derive(Debug, Clone)]
43pub enum DatasetAdapter {
44    /// All batches loaded in memory - fast random access
45    InMemory(InMemoryAdapter),
46    /// Lazy batch loading for large datasets
47    Streaming(StreamingAdapter),
48}
49
50/// In-memory adapter with all batches loaded
51#[derive(Debug, Clone)]
52pub struct InMemoryAdapter {
53    /// Record batches containing the data
54    batches: Vec<RecordBatch>,
55    /// Cached schema reference
56    schema: SchemaRef,
57    /// Cached total row count
58    total_rows: usize,
59    /// Cached column count
60    column_count: usize,
61    /// Cumulative row offsets for batch lookup
62    batch_offsets: Vec<usize>,
63}
64
65/// Streaming adapter for lazy batch loading (stub implementation)
66///
67/// This adapter is designed for datasets too large to fit in memory.
68/// Batches are loaded on-demand and evicted when not needed.
69#[derive(Debug, Clone)]
70pub struct StreamingAdapter {
71    /// Cached schema reference
72    schema: SchemaRef,
73    /// Total row count (known from metadata)
74    total_rows: usize,
75    /// Column count
76    column_count: usize,
77    /// Currently loaded batches (LRU cache would go here)
78    loaded_batches: Vec<RecordBatch>,
79    /// Cumulative row offsets
80    batch_offsets: Vec<usize>,
81}
82
83impl DatasetAdapter {
84    /// Create adapter from an `ArrowDataset`
85    ///
86    /// Automatically selects InMemory or Streaming mode based on dataset size.
87    ///
88    /// # Arguments
89    /// * `dataset` - The Arrow dataset to adapt
90    ///
91    /// # Returns
92    /// A new adapter, or error if the dataset has no schema
93    pub fn from_dataset(dataset: &ArrowDataset) -> TuiResult<Self> {
94        let schema = dataset.schema();
95        let batches: Vec<_> = dataset.iter().collect();
96        let total_rows: usize = batches.iter().map(|b| b.num_rows()).sum();
97
98        // Choose mode based on dataset size (F103)
99        if total_rows > STREAMING_THRESHOLD {
100            Self::streaming_from_batches(batches, schema)
101        } else {
102            Self::in_memory_from_batches(batches, schema)
103        }
104    }
105
106    /// Create in-memory adapter from record batches and schema
107    pub fn from_batches(batches: Vec<RecordBatch>, schema: SchemaRef) -> TuiResult<Self> {
108        Self::in_memory_from_batches(batches, schema)
109    }
110
111    /// Create in-memory adapter explicitly
112    pub fn in_memory_from_batches(batches: Vec<RecordBatch>, schema: SchemaRef) -> TuiResult<Self> {
113        Ok(Self::InMemory(InMemoryAdapter::new(batches, schema)?))
114    }
115
116    /// Create streaming adapter explicitly
117    pub fn streaming_from_batches(batches: Vec<RecordBatch>, schema: SchemaRef) -> TuiResult<Self> {
118        Ok(Self::Streaming(StreamingAdapter::new(batches, schema)?))
119    }
120
121    /// Create an empty adapter
122    pub fn empty() -> Self {
123        Self::InMemory(InMemoryAdapter::empty())
124    }
125
126    /// Get the schema reference
127    #[inline]
128    pub fn schema(&self) -> &SchemaRef {
129        match self {
130            Self::InMemory(a) => a.schema(),
131            Self::Streaming(a) => a.schema(),
132        }
133    }
134
135    /// Get the total row count
136    #[inline]
137    pub fn row_count(&self) -> usize {
138        match self {
139            Self::InMemory(a) => a.row_count(),
140            Self::Streaming(a) => a.row_count(),
141        }
142    }
143
144    /// Get the column count
145    #[inline]
146    pub fn column_count(&self) -> usize {
147        match self {
148            Self::InMemory(a) => a.column_count(),
149            Self::Streaming(a) => a.column_count(),
150        }
151    }
152
153    /// Check if the dataset is empty
154    #[inline]
155    pub fn is_empty(&self) -> bool {
156        self.row_count() == 0
157    }
158
159    /// Check if this adapter is in streaming mode
160    #[inline]
161    pub fn is_streaming(&self) -> bool {
162        matches!(self, Self::Streaming(_))
163    }
164
165    /// Get a cell value as a formatted string
166    pub fn get_cell(&self, row: usize, col: usize) -> TuiResult<Option<String>> {
167        match self {
168            Self::InMemory(a) => a.get_cell(row, col),
169            Self::Streaming(a) => a.get_cell(row, col),
170        }
171    }
172
173    /// Get a field name by column index
174    pub fn field_name(&self, col: usize) -> Option<&str> {
175        match self {
176            Self::InMemory(a) => a.field_name(col),
177            Self::Streaming(a) => a.field_name(col),
178        }
179    }
180
181    /// Get a field data type description by column index
182    pub fn field_type(&self, col: usize) -> Option<String> {
183        match self {
184            Self::InMemory(a) => a.field_type(col),
185            Self::Streaming(a) => a.field_type(col),
186        }
187    }
188
189    /// Check if a field is nullable
190    pub fn field_nullable(&self, col: usize) -> Option<bool> {
191        match self {
192            Self::InMemory(a) => a.field_nullable(col),
193            Self::Streaming(a) => a.field_nullable(col),
194        }
195    }
196
197    /// Calculate optimal column widths for display
198    ///
199    /// Uses `unicode-width` for correct visual width calculation.
200    pub fn calculate_column_widths(&self, max_width: u16, sample_rows: usize) -> Vec<u16> {
201        match self {
202            Self::InMemory(a) => a.calculate_column_widths(max_width, sample_rows),
203            Self::Streaming(a) => a.calculate_column_widths(max_width, sample_rows),
204        }
205    }
206
207    /// Get all field names as a vector
208    pub fn field_names(&self) -> Vec<&str> {
209        match self {
210            Self::InMemory(a) => a.field_names(),
211            Self::Streaming(a) => a.field_names(),
212        }
213    }
214
215    /// Locate a row within the batch structure
216    pub fn locate_row(&self, global_row: usize) -> Option<(usize, usize)> {
217        match self {
218            Self::InMemory(a) => a.locate_row(global_row),
219            Self::Streaming(a) => a.locate_row(global_row),
220        }
221    }
222
223    /// Search for a substring in string columns, returning first matching row
224    ///
225    /// Linear scan implementation suitable for <100k rows (F101).
226    pub fn search(&self, query: &str) -> Option<usize> {
227        if query.is_empty() {
228            return None;
229        }
230        let query_lower = query.to_lowercase();
231
232        for row in 0..self.row_count() {
233            for col in 0..self.column_count() {
234                if let Ok(Some(value)) = self.get_cell(row, col) {
235                    if value.to_lowercase().contains(&query_lower) {
236                        return Some(row);
237                    }
238                }
239            }
240        }
241        None
242    }
243
244    /// Search continuing from a given row
245    pub fn search_from(&self, query: &str, start_row: usize) -> Option<usize> {
246        if query.is_empty() {
247            return None;
248        }
249        let query_lower = query.to_lowercase();
250
251        for row in start_row..self.row_count() {
252            for col in 0..self.column_count() {
253                if let Ok(Some(value)) = self.get_cell(row, col) {
254                    if value.to_lowercase().contains(&query_lower) {
255                        return Some(row);
256                    }
257                }
258            }
259        }
260        // Wrap around to beginning
261        for row in 0..start_row {
262            for col in 0..self.column_count() {
263                if let Ok(Some(value)) = self.get_cell(row, col) {
264                    if value.to_lowercase().contains(&query_lower) {
265                        return Some(row);
266                    }
267                }
268            }
269        }
270        None
271    }
272}
273
274impl InMemoryAdapter {
275    /// Create a new in-memory adapter
276    #[allow(clippy::unnecessary_wraps)]
277    pub fn new(batches: Vec<RecordBatch>, schema: SchemaRef) -> TuiResult<Self> {
278        let total_rows = batches.iter().map(|b| b.num_rows()).sum();
279        let column_count = schema.fields().len();
280
281        // Pre-compute batch offsets for O(log n) row lookup
282        let mut batch_offsets = Vec::with_capacity(batches.len() + 1);
283        batch_offsets.push(0);
284        let mut offset = 0;
285        for batch in &batches {
286            offset += batch.num_rows();
287            batch_offsets.push(offset);
288        }
289
290        Ok(Self {
291            batches,
292            schema,
293            total_rows,
294            column_count,
295            batch_offsets,
296        })
297    }
298
299    /// Create an empty adapter
300    pub fn empty() -> Self {
301        Self {
302            batches: Vec::new(),
303            schema: Arc::new(Schema::empty()),
304            total_rows: 0,
305            column_count: 0,
306            batch_offsets: vec![0],
307        }
308    }
309
310    #[inline]
311    pub fn schema(&self) -> &SchemaRef {
312        &self.schema
313    }
314
315    #[inline]
316    pub fn row_count(&self) -> usize {
317        self.total_rows
318    }
319
320    #[inline]
321    pub fn column_count(&self) -> usize {
322        self.column_count
323    }
324
325    pub fn get_cell(&self, row: usize, col: usize) -> TuiResult<Option<String>> {
326        if row >= self.total_rows || col >= self.column_count {
327            return Ok(None);
328        }
329
330        let Some((batch_idx, local_row)) = self.locate_row(row) else {
331            return Ok(None);
332        };
333
334        let Some(batch) = self.batches.get(batch_idx) else {
335            return Ok(None);
336        };
337
338        let array = batch.column(col);
339        format_array_value(array.as_ref(), local_row)
340    }
341
342    pub fn field_name(&self, col: usize) -> Option<&str> {
343        self.schema.fields().get(col).map(|f| f.name().as_str())
344    }
345
346    pub fn field_type(&self, col: usize) -> Option<String> {
347        self.schema
348            .fields()
349            .get(col)
350            .map(|f| format!("{:?}", f.data_type()))
351    }
352
353    pub fn field_nullable(&self, col: usize) -> Option<bool> {
354        self.schema.fields().get(col).map(|f| f.is_nullable())
355    }
356
357    pub fn locate_row(&self, global_row: usize) -> Option<(usize, usize)> {
358        if global_row >= self.total_rows {
359            return None;
360        }
361
362        let batch_idx = match self.batch_offsets.binary_search(&global_row) {
363            Ok(idx) => {
364                if idx < self.batches.len() {
365                    idx
366                } else {
367                    idx.saturating_sub(1)
368                }
369            }
370            Err(idx) => idx.saturating_sub(1),
371        };
372
373        let batch_start = self.batch_offsets.get(batch_idx).copied().unwrap_or(0);
374        let local_row = global_row.saturating_sub(batch_start);
375
376        Some((batch_idx, local_row))
377    }
378
379    /// Calculate column widths using unicode-width for correct visual width
380    pub fn calculate_column_widths(&self, max_width: u16, sample_rows: usize) -> Vec<u16> {
381        if self.column_count == 0 {
382            return Vec::new();
383        }
384
385        // Start with header widths (using unicode width)
386        let mut widths: Vec<u16> = self
387            .schema
388            .fields()
389            .iter()
390            .map(|f| {
391                let width = UnicodeWidthStr::width(f.name().as_str()).min(50);
392                u16::try_from(width).unwrap_or(u16::MAX)
393            })
394            .collect();
395
396        // Sample rows for content width
397        let sample_count = sample_rows.min(self.total_rows);
398        for row in 0..sample_count {
399            for col in 0..self.column_count {
400                if let Ok(Some(value)) = self.get_cell(row, col) {
401                    // Use unicode width for correct visual width
402                    let width = UnicodeWidthStr::width(value.as_str()).min(50);
403                    let width_u16 = u16::try_from(width).unwrap_or(u16::MAX);
404                    if let Some(w) = widths.get_mut(col) {
405                        *w = (*w).max(width_u16);
406                    }
407                }
408            }
409        }
410
411        // Ensure minimum width of 3 for each column
412        for w in &mut widths {
413            *w = (*w).max(3);
414        }
415
416        // Calculate separators and available space
417        let num_cols = u16::try_from(self.column_count).unwrap_or(u16::MAX);
418        let separators = num_cols.saturating_sub(1);
419        let available = max_width.saturating_sub(separators);
420
421        // Scale down if needed
422        let total: u16 = widths.iter().sum();
423        if total > available && available > 0 {
424            let scale = f64::from(available) / f64::from(total);
425            for w in &mut widths {
426                #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
427                let scaled = (f64::from(*w) * scale) as u16;
428                *w = scaled.max(3);
429            }
430        }
431
432        widths
433    }
434
435    pub fn field_names(&self) -> Vec<&str> {
436        self.schema
437            .fields()
438            .iter()
439            .map(|f| f.name().as_str())
440            .collect()
441    }
442}
443
444impl StreamingAdapter {
445    /// Create a new streaming adapter
446    ///
447    /// Note: This is currently a stub that loads all batches.
448    /// A full implementation would use an async iterator.
449    #[allow(clippy::unnecessary_wraps)]
450    pub fn new(batches: Vec<RecordBatch>, schema: SchemaRef) -> TuiResult<Self> {
451        let total_rows = batches.iter().map(|b| b.num_rows()).sum();
452        let column_count = schema.fields().len();
453
454        let mut batch_offsets = Vec::with_capacity(batches.len() + 1);
455        batch_offsets.push(0);
456        let mut offset = 0;
457        for batch in &batches {
458            offset += batch.num_rows();
459            batch_offsets.push(offset);
460        }
461
462        Ok(Self {
463            schema,
464            total_rows,
465            column_count,
466            loaded_batches: batches, // TODO: Replace with lazy loading
467            batch_offsets,
468        })
469    }
470
471    #[inline]
472    pub fn schema(&self) -> &SchemaRef {
473        &self.schema
474    }
475
476    #[inline]
477    pub fn row_count(&self) -> usize {
478        self.total_rows
479    }
480
481    #[inline]
482    pub fn column_count(&self) -> usize {
483        self.column_count
484    }
485
486    pub fn get_cell(&self, row: usize, col: usize) -> TuiResult<Option<String>> {
487        if row >= self.total_rows || col >= self.column_count {
488            return Ok(None);
489        }
490
491        let Some((batch_idx, local_row)) = self.locate_row(row) else {
492            return Ok(None);
493        };
494
495        let Some(batch) = self.loaded_batches.get(batch_idx) else {
496            return Ok(None);
497        };
498
499        let array = batch.column(col);
500        format_array_value(array.as_ref(), local_row)
501    }
502
503    pub fn field_name(&self, col: usize) -> Option<&str> {
504        self.schema.fields().get(col).map(|f| f.name().as_str())
505    }
506
507    pub fn field_type(&self, col: usize) -> Option<String> {
508        self.schema
509            .fields()
510            .get(col)
511            .map(|f| format!("{:?}", f.data_type()))
512    }
513
514    pub fn field_nullable(&self, col: usize) -> Option<bool> {
515        self.schema.fields().get(col).map(|f| f.is_nullable())
516    }
517
518    pub fn locate_row(&self, global_row: usize) -> Option<(usize, usize)> {
519        if global_row >= self.total_rows {
520            return None;
521        }
522
523        let batch_idx = match self.batch_offsets.binary_search(&global_row) {
524            Ok(idx) => {
525                if idx < self.loaded_batches.len() {
526                    idx
527                } else {
528                    idx.saturating_sub(1)
529                }
530            }
531            Err(idx) => idx.saturating_sub(1),
532        };
533
534        let batch_start = self.batch_offsets.get(batch_idx).copied().unwrap_or(0);
535        let local_row = global_row.saturating_sub(batch_start);
536
537        Some((batch_idx, local_row))
538    }
539
540    /// Calculate column widths using unicode-width
541    pub fn calculate_column_widths(&self, max_width: u16, sample_rows: usize) -> Vec<u16> {
542        if self.column_count == 0 {
543            return Vec::new();
544        }
545
546        let mut widths: Vec<u16> = self
547            .schema
548            .fields()
549            .iter()
550            .map(|f| {
551                let width = UnicodeWidthStr::width(f.name().as_str()).min(50);
552                u16::try_from(width).unwrap_or(u16::MAX)
553            })
554            .collect();
555
556        let sample_count = sample_rows.min(self.total_rows);
557        for row in 0..sample_count {
558            for col in 0..self.column_count {
559                if let Ok(Some(value)) = self.get_cell(row, col) {
560                    let width = UnicodeWidthStr::width(value.as_str()).min(50);
561                    let width_u16 = u16::try_from(width).unwrap_or(u16::MAX);
562                    if let Some(w) = widths.get_mut(col) {
563                        *w = (*w).max(width_u16);
564                    }
565                }
566            }
567        }
568
569        for w in &mut widths {
570            *w = (*w).max(3);
571        }
572
573        let num_cols = u16::try_from(self.column_count).unwrap_or(u16::MAX);
574        let separators = num_cols.saturating_sub(1);
575        let available = max_width.saturating_sub(separators);
576
577        let total: u16 = widths.iter().sum();
578        if total > available && available > 0 {
579            let scale = f64::from(available) / f64::from(total);
580            for w in &mut widths {
581                #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
582                let scaled = (f64::from(*w) * scale) as u16;
583                *w = scaled.max(3);
584            }
585        }
586
587        widths
588    }
589
590    pub fn field_names(&self) -> Vec<&str> {
591        self.schema
592            .fields()
593            .iter()
594            .map(|f| f.name().as_str())
595            .collect()
596    }
597}
598
599#[cfg(test)]
600mod tests {
601    use arrow::{
602        array::{Float32Array, Int32Array, StringArray},
603        datatypes::{DataType, Field},
604    };
605
606    use super::*;
607
608    fn create_test_schema() -> SchemaRef {
609        Arc::new(Schema::new(vec![
610            Field::new("id", DataType::Utf8, false),
611            Field::new("value", DataType::Int32, false),
612            Field::new("score", DataType::Float32, false),
613        ]))
614    }
615
616    fn create_test_batch(schema: &SchemaRef, start_id: i32, count: usize) -> RecordBatch {
617        let ids: Vec<String> = (0..count)
618            .map(|i| format!("id_{}", start_id + i as i32))
619            .collect();
620        let values: Vec<i32> = (0..count).map(|i| (start_id + i as i32) * 10).collect();
621        let scores: Vec<f32> = (0..count).map(|i| (i as f32) * 0.1).collect();
622
623        RecordBatch::try_new(
624            schema.clone(),
625            vec![
626                Arc::new(StringArray::from(ids)),
627                Arc::new(Int32Array::from(values)),
628                Arc::new(Float32Array::from(scores)),
629            ],
630        )
631        .unwrap()
632    }
633
634    fn create_test_adapter() -> DatasetAdapter {
635        let schema = create_test_schema();
636        let batch1 = create_test_batch(&schema, 0, 5);
637        let batch2 = create_test_batch(&schema, 5, 5);
638        DatasetAdapter::from_batches(vec![batch1, batch2], schema).unwrap()
639    }
640
641    #[test]
642    fn f001_adapter_row_count() {
643        let adapter = create_test_adapter();
644        assert_eq!(adapter.row_count(), 10, "FALSIFIED: Expected 10 rows");
645    }
646
647    #[test]
648    fn f002_adapter_column_count() {
649        let adapter = create_test_adapter();
650        assert_eq!(
651            adapter.column_count(),
652            3,
653            "FALSIFIED: Expected 3 columns (id, value, score)"
654        );
655    }
656
657    #[test]
658    fn f003_adapter_schema_o1() {
659        let adapter = create_test_adapter();
660        let schema = adapter.schema();
661        assert_eq!(schema.fields().len(), 3);
662    }
663
664    #[test]
665    fn f004_adapter_get_cell_first_batch() {
666        let adapter = create_test_adapter();
667        let cell = adapter.get_cell(0, 0).unwrap();
668        assert!(cell.is_some(), "FALSIFIED: Cell should exist");
669        assert_eq!(cell.unwrap(), "id_0");
670    }
671
672    #[test]
673    fn f005_adapter_get_cell_second_batch() {
674        let adapter = create_test_adapter();
675        let cell = adapter.get_cell(5, 0).unwrap();
676        assert!(cell.is_some(), "FALSIFIED: Cell should exist");
677        assert_eq!(cell.unwrap(), "id_5");
678    }
679
680    #[test]
681    fn f006_adapter_get_cell_row_out_of_bounds() {
682        let adapter = create_test_adapter();
683        let cell = adapter.get_cell(100, 0).unwrap();
684        assert!(
685            cell.is_none(),
686            "FALSIFIED: Out of bounds row should return None"
687        );
688    }
689
690    #[test]
691    fn f007_adapter_get_cell_col_out_of_bounds() {
692        let adapter = create_test_adapter();
693        let cell = adapter.get_cell(0, 100).unwrap();
694        assert!(
695            cell.is_none(),
696            "FALSIFIED: Out of bounds column should return None"
697        );
698    }
699
700    #[test]
701    fn f008_adapter_empty() {
702        let adapter = DatasetAdapter::empty();
703        assert_eq!(adapter.row_count(), 0);
704        assert_eq!(adapter.column_count(), 0);
705        assert!(adapter.is_empty());
706    }
707
708    #[test]
709    fn f009_adapter_empty_get_cell() {
710        let adapter = DatasetAdapter::empty();
711        let cell = adapter.get_cell(0, 0).unwrap();
712        assert!(
713            cell.is_none(),
714            "FALSIFIED: Empty adapter should return None"
715        );
716    }
717
718    #[test]
719    fn f010_adapter_field_name() {
720        let adapter = create_test_adapter();
721        assert_eq!(adapter.field_name(0), Some("id"));
722        assert_eq!(adapter.field_name(1), Some("value"));
723        assert_eq!(adapter.field_name(100), None);
724    }
725
726    #[test]
727    fn f011_adapter_field_type() {
728        let adapter = create_test_adapter();
729        let type_str = adapter.field_type(0).unwrap();
730        assert!(type_str.contains("Utf8"), "FALSIFIED: id should be Utf8");
731    }
732
733    #[test]
734    fn f012_adapter_field_nullable() {
735        let adapter = create_test_adapter();
736        assert_eq!(
737            adapter.field_nullable(0),
738            Some(false),
739            "FALSIFIED: id should not be nullable"
740        );
741    }
742
743    #[test]
744    fn f013_adapter_column_widths() {
745        let adapter = create_test_adapter();
746        let widths = adapter.calculate_column_widths(80, 5);
747        assert_eq!(
748            widths.len(),
749            3,
750            "FALSIFIED: Should have width for each column"
751        );
752        for (i, w) in widths.iter().enumerate() {
753            assert!(*w >= 3, "FALSIFIED: Column {} width {} below minimum", i, w);
754        }
755    }
756
757    #[test]
758    fn f014_adapter_column_widths_constrained() {
759        let adapter = create_test_adapter();
760        let widths = adapter.calculate_column_widths(15, 5);
761        let total: u16 = widths.iter().sum();
762        let separators = (widths.len() as u16).saturating_sub(1);
763        assert!(
764            total + separators <= 15,
765            "FALSIFIED: Total width {} exceeds constraint 15",
766            total + separators
767        );
768    }
769
770    #[test]
771    fn f015_adapter_locate_row_first_batch() {
772        let adapter = create_test_adapter();
773        let loc = adapter.locate_row(0);
774        assert_eq!(loc, Some((0, 0)), "FALSIFIED: Row 0 should be in batch 0");
775    }
776
777    #[test]
778    fn f016_adapter_locate_row_second_batch() {
779        let adapter = create_test_adapter();
780        let loc = adapter.locate_row(5);
781        assert_eq!(
782            loc,
783            Some((1, 0)),
784            "FALSIFIED: Row 5 should be first row of batch 1"
785        );
786    }
787
788    #[test]
789    fn f017_adapter_locate_row_last() {
790        let adapter = create_test_adapter();
791        let loc = adapter.locate_row(9);
792        assert_eq!(
793            loc,
794            Some((1, 4)),
795            "FALSIFIED: Row 9 should be last row of batch 1"
796        );
797    }
798
799    #[test]
800    fn f018_adapter_locate_row_out_of_bounds() {
801        let adapter = create_test_adapter();
802        let loc = adapter.locate_row(100);
803        assert_eq!(loc, None, "FALSIFIED: Out of bounds should return None");
804    }
805
806    #[test]
807    fn f019_adapter_is_clone() {
808        let adapter = create_test_adapter();
809        let cloned = adapter.clone();
810        assert_eq!(adapter.row_count(), cloned.row_count());
811        assert_eq!(adapter.column_count(), cloned.column_count());
812    }
813
814    #[test]
815    fn f020_adapter_schema_o1() {
816        let adapter = create_test_adapter();
817        for _ in 0..10000 {
818            let _ = adapter.schema();
819        }
820    }
821
822    #[test]
823    fn f021_adapter_row_count_o1() {
824        let adapter = create_test_adapter();
825        for _ in 0..10000 {
826            let _ = adapter.row_count();
827        }
828    }
829
830    #[test]
831    fn f022_adapter_int_formatting() {
832        let adapter = create_test_adapter();
833        let cell = adapter.get_cell(0, 1).unwrap().unwrap();
834        assert_eq!(cell, "0", "FALSIFIED: First value should be 0");
835    }
836
837    #[test]
838    fn f023_adapter_float_formatting() {
839        let adapter = create_test_adapter();
840        let cell = adapter.get_cell(1, 2).unwrap().unwrap();
841        assert!(cell.contains("0.1"), "FALSIFIED: Score should be ~0.1");
842    }
843
844    #[test]
845    fn f024_adapter_large_row_index() {
846        let adapter = create_test_adapter();
847        let cell = adapter.get_cell(usize::MAX, 0).unwrap();
848        assert!(cell.is_none(), "FALSIFIED: usize::MAX should not panic");
849    }
850
851    #[test]
852    fn f025_adapter_large_col_index() {
853        let adapter = create_test_adapter();
854        let cell = adapter.get_cell(0, usize::MAX).unwrap();
855        assert!(
856            cell.is_none(),
857            "FALSIFIED: usize::MAX column should not panic"
858        );
859    }
860
861    #[test]
862    fn f026_adapter_from_dataset() {
863        let schema = create_test_schema();
864        let batch = create_test_batch(&schema, 0, 5);
865        let dataset = ArrowDataset::from_batch(batch).unwrap();
866
867        let adapter = DatasetAdapter::from_dataset(&dataset).unwrap();
868        assert_eq!(adapter.row_count(), 5);
869        assert_eq!(adapter.column_count(), 3);
870        assert_eq!(adapter.field_name(0), Some("id"));
871    }
872
873    #[test]
874    fn f027_adapter_single_batch() {
875        let schema = create_test_schema();
876        let batch = create_test_batch(&schema, 0, 10);
877        let adapter = DatasetAdapter::from_batches(vec![batch], schema).unwrap();
878
879        assert_eq!(adapter.row_count(), 10);
880        assert_eq!(adapter.get_cell(0, 0).unwrap(), Some("id_0".to_string()));
881        assert_eq!(adapter.get_cell(9, 0).unwrap(), Some("id_9".to_string()));
882    }
883
884    #[test]
885    fn f028_adapter_multi_batch_boundaries() {
886        let schema = create_test_schema();
887        let batch1 = create_test_batch(&schema, 0, 3);
888        let batch2 = create_test_batch(&schema, 3, 3);
889        let batch3 = create_test_batch(&schema, 6, 3);
890        let adapter =
891            DatasetAdapter::from_batches(vec![batch1, batch2, batch3], schema.clone()).unwrap();
892
893        assert_eq!(adapter.row_count(), 9);
894        assert_eq!(adapter.get_cell(2, 0).unwrap(), Some("id_2".to_string()));
895        assert_eq!(adapter.get_cell(3, 0).unwrap(), Some("id_3".to_string()));
896        assert_eq!(adapter.get_cell(8, 0).unwrap(), Some("id_8".to_string()));
897    }
898
899    #[test]
900    fn f029_adapter_empty_schema_columns() {
901        let schema = create_test_schema();
902        let batch1 = create_test_batch(&schema, 0, 5);
903        let batch2 = create_test_batch(&schema, 5, 5);
904        let adapter = DatasetAdapter::from_batches(vec![batch1, batch2], schema.clone()).unwrap();
905
906        let widths = adapter.calculate_column_widths(100, 10);
907        assert_eq!(widths.len(), 3);
908    }
909
910    #[test]
911    fn f030_adapter_empty_batches() {
912        let schema = create_test_schema();
913        let adapter = DatasetAdapter::from_batches(vec![], schema).unwrap();
914        assert!(adapter.is_empty());
915        assert_eq!(adapter.row_count(), 0);
916        assert_eq!(adapter.get_cell(0, 0).unwrap(), None);
917    }
918
919    #[test]
920    fn f031_adapter_empty_schema_field_names() {
921        let schema = create_test_schema();
922        let adapter = DatasetAdapter::from_batches(vec![], schema).unwrap();
923        let names = adapter.field_names();
924        assert_eq!(names, vec!["id", "value", "score"]);
925    }
926
927    // === NEW TESTS FOR STREAMING AND SEARCH ===
928
929    #[test]
930    fn f032_adapter_is_streaming() {
931        let adapter = create_test_adapter();
932        assert!(
933            !adapter.is_streaming(),
934            "FALSIFIED: Small dataset should be InMemory"
935        );
936    }
937
938    #[test]
939    fn f033_adapter_streaming_mode() {
940        let schema = create_test_schema();
941        let batch = create_test_batch(&schema, 0, 5);
942        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
943        assert!(
944            adapter.is_streaming(),
945            "FALSIFIED: Should be Streaming mode"
946        );
947        assert_eq!(adapter.row_count(), 5);
948    }
949
950    #[test]
951    fn f034_adapter_search_finds_match() {
952        let adapter = create_test_adapter();
953        let result = adapter.search("id_5");
954        assert_eq!(
955            result,
956            Some(5),
957            "FALSIFIED: Search should find 'id_5' at row 5"
958        );
959    }
960
961    #[test]
962    fn f035_adapter_search_no_match() {
963        let adapter = create_test_adapter();
964        let result = adapter.search("nonexistent_value");
965        assert_eq!(result, None, "FALSIFIED: Search should return None");
966    }
967
968    #[test]
969    fn f036_adapter_search_empty_query() {
970        let adapter = create_test_adapter();
971        let result = adapter.search("");
972        assert_eq!(result, None, "FALSIFIED: Empty query should return None");
973    }
974
975    #[test]
976    fn f037_adapter_search_case_insensitive() {
977        let adapter = create_test_adapter();
978        let result = adapter.search("ID_3");
979        assert_eq!(
980            result,
981            Some(3),
982            "FALSIFIED: Search should be case insensitive"
983        );
984    }
985
986    #[test]
987    fn f038_adapter_search_from_wraps() {
988        let adapter = create_test_adapter();
989        // Search from row 8, should wrap and find id_0
990        let result = adapter.search_from("id_0", 8);
991        assert_eq!(result, Some(0), "FALSIFIED: Search should wrap around");
992    }
993
994    #[test]
995    fn f039_adapter_unicode_width() {
996        // Test that unicode width is correctly calculated
997        let schema = Arc::new(Schema::new(vec![Field::new(
998            "emoji",
999            DataType::Utf8,
1000            false,
1001        )]));
1002
1003        let batch = RecordBatch::try_new(
1004            schema.clone(),
1005            vec![Arc::new(StringArray::from(vec!["👨‍👩‍👧‍👦", "hello"]))],
1006        )
1007        .unwrap();
1008
1009        let adapter = DatasetAdapter::from_batches(vec![batch], schema).unwrap();
1010        let widths = adapter.calculate_column_widths(80, 10);
1011
1012        // Emoji should have visual width of 2 per component,
1013        // family emoji is complex but should be handled
1014        assert!(
1015            widths[0] >= 3,
1016            "FALSIFIED: Column width should be at least minimum"
1017        );
1018    }
1019
1020    // === Additional coverage tests for StreamingAdapter ===
1021
1022    #[test]
1023    fn f040_streaming_adapter_get_cell() {
1024        let schema = create_test_schema();
1025        let batch = create_test_batch(&schema, 0, 5);
1026        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1027
1028        let cell = adapter.get_cell(0, 0).unwrap();
1029        assert_eq!(cell, Some("id_0".to_string()));
1030    }
1031
1032    #[test]
1033    fn f041_streaming_adapter_field_name() {
1034        let schema = create_test_schema();
1035        let batch = create_test_batch(&schema, 0, 5);
1036        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1037
1038        assert_eq!(adapter.field_name(0), Some("id"));
1039        assert_eq!(adapter.field_name(1), Some("value"));
1040        assert_eq!(adapter.field_name(100), None);
1041    }
1042
1043    #[test]
1044    fn f042_streaming_adapter_field_type() {
1045        let schema = create_test_schema();
1046        let batch = create_test_batch(&schema, 0, 5);
1047        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1048
1049        let type_str = adapter.field_type(0).unwrap();
1050        assert!(type_str.contains("Utf8"));
1051        assert!(adapter.field_type(100).is_none());
1052    }
1053
1054    #[test]
1055    fn f043_streaming_adapter_field_nullable() {
1056        let schema = create_test_schema();
1057        let batch = create_test_batch(&schema, 0, 5);
1058        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1059
1060        assert_eq!(adapter.field_nullable(0), Some(false));
1061        assert!(adapter.field_nullable(100).is_none());
1062    }
1063
1064    #[test]
1065    fn f044_streaming_adapter_locate_row() {
1066        let schema = create_test_schema();
1067        let batch1 = create_test_batch(&schema, 0, 5);
1068        let batch2 = create_test_batch(&schema, 5, 5);
1069        let adapter = DatasetAdapter::streaming_from_batches(vec![batch1, batch2], schema).unwrap();
1070
1071        assert_eq!(adapter.locate_row(0), Some((0, 0)));
1072        assert_eq!(adapter.locate_row(4), Some((0, 4)));
1073        assert_eq!(adapter.locate_row(5), Some((1, 0)));
1074        assert_eq!(adapter.locate_row(9), Some((1, 4)));
1075        assert_eq!(adapter.locate_row(100), None);
1076    }
1077
1078    #[test]
1079    fn f045_streaming_adapter_column_widths() {
1080        let schema = create_test_schema();
1081        let batch = create_test_batch(&schema, 0, 5);
1082        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1083
1084        let widths = adapter.calculate_column_widths(80, 5);
1085        assert_eq!(widths.len(), 3);
1086        for w in &widths {
1087            assert!(*w >= 3);
1088        }
1089    }
1090
1091    #[test]
1092    fn f046_streaming_adapter_field_names() {
1093        let schema = create_test_schema();
1094        let batch = create_test_batch(&schema, 0, 5);
1095        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1096
1097        let names = adapter.field_names();
1098        assert_eq!(names, vec!["id", "value", "score"]);
1099    }
1100
1101    #[test]
1102    fn f047_streaming_adapter_out_of_bounds() {
1103        let schema = create_test_schema();
1104        let batch = create_test_batch(&schema, 0, 5);
1105        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1106
1107        // Row out of bounds
1108        assert_eq!(adapter.get_cell(100, 0).unwrap(), None);
1109        // Column out of bounds
1110        assert_eq!(adapter.get_cell(0, 100).unwrap(), None);
1111    }
1112
1113    #[test]
1114    fn f048_streaming_adapter_schema() {
1115        let schema = create_test_schema();
1116        let batch = create_test_batch(&schema, 0, 5);
1117        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1118
1119        assert_eq!(adapter.schema().fields().len(), 3);
1120    }
1121
1122    #[test]
1123    fn f049_search_from_empty_query() {
1124        let adapter = create_test_adapter();
1125        let result = adapter.search_from("", 0);
1126        assert_eq!(result, None);
1127    }
1128
1129    #[test]
1130    fn f050_search_from_no_wrap_needed() {
1131        let adapter = create_test_adapter();
1132        // Search from row 0, should find id_5 at row 5
1133        let result = adapter.search_from("id_5", 0);
1134        assert_eq!(result, Some(5));
1135    }
1136
1137    #[test]
1138    fn f051_search_from_no_match() {
1139        let adapter = create_test_adapter();
1140        let result = adapter.search_from("nonexistent", 0);
1141        assert_eq!(result, None);
1142    }
1143
1144    #[test]
1145    fn f052_streaming_adapter_empty_batches() {
1146        let schema = create_test_schema();
1147        let adapter = DatasetAdapter::streaming_from_batches(vec![], schema).unwrap();
1148
1149        assert_eq!(adapter.row_count(), 0);
1150        assert_eq!(adapter.column_count(), 3);
1151        assert!(adapter.is_streaming());
1152    }
1153
1154    #[test]
1155    fn f053_streaming_adapter_column_widths_empty() {
1156        let schema = Arc::new(Schema::empty());
1157        let adapter = DatasetAdapter::streaming_from_batches(vec![], schema).unwrap();
1158
1159        let widths = adapter.calculate_column_widths(80, 10);
1160        assert!(widths.is_empty());
1161    }
1162
1163    #[test]
1164    fn f054_streaming_adapter_column_widths_constrained() {
1165        let schema = create_test_schema();
1166        let batch = create_test_batch(&schema, 0, 5);
1167        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1168
1169        // Very constrained width
1170        let widths = adapter.calculate_column_widths(15, 5);
1171        let total: u16 = widths.iter().sum();
1172        let separators = (widths.len() as u16).saturating_sub(1);
1173        assert!(total + separators <= 15);
1174    }
1175
1176    #[test]
1177    fn f055_in_memory_adapter_empty_row_count() {
1178        let schema = create_test_schema();
1179        let adapter = DatasetAdapter::in_memory_from_batches(vec![], schema.clone()).unwrap();
1180
1181        assert_eq!(adapter.row_count(), 0);
1182        assert!(!adapter.is_streaming());
1183    }
1184
1185    #[test]
1186    fn f056_in_memory_adapter_locate_row_boundary() {
1187        let schema = create_test_schema();
1188        let batch1 = create_test_batch(&schema, 0, 3);
1189        let batch2 = create_test_batch(&schema, 3, 3);
1190        let batch3 = create_test_batch(&schema, 6, 4);
1191        let adapter =
1192            DatasetAdapter::in_memory_from_batches(vec![batch1, batch2, batch3], schema).unwrap();
1193
1194        // Test exact batch boundaries
1195        assert_eq!(adapter.locate_row(2), Some((0, 2))); // Last of batch 0
1196        assert_eq!(adapter.locate_row(3), Some((1, 0))); // First of batch 1
1197        assert_eq!(adapter.locate_row(5), Some((1, 2))); // Last of batch 1
1198        assert_eq!(adapter.locate_row(6), Some((2, 0))); // First of batch 2
1199        assert_eq!(adapter.locate_row(9), Some((2, 3))); // Last of batch 2
1200    }
1201
1202    #[test]
1203    fn f057_search_on_streaming_adapter() {
1204        let schema = create_test_schema();
1205        let batch = create_test_batch(&schema, 0, 10);
1206        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1207
1208        let result = adapter.search("id_7");
1209        assert_eq!(result, Some(7));
1210    }
1211
1212    #[test]
1213    fn f058_search_from_on_streaming_adapter() {
1214        let schema = create_test_schema();
1215        let batch = create_test_batch(&schema, 0, 10);
1216        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1217
1218        // Search from row 8, wraps to find id_3
1219        let result = adapter.search_from("id_3", 8);
1220        assert_eq!(result, Some(3));
1221    }
1222
1223    #[test]
1224    fn f059_search_partial_match() {
1225        let adapter = create_test_adapter();
1226        // Should match "id_" prefix
1227        let result = adapter.search("id_");
1228        assert_eq!(result, Some(0));
1229    }
1230
1231    #[test]
1232    fn f060_search_numeric_value() {
1233        let adapter = create_test_adapter();
1234        // Search for numeric value in the value column
1235        let result = adapter.search("10");
1236        assert!(result.is_some());
1237    }
1238
1239    #[test]
1240    fn f061_empty_adapter_search() {
1241        let adapter = DatasetAdapter::empty();
1242        assert_eq!(adapter.search("anything"), None);
1243        assert_eq!(adapter.search_from("anything", 0), None);
1244    }
1245
1246    #[test]
1247    fn f062_column_widths_zero_sample() {
1248        let adapter = create_test_adapter();
1249        let widths = adapter.calculate_column_widths(80, 0);
1250        // Should still have widths based on headers
1251        assert_eq!(widths.len(), 3);
1252    }
1253
1254    #[test]
1255    fn f063_column_widths_large_sample() {
1256        let adapter = create_test_adapter();
1257        // Sample more rows than exist
1258        let widths = adapter.calculate_column_widths(80, 1000);
1259        assert_eq!(widths.len(), 3);
1260    }
1261
1262    #[test]
1263    fn f064_streaming_locate_row_exact_boundary() {
1264        let schema = create_test_schema();
1265        let batch1 = create_test_batch(&schema, 0, 5);
1266        let batch2 = create_test_batch(&schema, 5, 5);
1267        let adapter = DatasetAdapter::streaming_from_batches(vec![batch1, batch2], schema).unwrap();
1268
1269        // Test the binary_search Ok branch
1270        let loc = adapter.locate_row(0);
1271        assert_eq!(loc, Some((0, 0)));
1272
1273        let loc = adapter.locate_row(5);
1274        assert_eq!(loc, Some((1, 0)));
1275    }
1276
1277    #[test]
1278    fn f065_in_memory_adapter_debug() {
1279        let adapter = create_test_adapter();
1280        let debug = format!("{:?}", adapter);
1281        assert!(debug.contains("InMemory"));
1282    }
1283
1284    #[test]
1285    fn f066_streaming_adapter_debug() {
1286        let schema = create_test_schema();
1287        let batch = create_test_batch(&schema, 0, 5);
1288        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1289        let debug = format!("{:?}", adapter);
1290        assert!(debug.contains("Streaming"));
1291    }
1292
1293    #[test]
1294    fn f067_in_memory_empty_direct() {
1295        let adapter = InMemoryAdapter::empty();
1296        assert_eq!(adapter.row_count(), 0);
1297        assert_eq!(adapter.column_count(), 0);
1298        assert!(adapter.schema().fields().is_empty());
1299    }
1300
1301    #[test]
1302    fn f068_adapter_from_dataset_small() {
1303        let schema = create_test_schema();
1304        let batch = create_test_batch(&schema, 0, 50);
1305        let dataset = ArrowDataset::from_batch(batch).unwrap();
1306
1307        let adapter = DatasetAdapter::from_dataset(&dataset).unwrap();
1308        // Should be in-memory mode for small datasets
1309        assert!(!adapter.is_streaming());
1310        assert_eq!(adapter.row_count(), 50);
1311    }
1312
1313    // ========================================================================
1314    // Additional TUI Adapter Tests for Coverage
1315    // ========================================================================
1316
1317    #[test]
1318    fn f069_in_memory_adapter_get_cell_batch_not_found() {
1319        // Test when batch index is invalid
1320        let schema = create_test_schema();
1321        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1322        let result = adapter.get_cell(0, 0).unwrap();
1323        assert!(result.is_none());
1324    }
1325
1326    #[test]
1327    fn f070_streaming_adapter_get_cell_batch_not_found() {
1328        // Test when batch index is invalid in streaming mode
1329        let schema = create_test_schema();
1330        let adapter = StreamingAdapter::new(vec![], schema).unwrap();
1331        let result = adapter.get_cell(0, 0).unwrap();
1332        assert!(result.is_none());
1333    }
1334
1335    #[test]
1336    fn f071_in_memory_adapter_locate_row_at_batch_boundary() {
1337        let schema = create_test_schema();
1338        let batch1 = create_test_batch(&schema, 0, 5);
1339        let batch2 = create_test_batch(&schema, 5, 5);
1340        let adapter = InMemoryAdapter::new(vec![batch1, batch2], schema).unwrap();
1341
1342        // Test exact batch boundary (binary_search returns Ok)
1343        let loc = adapter.locate_row(5);
1344        assert_eq!(loc, Some((1, 0)));
1345
1346        // Test one before boundary
1347        let loc = adapter.locate_row(4);
1348        assert_eq!(loc, Some((0, 4)));
1349    }
1350
1351    #[test]
1352    fn f072_streaming_adapter_locate_row_at_batch_boundary() {
1353        let schema = create_test_schema();
1354        let batch1 = create_test_batch(&schema, 0, 5);
1355        let batch2 = create_test_batch(&schema, 5, 5);
1356        let adapter = StreamingAdapter::new(vec![batch1, batch2], schema).unwrap();
1357
1358        // Test exact batch boundary
1359        let loc = adapter.locate_row(5);
1360        assert_eq!(loc, Some((1, 0)));
1361    }
1362
1363    #[test]
1364    fn f073_in_memory_adapter_schema_access() {
1365        let schema = create_test_schema();
1366        let adapter = InMemoryAdapter::new(vec![], schema.clone()).unwrap();
1367        assert_eq!(adapter.schema().fields().len(), 3);
1368    }
1369
1370    #[test]
1371    fn f074_streaming_adapter_schema_access() {
1372        let schema = create_test_schema();
1373        let adapter = StreamingAdapter::new(vec![], schema.clone()).unwrap();
1374        assert_eq!(adapter.schema().fields().len(), 3);
1375    }
1376
1377    #[test]
1378    fn f075_in_memory_adapter_row_count() {
1379        let schema = create_test_schema();
1380        let batch = create_test_batch(&schema, 0, 7);
1381        let adapter = InMemoryAdapter::new(vec![batch], schema).unwrap();
1382        assert_eq!(adapter.row_count(), 7);
1383    }
1384
1385    #[test]
1386    fn f076_streaming_adapter_row_count() {
1387        let schema = create_test_schema();
1388        let batch = create_test_batch(&schema, 0, 7);
1389        let adapter = StreamingAdapter::new(vec![batch], schema).unwrap();
1390        assert_eq!(adapter.row_count(), 7);
1391    }
1392
1393    #[test]
1394    fn f077_in_memory_adapter_column_count() {
1395        let schema = create_test_schema();
1396        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1397        assert_eq!(adapter.column_count(), 3);
1398    }
1399
1400    #[test]
1401    fn f078_streaming_adapter_column_count() {
1402        let schema = create_test_schema();
1403        let adapter = StreamingAdapter::new(vec![], schema).unwrap();
1404        assert_eq!(adapter.column_count(), 3);
1405    }
1406
1407    #[test]
1408    fn f079_in_memory_adapter_field_names() {
1409        let schema = create_test_schema();
1410        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1411        let names = adapter.field_names();
1412        assert_eq!(names, vec!["id", "value", "score"]);
1413    }
1414
1415    #[test]
1416    fn f080_in_memory_adapter_field_name_out_of_bounds() {
1417        let schema = create_test_schema();
1418        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1419        assert!(adapter.field_name(100).is_none());
1420    }
1421
1422    #[test]
1423    fn f081_in_memory_adapter_field_type_out_of_bounds() {
1424        let schema = create_test_schema();
1425        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1426        assert!(adapter.field_type(100).is_none());
1427    }
1428
1429    #[test]
1430    fn f082_in_memory_adapter_field_nullable_out_of_bounds() {
1431        let schema = create_test_schema();
1432        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1433        assert!(adapter.field_nullable(100).is_none());
1434    }
1435
1436    #[test]
1437    fn f083_streaming_adapter_field_name_out_of_bounds() {
1438        let schema = create_test_schema();
1439        let adapter = StreamingAdapter::new(vec![], schema).unwrap();
1440        assert!(adapter.field_name(100).is_none());
1441    }
1442
1443    #[test]
1444    fn f084_streaming_adapter_field_type_out_of_bounds() {
1445        let schema = create_test_schema();
1446        let adapter = StreamingAdapter::new(vec![], schema).unwrap();
1447        assert!(adapter.field_type(100).is_none());
1448    }
1449
1450    #[test]
1451    fn f085_streaming_adapter_field_nullable_out_of_bounds() {
1452        let schema = create_test_schema();
1453        let adapter = StreamingAdapter::new(vec![], schema).unwrap();
1454        assert!(adapter.field_nullable(100).is_none());
1455    }
1456
1457    #[test]
1458    fn f086_in_memory_calculate_column_widths_empty_schema() {
1459        let schema = Arc::new(Schema::empty());
1460        let adapter = InMemoryAdapter::new(vec![], schema).unwrap();
1461        let widths = adapter.calculate_column_widths(80, 10);
1462        assert!(widths.is_empty());
1463    }
1464
1465    #[test]
1466    fn f087_in_memory_calculate_column_widths_scaling() {
1467        let schema = create_test_schema();
1468        let batch = create_test_batch(&schema, 0, 5);
1469        let adapter = InMemoryAdapter::new(vec![batch], schema).unwrap();
1470
1471        // Test with very narrow width to force scaling
1472        let widths = adapter.calculate_column_widths(12, 5);
1473        let total: u16 = widths.iter().sum();
1474        let separators = (widths.len() as u16).saturating_sub(1);
1475        assert!(total + separators <= 12);
1476    }
1477
1478    #[test]
1479    fn f088_streaming_calculate_column_widths_scaling() {
1480        let schema = create_test_schema();
1481        let batch = create_test_batch(&schema, 0, 5);
1482        let adapter = StreamingAdapter::new(vec![batch], schema).unwrap();
1483
1484        // Test with very narrow width to force scaling
1485        let widths = adapter.calculate_column_widths(12, 5);
1486        let total: u16 = widths.iter().sum();
1487        let separators = (widths.len() as u16).saturating_sub(1);
1488        assert!(total + separators <= 12);
1489    }
1490
1491    #[test]
1492    fn f089_adapter_search_empty_dataset() {
1493        let adapter = DatasetAdapter::empty();
1494        assert!(adapter.search("anything").is_none());
1495    }
1496
1497    #[test]
1498    fn f090_adapter_search_from_start_row_beyond_total() {
1499        let adapter = create_test_adapter();
1500        // Start from row 100 (beyond total of 10)
1501        let result = adapter.search_from("id_0", 100);
1502        // Should wrap and find id_0
1503        assert_eq!(result, Some(0));
1504    }
1505
1506    #[test]
1507    fn f091_in_memory_locate_row_binary_search_ok_branch() {
1508        // Create batches such that row 0 is exactly at batch boundary offset
1509        let schema = create_test_schema();
1510        let batch1 = create_test_batch(&schema, 0, 3);
1511        let batch2 = create_test_batch(&schema, 3, 3);
1512        let adapter = InMemoryAdapter::new(vec![batch1, batch2], schema).unwrap();
1513
1514        // Row 3 should be at offset 3, which is in batch 1
1515        let loc = adapter.locate_row(3);
1516        assert_eq!(loc, Some((1, 0)));
1517
1518        // Row 0 is at offset 0, binary_search returns Ok(0)
1519        let loc = adapter.locate_row(0);
1520        assert_eq!(loc, Some((0, 0)));
1521    }
1522
1523    #[test]
1524    fn f092_streaming_locate_row_binary_search_ok_branch() {
1525        let schema = create_test_schema();
1526        let batch1 = create_test_batch(&schema, 0, 3);
1527        let batch2 = create_test_batch(&schema, 3, 3);
1528        let adapter = StreamingAdapter::new(vec![batch1, batch2], schema).unwrap();
1529
1530        let loc = adapter.locate_row(3);
1531        assert_eq!(loc, Some((1, 0)));
1532
1533        let loc = adapter.locate_row(0);
1534        assert_eq!(loc, Some((0, 0)));
1535    }
1536
1537    #[test]
1538    fn f093_adapter_field_type_streaming_mode() {
1539        let schema = create_test_schema();
1540        let batch = create_test_batch(&schema, 0, 5);
1541        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1542
1543        let type_str = adapter.field_type(0);
1544        assert!(type_str.is_some());
1545        assert!(type_str.unwrap().contains("Utf8"));
1546    }
1547
1548    #[test]
1549    fn f094_adapter_field_nullable_streaming_mode() {
1550        let schema = create_test_schema();
1551        let batch = create_test_batch(&schema, 0, 5);
1552        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1553
1554        let nullable = adapter.field_nullable(0);
1555        assert_eq!(nullable, Some(false));
1556    }
1557
1558    #[test]
1559    fn f095_adapter_locate_row_streaming_mode() {
1560        let schema = create_test_schema();
1561        let batch = create_test_batch(&schema, 0, 5);
1562        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1563
1564        let loc = adapter.locate_row(2);
1565        assert_eq!(loc, Some((0, 2)));
1566
1567        let loc_oob = adapter.locate_row(100);
1568        assert!(loc_oob.is_none());
1569    }
1570
1571    #[test]
1572    fn f096_in_memory_adapter_with_many_batches() {
1573        let schema = create_test_schema();
1574        let batches: Vec<_> = (0..10)
1575            .map(|i| create_test_batch(&schema, i * 5, 5))
1576            .collect();
1577        let adapter = InMemoryAdapter::new(batches, schema).unwrap();
1578
1579        assert_eq!(adapter.row_count(), 50);
1580
1581        // Test locating rows in different batches
1582        assert_eq!(adapter.locate_row(0), Some((0, 0)));
1583        assert_eq!(adapter.locate_row(7), Some((1, 2)));
1584        assert_eq!(adapter.locate_row(49), Some((9, 4)));
1585    }
1586
1587    #[test]
1588    fn f097_streaming_adapter_with_many_batches() {
1589        let schema = create_test_schema();
1590        let batches: Vec<_> = (0..10)
1591            .map(|i| create_test_batch(&schema, i * 5, 5))
1592            .collect();
1593        let adapter = StreamingAdapter::new(batches, schema).unwrap();
1594
1595        assert_eq!(adapter.row_count(), 50);
1596        assert_eq!(adapter.locate_row(7), Some((1, 2)));
1597    }
1598
1599    #[test]
1600    fn f098_adapter_search_in_numeric_column() {
1601        let adapter = create_test_adapter();
1602        // Values column contains "0", "10", "20", etc.
1603        let result = adapter.search("30");
1604        assert!(result.is_some());
1605    }
1606
1607    #[test]
1608    fn f099_adapter_search_partial_match() {
1609        let adapter = create_test_adapter();
1610        // Should find partial matches
1611        let result = adapter.search("d_3");
1612        assert_eq!(result, Some(3));
1613    }
1614
1615    #[test]
1616    fn f100_adapter_is_empty_with_batches() {
1617        let schema = create_test_schema();
1618        let batch = create_test_batch(&schema, 0, 5);
1619        let adapter = DatasetAdapter::from_batches(vec![batch], schema).unwrap();
1620        assert!(!adapter.is_empty());
1621    }
1622
1623    #[test]
1624    fn f101_streaming_adapter_search() {
1625        let schema = create_test_schema();
1626        let batch = create_test_batch(&schema, 0, 10);
1627        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1628
1629        let result = adapter.search("id_5");
1630        assert_eq!(result, Some(5));
1631    }
1632
1633    #[test]
1634    fn f102_streaming_adapter_search_from() {
1635        let schema = create_test_schema();
1636        let batch = create_test_batch(&schema, 0, 10);
1637        let adapter = DatasetAdapter::streaming_from_batches(vec![batch], schema).unwrap();
1638
1639        // Search from row 7, should wrap and find id_2
1640        let result = adapter.search_from("id_2", 7);
1641        assert_eq!(result, Some(2));
1642    }
1643
1644    #[test]
1645    fn f103_calculate_column_widths_with_unicode() {
1646        // Test that unicode width calculation works correctly
1647        let schema = Arc::new(Schema::new(vec![Field::new("name", DataType::Utf8, false)]));
1648
1649        let batch = RecordBatch::try_new(
1650            schema.clone(),
1651            vec![Arc::new(StringArray::from(vec!["Hello", "World"]))],
1652        )
1653        .unwrap();
1654
1655        let adapter = DatasetAdapter::from_batches(vec![batch], schema).unwrap();
1656        let widths = adapter.calculate_column_widths(80, 10);
1657        assert!(!widths.is_empty());
1658        assert!(widths[0] >= 4); // "name" has 4 chars
1659    }
1660
1661    #[test]
1662    fn f104_in_memory_empty_direct_methods() {
1663        let adapter = InMemoryAdapter::empty();
1664        assert_eq!(adapter.row_count(), 0);
1665        assert_eq!(adapter.column_count(), 0);
1666        assert!(adapter.field_name(0).is_none());
1667        assert!(adapter.field_type(0).is_none());
1668        assert!(adapter.field_nullable(0).is_none());
1669        assert!(adapter.locate_row(0).is_none());
1670    }
1671}