aprender-core 0.30.0

Next-generation machine learning library in pure Rust
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
//! Reliable Demo Best Practices (Part IX of chat-template-improvement-spec v1.4.0)
//!
//! Implements world-class developer experience inspired by:
//! - Hugging Face (model cards, auto-download)
//! - Ollama (zero-config CLI)
//! - llama.cpp (performance stats)
//! - llamafile (single-binary deployment)
//!
//! # Checklist Items
//!
//! - RDB-01: Zero-Config Guarantee
//! - RDB-02: Prerequisites & Environment Isolation
//! - RDB-03: Interactive & Non-Interactive Modes
//! - RDB-04: Robust Error Recovery
//! - RDB-05: Performance Transparency
//! - RDB-06: Model Provenance & Licensing

use crate::AprenderError;
use std::io::{self, IsTerminal, Write};
use std::path::{Path, PathBuf};
use std::time::Duration;

#[cfg(feature = "hf-hub-integration")]
use dirs;

// ============================================================================
// RDB-01: Zero-Config Guarantee
// ============================================================================

/// Model cache configuration for zero-config downloads
#[derive(Debug, Clone)]
pub struct ModelCache {
    /// Cache directory path
    pub cache_dir: PathBuf,
    /// Whether to auto-download missing models
    pub auto_download: bool,
    /// Maximum cache size in bytes (0 = unlimited)
    pub max_size_bytes: u64,
}

impl Default for ModelCache {
    fn default() -> Self {
        // Use dirs crate if available (hf-hub-integration feature), otherwise fallback to current dir
        #[cfg(feature = "hf-hub-integration")]
        let cache_dir = dirs::cache_dir()
            .unwrap_or_else(|| PathBuf::from("."))
            .join("apr")
            .join("models");

        #[cfg(not(feature = "hf-hub-integration"))]
        let cache_dir = std::env::var("APR_CACHE_DIR")
            .map_or_else(|_| PathBuf::from(".apr_cache"), PathBuf::from)
            .join("models");

        Self {
            cache_dir,
            auto_download: true,
            max_size_bytes: 0, // Unlimited
        }
    }
}

impl ModelCache {
    /// Create a new model cache with custom directory
    #[must_use]
    pub fn new(cache_dir: PathBuf) -> Self {
        Self {
            cache_dir,
            auto_download: true,
            max_size_bytes: 0,
        }
    }

    /// Get the path where a model should be cached
    #[must_use]
    pub fn model_path(&self, model_name: &str) -> PathBuf {
        self.cache_dir.join(model_name)
    }

    /// Check if a model exists in the cache
    #[must_use]
    pub fn has_model(&self, model_name: &str) -> bool {
        self.model_path(model_name).exists()
    }

    /// Ensure cache directory exists
    pub fn ensure_dir(&self) -> Result<(), AprenderError> {
        std::fs::create_dir_all(&self.cache_dir).map_err(|e| {
            AprenderError::Io(io::Error::other(format!(
                "Failed to create cache directory: {}",
                e
            )))
        })
    }
}

/// Model source specification for auto-download
#[derive(Debug, Clone)]
pub enum ModelSource {
    /// Hugging Face Hub (e.g., "hf://Qwen/Qwen2-0.5B-Instruct")
    HuggingFace { repo_id: String, filename: String },
    /// Direct URL
    Url(String),
    /// Local file path
    Local(PathBuf),
}

impl ModelSource {
    /// Parse a model source string
    ///
    /// Formats:
    /// - `hf://owner/repo/file.safetensors` -> HuggingFace
    /// - `https://...` or `http://...` -> Url
    /// - `/path/to/model` -> Local
    #[must_use]
    pub fn parse(source: &str) -> Self {
        if source.starts_with("hf://") {
            let path = source.strip_prefix("hf://").unwrap_or(source);
            let parts: Vec<&str> = path.splitn(3, '/').collect();
            if parts.len() >= 2 {
                let repo_id = format!("{}/{}", parts[0], parts[1]);
                let filename = (*parts.get(2).unwrap_or(&"model.safetensors")).to_string();
                return Self::HuggingFace { repo_id, filename };
            }
        }
        if source.starts_with("http://") || source.starts_with("https://") {
            return Self::Url(source.to_string());
        }
        Self::Local(PathBuf::from(source))
    }

    /// Check if this is a remote source that may need downloading
    #[must_use]
    pub fn is_remote(&self) -> bool {
        matches!(self, Self::HuggingFace { .. } | Self::Url(_))
    }
}

// ============================================================================
// RDB-02: Prerequisites & Environment Isolation
// ============================================================================

/// Prerequisite check result
#[derive(Debug, Clone)]
pub struct PrerequisiteCheck {
    /// Name of the prerequisite
    pub name: String,
    /// Whether it's satisfied
    pub satisfied: bool,
    /// Version found (if applicable)
    pub version: Option<String>,
    /// How to install if missing
    pub install_hint: Option<String>,
}

impl PrerequisiteCheck {
    /// Create a satisfied prerequisite
    #[must_use]
    pub fn satisfied(name: &str) -> Self {
        Self {
            name: name.to_string(),
            satisfied: true,
            version: None,
            install_hint: None,
        }
    }

    /// Create a missing prerequisite with install hint
    #[must_use]
    pub fn missing(name: &str, install_hint: &str) -> Self {
        Self {
            name: name.to_string(),
            satisfied: false,
            version: None,
            install_hint: Some(install_hint.to_string()),
        }
    }
}

/// Check if an external command exists
#[must_use]
pub fn check_command(command: &str) -> PrerequisiteCheck {
    let exists = std::process::Command::new("which")
        .arg(command)
        .output()
        .map(|o| o.status.success())
        .unwrap_or(false);

    if exists {
        PrerequisiteCheck::satisfied(command)
    } else {
        PrerequisiteCheck::missing(
            command,
            &format!("Please install {}: check your package manager", command),
        )
    }
}

/// Check all prerequisites and return results
pub fn check_prerequisites(required: &[&str]) -> Vec<PrerequisiteCheck> {
    required.iter().map(|cmd| check_command(cmd)).collect()
}

/// Print prerequisite check results
pub fn print_prerequisites(checks: &[PrerequisiteCheck]) {
    for check in checks {
        if check.satisfied {
            eprintln!("  ✓ {} found", check.name);
        } else {
            eprintln!("  ✗ {} missing", check.name);
            if let Some(hint) = &check.install_hint {
                eprintln!("    → {}", hint);
            }
        }
    }
}

// ============================================================================
// RDB-03: Interactive & Non-Interactive Modes
// ============================================================================

/// Execution mode based on terminal detection
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExecutionMode {
    /// Running in a terminal with TTY
    Interactive,
    /// Running in a pipe or batch mode
    Batch,
}

impl ExecutionMode {
    /// Detect execution mode from stdin/stdout
    #[must_use]
    pub fn detect() -> Self {
        if io::stdin().is_terminal() && io::stdout().is_terminal() {
            Self::Interactive
        } else {
            Self::Batch
        }
    }

    /// Check if interactive
    #[must_use]
    pub fn is_interactive(&self) -> bool {
        *self == Self::Interactive
    }

    /// Check if batch mode
    #[must_use]
    pub fn is_batch(&self) -> bool {
        *self == Self::Batch
    }
}

/// Output formatter that adapts to execution mode
#[derive(Debug)]
pub struct AdaptiveOutput {
    mode: ExecutionMode,
    json_output: bool,
}

impl Default for AdaptiveOutput {
    fn default() -> Self {
        Self::new()
    }
}

impl AdaptiveOutput {
    /// Create new adaptive output
    #[must_use]
    pub fn new() -> Self {
        Self {
            mode: ExecutionMode::detect(),
            json_output: false,
        }
    }

    /// Force JSON output mode
    #[must_use]
    pub fn with_json(mut self) -> Self {
        self.json_output = true;
        self
    }

    /// Force a specific execution mode
    #[must_use]
    pub fn with_mode(mut self, mode: ExecutionMode) -> Self {
        self.mode = mode;
        self
    }

    /// Print status message (only in interactive mode)
    pub fn status(&self, msg: &str) {
        if self.mode.is_interactive() && !self.json_output {
            eprintln!("{}", msg);
        }
    }

    /// Print progress (only in interactive mode)
    pub fn progress(&self, current: usize, total: usize, msg: &str) {
        if self.mode.is_interactive() && !self.json_output {
            eprint!("\r[{}/{}] {}", current, total, msg);
            let _ = io::stderr().flush();
        }
    }

    /// Print result (always, format depends on mode)
    pub fn result(&self, data: &str) {
        println!("{}", data);
    }

    /// Print error (always to stderr)
    pub fn error(&self, msg: &str) {
        eprintln!("Error: {}", msg);
    }
}

// ============================================================================
// RDB-04: Robust Error Recovery
// ============================================================================

/// Error with recovery suggestion
#[derive(Debug)]
pub struct RecoverableError {
    /// Error message
    pub message: String,
    /// Suggested recovery action
    pub recovery: Option<String>,
    /// Whether automatic recovery is possible
    pub auto_recoverable: bool,
}

impl RecoverableError {
    /// Create a new recoverable error
    #[must_use]
    pub fn new(message: &str) -> Self {
        Self {
            message: message.to_string(),
            recovery: None,
            auto_recoverable: false,
        }
    }

    /// Add recovery suggestion
    #[must_use]
    pub fn with_recovery(mut self, recovery: &str) -> Self {
        self.recovery = Some(recovery.to_string());
        self
    }

    /// Mark as auto-recoverable
    #[must_use]
    pub fn auto_recoverable(mut self) -> Self {
        self.auto_recoverable = true;
        self
    }

    /// Format error for display
    #[must_use]
    pub fn format(&self) -> String {
        use std::fmt::Write;
        let mut output = format!("Error: {}", self.message);
        if let Some(recovery) = &self.recovery {
            let _ = write!(output, "\n\nSuggested fix: {}", recovery);
        }
        output
    }
}

/// Common error recovery scenarios
pub mod recovery {
    use super::{Path, RecoverableError};

    /// Model file not found
    #[must_use]
    pub fn model_not_found(path: &Path) -> RecoverableError {
        RecoverableError::new(&format!("Model file not found: {}", path.display()))
            .with_recovery("Run 'apr download <model>' to fetch the model, or check the path")
    }

    /// Model checksum mismatch
    #[must_use]
    pub fn checksum_mismatch(expected: &str, actual: &str) -> RecoverableError {
        RecoverableError::new(&format!(
            "Model checksum mismatch\n  Expected: {}\n  Actual: {}",
            expected, actual
        ))
        .with_recovery("The model file may be corrupted. Delete it and re-download with 'apr download --force <model>'")
        .auto_recoverable()
    }

    /// GPU not available
    #[must_use]
    pub fn gpu_not_available() -> RecoverableError {
        RecoverableError::new("GPU acceleration requested but not available").with_recovery(
            "Falling back to CPU. For GPU support, ensure CUDA/Metal drivers are installed",
        )
    }

    /// Out of memory
    #[must_use]
    pub fn out_of_memory(required: usize, available: usize) -> RecoverableError {
        RecoverableError::new(&format!(
            "Insufficient memory: need {} MB, have {} MB",
            required / 1_000_000,
            available / 1_000_000
        ))
        .with_recovery("Try a smaller model or enable quantization with '--quantize int4'")
    }
}

// ============================================================================
// RDB-05: Performance Transparency
// ============================================================================

/// Performance metrics for transparency
#[derive(Debug, Clone, Default)]
pub struct PerformanceMetrics {
    /// Model load time
    pub load_time: Duration,
    /// Time to first token
    pub time_to_first_token: Duration,
    /// Tokens generated
    pub tokens_generated: usize,
    /// Total generation time
    pub generation_time: Duration,
    /// Peak memory usage (bytes)
    pub peak_memory: usize,
    /// Backend used (e.g., "AVX2", "Metal", "CUDA")
    pub backend: String,
}

mod performance;
pub use performance::*;