ferrum_cli/commands/
list.rs1use crate::config::CliConfig;
4use clap::Args;
5use colored::*;
6use ferrum_types::Result;
7use std::fs;
8use std::path::PathBuf;
9
10#[derive(Args)]
11pub struct ListCommand {}
12
13pub async fn execute(_cmd: ListCommand, config: CliConfig) -> Result<()> {
14 let cache_dir = get_hf_cache_dir(&config);
15 let hub_dir = cache_dir.join("hub");
16
17 if !hub_dir.exists() {
18 println!("{}", "No models downloaded yet.".dimmed());
19 println!();
20 println!("Run {} to download a model.", "ferrum pull <model>".cyan());
21 return Ok(());
22 }
23
24 let mut models: Vec<ModelInfo> = Vec::new();
25
26 if let Ok(entries) = fs::read_dir(&hub_dir) {
28 for entry in entries.flatten() {
29 let name = entry.file_name().to_string_lossy().to_string();
30 if name.starts_with("models--") {
31 if let Some(info) = get_model_info(&entry.path()) {
32 models.push(info);
33 }
34 }
35 }
36 }
37
38 if models.is_empty() {
39 println!("{}", "No models downloaded yet.".dimmed());
40 println!();
41 println!("Run {} to download a model.", "ferrum pull <model>".cyan());
42 return Ok(());
43 }
44
45 models.sort_by(|a, b| match (a.is_complete, b.is_complete) {
47 (true, false) => std::cmp::Ordering::Less,
48 (false, true) => std::cmp::Ordering::Greater,
49 _ => a.name.cmp(&b.name),
50 });
51
52 println!(
54 "{:<40} {:<12} {:<10} {:<16}",
55 "NAME".bold(),
56 "SIZE".bold(),
57 "STATUS".bold(),
58 "MODIFIED".bold()
59 );
60
61 for model in models {
63 let status = if model.is_complete {
64 "ready".green().to_string()
65 } else {
66 "incomplete".yellow().to_string()
67 };
68
69 let name_display = if model.is_complete {
70 model.name.normal().to_string()
71 } else {
72 model.name.dimmed().to_string()
73 };
74
75 println!(
76 "{:<40} {:<12} {:<10} {:<16}",
77 name_display,
78 format_size(model.size),
79 status,
80 model.modified
81 );
82 }
83
84 Ok(())
85}
86
87struct ModelInfo {
88 name: String,
89 size: u64,
90 modified: String,
91 is_complete: bool,
92}
93
94fn get_model_info(model_dir: &PathBuf) -> Option<ModelInfo> {
95 let dir_name = model_dir.file_name()?.to_string_lossy().to_string();
97 let name = dir_name.strip_prefix("models--")?.replace("--", "/");
98
99 let blobs_dir = model_dir.join("blobs");
101 let size = if blobs_dir.exists() {
102 get_dir_size(&blobs_dir)
103 } else {
104 0
105 };
106
107 let snapshots_dir = model_dir.join("snapshots");
109 let is_complete = check_model_complete(&snapshots_dir);
110
111 let modified = if let Ok(metadata) = fs::metadata(model_dir) {
113 if let Ok(time) = metadata.modified() {
114 let datetime: chrono::DateTime<chrono::Local> = time.into();
115 datetime.format("%Y-%m-%d %H:%M").to_string()
116 } else {
117 "unknown".to_string()
118 }
119 } else {
120 "unknown".to_string()
121 };
122
123 Some(ModelInfo {
124 name,
125 size,
126 modified,
127 is_complete,
128 })
129}
130
131fn check_model_complete(snapshots_dir: &PathBuf) -> bool {
133 if !snapshots_dir.exists() {
134 return false;
135 }
136
137 if let Ok(entries) = fs::read_dir(snapshots_dir) {
139 for entry in entries.flatten() {
140 let path = entry.path();
141 if path.is_dir() {
142 if path.join("model.safetensors").exists()
144 || path.join("model.safetensors.index.json").exists()
145 || path.join("pytorch_model.bin").exists()
146 || path.join("pytorch_model.bin.index.json").exists()
147 {
148 return true;
149 }
150 }
151 }
152 }
153
154 false
155}
156
157fn get_dir_size(path: &PathBuf) -> u64 {
158 let mut size = 0;
159 if let Ok(entries) = fs::read_dir(path) {
160 for entry in entries.flatten() {
161 let path = entry.path();
162 if path.is_file() {
163 if let Ok(metadata) = fs::metadata(&path) {
164 size += metadata.len();
165 }
166 } else if path.is_dir() {
167 size += get_dir_size(&path);
168 }
169 }
170 }
171 size
172}
173
174fn format_size(bytes: u64) -> String {
175 const KB: u64 = 1024;
176 const MB: u64 = KB * 1024;
177 const GB: u64 = MB * 1024;
178
179 if bytes >= GB {
180 format!("{:.1} GB", bytes as f64 / GB as f64)
181 } else if bytes >= MB {
182 format!("{:.1} MB", bytes as f64 / MB as f64)
183 } else if bytes >= KB {
184 format!("{:.1} KB", bytes as f64 / KB as f64)
185 } else {
186 format!("{} B", bytes)
187 }
188}
189
190fn get_hf_cache_dir(config: &CliConfig) -> PathBuf {
191 if let Ok(hf_home) = std::env::var("HF_HOME") {
192 return PathBuf::from(hf_home);
193 }
194 let configured = shellexpand::tilde(&config.models.download.hf_cache_dir).to_string();
195 PathBuf::from(configured)
196}