1use crate::Result;
2use serde::{Deserialize, Serialize};
3use std::collections::HashMap;
4
5#[cfg(target_os = "windows")]
6use wmi::{COMLibrary, WMIConnection};
7
8#[cfg(target_os = "linux")]
9use std::process::Command;
10
11#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
13pub enum NPUVendor {
14 Intel,
15 Qualcomm,
16 Apple,
17 AMD,
18 Google,
19 MediaTek,
20 Samsung,
21 Hailo,
22 Kneron,
23 Unknown(String),
24}
25
26impl std::fmt::Display for NPUVendor {
27 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
28 match self {
29 NPUVendor::Intel => write!(f, "Intel"),
30 NPUVendor::Qualcomm => write!(f, "Qualcomm"),
31 NPUVendor::Apple => write!(f, "Apple"),
32 NPUVendor::AMD => write!(f, "AMD"),
33 NPUVendor::Google => write!(f, "Google"),
34 NPUVendor::MediaTek => write!(f, "MediaTek"),
35 NPUVendor::Samsung => write!(f, "Samsung"),
36 NPUVendor::Hailo => write!(f, "Hailo"),
37 NPUVendor::Kneron => write!(f, "Kneron"),
38 NPUVendor::Unknown(name) => write!(f, "{name}"),
39 }
40 }
41}
42
43#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
45pub enum NPUType {
46 Integrated, Discrete, USB, PCIe, M2, Unknown,
52}
53
54#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
56pub enum NPUArchitecture {
57 IntelMovidius,
58 IntelGNA,
59 IntelXDNA,
60 QualcommHexagon,
61 AppleNeuralEngine,
62 GoogleTPU,
63 AMDRyzenAI,
64 MediaTekAPU,
65 SamsungNPU,
66 HailoNPU,
67 KneronKL,
68 Unknown(String),
69}
70
71#[derive(Debug, Clone, Serialize, Deserialize)]
73pub struct NPUInfo {
74 pub vendor: NPUVendor,
76
77 pub model_name: String,
79
80 pub npu_type: NPUType,
82
83 pub architecture: NPUArchitecture,
85
86 pub tops_performance: Option<f32>,
88
89 pub memory_mb: Option<u64>,
91
92 pub driver_version: Option<String>,
94
95 pub firmware_version: Option<String>,
97
98 pub pci_device_id: Option<String>,
100
101 pub usb_device_id: Option<String>,
103
104 pub supported_frameworks: Vec<String>,
106
107 pub power_consumption: Option<f32>,
109
110 pub temperature: Option<f32>,
112
113 pub clock_frequency: Option<u32>,
115
116 pub capabilities: HashMap<String, String>,
118}
119
120impl NPUInfo {
121 pub fn query_all() -> Result<Vec<NPUInfo>> {
123 let mut npus = Vec::new();
124
125 npus.extend(Self::detect_intel_npus()?);
127 npus.extend(Self::detect_qualcomm_npus()?);
128 npus.extend(Self::detect_apple_neural_engine()?);
129 npus.extend(Self::detect_amd_npus()?);
130 npus.extend(Self::detect_google_tpus()?);
131 npus.extend(Self::detect_usb_npus()?);
132 npus.extend(Self::detect_pcie_npus()?);
133
134 Ok(npus)
135 }
136
137 fn detect_intel_npus() -> Result<Vec<NPUInfo>> {
139 let mut npus = Vec::new();
140
141 npus.extend(Self::detect_intel_ncs()?);
143
144 npus.extend(Self::detect_intel_gna()?);
146
147 npus.extend(Self::detect_intel_xdna()?);
149
150 Ok(npus)
151 }
152
153 fn detect_intel_ncs() -> Result<Vec<NPUInfo>> {
154 let mut npus = Vec::new();
155
156 #[cfg(target_os = "linux")]
157 {
158 if let Ok(output) = Command::new("lsusb").output() {
160 let output_str = String::from_utf8_lossy(&output.stdout);
161 for line in output_str.lines() {
162 if line.contains("03e7") { if line.contains("2485") { npus.push(NPUInfo {
165 vendor: NPUVendor::Intel,
166 model_name: "Intel Neural Compute Stick 2".to_string(),
167 npu_type: NPUType::USB,
168 architecture: NPUArchitecture::IntelMovidius,
169 tops_performance: Some(4.0), memory_mb: None,
171 driver_version: Self::get_intel_ncs_driver_version(),
172 firmware_version: None,
173 pci_device_id: None,
174 usb_device_id: Some("03e7:2485".to_string()),
175 supported_frameworks: vec![
176 "OpenVINO".to_string(),
177 "ONNX Runtime".to_string(),
178 ],
179 power_consumption: Some(1.0), temperature: None,
181 clock_frequency: None,
182 capabilities: HashMap::new(),
183 });
184 } else if line.contains("f63b") { npus.push(NPUInfo {
186 vendor: NPUVendor::Intel,
187 model_name: "Intel Neural Compute Stick".to_string(),
188 npu_type: NPUType::USB,
189 architecture: NPUArchitecture::IntelMovidius,
190 tops_performance: Some(0.1), memory_mb: None,
192 driver_version: Self::get_intel_ncs_driver_version(),
193 firmware_version: None,
194 pci_device_id: None,
195 usb_device_id: Some("03e7:f63b".to_string()),
196 supported_frameworks: vec!["OpenVINO".to_string()],
197 power_consumption: Some(1.0),
198 temperature: None,
199 clock_frequency: None,
200 capabilities: HashMap::new(),
201 });
202 }
203 }
204 }
205 }
206 }
207
208 #[cfg(target_os = "windows")]
209 {
210 if let Ok(com_con) = COMLibrary::new() {
212 if let Ok(wmi_con) = WMIConnection::new(com_con) {
213 let query = "SELECT * FROM Win32_USBHub WHERE DeviceID LIKE '%VID_03E7%'";
214 if let Ok(results) = wmi_con.raw_query(query) {
215 let results: Vec<HashMap<String, wmi::Variant>> = results;
216 for device in results {
217 if let Some(device_id) = device.get("DeviceID") {
218 let device_id_str = format!("{device_id:?}");
219 if device_id_str.contains("PID_2485") {
220 npus.push(NPUInfo {
221 vendor: NPUVendor::Intel,
222 model_name: "Intel Neural Compute Stick 2".to_string(),
223 npu_type: NPUType::USB,
224 architecture: NPUArchitecture::IntelMovidius,
225 tops_performance: Some(4.0),
226 memory_mb: None,
227 driver_version: Self::get_intel_ncs_driver_version(),
228 firmware_version: None,
229 pci_device_id: None,
230 usb_device_id: Some("03e7:2485".to_string()),
231 supported_frameworks: vec![
232 "OpenVINO".to_string(),
233 "ONNX Runtime".to_string(),
234 ],
235 power_consumption: Some(1.0),
236 temperature: None,
237 clock_frequency: None,
238 capabilities: HashMap::new(),
239 });
240 }
241 }
242 }
243 }
244 }
245 }
246 }
247
248 Ok(npus)
249 }
250
251 fn detect_intel_gna() -> Result<Vec<NPUInfo>> {
252 let mut npus = Vec::new();
253
254 #[cfg(target_os = "linux")]
256 {
257 if std::path::Path::new("/sys/class/intel_gna").exists() {
259 npus.push(NPUInfo {
260 vendor: NPUVendor::Intel,
261 model_name: "Intel Gaussian Neural Accelerator".to_string(),
262 npu_type: NPUType::Integrated,
263 architecture: NPUArchitecture::IntelGNA,
264 tops_performance: Some(1.0), memory_mb: None, driver_version: Self::get_intel_gna_driver_version(),
267 firmware_version: None,
268 pci_device_id: None,
269 usb_device_id: None,
270 supported_frameworks: vec![
271 "OpenVINO".to_string(),
272 "Intel GNA Library".to_string(),
273 ],
274 power_consumption: Some(0.5), temperature: None,
276 clock_frequency: Some(400), capabilities: HashMap::from([
278 ("keyword_spotting".to_string(), "true".to_string()),
279 ("noise_reduction".to_string(), "true".to_string()),
280 ]),
281 });
282 }
283 }
284
285 #[cfg(target_os = "windows")]
286 {
287 if let Ok(com_con) = COMLibrary::new() {
289 if let Ok(wmi_con) = WMIConnection::new(com_con) {
290 let query = "SELECT * FROM Win32_PnPEntity WHERE Description LIKE '%GNA%' OR Name LIKE '%Gaussian%'";
291 if let Ok(results) = wmi_con.raw_query(query) {
292 let results: Vec<HashMap<String, wmi::Variant>> = results;
293 if !results.is_empty() {
294 npus.push(NPUInfo {
295 vendor: NPUVendor::Intel,
296 model_name: "Intel Gaussian Neural Accelerator".to_string(),
297 npu_type: NPUType::Integrated,
298 architecture: NPUArchitecture::IntelGNA,
299 tops_performance: Some(1.0),
300 memory_mb: None,
301 driver_version: Self::get_intel_gna_driver_version(),
302 firmware_version: None,
303 pci_device_id: None,
304 usb_device_id: None,
305 supported_frameworks: vec![
306 "OpenVINO".to_string(),
307 "Intel GNA Library".to_string(),
308 ],
309 power_consumption: Some(0.5),
310 temperature: None,
311 clock_frequency: Some(400),
312 capabilities: HashMap::from([
313 ("keyword_spotting".to_string(), "true".to_string()),
314 ("noise_reduction".to_string(), "true".to_string()),
315 ]),
316 });
317 }
318 }
319 }
320 }
321 }
322
323 Ok(npus)
324 }
325
326 fn detect_intel_xdna() -> Result<Vec<NPUInfo>> {
327 let mut npus = Vec::new();
328
329 #[cfg(target_os = "windows")]
331 {
332 if let Ok(com_con) = COMLibrary::new() {
333 if let Ok(wmi_con) = WMIConnection::new(com_con) {
334 let query = "SELECT * FROM Win32_PnPEntity WHERE Description LIKE '%NPU%' OR Name LIKE '%Neural%'";
335 if let Ok(results) = wmi_con.raw_query(query) {
336 let results: Vec<HashMap<String, wmi::Variant>> = results;
337 for device in results {
338 if let Some(name) = device.get("Name") {
339 let name_str = format!("{name:?}");
340 if name_str.contains("Intel") && (name_str.contains("NPU") || name_str.contains("Neural")) {
341 npus.push(NPUInfo {
342 vendor: NPUVendor::Intel,
343 model_name: "Intel XDNA NPU".to_string(),
344 npu_type: NPUType::Integrated,
345 architecture: NPUArchitecture::IntelXDNA,
346 tops_performance: Some(11.5), memory_mb: None,
348 driver_version: Self::get_intel_npu_driver_version(),
349 firmware_version: None,
350 pci_device_id: None,
351 usb_device_id: None,
352 supported_frameworks: vec![
353 "OpenVINO".to_string(),
354 "ONNX Runtime".to_string(),
355 "DirectML".to_string(),
356 ],
357 power_consumption: Some(2.0),
358 temperature: None,
359 clock_frequency: Some(1400), capabilities: HashMap::from([
361 ("int8".to_string(), "true".to_string()),
362 ("fp16".to_string(), "true".to_string()),
363 ("dynamic_shapes".to_string(), "true".to_string()),
364 ]),
365 });
366 }
367 }
368 }
369 }
370 }
371 }
372 }
373
374 Ok(npus)
375 }
376
377 fn get_intel_ncs_driver_version() -> Option<String> {
379 #[cfg(target_os = "linux")]
381 {
382 if let Ok(output) = Command::new("python3")
383 .args(["-c", "import openvino; print(openvino.__version__)"])
384 .output()
385 {
386 if output.status.success() {
387 return Some(String::from_utf8_lossy(&output.stdout).trim().to_string());
388 }
389 }
390 }
391 None
392 }
393
394 fn get_intel_gna_driver_version() -> Option<String> {
395 #[cfg(target_os = "linux")]
397 {
398 if let Ok(contents) = std::fs::read_to_string("/sys/module/intel_gna/version") {
399 return Some(contents.trim().to_string());
400 }
401 }
402 None
403 }
404
405 fn get_intel_npu_driver_version() -> Option<String> {
406 None }
409
410 fn detect_qualcomm_npus() -> Result<Vec<NPUInfo>> {
412 Ok(Vec::new())
414 }
415
416 fn detect_apple_neural_engine() -> Result<Vec<NPUInfo>> {
417 #[cfg(target_os = "macos")]
418 {
419 if let Ok(output) = Command::new("sysctl")
421 .args(["-n", "machdep.cpu.brand_string"])
422 .output()
423 {
424 let cpu_brand = String::from_utf8_lossy(&output.stdout);
425 if cpu_brand.contains("Apple M") {
426 let mut npus = Vec::new();
427
428 let (tops, model_name) = if cpu_brand.contains("M1") {
430 (15.8, "Apple Neural Engine (M1)")
431 } else if cpu_brand.contains("M2") {
432 (15.8, "Apple Neural Engine (M2)")
433 } else if cpu_brand.contains("M3") {
434 (18.0, "Apple Neural Engine (M3)")
435 } else {
436 (15.8, "Apple Neural Engine")
437 };
438
439 npus.push(NPUInfo {
440 vendor: NPUVendor::Apple,
441 model_name: model_name.to_string(),
442 npu_type: NPUType::Integrated,
443 architecture: NPUArchitecture::AppleNeuralEngine,
444 tops_performance: Some(tops),
445 memory_mb: None, driver_version: None,
447 firmware_version: None,
448 pci_device_id: None,
449 usb_device_id: None,
450 supported_frameworks: vec![
451 "Core ML".to_string(),
452 "ONNX Runtime".to_string(),
453 "TensorFlow Lite".to_string(),
454 ],
455 power_consumption: Some(1.0),
456 temperature: None,
457 clock_frequency: None,
458 capabilities: HashMap::from([
459 ("16_core".to_string(), "true".to_string()),
460 ("matrix_operations".to_string(), "true".to_string()),
461 ("convolution".to_string(), "true".to_string()),
462 ]),
463 });
464
465 return Ok(npus);
466 }
467 }
468 }
469 Ok(Vec::new())
470 }
471
472 fn detect_amd_npus() -> Result<Vec<NPUInfo>> {
473 Ok(Vec::new())
475 }
476
477 fn detect_google_tpus() -> Result<Vec<NPUInfo>> {
478 Ok(Vec::new())
480 }
481
482 fn detect_usb_npus() -> Result<Vec<NPUInfo>> {
483 Ok(Vec::new())
485 }
486
487 fn detect_pcie_npus() -> Result<Vec<NPUInfo>> {
488 Ok(Vec::new())
490 }
491
492 pub fn vendor(&self) -> &NPUVendor {
494 &self.vendor
495 }
496
497 pub fn model_name(&self) -> &str {
499 &self.model_name
500 }
501
502 pub fn npu_type(&self) -> &NPUType {
504 &self.npu_type
505 }
506
507 pub fn tops_performance(&self) -> Option<f32> {
509 self.tops_performance
510 }
511
512 pub fn supports_framework(&self, framework: &str) -> bool {
514 self.supported_frameworks.iter()
515 .any(|f| f.to_lowercase().contains(&framework.to_lowercase()))
516 }
517}