1use std::process::Output;
2
3use clap::ArgMatches;
4use log::{debug, error};
5use regex::Regex;
6use serde::{Deserialize, Serialize};
7use tokio::process::Command;
8
9use gst::prelude::DeviceExt;
10use gst::prelude::DeviceProviderExtManual;
11
12use crate::error::PrintNannySettingsError;
13
14const DEFAULT_COLORIMETRY: &str = "bt709";
15const DEFAULT_PIXEL_FORMAT: &str = "YUY2";
16const COMPAT_PIXEL_FORMATS: [&str; 1] = ["YUY2"];
17
18#[derive(Debug, Clone, clap::ValueEnum, Deserialize, Serialize, PartialEq, Eq)]
19pub enum VideoSrcType {
20 File,
21 CSI,
22 USB,
23 Uri,
24}
25
26#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]
27pub struct TfliteModelSettings {
28 pub label_file: String,
29 pub model_file: String,
30 pub nms_threshold: i32,
31 pub tensor_batch_size: i32,
32 pub tensor_channels: i32,
33 pub tensor_height: i32,
34 pub tensor_width: i32,
35 pub tensor_framerate: i32,
36}
37
38impl Default for TfliteModelSettings {
39 fn default() -> Self {
40 Self {
41 label_file: "/usr/share/printnanny/model/labels.txt".into(),
42 model_file: "/usr/share/printnanny/model/model.tflite".into(),
43 nms_threshold: 66,
44 tensor_batch_size: 40,
45 tensor_channels: 3,
46 tensor_height: 320,
47 tensor_width: 320,
48 tensor_framerate: 2,
49 }
50 }
51}
52
53impl From<&ArgMatches> for TfliteModelSettings {
54 fn from(args: &ArgMatches) -> Self {
55 let label_file = args
56 .value_of("label_file")
57 .expect("--label-file is required")
58 .into();
59 let model_file = args
60 .value_of("model_file")
61 .expect("--model-file is required")
62 .into();
63 let tensor_batch_size: i32 = args
64 .value_of_t::<i32>("tensor_batch_size")
65 .expect("--tensor-batch-size must be an integer");
66
67 let tensor_height: i32 = args
68 .value_of_t::<i32>("tensor_height")
69 .expect("--tensor-height must be an integer");
70
71 let tensor_width: i32 = args
72 .value_of_t::<i32>("tensor_width")
73 .expect("--tensor-width must be an integer");
74
75 let tensor_channels: i32 = args
76 .value_of_t::<i32>("tensor_channels")
77 .expect("--tensor-channels must be an integer");
78
79 let tensor_framerate: i32 = args
80 .value_of_t::<i32>("tensor_framerate")
81 .expect("--tensor-framerate must be an integer");
82
83 let nms_threshold: i32 = args
84 .value_of_t::<i32>("nms_threshold")
85 .expect("--nms-threshold must be an integer");
86
87 Self {
88 label_file,
89 model_file,
90 nms_threshold,
91 tensor_batch_size,
92 tensor_channels,
93 tensor_height,
94 tensor_width,
95 tensor_framerate,
96 }
97 }
98}
99
100#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]
101pub struct CameraVideoSource {
102 pub index: i32,
103 pub device_name: String,
104 pub label: String,
105 pub caps: printnanny_asyncapi_models::GstreamerCaps,
107}
108
109impl Default for CameraVideoSource {
110 fn default() -> Self {
111 Self {
112 caps: Self::default_caps(),
113 device_name: "/base/soc/i2c0mux/i2c@1/imx219@10".into(),
114 label: "imx219".into(),
115 index: 0,
116 }
117 }
118}
119
120impl CameraVideoSource {
121 pub fn default_caps() -> printnanny_asyncapi_models::GstreamerCaps {
122 printnanny_asyncapi_models::GstreamerCaps {
123 colorimetry: DEFAULT_COLORIMETRY.into(),
124 media_type: "video/x-raw".into(),
125 format: DEFAULT_PIXEL_FORMAT.into(),
126 width: 640,
127 height: 480,
128 }
129 }
130
131 pub fn camera_source_type(&self) -> printnanny_asyncapi_models::CameraSourceType {
132 match &self.device_name.contains("usb") {
133 true => printnanny_asyncapi_models::CameraSourceType::Usb,
134 false => printnanny_asyncapi_models::CameraSourceType::Csi,
135 }
136 }
137
138 pub fn list_available_caps(&self) -> Vec<printnanny_asyncapi_models::GstreamerCaps> {
139 gst::init().unwrap();
140 let get_factory = gst::DeviceProviderFactory::find("libcameraprovider");
141 let results = if let Some(libcamera_device_provider_factory) = get_factory {
142 match libcamera_device_provider_factory.get() {
143 Some(provider) => {
144 let devices: Vec<gst::Device> = provider
145 .devices()
146 .filter(|d| {
147 let display_name = d.display_name();
148 display_name == self.device_name
149 })
150 .collect();
151 if devices.len() > 1 {
152 error!(
153 "libcameraprovider detected multiple devices matching name: {}",
154 self.device_name
155 );
156 vec![Self::default_caps()]
157 } else if devices.len() == 1 {
158 let device = devices.first().unwrap();
159 match device.caps() {
160 Some(caps) => {
161 caps.into_iter()
162 .filter_map(|(s, _c)| {
163 let height: Result<i32, gst::structure::GetError<_>> =
164 s.get("height");
165 let width: Result<i32, gst::structure::GetError<_>> =
166 s.get("width");
167 let format: Result<String, gst::structure::GetError<_>> =
168 s.get("format");
169
170 if let (Ok(height), Ok(width), Ok(format)) =
171 (&height, &width, &format)
172 {
173 let media_type = s.name().into();
174 Some(printnanny_asyncapi_models::GstreamerCaps {
175 colorimetry: DEFAULT_COLORIMETRY.into(),
176 height: *height,
177 width: *width,
178 format: format.into(),
179 media_type,
180 })
181 } else {
182 match &height {
183 Ok(_) => (),
184 Err(e) => {
185 error!(
186 "Failed to parse i32 from caps height={:?} with error={}",
187 &height, e
188 );
189 }
190 };
191 match &width {
192 Ok(_) => (),
193 Err(e) => {
194 error!(
195 "Failed to parse i32 from caps width={:?} with error={}",
196 &width, e
197 );
198 }
199 };
200 match &format {
201 Ok(_) => (),
202 Err(e) =>
203 error!(
204 "Failed to read caps format={:?} with error={}",
205 &format, e
206 )
207 };
208 None
209 }
210 })
211 .collect()
212 }
213 None => vec![Self::default_caps()],
214 }
215 } else {
216 error!(
217 "libcameraprovider detected 0 devices matching name {}",
218 self.device_name
219 );
220 vec![Self::default_caps()]
221 }
222 }
223 None => vec![Self::default_caps()],
224 }
225 } else {
226 vec![Self::default_caps()]
227 };
228 results
229 .into_iter()
230 .filter(|caps| COMPAT_PIXEL_FORMATS.contains(&caps.format.as_str()))
231 .collect()
232 }
233
234 pub async fn list_cameras_command_output() -> std::io::Result<Output> {
235 Command::new("cam")
236 .env("LIBCAMERA_LOG_LEVELS", "*:ERROR") .args(["--list", "--list-properties"])
238 .output()
239 .await
240 }
241
242 pub fn parse_list_camera_line(line: &str) -> Option<CameraVideoSource> {
243 let re = Regex::new(r"(\d): '(.*)' \((.*)\)").unwrap();
244 match re.captures(line) {
245 Some(caps) => {
246 let index = caps.get(1).map(|s| s.as_str());
247 let label = caps.get(2).map(|s| s.as_str());
248 let device_name = caps.get(3).map(|s| s.as_str());
249 debug!(
250 "parse_list_camera_line capture groups: {:#?} {:#?} {:#?}",
251 &index, &label, &device_name
252 );
253
254 match index {
255 Some(index) => match index.parse::<i32>() {
256 Ok(index) => match device_name {
257 Some(device_name) => label.map(|label| CameraVideoSource {
258 index,
259 device_name: device_name.into(),
260 label: label.into(),
261 caps: Self::default_caps(),
262 }),
263 None => None,
264 },
265 Err(e) => {
266 error!("Failed to parse integer from {}, error: {}", &index, &e);
267 None
268 }
269 },
270 _ => None,
271 }
272 }
273 None => None,
274 }
275 }
276
277 pub fn parse_list_cameras_command_output(stdout: &str) -> Vec<CameraVideoSource> {
278 let remove_str = "Available cameras:";
279 let filtered = stdout.replace(remove_str, "");
280 filtered
281 .lines()
282 .filter_map(Self::parse_list_camera_line)
283 .collect()
284 }
285
286 pub async fn from_libcamera_list() -> Result<Vec<CameraVideoSource>, PrintNannySettingsError> {
287 match Self::list_cameras_command_output().await {
288 Ok(output) => {
289 let utf8output = String::from_utf8(output.stdout)?;
290 Ok(Self::parse_list_cameras_command_output(&utf8output))
291 }
292 Err(e) => {
293 error!("Error listing libcamera devices {}", e);
294 Ok(vec![])
295 }
296 }
297 }
298}
299
300#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]
301pub struct MediaVideoSource {
302 pub uri: String,
303}
304
305#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize)]
306#[serde(tag = "src_type")]
307pub enum VideoSource {
308 #[serde(rename = "csi")]
309 CSI(CameraVideoSource),
310 #[serde(rename = "usb")]
311 USB(CameraVideoSource),
312 #[serde(rename = "file")]
313 File(MediaVideoSource),
314 #[serde(rename = "uri")]
315 Uri(MediaVideoSource),
316}
317
318impl From<&CameraVideoSource> for printnanny_asyncapi_models::camera::Camera {
319 fn from(obj: &CameraVideoSource) -> printnanny_asyncapi_models::camera::Camera {
320 let src_type = obj.camera_source_type();
321 let available_caps = obj.list_available_caps();
322 printnanny_asyncapi_models::camera::Camera {
323 selected_caps: Box::new(obj.caps.clone()),
324 available_caps,
325 index: obj.index,
326 label: obj.label.clone(),
327 device_name: obj.device_name.clone(),
328 src_type: Box::new(src_type),
329 }
330 }
331}
332
333impl From<printnanny_asyncapi_models::Camera> for VideoSource {
334 fn from(camera: printnanny_asyncapi_models::Camera) -> VideoSource {
335 match *camera.src_type {
336 printnanny_asyncapi_models::CameraSourceType::Csi => {
337 VideoSource::CSI(CameraVideoSource {
338 caps: *camera.selected_caps,
339 index: camera.index,
340 device_name: camera.device_name,
341 label: camera.label,
342 })
343 }
344 printnanny_asyncapi_models::CameraSourceType::Usb => {
345 VideoSource::USB(CameraVideoSource {
346 caps: *camera.selected_caps,
347
348 index: camera.index,
349 device_name: camera.device_name,
350 label: camera.label,
351 })
352 }
353 }
354 }
355}
356
357impl From<VideoSource> for printnanny_asyncapi_models::Camera {
358 fn from(obj: VideoSource) -> printnanny_asyncapi_models::Camera {
359 match &obj {
360 VideoSource::CSI(camera) => printnanny_asyncapi_models::Camera {
361 selected_caps: Box::new(camera.caps.clone()),
362 src_type: Box::new(printnanny_asyncapi_models::CameraSourceType::Csi),
363 index: camera.index,
364 label: camera.label.clone(),
365 device_name: camera.device_name.clone(),
366 available_caps: camera.list_available_caps(),
367 },
368 VideoSource::USB(camera) => printnanny_asyncapi_models::Camera {
369 selected_caps: Box::new(camera.caps.clone()),
370 src_type: Box::new(printnanny_asyncapi_models::CameraSourceType::Usb),
371 index: camera.index,
372 label: camera.label.clone(),
373 device_name: camera.device_name.clone(),
374 available_caps: camera.list_available_caps(),
375 },
376
377 _ => todo!(),
378 }
379 }
380}
381
382#[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)]
383pub struct VideoStreamSettings {
384 #[serde(rename = "camera")]
385 pub camera: Box<printnanny_asyncapi_models::CameraSettings>,
386 #[serde(rename = "detection")]
387 pub detection: Box<printnanny_asyncapi_models::DetectionSettings>,
388 #[serde(rename = "hls")]
389 pub hls: Box<printnanny_asyncapi_models::HlsSettings>,
390 #[serde(rename = "recording")]
391 pub recording: Box<printnanny_asyncapi_models::RecordingSettings>,
392 #[serde(rename = "rtp")]
393 pub rtp: Box<printnanny_asyncapi_models::RtpSettings>,
394 #[serde(rename = "snapshot")]
395 pub snapshot: Box<printnanny_asyncapi_models::SnapshotSettings>,
396}
397
398impl From<VideoStreamSettings> for printnanny_asyncapi_models::VideoStreamSettings {
399 fn from(obj: VideoStreamSettings) -> printnanny_asyncapi_models::VideoStreamSettings {
400 printnanny_asyncapi_models::VideoStreamSettings {
401 camera: obj.camera,
402 detection: obj.detection,
403 hls: obj.hls,
404 recording: obj.recording,
405 snapshot: obj.snapshot,
406 rtp: obj.rtp,
407 }
408 }
409}
410
411impl From<printnanny_asyncapi_models::VideoStreamSettings> for VideoStreamSettings {
412 fn from(obj: printnanny_asyncapi_models::VideoStreamSettings) -> VideoStreamSettings {
413 VideoStreamSettings {
414 camera: obj.camera,
415 detection: obj.detection,
416 hls: obj.hls,
417 recording: obj.recording,
418 snapshot: obj.snapshot,
419 rtp: obj.rtp,
420 }
421 }
422}
423
424impl Default for VideoStreamSettings {
425 fn default() -> Self {
426 let camera = Box::new(printnanny_asyncapi_models::CameraSettings {
427 width: 640,
428 height: 480,
429 framerate_n: 16,
430 framerate_d: 1,
431 device_name: "/base/soc/i2c0mux/i2c@1/imx219@10".into(),
432 format: DEFAULT_PIXEL_FORMAT.into(),
433 label: "Raspberry Pi imx219".into(),
434 colorimetry: DEFAULT_COLORIMETRY.into()
435 });
436
437 let detection = Box::new(printnanny_asyncapi_models::DetectionSettings {
438 graphs: true,
439 overlay: true,
440 nats_server_uri: "nats://127.0.0.1:4223".into(),
441 label_file: "/usr/share/printnanny/model/labels.txt".into(),
442 model_file: "/usr/share/printnanny/model/model.tflite".into(),
443 nms_threshold: 66,
444 tensor_batch_size: 40,
445 tensor_height: 320,
446 tensor_width: 320,
447 tensor_framerate: 2,
448 });
449
450 let hls = Box::new(printnanny_asyncapi_models::HlsSettings {
451 enabled: true,
452 segments: "/var/run/printnanny-hls/segment%05d.ts".into(),
453 playlist: "/var/run/printnanny-hls/playlist.m3u8".into(),
454 playlist_root: "/printnanny-hls/".into(),
455 });
456
457 let recording = Box::new(printnanny_asyncapi_models::RecordingSettings {
458 path: "/home/printnanny/.local/share/printnanny/video".into(),
459 auto_start: true,
460 cloud_sync: true,
461 });
462
463 let rtp = Box::new(printnanny_asyncapi_models::RtpSettings {
464 video_udp_port: 20001,
465 overlay_udp_port: 20002,
466 });
467
468 let snapshot = Box::new(printnanny_asyncapi_models::SnapshotSettings {
469 path: "/var/run/printnanny-snapshot/snapshot-%d.jpg".into(),
470 enabled: true,
471 });
472
473 Self {
474 camera,
475 detection,
476 hls,
477 recording,
478 rtp,
479 snapshot,
480 }
481 }
482}
483
484impl VideoStreamSettings {
485 pub async fn hotplug(mut self) -> Result<Self, PrintNannySettingsError> {
486 let camera_sources = CameraVideoSource::from_libcamera_list().await?;
488 let selected_camera = *(self.camera.clone());
489 if camera_sources.is_empty() {
491 Ok(self)
492 } else {
493 for camera in camera_sources.iter() {
495 if camera.device_name == selected_camera.device_name {
497 return Ok(self);
498 }
499 }
500 let selected = camera_sources.first().unwrap();
502 self.camera = Box::new(printnanny_asyncapi_models::CameraSettings {
503 device_name: selected.device_name.clone(),
504 label: selected.label.clone(),
505 height: selected.caps.height,
506 width: selected.caps.width,
507 format: selected.caps.format.clone(),
508 ..selected_camera
509 });
510 Ok(self)
511 }
512 }
513}
514
515#[cfg(test)]
516mod tests {
517 use super::*;
518
519 const MULTIPLE_CAMERAS: &str = r#"Available cameras:
5201: 'imx219' (/base/soc/i2c0mux/i2c@1/imx219@10)
5212: 'Logitech BRIO' (/base/scb/pcie@7d500000/pci@0,0/usb@0,0-1:1.0-046d:085e)"#;
522
523 const ONE_CSI_CAMERA: &str = r#"Available cameras:
5241: 'imx219' (/base/soc/i2c0mux/i2c@1/imx219@10)"#;
525
526 const ONE_USB_CAMERA: &str = r#"Available cameras:
5271: 'Logitech BRIO' (/base/scb/pcie@7d500000/pci@0,0/usb@0,0-1:1.0-046d:085e)"#;
528
529 #[test_log::test]
530 fn test_parse_multiple_libcamera_list_command_output() {
531 let result = CameraVideoSource::parse_list_cameras_command_output(MULTIPLE_CAMERAS);
532
533 assert_eq!(
534 *result.get(0).unwrap(),
535 CameraVideoSource {
536 index: 1,
537 label: "imx219".into(),
538 device_name: "/base/soc/i2c0mux/i2c@1/imx219@10".into(),
539 caps: CameraVideoSource::default_caps()
540 }
541 );
542 assert_eq!(
543 *result.get(1).unwrap(),
544 CameraVideoSource {
545 index: 2,
546 label: "Logitech BRIO".into(),
547 device_name: "/base/scb/pcie@7d500000/pci@0,0/usb@0,0-1:1.0-046d:085e".into(),
548 caps: CameraVideoSource::default_caps()
549 }
550 )
551 }
552 #[test_log::test]
553 fn test_parse_one_csi_libcamera_list_command_output() {
554 let result = CameraVideoSource::parse_list_cameras_command_output(ONE_CSI_CAMERA);
555
556 assert_eq!(
557 *result.get(0).unwrap(),
558 CameraVideoSource {
559 index: 1,
560 label: "imx219".into(),
561 device_name: "/base/soc/i2c0mux/i2c@1/imx219@10".into(),
562 caps: CameraVideoSource::default_caps()
563 }
564 );
565 }
566 #[test_log::test]
567 fn test_parse_one_usb_libcamera_list_command_output() {
568 let result = CameraVideoSource::parse_list_cameras_command_output(ONE_USB_CAMERA);
569 assert_eq!(
570 *result.get(0).unwrap(),
571 CameraVideoSource {
572 index: 1,
573 label: "Logitech BRIO".into(),
574 device_name: "/base/scb/pcie@7d500000/pci@0,0/usb@0,0-1:1.0-046d:085e".into(),
575 caps: CameraVideoSource::default_caps()
576 }
577 )
578 }
579
580 #[test_log::test]
581 fn test_parse_no_libcamera_list_command_output() {
582 let result = CameraVideoSource::parse_list_cameras_command_output("");
583 assert_eq!(result.len(), 0)
584 }
585}