use std::{fs::OpenOptions, io::Write, process::exit, time::Duration};
use libcamera::{
camera::CameraConfigurationStatus,
camera_manager::CameraManager,
framebuffer::AsFrameBuffer,
framebuffer_allocator::{FrameBuffer, FrameBufferAllocator},
framebuffer_map::MemoryMappedFrameBuffer,
pixel_format::PixelFormat,
properties,
request::ReuseFlag,
stream::StreamRole,
};
const PIXEL_FORMAT_MJPEG: PixelFormat = PixelFormat::new(u32::from_le_bytes([b'M', b'J', b'P', b'G']), 0);
fn main() {
let filename = match std::env::args().nth(1) {
Some(f) => f,
None => {
println!("Error: missing file output parameter");
println!("Usage: ./video_capture </path/to/output.mjpeg>");
exit(1);
}
};
let mgr = CameraManager::new().unwrap();
let cameras = mgr.cameras();
let cam = cameras.get(0).expect("No cameras found");
println!(
"Using camera: {}",
*cam.properties().get::<properties::Model>().unwrap()
);
let mut cam = cam.acquire().expect("Unable to acquire camera");
let mut cfgs = cam.generate_configuration(&[StreamRole::VideoRecording]).unwrap();
cfgs.get_mut(0).unwrap().set_pixel_format(PIXEL_FORMAT_MJPEG);
println!("Generated config: {cfgs:#?}");
match cfgs.validate() {
CameraConfigurationStatus::Valid => println!("Camera configuration valid!"),
CameraConfigurationStatus::Adjusted => println!("Camera configuration was adjusted: {cfgs:#?}"),
CameraConfigurationStatus::Invalid => panic!("Error validating camera configuration"),
}
assert_eq!(
cfgs.get(0).unwrap().get_pixel_format(),
PIXEL_FORMAT_MJPEG,
"MJPEG is not supported by the camera"
);
cam.configure(&mut cfgs).expect("Unable to configure camera");
let mut alloc = FrameBufferAllocator::new(&cam);
let cfg = cfgs.get(0).unwrap();
let stream = cfg.stream().unwrap();
let buffers = alloc.alloc(&stream).unwrap();
println!("Allocated {} buffers", buffers.len());
let buffers = buffers
.into_iter()
.map(|buf| MemoryMappedFrameBuffer::new(buf).unwrap())
.collect::<Vec<_>>();
let reqs = buffers
.into_iter()
.enumerate()
.map(|(i, buf)| {
let mut req = cam.create_request(Some(i as u64)).unwrap();
req.add_buffer(&stream, buf).unwrap();
req
})
.collect::<Vec<_>>();
let (tx, rx) = std::sync::mpsc::channel();
cam.on_request_completed(move |req| {
tx.send(req).unwrap();
});
cam.start(None).unwrap();
for req in reqs {
println!("Request queued for execution: {req:#?}");
cam.queue_request(req).map_err(|(_, e)| e).unwrap();
}
let mut file = OpenOptions::new()
.append(true)
.create(true)
.open(&filename)
.expect("Unable to create output file");
let mut count = 0;
while count < 60 {
println!("Waiting for camera request execution");
let mut req = rx.recv_timeout(Duration::from_secs(5)).expect("Camera request failed");
println!("Camera request {req:?} completed!");
println!("Metadata: {:#?}", req.metadata());
let framebuffer: &MemoryMappedFrameBuffer<FrameBuffer> = req.buffer(&stream).unwrap();
println!("FrameBuffer metadata: {:#?}", framebuffer.metadata());
let planes = framebuffer.data();
let frame_data = planes.first().unwrap();
let bytes_used = framebuffer.metadata().unwrap().planes().get(0).unwrap().bytes_used as usize;
file.write_all(&frame_data[..bytes_used]).unwrap();
println!("Written {} bytes to {}", bytes_used, &filename);
req.reuse(ReuseFlag::REUSE_BUFFERS);
cam.queue_request(req).map_err(|(_, e)| e).unwrap();
count += 1;
}
}