use smelling_salts::{Device, Watcher};
use std::{
convert::TryInto,
future::Future,
mem::{size_of, MaybeUninit},
os::{
raw::{c_void, c_int, c_ulong, c_long, c_char},
unix::{fs::OpenOptionsExt, io::IntoRawFd},
},
ptr::null_mut,
pin::Pin,
task::{Context, Poll},
fs::{self, OpenOptions},
collections::HashSet,
io::ErrorKind,
};
use pix::rgb::SRgba8;
use pix::Raster;
#[repr(C)]
struct InotifyEv {
wd: c_int,
mask: u32,
cookie: u32,
len: u32,
name: [c_char; 256],
}
#[repr(C)]
struct TimeVal {
tv_sec: c_long,
tv_usec: c_long,
}
#[repr(C)]
#[allow(unused)]
enum V4l2BufType {
VideoCapture = 1,
VideoCaptureMPlane = 9,
VideoOutput = 2,
VideoOutputMPlane = 10,
VideoOverlay = 3,
VbiCapture = 4,
VbiOutput = 5,
SlicedVbiCapture = 6,
SlicedVbiOutput = 7,
VideoOutputOverlay = 8,
SdrCapture = 11,
SdrOutput = 12,
}
#[repr(C)]
struct V4l2Capability {
driver: [u8; 16],
card: [u8; 32],
bus_info: [u8; 32],
version: u32,
capabilities: u32,
reserved: [u32; 4],
}
#[repr(C)]
#[derive(Copy, Clone)]
#[allow(unused)]
enum V4l2Field {
Any = 0,
None = 1,
Top = 2,
Bottom = 3,
Interlaced = 4,
SeqTopBottom = 5,
SeqBottomTop = 6,
Alternate = 7,
}
#[repr(C)]
#[derive(Copy, Clone)]
#[allow(unused)]
enum V4l2Colorspace {
Unset = 0,
Smpte170M = 1,
Smpte240M = 2,
Rec709 = 3,
Bt878 = 4,
System470M = 5,
System470BG = 6,
Jpeg = 7,
Srgb = 8,
}
#[repr(C)]
#[derive(Copy, Clone)]
struct V4l2PixFormat {
width: u32,
height: u32,
pixelformat: u32,
field: V4l2Field,
bytesperline: u32,
sizeimage: u32,
colorspace: V4l2Colorspace,
private: u32,
}
#[repr(C)]
#[derive(Copy, Clone)]
struct V4l2Rect {
left: i32,
top: i32,
width: i32,
height: i32,
}
#[repr(C)]
#[derive(Copy, Clone)]
struct V4l2Clip {
c: V4l2Rect,
next: *mut V4l2Clip,
}
#[repr(C)]
#[derive(Copy, Clone)]
struct V4l2Window {
w: V4l2Rect,
field: V4l2Field,
chromakey: u32,
clips: *mut V4l2Clip,
clipcount: u32,
bitmap: *mut c_void,
}
#[repr(C)]
struct V4l2Timecode {
type_: u32,
flags: u32,
frames: u8,
seconds: u8,
minutes: u8,
hours: u8,
userbits: [u8; 4],
}
#[repr(C)]
union V4l2BufferUnion {
offset: u32,
userptr: c_ulong
}
#[repr(C)]
struct V4l2Buffer {
index: u32,
type_: V4l2BufType,
bytesused: u32,
flags: u32,
field: V4l2Field,
timestamp: TimeVal,
timecode: V4l2Timecode,
sequence: u32,
memory: V4l2Memory,
m: V4l2BufferUnion,
length: u32,
input: u32,
reserved: u32,
}
#[repr(C)]
#[derive(Copy, Clone)]
struct V4l2VbiFormat {
sampling_rate: u32,
offset: u32,
samples_per_line: u32,
sample_format: u32,
start: [i32; 2],
count: [u32; 2],
flags: u32,
reserved: [u32; 2],
}
#[repr(C)]
union V4l2FormatUnion {
pix: V4l2PixFormat, win: V4l2Window, vbi: V4l2VbiFormat, raw_data: [u8; 200], }
#[repr(C)]
struct V4l2Format {
type_: V4l2BufType,
fmt: V4l2FormatUnion,
}
#[repr(C)]
#[allow(unused)]
enum V4l2Memory {
Mmap = 1,
UserPtr = 2,
MemoryOverlay = 3,
}
#[repr(C)]
struct V4l2RequestBuffers {
count: u32,
type_: V4l2BufType,
memory: V4l2Memory,
reserved: [u32; 2],
}
const fn iow_v(size: usize, num: u8) -> c_ulong {
(0x80 << 24) | ((size as c_ulong & 0x1fff) << 16) | ((b'V' as c_ulong) << 8) | num as c_ulong
}
const fn ior_v(size: usize, num: u8) -> c_ulong {
(0x40 << 24) | ((size as c_ulong & 0x1fff) << 16) | ((b'V' as c_ulong) << 8) | num as c_ulong
}
const fn iowr_v(size: usize, num: u8) -> c_ulong {
(0xc0 << 24) | ((size as c_ulong & 0x1fff) << 16) | ((b'V' as c_ulong) << 8) | num as c_ulong
}
const VIDIOC_STREAMON: c_ulong = iow_v(size_of::<c_int>(), 18);
const VIDIOC_STREAMOFF: c_ulong = iow_v(size_of::<c_int>(), 19);
const VIDIOC_QUERYCAP: c_ulong = ior_v(size_of::<V4l2Capability>(), 0);
const VIDIOC_S_FMT: c_ulong = iowr_v(size_of::<V4l2Format>(), 5);
const VIDIOC_REQBUFS: c_ulong = iowr_v(size_of::<V4l2RequestBuffers>(), 8);
const VIDIOC_QUERYBUF: c_ulong = iowr_v(size_of::<V4l2Buffer>(), 9);
const VIDIOC_QBUF: c_ulong = iowr_v(size_of::<V4l2Buffer>(), 15);
const VIDIOC_DQBUF: c_ulong = iowr_v(size_of::<V4l2Buffer>(), 17);
const fn v4l2_fourcc(a: &[u8; 4]) -> u32 {
((a[0] as u32)<<0)|((a[1] as u32)<<8)|((a[2] as u32)<<16)|((a[3] as u32)<<24)
}
const V4L2_PIX_FMT_MJPEG: u32 = v4l2_fourcc(b"MJPG");
const PROT_READ: c_int = 0x04;
const PROT_WRITE: c_int = 0x02;
const MAP_SHARED: c_int = 0x0010;
fn xioctl(fd: c_int, request: c_ulong, arg: *mut c_void) -> c_int {
loop {
match unsafe { ioctl(fd, request, arg) } {
-1 if errno() == 4 => {}
r => break r,
}
}
}
#[inline(always)]
fn errno() -> c_int {
unsafe { *__errno_location() }
}
extern "C" {
fn ioctl(fd: c_int, request: c_ulong, ...) -> c_int;
fn mmap(addr: *mut c_void, length: usize, prot: c_int, flags: c_int,
fd: c_int, offset: isize) -> *mut c_void;
fn munmap(addr: *mut c_void, length: usize) -> c_int;
fn read(fd: c_int, buf: *mut c_void, count: usize) -> isize;
fn write(fd: c_int, buf: *const c_void, count: usize) -> isize;
fn close(fd: c_int) -> c_int;
fn __errno_location() -> *mut c_int;
fn inotify_init1(flags: c_int) -> c_int;
fn inotify_add_watch(fd: c_int, path: *const c_char, mask: u32) -> c_int;
}
pub enum Event {
Connect(Box<Camera>),
}
pub struct Rig {
device: Device,
connected: HashSet<String>,
}
impl Rig {
pub fn new() -> Self {
let inotify = unsafe {
inotify_init1(0o0004000 )
};
if inotify == -1 {
panic!("Couldn't create inotify (1)!");
}
if unsafe {
inotify_add_watch(
inotify,
b"/dev/\0".as_ptr() as *const _,
0x0000_0200 | 0x0000_0100,
)
} == -1
{
panic!("Couldn't create inotify (2)!");
}
let watcher = Watcher::new().input();
let device = Device::new(inotify, watcher);
let connected = HashSet::new();
Rig {
device,
connected,
}
}
}
impl Future for Rig {
type Output = Camera;
fn poll(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Camera> {
let mut ev = MaybeUninit::<InotifyEv>::uninit();
let ev = unsafe {
if read(
self.device.fd(),
ev.as_mut_ptr().cast(),
std::mem::size_of::<InotifyEv>(),
) <= 0
{
let mut all_open = true;
'fds: for file in fs::read_dir("/dev/").unwrap() {
let file = file.unwrap().file_name().into_string().unwrap();
if file.starts_with("video") {
if self.connected.contains(&file) {
continue 'fds;
}
let mut filename = "/dev/".to_string();
filename.push_str(&file);
let fd = match OpenOptions::new()
.read(true)
.append(true)
.open(filename)
{
Ok(f) => f,
Err(e) => {
if e.kind() == ErrorKind::PermissionDenied {
all_open = false;
}
continue 'fds;
}
};
self.connected.insert(file);
if let Some(camera) = Camera::new(fd.into_raw_fd(), Raster::with_clear(640, 480)) {
return Poll::Ready(
camera
);
}
}
}
self.device.register_waker(cx.waker());
return Poll::Pending;
}
ev.assume_init()
};
if (ev.mask & 0x0000_0200) != 0 {
let mut file = "".to_string();
let name = unsafe { std::ffi::CStr::from_ptr(ev.name.as_ptr()) };
file.push_str(&name.to_string_lossy());
if file.ends_with("-event-joystick") {
let _ = self.connected.remove(&file);
}
}
self.poll(cx)
}
}
impl Drop for Rig {
fn drop(&mut self) {
let fd = self.device.fd();
self.device.old();
assert_ne!(unsafe { close(fd) }, -1);
}
}
pub struct Camera {
device: Device,
buffer: *mut c_void,
buf: V4l2Buffer,
data: *mut c_void, size: u32,
raster: Raster<SRgba8>,
}
impl Camera {
pub fn new(fd: c_int, raster: Raster<SRgba8>) -> Option<Camera> {
let filename = "/dev/video0";
let fd = match OpenOptions::new()
.read(true)
.append(true)
.mode(0)
.custom_flags(0x0004 )
.open(filename)
{
Ok(f) => f.into_raw_fd(),
Err(_e) => return None,
};
if fd == -1 {
return None;
}
let mut caps: MaybeUninit<V4l2Capability> = MaybeUninit::uninit();
if xioctl(fd, VIDIOC_QUERYCAP, caps.as_mut_ptr().cast()) == -1 {
panic!("Failed Querying Capabilites\n");
}
let mut fmt = V4l2Format {
type_: V4l2BufType::VideoCapture,
fmt: V4l2FormatUnion {
pix: V4l2PixFormat {
width: 0, height: 0, pixelformat: V4L2_PIX_FMT_MJPEG,
field: V4l2Field::None,
bytesperline: 0,
sizeimage: 0,
colorspace: V4l2Colorspace::Unset,
private: 0,
},
},
};
if xioctl(fd, VIDIOC_S_FMT, (&mut fmt as *mut V4l2Format).cast()) == -1 {
panic!("Error setting Pixel Format\n");
}
let mut req = V4l2RequestBuffers {
count: 1,
type_: V4l2BufType::VideoCapture,
memory: V4l2Memory::Mmap,
reserved: [0; 2],
};
if xioctl(fd, VIDIOC_REQBUFS, (&mut req as *mut V4l2RequestBuffers).cast()) == -1 {
panic!("Error Requesting Buffer\n");
}
let mut buf = V4l2Buffer {
index: 0,
type_: V4l2BufType::VideoCapture,
bytesused: 0,
flags: 0,
field: V4l2Field::Any,
timestamp: TimeVal {
tv_sec: 0,
tv_usec: 0,
},
timecode: V4l2Timecode {
type_: 0,
flags: 0,
frames: 0,
seconds: 0,
minutes: 0,
hours: 0,
userbits: [0; 4],
},
sequence: 0,
memory: V4l2Memory::Mmap,
m: V4l2BufferUnion { userptr: 0 },
length: 0,
input: 0,
reserved: 0,
};
if xioctl(fd, VIDIOC_QUERYBUF, (&mut buf as *mut V4l2Buffer).cast()) == -1 {
panic!("Error Querying Buffer\n");
}
if xioctl(fd, VIDIOC_QBUF, (&mut buf as *mut V4l2Buffer).cast()) == -1 {
panic!("Error: VIDIOC_QBUF");
}
let mut type_ = V4l2BufType::VideoCapture;
if xioctl(fd, VIDIOC_STREAMON, (&mut type_ as *mut V4l2BufType).cast()) == -1 {
panic!("Error: VIDIOC_STREAMON");
}
Some(Camera {
device: Device::new(fd, Watcher::new().input()),
size: buf.length,
buf,
buffer: null_mut(),
data: null_mut(),
raster,
})
}
}
impl Future for Camera {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
if xioctl(self.device.fd(), VIDIOC_DQBUF, (&mut self.buf as *mut V4l2Buffer).cast()) == -1 {
let errno = errno();
if errno == 11 {
self.device.register_waker(cx.waker());
return Poll::Pending;
}
unsafe {
close(self.device.fd());
}
panic!("Error retrieving frame {}\n", errno);
}
if xioctl(self.device.fd(), VIDIOC_QBUF, (&mut self.buf as *mut V4l2Buffer).cast()) == -1 {
panic!("VIDIOC_QBUF");
}
Poll::Ready(())
}
}
impl Drop for Camera {
fn drop(&mut self) {
let mut type_ = V4l2BufType::VideoCapture;
if xioctl(self.device.fd(), VIDIOC_STREAMOFF, (&mut type_ as *mut V4l2BufType).cast()) == -1 {
panic!("Error VIDIOC_STREAMOFF");
}
if unsafe { munmap(self.buffer, self.size.try_into().unwrap()) == -1 } {
panic!("Error munmap");
}
if unsafe { close(self.device.fd()) == -1 } {
panic!("Error close");
}
}
}