use std::error::Error;
use std::time::Duration;
use depthai::camera::{CameraNode, CameraOutputConfig};
use depthai::common::{CameraBoardSocket, ImageFrameType, ResizeMode};
use depthai::{
Device, Pipeline, RerunHostNode, RerunHostNodeConfig, RerunViewer, RerunWebConfig,
VideoEncoderNode, VideoEncoderProfile,
};
use rerun as rr;
fn main() -> Result<(), Box<dyn Error>> {
let device = Device::new()?;
let pipeline = Pipeline::new().with_device(&device).build()?;
let _web = pipeline.create_with::<RerunHostNode, _>(RerunHostNodeConfig {
app_id: "depthai_h265_server".to_string(),
viewer: RerunViewer::Web(RerunWebConfig {
open_browser: false,
..Default::default()
}),
..Default::default()
})?;
let rec = rr::RecordingStreamBuilder::new("depthai_h265").connect_grpc()?;
let cam = pipeline.create_with::<CameraNode, _>(CameraBoardSocket::CamA)?;
let fps = 30.0;
let (w, h) = (640, 400);
let nv12 = cam.request_output(CameraOutputConfig {
size: (w, h),
frame_type: Some(ImageFrameType::NV12),
resize_mode: ResizeMode::Crop,
fps: Some(fps),
enable_undistortion: None,
})?;
let enc = pipeline.create::<VideoEncoderNode>()?;
enc.validate_nv12_size(w, h)?;
enc.set_default_profile_preset(fps, VideoEncoderProfile::H265Main);
nv12.link(&enc.input()?)?;
let q = enc.out()?.create_encoded_frame_queue(8, true)?;
rec.log_static("video", &rr::VideoStream::new(rr::components::VideoCodec::H265))?;
pipeline.start()?;
eprintln!("Streaming H.265 to Rerun (press Ctrl-C to stop)...");
eprintln!("If the web viewer can't fetch data, make sure the gRPC /proxy port (default 9876) is reachable from your browser (e.g. port-forward it if you're remote).");
let mut frame_nr: i64 = 0;
loop {
if let Some(frame) = q.blocking_next(Some(Duration::from_millis(500)))? {
rec.set_time_sequence("frame", frame_nr);
frame_nr += 1;
let bytes = frame.bytes();
rec.log(
"video",
&rr::VideoStream::update_fields()
.with_sample(rr::components::VideoSample::from(bytes)),
)?;
}
}
}