use openigtlink_rust::error::Result;
use openigtlink_rust::io::{ClientBuilder, SyncIgtlClient};
use openigtlink_rust::protocol::message::IgtlMessage;
use openigtlink_rust::protocol::types::{CodecType, VideoMessage};
use std::env;
use std::thread;
use std::time::{Duration, Instant};
fn main() {
if let Err(e) = run() {
eprintln!("[ERROR] {}", e);
std::process::exit(1);
}
}
fn run() -> Result<()> {
let codec = parse_codec();
let mut client = ClientBuilder::new().tcp("127.0.0.1:18944").sync().build()?;
println!("[INFO] Connected to OpenIGTLink server\n");
match codec {
CodecType::MJPEG => stream_mjpeg(&mut client)?,
CodecType::H264 => stream_h264(&mut client)?,
CodecType::Raw => stream_raw(&mut client)?,
_ => {
eprintln!("Unsupported codec type");
return Ok(());
}
}
println!("\n[INFO] Video streaming completed successfully");
Ok(())
}
fn parse_codec() -> CodecType {
let args: Vec<String> = env::args().collect();
if args.len() > 1 {
match args[1].to_lowercase().as_str() {
"mjpeg" => return CodecType::MJPEG,
"h264" => return CodecType::H264,
"raw" => return CodecType::Raw,
_ => {}
}
}
println!("Usage: cargo run --example video_streaming [mjpeg|h264|raw]");
println!("Defaulting to MJPEG...\n");
CodecType::MJPEG
}
fn stream_mjpeg(client: &mut SyncIgtlClient) -> Result<()> {
println!("=== MJPEG Video Streaming ===");
println!("Resolution: 640x480");
println!("Frame Rate: 30 fps");
println!("Duration: 10 seconds");
println!("Codec: Motion JPEG\n");
let width = 640;
let height = 480;
let fps: usize = 30;
let duration_sec = 10;
let total_frames = fps * duration_sec;
let frame_interval = Duration::from_millis(1000 / fps as u64);
let mut actual_fps_sum = 0.0;
let mut fps_samples = 0;
for frame_num in 0..total_frames {
let start_time = Instant::now();
let frame_data = generate_mjpeg_frame(frame_num, width, height);
let video = VideoMessage::new(CodecType::MJPEG, width, height, frame_data);
let msg = IgtlMessage::new(video, "LaparoscopicCamera")?;
client.send(&msg)?;
if frame_num % fps == 0 {
let seconds = frame_num / fps;
print!("\r[MJPEG] Streaming: {}/{} seconds", seconds, duration_sec);
std::io::Write::flush(&mut std::io::stdout()).ok();
}
let elapsed = start_time.elapsed();
let actual_fps = 1.0 / elapsed.as_secs_f64();
actual_fps_sum += actual_fps;
fps_samples += 1;
if elapsed < frame_interval {
thread::sleep(frame_interval - elapsed);
}
}
println!(); println!("Average FPS: {:.2}", actual_fps_sum / fps_samples as f64);
Ok(())
}
fn stream_h264(client: &mut SyncIgtlClient) -> Result<()> {
println!("=== H.264 Video Streaming ===");
println!("Resolution: 1920x1080 (Full HD)");
println!("Frame Rate: 60 fps");
println!("Duration: 5 seconds");
println!("Codec: H.264/AVC\n");
let width = 1920;
let height = 1080;
let fps: usize = 60;
let duration_sec = 5;
let total_frames = fps * duration_sec;
let frame_interval = Duration::from_millis(1000 / fps as u64);
let mut total_bytes = 0;
for frame_num in 0..total_frames {
let start_time = Instant::now();
let frame_data = generate_h264_frame(frame_num, width, height);
total_bytes += frame_data.len();
let video = VideoMessage::new(CodecType::H264, width, height, frame_data);
let msg = IgtlMessage::new(video, "SurgicalMicroscope")?;
client.send(&msg)?;
if frame_num % fps == 0 {
let seconds = frame_num / fps;
print!("\r[H.264] Streaming: {}/{} seconds", seconds, duration_sec);
std::io::Write::flush(&mut std::io::stdout()).ok();
}
let elapsed = start_time.elapsed();
if elapsed < frame_interval {
thread::sleep(frame_interval - elapsed);
}
}
println!(); println!(
"Total data sent: {:.2} MB",
total_bytes as f64 / 1_048_576.0
);
println!(
"Average bitrate: {:.2} Mbps",
(total_bytes as f64 * 8.0) / (duration_sec as f64 * 1_000_000.0)
);
Ok(())
}
fn stream_raw(client: &mut SyncIgtlClient) -> Result<()> {
println!("=== Raw Uncompressed Video Streaming ===");
println!("Resolution: 320x240");
println!("Frame Rate: 15 fps");
println!("Duration: 10 seconds");
println!("Format: RGB24 (uncompressed)\n");
let width = 320;
let height = 240;
let fps: usize = 15;
let duration_sec = 10;
let total_frames = fps * duration_sec;
let frame_interval = Duration::from_millis(1000 / fps as u64);
let mut total_bytes = 0;
for frame_num in 0..total_frames {
let start_time = Instant::now();
let frame_data = generate_raw_frame(frame_num, width, height);
total_bytes += frame_data.len();
let video = VideoMessage::new(CodecType::Raw, width, height, frame_data);
let msg = IgtlMessage::new(video, "MonitorCamera")?;
client.send(&msg)?;
print!("\r[RAW] Streaming frame {}/{}", frame_num + 1, total_frames);
std::io::Write::flush(&mut std::io::stdout()).ok();
let elapsed = start_time.elapsed();
if elapsed < frame_interval {
thread::sleep(frame_interval - elapsed);
}
}
println!(); println!(
"Total data sent: {:.2} MB",
total_bytes as f64 / 1_048_576.0
);
Ok(())
}
fn generate_mjpeg_frame(frame_num: usize, width: u16, height: u16) -> Vec<u8> {
let mut data = Vec::new();
data.extend_from_slice(&[0xFF, 0xD8]);
let content_size = (width as usize * height as usize) / 20; for i in 0..content_size {
let byte = ((frame_num + i) % 256) as u8;
data.push(byte);
}
data.extend_from_slice(&[0xFF, 0xD9]);
data
}
fn generate_h264_frame(frame_num: usize, width: u16, height: u16) -> Vec<u8> {
let mut data = Vec::new();
data.extend_from_slice(&[0x00, 0x00, 0x00, 0x01]);
let is_keyframe = frame_num % 30 == 0; let nal_type = if is_keyframe { 0x65 } else { 0x41 }; data.push(nal_type);
let content_size = (width as usize * height as usize * 3) / 100;
for i in 0..content_size {
let byte = ((frame_num * 7 + i * 13) % 256) as u8;
data.push(byte);
}
data
}
fn generate_raw_frame(frame_num: usize, width: u16, height: u16) -> Vec<u8> {
let mut data = Vec::with_capacity(width as usize * height as usize * 3);
for y in 0..height {
for x in 0..width {
let phase = frame_num as f32 * 0.1;
let nx = x as f32 / width as f32;
let ny = y as f32 / height as f32;
let r = ((nx * 255.0 + phase * 10.0).sin() * 127.0 + 128.0) as u8;
let g = ((ny * 255.0 + phase * 15.0).cos() * 127.0 + 128.0) as u8;
let b = (((nx + ny) * 255.0 + phase * 20.0).sin() * 127.0 + 128.0) as u8;
data.push(r);
data.push(g);
data.push(b);
}
}
data
}