use crate::core::{
plot_renderer::{PlotRenderConfig, PlotRenderer},
Camera,
};
use crate::plots::Figure;
#[cfg(feature = "gui")]
use egui::{Align2, Color32, FontId, Pos2};
#[cfg(feature = "gui")]
use egui_wgpu;
use futures::channel::oneshot;
use std::io::Cursor;
use std::path::Path;
use std::sync::Arc;
use wgpu::{Device, Queue, TextureFormat};
pub struct ImageExporter {
device: Arc<Device>,
queue: Arc<Queue>,
#[allow(dead_code)]
format: TextureFormat,
settings: ImageExportSettings,
}
#[derive(Debug, Clone)]
pub struct ImageExportSettings {
pub width: u32,
pub height: u32,
pub samples: u32,
pub background_color: [f32; 4],
pub quality: f32,
pub include_metadata: bool,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ImageFormat {
Png,
Jpeg,
WebP,
Bmp,
}
impl Default for ImageExportSettings {
fn default() -> Self {
Self {
width: 800,
height: 600,
samples: 4, background_color: [1.0, 1.0, 1.0, 1.0], quality: 0.95,
include_metadata: true,
}
}
}
impl ImageExporter {
pub async fn new() -> Result<Self, String> {
Self::with_settings(ImageExportSettings::default()).await
}
pub async fn with_settings(settings: ImageExportSettings) -> Result<Self, String> {
if let Some(ctx) = crate::context::shared_wgpu_context() {
return Ok(Self {
device: ctx.device,
queue: ctx.queue,
format: TextureFormat::Rgba8UnormSrgb,
settings,
});
}
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor::default());
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::HighPerformance,
compatible_surface: None,
force_fallback_adapter: false,
})
.await
.ok_or("Failed to find suitable GPU adapter")?;
let (device, queue) = adapter
.request_device(&wgpu::DeviceDescriptor::default(), None)
.await
.map_err(|e| format!("Failed to create device: {e}"))?;
Ok(Self {
device: Arc::new(device),
queue: Arc::new(queue),
format: TextureFormat::Rgba8UnormSrgb,
settings,
})
}
pub async fn export_png<P: AsRef<Path>>(
&self,
figure: &mut Figure,
path: P,
) -> Result<(), String> {
let pixels = self.render_rgba(figure, None).await?;
self.save_png(&pixels, path).await
}
pub async fn render_png_bytes(&self, figure: &mut Figure) -> Result<Vec<u8>, String> {
let pixels = self.render_rgba(figure, None).await?;
self.encode_png_bytes(&pixels)
}
pub async fn render_rgba_bytes(&self, figure: &mut Figure) -> Result<Vec<u8>, String> {
self.render_rgba(figure, None).await
}
pub async fn render_png_bytes_with_camera(
&self,
figure: &mut Figure,
camera: &Camera,
) -> Result<Vec<u8>, String> {
let pixels = self.render_rgba(figure, Some(camera)).await?;
self.encode_png_bytes(&pixels)
}
pub async fn render_rgba_bytes_with_camera(
&self,
figure: &mut Figure,
camera: &Camera,
) -> Result<Vec<u8>, String> {
self.render_rgba(figure, Some(camera)).await
}
pub async fn render_png_bytes_with_axes_cameras(
&self,
figure: &mut Figure,
axes_cameras: &[Camera],
) -> Result<Vec<u8>, String> {
let pixels = self
.render_rgba_with_axes_cameras(figure, axes_cameras)
.await?;
self.encode_png_bytes(&pixels)
}
pub async fn render_rgba_bytes_with_axes_cameras(
&self,
figure: &mut Figure,
axes_cameras: &[Camera],
) -> Result<Vec<u8>, String> {
self.render_rgba_with_axes_cameras(figure, axes_cameras)
.await
}
async fn render_rgba(
&self,
figure: &mut Figure,
camera_override: Option<&Camera>,
) -> Result<Vec<u8>, String> {
self.render_rgba_internal(figure, camera_override, None)
.await
}
async fn render_rgba_with_axes_cameras(
&self,
figure: &mut Figure,
axes_cameras: &[Camera],
) -> Result<Vec<u8>, String> {
self.render_rgba_internal(figure, None, Some(axes_cameras))
.await
}
async fn render_rgba_internal(
&self,
figure: &mut Figure,
camera_override: Option<&Camera>,
axes_camera_overrides: Option<&[Camera]>,
) -> Result<Vec<u8>, String> {
let sc_desc = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: self.format,
width: self.settings.width,
height: self.settings.height,
present_mode: wgpu::PresentMode::Fifo,
alpha_mode: wgpu::CompositeAlphaMode::Opaque,
view_formats: vec![],
desired_maximum_frame_latency: 1,
};
let device: Arc<wgpu::Device> = self.device.clone();
let queue: Arc<wgpu::Queue> = self.queue.clone();
let mut plot_renderer = PlotRenderer::new(device.clone(), queue.clone(), sc_desc)
.await
.map_err(|e| format!("plot renderer init failed: {e}"))?;
plot_renderer.set_figure(figure.clone());
if let Some(overrides) = axes_camera_overrides {
for (idx, camera) in overrides.iter().enumerate() {
if let Some(target) = plot_renderer.axes_camera_mut(idx) {
*target = camera.clone();
}
}
}
let mut encoder = self
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("image_export_encoder"),
});
let color_texture = self.device.create_texture(&wgpu::TextureDescriptor {
label: Some("export_color"),
size: wgpu::Extent3d {
width: self.settings.width,
height: self.settings.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: self.format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
view_formats: &[],
});
let color_view = color_texture.create_view(&wgpu::TextureViewDescriptor::default());
let depth_texture = self.device.create_texture(&wgpu::TextureDescriptor {
label: Some("export_depth"),
size: wgpu::Extent3d {
width: self.settings.width,
height: self.settings.height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Depth24Plus,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
view_formats: &[],
});
let _depth_view = depth_texture.create_view(&wgpu::TextureViewDescriptor::default());
{
let mut clear_pass =
self.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("export_clear_encoder"),
});
{
let rp = clear_pass.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("export_clear_pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &color_view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: self.settings.background_color[0] as f64,
g: self.settings.background_color[1] as f64,
b: self.settings.background_color[2] as f64,
a: self.settings.background_color[3] as f64,
}),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
drop(rp);
}
self.queue.submit(Some(clear_pass.finish()));
}
let mut cfg = PlotRenderConfig {
width: self.settings.width,
height: self.settings.height,
..Default::default()
};
cfg.msaa_samples = 1;
let (rows, cols) = plot_renderer.figure_axes_grid();
if rows * cols > 1 {
let hgap: u32 = 8;
let vgap: u32 = 8;
let total_hgap = hgap * (cols.saturating_sub(1) as u32);
let total_vgap = vgap * (rows.saturating_sub(1) as u32);
let cell_w = (self.settings.width.saturating_sub(total_hgap)) / (cols as u32);
let cell_h = (self.settings.height.saturating_sub(total_vgap)) / (rows as u32);
let mut viewports: Vec<(u32, u32, u32, u32)> = Vec::with_capacity(rows * cols);
for r in 0..rows {
for c in 0..cols {
let x = c as u32 * (cell_w + hgap);
let y = r as u32 * (cell_h + vgap);
viewports.push((x, y, cell_w.max(1), cell_h.max(1)));
}
}
plot_renderer
.render_axes_to_viewports(&mut encoder, &color_view, &viewports, 1, &cfg)
.map_err(|e| format!("render subplot failed: {e}"))?;
} else {
let viewport = (0u32, 0u32, self.settings.width, self.settings.height);
let cam = camera_override
.cloned()
.unwrap_or_else(|| plot_renderer.camera().clone());
plot_renderer
.render_camera_to_viewport(&mut encoder, &color_view, viewport, &cfg, &cam)
.map_err(|e| format!("render failed: {e}"))?;
}
#[cfg(feature = "gui")]
{
let egui_ctx = egui::Context::default();
let mut raw_input = egui::RawInput::default();
raw_input.viewports.insert(
egui::viewport::ViewportId::ROOT,
egui::ViewportInfo {
native_pixels_per_point: Some(1.0),
..Default::default()
},
);
let full_output = egui_ctx.run(raw_input, |ctx| {
egui::CentralPanel::default()
.frame(egui::Frame::none())
.show(ctx, |ui| {
if let Some(title) = &figure.title {
ui.painter().text(
Pos2::new(self.settings.width as f32 * 0.5, 24.0),
Align2::CENTER_CENTER,
title,
FontId::proportional(18.0),
Color32::BLACK,
);
}
let (rows, cols) = figure.axes_grid();
let hgap: f32 = 8.0;
let vgap: f32 = 8.0;
let total_hgap = hgap * (cols.saturating_sub(1) as f32);
let total_vgap = vgap * (rows.saturating_sub(1) as f32);
let cell_w = (self.settings.width as f32 - total_hgap).max(1.0)
/ (cols.max(1) as f32);
let cell_h = (self.settings.height as f32 - total_vgap).max(1.0)
/ (rows.max(1) as f32);
for r in 0..rows {
for c in 0..cols {
let vp_x = c as f32 * (cell_w + hgap);
let vp_y = r as f32 * (cell_h + vgap);
let vp_center_x = vp_x + cell_w * 0.5;
let vp_max_y = vp_y + cell_h;
let vp_center_y = vp_y + cell_h * 0.5;
let vp_min_x = vp_x;
if let Some(xl) = &figure.x_label {
ui.painter().text(
Pos2::new(vp_center_x, vp_max_y + 20.0),
Align2::CENTER_CENTER,
xl,
FontId::proportional(12.0),
Color32::BLACK,
);
}
if let Some(yl) = &figure.y_label {
ui.painter().text(
Pos2::new(vp_min_x - 24.0, vp_center_y),
Align2::CENTER_CENTER,
yl,
FontId::proportional(12.0),
Color32::BLACK,
);
}
}
}
});
});
let mut egui_renderer = egui_wgpu::Renderer::new(&self.device, self.format, None, 1);
for (id, image_delta) in &full_output.textures_delta.set {
egui_renderer.update_texture(&self.device, &self.queue, *id, image_delta);
}
let shapes = egui_ctx.tessellate(full_output.shapes, 1.0);
let mut enc_overlay =
self.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("image_overlay_encoder"),
});
egui_renderer.update_buffers(
&self.device,
&self.queue,
&mut enc_overlay,
&shapes,
&egui_wgpu::ScreenDescriptor {
size_in_pixels: [self.settings.width, self.settings.height],
pixels_per_point: 1.0,
},
);
{
let mut rp = enc_overlay.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("image_overlay_pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &color_view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
egui_renderer.render(
&mut rp,
&shapes,
&egui_wgpu::ScreenDescriptor {
size_in_pixels: [self.settings.width, self.settings.height],
pixels_per_point: 1.0,
},
);
}
self.queue.submit(Some(enc_overlay.finish()));
for id in &full_output.textures_delta.free {
egui_renderer.free_texture(id);
}
}
self.queue.submit(Some(encoder.finish()));
let bytes_per_pixel = 4u32;
let padded_bytes_per_row = (self.settings.width * bytes_per_pixel).div_ceil(256) * 256;
let output_buffer_size =
(padded_bytes_per_row * self.settings.height) as wgpu::BufferAddress;
let output_buffer = self.device.create_buffer(&wgpu::BufferDescriptor {
label: Some("export_readback"),
size: output_buffer_size,
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
mapped_at_creation: false,
});
let mut encoder2 = self
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("export_copy_encoder"),
});
encoder2.copy_texture_to_buffer(
wgpu::ImageCopyTexture {
texture: &color_texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
wgpu::ImageCopyBuffer {
buffer: &output_buffer,
layout: wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(padded_bytes_per_row),
rows_per_image: Some(self.settings.height),
},
},
wgpu::Extent3d {
width: self.settings.width,
height: self.settings.height,
depth_or_array_layers: 1,
},
);
self.queue.submit(Some(encoder2.finish()));
let buffer_slice = output_buffer.slice(..);
map_read_async(&self.device, &buffer_slice).await?;
let data = buffer_slice.get_mapped_range();
let mut pixels = vec![0u8; (self.settings.width * self.settings.height * 4) as usize];
for y in 0..self.settings.height as usize {
let src_start = y * padded_bytes_per_row as usize;
let dst_start = y * (self.settings.width as usize) * 4;
pixels[dst_start..dst_start + (self.settings.width as usize) * 4]
.copy_from_slice(&data[src_start..src_start + (self.settings.width as usize) * 4]);
}
drop(data);
output_buffer.unmap();
Ok(pixels)
}
async fn save_png<P: AsRef<Path>>(&self, data: &[u8], path: P) -> Result<(), String> {
use image::{ImageBuffer, Rgba};
let image = ImageBuffer::<Rgba<u8>, _>::from_raw(
self.settings.width,
self.settings.height,
data.to_vec(),
)
.ok_or("Failed to create image buffer")?;
image
.save(path)
.map_err(|e| format!("Failed to save PNG: {e}"))?;
log::debug!(target: "runmat_plot", "png export completed");
Ok(())
}
fn encode_png_bytes(&self, data: &[u8]) -> Result<Vec<u8>, String> {
use image::{ImageBuffer, ImageOutputFormat, Rgba};
let image = ImageBuffer::<Rgba<u8>, _>::from_raw(
self.settings.width,
self.settings.height,
data.to_vec(),
)
.ok_or("Failed to create image buffer")?;
let mut cursor = Cursor::new(Vec::new());
image
.write_to(&mut cursor, ImageOutputFormat::Png)
.map_err(|e| format!("Failed to encode PNG: {e}"))?;
Ok(cursor.into_inner())
}
pub fn set_settings(&mut self, settings: ImageExportSettings) {
self.settings = settings;
}
pub fn settings(&self) -> &ImageExportSettings {
&self.settings
}
}
async fn map_read_async(
device: &wgpu::Device,
slice: &wgpu::BufferSlice<'_>,
) -> Result<(), String> {
let (tx, rx) = oneshot::channel();
slice.map_async(wgpu::MapMode::Read, move |result| {
let _ = tx.send(result);
});
#[cfg(not(target_arch = "wasm32"))]
device.poll(wgpu::Maintain::Wait);
#[cfg(target_arch = "wasm32")]
device.poll(wgpu::Maintain::Poll);
rx.await
.map_err(|_| "map failed".to_string())?
.map_err(|_| "map error".to_string())?;
Ok(())
}