1#[allow(dead_code)]
8#[derive(Debug, Clone, PartialEq)]
9pub enum StreamFormat {
10 BinaryFloat32,
11 BinaryFloat16,
12 AsciiCsv,
13}
14
15#[allow(dead_code)]
16#[derive(Debug, Clone)]
17pub struct StreamingExportConfig {
18 pub chunk_size: usize,
20 pub format: StreamFormat,
21 pub compress: bool,
23}
24
25impl Default for StreamingExportConfig {
26 fn default() -> Self {
27 Self {
28 chunk_size: 4096,
29 format: StreamFormat::BinaryFloat32,
30 compress: false,
31 }
32 }
33}
34
35#[allow(dead_code)]
36#[derive(Debug, Clone)]
37pub struct StreamChunk {
38 pub chunk_index: u32,
39 pub vertex_offset: u32,
40 pub vertex_count: u32,
41 pub data: Vec<u8>,
43}
44
45#[allow(dead_code)]
46#[derive(Debug, Clone)]
47pub struct StreamingExportResult {
48 pub total_chunks: u32,
49 pub total_vertices: u32,
50 pub total_bytes: usize,
51 pub format: StreamFormat,
52}
53
54#[allow(dead_code)]
58pub fn encode_chunk_f32(positions: &[[f32; 3]]) -> Vec<u8> {
59 let mut out = Vec::with_capacity(positions.len() * 12);
60 for p in positions {
61 out.extend_from_slice(&p[0].to_le_bytes());
62 out.extend_from_slice(&p[1].to_le_bytes());
63 out.extend_from_slice(&p[2].to_le_bytes());
64 }
65 out
66}
67
68#[allow(dead_code)]
71pub fn encode_chunk_f16(positions: &[[f32; 3]]) -> Vec<u8> {
72 let mut out = Vec::with_capacity(positions.len() * 6);
73 for p in positions {
74 for component in p {
75 let q = ((*component + 100.0) * 65535.0 / 200.0).round() as u16;
76 out.extend_from_slice(&q.to_le_bytes());
77 }
78 }
79 out
80}
81
82#[allow(dead_code)]
84pub fn encode_chunk_csv(positions: &[[f32; 3]]) -> Vec<u8> {
85 let mut out = Vec::new();
86 for p in positions {
87 out.extend_from_slice(format!("{},{},{}\n", p[0], p[1], p[2]).as_bytes());
88 }
89 out
90}
91
92#[allow(dead_code)]
96pub fn decode_chunk_f32(data: &[u8]) -> Vec<[f32; 3]> {
97 let vertex_count = data.len() / 12;
98 let mut out = Vec::with_capacity(vertex_count);
99 for i in 0..vertex_count {
100 let base = i * 12;
101 let x = f32::from_le_bytes(data[base..base + 4].try_into().unwrap_or_default());
102 let y = f32::from_le_bytes(data[base + 4..base + 8].try_into().unwrap_or_default());
103 let z = f32::from_le_bytes(data[base + 8..base + 12].try_into().unwrap_or_default());
104 out.push([x, y, z]);
105 }
106 out
107}
108
109#[allow(dead_code)]
111pub fn decode_chunk_f16(data: &[u8]) -> Vec<[f32; 3]> {
112 let vertex_count = data.len() / 6;
113 let mut out = Vec::with_capacity(vertex_count);
114 for i in 0..vertex_count {
115 let base = i * 6;
116 let mut components = [0.0f32; 3];
117 for (j, c) in components.iter_mut().enumerate() {
118 let q = u16::from_le_bytes(
119 data[base + j * 2..base + j * 2 + 2]
120 .try_into()
121 .unwrap_or_default(),
122 );
123 *c = (q as f32) * 200.0 / 65535.0 - 100.0;
124 }
125 out.push(components);
126 }
127 out
128}
129
130#[allow(dead_code)]
134pub fn stream_mesh_positions(
135 positions: &[[f32; 3]],
136 cfg: &StreamingExportConfig,
137) -> Vec<StreamChunk> {
138 if positions.is_empty() {
139 return Vec::new();
140 }
141 let chunk_size = if cfg.chunk_size == 0 {
142 4096
143 } else {
144 cfg.chunk_size
145 };
146 let total = positions.len();
147 let total_chunks = total.div_ceil(chunk_size);
148 let mut chunks = Vec::with_capacity(total_chunks);
149
150 for chunk_index in 0..total_chunks {
151 let offset = chunk_index * chunk_size;
152 let end = (offset + chunk_size).min(total);
153 let slice = &positions[offset..end];
154 let data = match cfg.format {
155 StreamFormat::BinaryFloat32 => encode_chunk_f32(slice),
156 StreamFormat::BinaryFloat16 => encode_chunk_f16(slice),
157 StreamFormat::AsciiCsv => encode_chunk_csv(slice),
158 };
159 chunks.push(StreamChunk {
160 chunk_index: chunk_index as u32,
161 vertex_offset: offset as u32,
162 vertex_count: slice.len() as u32,
163 data,
164 });
165 }
166 chunks
167}
168
169#[allow(dead_code)]
171pub fn reassemble_chunks(chunks: &[StreamChunk]) -> Vec<[f32; 3]> {
172 if chunks.is_empty() {
173 return Vec::new();
174 }
175 let mut sorted_indices: Vec<usize> = (0..chunks.len()).collect();
177 sorted_indices.sort_by_key(|&i| chunks[i].chunk_index);
178
179 let total_vertices: usize = chunks.iter().map(|c| c.vertex_count as usize).sum();
182 let total_bytes: usize = chunks.iter().map(|c| c.data.len()).sum();
183
184 let mut out = Vec::with_capacity(total_vertices);
185
186 for idx in sorted_indices {
187 let chunk = &chunks[idx];
188 let vertex_count = chunk.vertex_count as usize;
189 let byte_count = chunk.data.len();
190
191 let decoded = if vertex_count > 0 && byte_count == vertex_count * 12 {
192 decode_chunk_f32(&chunk.data)
193 } else if vertex_count > 0 && byte_count == vertex_count * 6 {
194 decode_chunk_f16(&chunk.data)
195 } else {
196 let text = std::str::from_utf8(&chunk.data).unwrap_or("");
198 text.lines()
199 .filter_map(|line| {
200 let parts: Vec<&str> = line.split(',').collect();
201 if parts.len() == 3 {
202 let x = parts[0].trim().parse::<f32>().ok()?;
203 let y = parts[1].trim().parse::<f32>().ok()?;
204 let z = parts[2].trim().parse::<f32>().ok()?;
205 Some([x, y, z])
206 } else {
207 None
208 }
209 })
210 .collect()
211 };
212 out.extend_from_slice(&decoded);
213 }
214
215 let _ = total_bytes;
217 out
218}
219
220#[allow(dead_code)]
222pub fn streaming_export_stats(result: &StreamingExportResult) -> String {
223 let fmt = match result.format {
224 StreamFormat::BinaryFloat32 => "BinaryFloat32",
225 StreamFormat::BinaryFloat16 => "BinaryFloat16",
226 StreamFormat::AsciiCsv => "AsciiCsv",
227 };
228 format!(
229 "StreamingExport: {} vertices, {} chunks, {} bytes, format={}",
230 result.total_vertices, result.total_chunks, result.total_bytes, fmt
231 )
232}
233
234#[cfg(test)]
237mod tests {
238 use super::*;
239
240 fn sample_positions(n: usize) -> Vec<[f32; 3]> {
241 (0..n)
242 .map(|i| {
243 let f = i as f32;
244 [f * 0.1, f * 0.2, f * 0.3]
245 })
246 .collect()
247 }
248
249 #[test]
250 fn encode_decode_f32_round_trip() {
251 let positions = sample_positions(10);
252 let encoded = encode_chunk_f32(&positions);
253 let decoded = decode_chunk_f32(&encoded);
254 assert_eq!(decoded.len(), positions.len());
255 for (a, b) in positions.iter().zip(decoded.iter()) {
256 assert!((a[0] - b[0]).abs() < 1e-6);
257 assert!((a[1] - b[1]).abs() < 1e-6);
258 assert!((a[2] - b[2]).abs() < 1e-6);
259 }
260 }
261
262 #[test]
263 fn encode_decode_f16_approximate_round_trip() {
264 let positions = vec![[0.0f32, 50.0, -50.0], [10.0, -10.0, 99.0]];
265 let encoded = encode_chunk_f16(&positions);
266 let decoded = decode_chunk_f16(&encoded);
267 assert_eq!(decoded.len(), positions.len());
268 for (a, b) in positions.iter().zip(decoded.iter()) {
270 assert!((a[0] - b[0]).abs() < 0.01, "x: {} vs {}", a[0], b[0]);
271 assert!((a[1] - b[1]).abs() < 0.01, "y: {} vs {}", a[1], b[1]);
272 assert!((a[2] - b[2]).abs() < 0.01, "z: {} vs {}", a[2], b[2]);
273 }
274 }
275
276 #[test]
277 fn encode_chunk_csv_correct_line_count() {
278 let positions = sample_positions(7);
279 let csv_bytes = encode_chunk_csv(&positions);
280 let text = std::str::from_utf8(&csv_bytes).expect("should succeed");
281 let line_count = text.lines().count();
282 assert_eq!(line_count, 7);
283 }
284
285 #[test]
286 fn stream_mesh_positions_chunk_count() {
287 let positions = sample_positions(10000);
288 let cfg = StreamingExportConfig {
289 chunk_size: 4096,
290 format: StreamFormat::BinaryFloat32,
291 compress: false,
292 };
293 let chunks = stream_mesh_positions(&positions, &cfg);
294 let expected = (10000usize).div_ceil(4096);
295 assert_eq!(chunks.len(), expected);
296 }
297
298 #[test]
299 fn stream_mesh_positions_small_chunk() {
300 let positions = sample_positions(5);
301 let cfg = StreamingExportConfig {
302 chunk_size: 2,
303 format: StreamFormat::BinaryFloat32,
304 compress: false,
305 };
306 let chunks = stream_mesh_positions(&positions, &cfg);
307 assert_eq!(chunks.len(), 3); }
309
310 #[test]
311 fn reassemble_chunks_f32_reconstructs_full() {
312 let positions = sample_positions(100);
313 let cfg = StreamingExportConfig {
314 chunk_size: 30,
315 format: StreamFormat::BinaryFloat32,
316 compress: false,
317 };
318 let chunks = stream_mesh_positions(&positions, &cfg);
319 let reconstructed = reassemble_chunks(&chunks);
320 assert_eq!(reconstructed.len(), positions.len());
321 for (a, b) in positions.iter().zip(reconstructed.iter()) {
322 assert!((a[0] - b[0]).abs() < 1e-5);
323 }
324 }
325
326 #[test]
327 fn reassemble_chunks_csv_reconstructs_full() {
328 let positions = sample_positions(20);
329 let cfg = StreamingExportConfig {
330 chunk_size: 8,
331 format: StreamFormat::AsciiCsv,
332 compress: false,
333 };
334 let chunks = stream_mesh_positions(&positions, &cfg);
335 let reconstructed = reassemble_chunks(&chunks);
336 assert_eq!(reconstructed.len(), positions.len());
337 }
338
339 #[test]
340 fn empty_positions_yields_zero_chunks() {
341 let cfg = StreamingExportConfig::default();
342 let chunks = stream_mesh_positions(&[], &cfg);
343 assert_eq!(chunks.len(), 0);
344 }
345
346 #[test]
347 fn streaming_export_stats_non_empty() {
348 let result = StreamingExportResult {
349 total_chunks: 3,
350 total_vertices: 100,
351 total_bytes: 1200,
352 format: StreamFormat::BinaryFloat32,
353 };
354 let s = streaming_export_stats(&result);
355 assert!(!s.is_empty());
356 assert!(s.contains("100"));
357 assert!(s.contains("BinaryFloat32"));
358 }
359
360 #[test]
361 fn f32_chunk_size_is_n_times_12() {
362 let positions = sample_positions(50);
363 let encoded = encode_chunk_f32(&positions);
364 assert_eq!(encoded.len(), 50 * 12);
365 }
366
367 #[test]
368 fn f16_chunk_size_is_n_times_6() {
369 let positions = sample_positions(50);
370 let encoded = encode_chunk_f16(&positions);
371 assert_eq!(encoded.len(), 50 * 6);
372 }
373
374 #[test]
375 fn stream_chunk_vertex_offsets_are_correct() {
376 let positions = sample_positions(10);
377 let cfg = StreamingExportConfig {
378 chunk_size: 3,
379 format: StreamFormat::BinaryFloat32,
380 compress: false,
381 };
382 let chunks = stream_mesh_positions(&positions, &cfg);
383 assert_eq!(chunks[0].vertex_offset, 0);
384 assert_eq!(chunks[1].vertex_offset, 3);
385 assert_eq!(chunks[2].vertex_offset, 6);
386 }
387
388 #[test]
389 fn total_vertex_count_matches_sum_of_chunk_vertex_counts() {
390 let positions = sample_positions(97);
391 let cfg = StreamingExportConfig {
392 chunk_size: 20,
393 format: StreamFormat::BinaryFloat16,
394 compress: false,
395 };
396 let chunks = stream_mesh_positions(&positions, &cfg);
397 let total: u32 = chunks.iter().map(|c| c.vertex_count).sum();
398 assert_eq!(total, 97);
399 }
400
401 #[test]
402 fn reassemble_chunks_f16_approximate() {
403 let positions = vec![[1.0f32, 2.0, 3.0], [-5.0, 10.0, -10.0]];
404 let cfg = StreamingExportConfig {
405 chunk_size: 10,
406 format: StreamFormat::BinaryFloat16,
407 compress: false,
408 };
409 let chunks = stream_mesh_positions(&positions, &cfg);
410 let reconstructed = reassemble_chunks(&chunks);
411 assert_eq!(reconstructed.len(), 2);
412 for (a, b) in positions.iter().zip(reconstructed.iter()) {
413 assert!((a[0] - b[0]).abs() < 0.01);
414 assert!((a[1] - b[1]).abs() < 0.01);
415 assert!((a[2] - b[2]).abs() < 0.01);
416 }
417 }
418}