1use proc_macro::TokenStream;
9use quote::quote;
10use syn::{Attribute, Data, DeriveInput, LitStr, parse_macro_input};
11
12#[proc_macro_derive(ParcodeObject, attributes(parcode))]
14pub fn derive_parcode_object(input: TokenStream) -> TokenStream {
15 let input = parse_macro_input!(input as DeriveInput);
16 let name = input.ident;
17
18 let data_struct = match input.data {
19 Data::Struct(ds) => ds,
20 _ => {
21 return syn::Error::new(name.span(), "ParcodeObject only supports structs")
22 .to_compile_error()
23 .into();
24 }
25 };
26
27 let mut locals = Vec::new();
28 let mut remotes = Vec::new();
29
30 for field in data_struct.fields {
31 let (is_chunkable, compression_id, is_map) = match parse_attributes(&field.attrs) {
33 Ok(res) => res,
34 Err(e) => return e.to_compile_error().into(),
35 };
36
37 if is_chunkable || is_map {
39 remotes.push(RemoteField {
40 ident: field.ident.clone().unwrap(),
41 ty: field.ty.clone(),
42 compression_id,
43 is_map,
44 });
45 } else {
46 locals.push(LocalField {
47 ident: field.ident.clone().unwrap(),
48 ty: field.ty.clone(),
49 });
50 }
51 }
52
53 let impl_visitor = generate_visitor(&name, &remotes, &locals);
54 let impl_job = generate_serialization_job(&name, &locals);
55 let impl_native = generate_native_reader(&name, &locals, &remotes);
56 let impl_item = generate_parcode_item(&name, &locals, &remotes);
57 let impl_lazy = generate_lazy_mirror(&name, &locals, &remotes);
58
59 let expanded = quote! {
60 #impl_visitor
61 #impl_job
62 #impl_native
63 #impl_item
64 #impl_lazy
65 };
66
67 TokenStream::from(expanded)
68}
69
70struct LocalField {
72 ident: syn::Ident,
73 ty: syn::Type,
74}
75struct RemoteField {
76 ident: syn::Ident,
77 ty: syn::Type,
78 compression_id: u8,
79 is_map: bool,
80}
81
82fn parse_attributes(attrs: &[Attribute]) -> syn::Result<(bool, u8, bool)> {
84 let mut is_chunkable = false;
85 let mut is_map = false;
86 let mut compression_id = 0;
87
88 for attr in attrs {
89 if attr.path().is_ident("parcode") {
90 attr.parse_nested_meta(|meta| {
91 if meta.path.is_ident("chunkable") {
92 is_chunkable = true;
93 return Ok(());
94 }
95
96 if meta.path.is_ident("map") {
97 is_map = true;
98 return Ok(());
99 }
100
101 if meta.path.is_ident("compression") {
102 let value = meta.value()?;
103 let s: LitStr = value.parse()?;
104 compression_id = match s.value().to_lowercase().as_str() {
105 "lz4" => 1,
106 "zstd" => 2,
107 "none" => 0,
108 _ => return Err(meta.error("Unknown compression algorithm")),
109 };
110 return Ok(());
111 }
112 Err(meta
113 .error("Unknown parcode attribute key. Supported: chunkable, map, compression"))
114 })?;
115 }
116 }
117 Ok((is_chunkable, compression_id, is_map))
118}
119
120fn generate_visitor(
123 name: &syn::Ident,
124 remotes: &[RemoteField],
125 _locals: &[LocalField],
126) -> proc_macro2::TokenStream {
127 let visit_children_standalone = remotes.iter().map(|f| {
128 let fname = &f.ident;
129 let cid = f.compression_id;
130 let is_map = f.is_map;
131
132 let config_expr = if cid > 0 || is_map {
133 quote! {
134 Some(parcode::graph::JobConfig {
135 compression_id: #cid,
136 is_map: #is_map
137 })
138 }
139 } else {
140 quote! { None }
141 };
142
143 quote! { self.#fname.visit(graph, Some(my_id), #config_expr); }
144 });
145
146 let visit_children_inlined = remotes.iter().map(|f| {
147 let fname = &f.ident;
148 let cid = f.compression_id;
149 let is_map = f.is_map;
150
151 let config_expr = if cid > 0 || is_map {
152 quote! {
153 Some(parcode::graph::JobConfig {
154 compression_id: #cid,
155 is_map: #is_map
156 })
157 }
158 } else {
159 quote! { None }
160 };
161
162 quote! { self.#fname.visit(graph, Some(pid), #config_expr); }
163 });
164
165 let serialize_locals = _locals.iter().map(|f| {
167 let fname = &f.ident;
168 quote! {
169 parcode::internal::bincode::serde::encode_into_std_write(
170 &self.#fname, writer, parcode::internal::bincode::config::standard()
171 ).map_err(|e| parcode::ParcodeError::Serialization(e.to_string()))?;
172 }
173 });
174
175 quote! {
176 impl parcode::visitor::ParcodeVisitor for #name {
177 fn visit<'a>(&'a self, graph: &mut parcode::graph::TaskGraph<'a>, parent_id: Option<parcode::graph::ChunkId>, config_override: Option<parcode::graph::JobConfig>) {
178 let job = self.create_job(config_override);
180 let my_id = graph.add_node(job);
181 if let Some(pid) = parent_id {
182 graph.link_parent_child(pid, my_id);
183 }
184 #(#visit_children_standalone)*
185 }
186
187 fn visit_inlined<'a>(&'a self, graph: &mut parcode::graph::TaskGraph<'a>, pid: parcode::graph::ChunkId, _config_override: Option<parcode::graph::JobConfig>) {
188 #(#visit_children_inlined)*
189 }
190
191 fn create_job<'a>(&'a self, config_override: Option<parcode::graph::JobConfig>) -> Box<dyn parcode::graph::SerializationJob<'a> + 'a> {
192 let base_job = Box::new(self.clone());
193 if let Some(cfg) = config_override { Box::new(parcode::rt::ConfiguredJob::new(base_job, cfg)) } else { base_job }
194 }
195
196 fn serialize_shallow<W: std::io::Write>(&self, writer: &mut W) -> parcode::Result<()>
197 where
198 Self: serde::Serialize,
199 {
200 #(#serialize_locals)*
201 Ok(())
202 }
203
204 fn serialize_slice<W: std::io::Write>(slice: &[Self], writer: &mut W) -> parcode::Result<()>
205 where
206 Self: Sized + serde::Serialize,
207 {
208 let len = slice.len() as u64;
209 parcode::internal::bincode::serde::encode_into_std_write(
210 &len,
211 writer,
212 parcode::internal::bincode::config::standard(),
213 ).map_err(|e| parcode::ParcodeError::Serialization(e.to_string()))?;
214
215 for item in slice {
216 item.serialize_shallow(writer)?;
217 }
218 Ok(())
219 }
220 }
221 }
222}
223
224fn generate_serialization_job(
226 name: &syn::Ident,
227 locals: &[LocalField],
228) -> proc_macro2::TokenStream {
229 let serialize_stmts = locals.iter().map(|f| {
230 let fname = &f.ident;
231 quote! {
232 parcode::internal::bincode::serde::encode_into_std_write(
233 &self.#fname, &mut writer, parcode::internal::bincode::config::standard()
234 ).map_err(|e| parcode::ParcodeError::Serialization(e.to_string()))?;
235 }
236 });
237
238 quote! {
239 impl<'a> parcode::graph::SerializationJob<'a> for #name {
240 fn execute(&self, _children_refs: &[parcode::format::ChildRef]) -> parcode::Result<Vec<u8>> {
241 let mut buffer = Vec::new();
242 let mut writer = std::io::BufWriter::new(&mut buffer);
243 #(#serialize_stmts)*
244 use std::io::Write;
245 writer.flush()?;
246 drop(writer);
247 Ok(buffer)
248 }
249 fn estimated_size(&self) -> usize { std::mem::size_of::<Self>() }
250 }
251 }
252}
253
254fn generate_native_reader(
256 name: &syn::Ident,
257 locals: &[LocalField],
258 remotes: &[RemoteField],
259) -> proc_macro2::TokenStream {
260 let read_locals = locals.iter().map(|f| {
261 let fname = &f.ident;
262 let fty = &f.ty;
263 quote! {
264 let #fname: #fty = parcode::internal::bincode::serde::decode_from_std_read(
265 &mut reader, parcode::internal::bincode::config::standard()
266 ).map_err(|e| parcode::ParcodeError::Serialization(e.to_string()))?;
267 }
268 });
269
270 let read_remotes = remotes.iter().map(|f| {
271 let fname = &f.ident;
272 let fty = &f.ty;
273 quote! {
274 let child_node = child_iter.next().ok_or_else(|| parcode::ParcodeError::Format(format!("Missing child for '{}'", stringify!(#fname))))?;
275 let #fname: #fty = parcode::reader::ParcodeNative::from_node(&child_node)?;
276 }
277 });
278
279 let mut field_names = Vec::new();
280 for f in locals {
281 field_names.push(&f.ident);
282 }
283 for f in remotes {
284 field_names.push(&f.ident);
285 }
286
287 quote! {
288 impl parcode::reader::ParcodeNative for #name {
289 fn from_node(node: &parcode::reader::ChunkNode<'_>) -> parcode::Result<Self> {
290 let payload = node.read_raw()?;
291 let mut reader = std::io::Cursor::new(payload);
292 #(#read_locals)*
293 let children = node.children()?;
294 let mut child_iter = children.into_iter();
295 #(#read_remotes)*
296 Ok(Self { #(#field_names),* })
297 }
298 }
299 }
300}
301
302fn generate_parcode_item(
304 name: &syn::Ident,
305 locals: &[LocalField],
306 remotes: &[RemoteField],
307) -> proc_macro2::TokenStream {
308 let read_locals = locals.iter().map(|f| {
309 let fname = &f.ident;
310 let fty = &f.ty;
311 quote! {
312 let #fname: #fty = parcode::internal::bincode::serde::decode_from_std_read(
313 reader, parcode::internal::bincode::config::standard()
314 ).map_err(|e| parcode::ParcodeError::Serialization(e.to_string()))?;
315 }
316 });
317
318 let read_remotes = remotes.iter().map(|f| {
319 let fname = &f.ident;
320 let fty = &f.ty;
321 quote! {
322 let child_node = children.next().ok_or_else(|| parcode::ParcodeError::Format(format!("Missing child for '{}'", stringify!(#fname))))?;
323 let #fname: #fty = parcode::reader::ParcodeNative::from_node(&child_node)?;
324 }
325 });
326
327 let mut field_names = Vec::new();
328 for f in locals {
329 field_names.push(&f.ident);
330 }
331 for f in remotes {
332 field_names.push(&f.ident);
333 }
334
335 quote! {
336 impl parcode::reader::ParcodeItem for #name {
337 fn read_from_shard(
338 reader: &mut std::io::Cursor<&[u8]>,
339 children: &mut std::vec::IntoIter<parcode::reader::ChunkNode<'_>>,
340 ) -> parcode::Result<Self> {
341 #(#read_locals)*
342 #(#read_remotes)*
343 Ok(Self { #(#field_names),* })
344 }
345 }
346 }
347}
348
349fn generate_lazy_mirror(
352 name: &syn::Ident,
353 locals: &[LocalField],
354 remotes: &[RemoteField],
355) -> proc_macro2::TokenStream {
356 let lazy_name = syn::Ident::new(&format!("{}Lazy", name), name.span());
357
358 let lazy_fields_def = locals
359 .iter()
360 .map(|f| {
361 let n = &f.ident;
362 let t = &f.ty;
363 quote! { pub #n: #t }
364 })
365 .chain(remotes.iter().map(|f| {
366 let n = &f.ident;
367 let t = &f.ty;
368 quote! { pub #n: <#t as parcode::rt::ParcodeLazyRef<'a>>::Lazy }
369 }));
370
371 let read_logic = {
373 let read_locals = locals.iter().map(|f| {
374 let n = &f.ident;
375 let t = &f.ty;
376 quote! {
377 let #n: #t = parcode::internal::bincode::serde::decode_from_std_read(
378 &mut reader, parcode::internal::bincode::config::standard()
379 ).map_err(|e| parcode::ParcodeError::Serialization(e.to_string()))?;
380 }
381 });
382
383 let assign_remotes = remotes.iter().map(|f| {
384 let n = &f.ident;
385 let t = &f.ty;
386 quote! {
387 let child_node = child_iter.next().ok_or_else(|| parcode::ParcodeError::Format(format!("Missing child node for field '{}'", stringify!(#n))))?;
388 let #n = <#t as parcode::rt::ParcodeLazyRef<'a>>::create_lazy(child_node)?;
389 }
390 });
391
392 let field_names = locals
393 .iter()
394 .map(|f| &f.ident)
395 .chain(remotes.iter().map(|f| &f.ident));
396
397 quote! {
398 #(#read_locals)*
399 #(#assign_remotes)*
400
401 Ok(#lazy_name {
402 #(#field_names,)*
403 _marker: std::marker::PhantomData,
404 })
405 }
406 };
407
408 quote! {
409 #[derive(Debug)]
410 #[allow(missing_docs)]
411 pub struct #lazy_name<'a> {
412 #(#lazy_fields_def,)*
413 _marker: std::marker::PhantomData<&'a ()>,
414 }
415
416 impl<'a> parcode::rt::ParcodeLazyRef<'a> for #name {
417 type Lazy = #lazy_name<'a>;
418
419 fn create_lazy(node: parcode::reader::ChunkNode<'a>) -> parcode::Result<Self::Lazy> {
420 let payload = node.read_raw()?;
421 let mut reader = std::io::Cursor::new(payload.as_ref());
422 let children = node.children()?;
423 let mut child_iter = children.into_iter();
424
425 Self::read_lazy_from_stream(&mut reader, &mut child_iter)
426 }
427
428 fn read_lazy_from_stream(
429 mut reader: &mut std::io::Cursor<&[u8]>,
430 mut child_iter: &mut std::vec::IntoIter<parcode::reader::ChunkNode<'a>>
431 ) -> parcode::Result<Self::Lazy> {
432 #read_logic
433 }
434 }
435 }
436}