endian_writer_derive/
lib.rs1#![doc = include_str!("../README.MD")]
2use proc_macro::TokenStream;
3use quote::quote;
4use syn::{parse_macro_input, Data, DeriveInput, Fields};
5
6#[proc_macro_derive(EndianWritable)]
29pub fn derive_endian(input: TokenStream) -> TokenStream {
30 derive_endian_impl(input)
31}
32
33pub(crate) fn derive_endian_impl(input: TokenStream) -> TokenStream {
34 let input = parse_macro_input!(input as DeriveInput);
36
37 let name = input.ident;
39
40 let fields = match input.data {
42 Data::Struct(data_struct) => match data_struct.fields {
43 Fields::Named(fields_named) => fields_named.named,
44 _ => {
45 return syn::Error::new_spanned(
46 data_struct.struct_token,
47 "EndianWritable can only be derived for structs with named fields",
48 )
49 .to_compile_error()
50 .into();
51 }
52 },
53 _ => {
54 return syn::Error::new_spanned(name, "EndianWritable can only be derived for structs")
55 .to_compile_error()
56 .into();
57 }
58 };
59
60 let mut field_names = Vec::new();
62 let mut field_types = Vec::new();
63
64 for field in fields.iter() {
65 let field_ident = match &field.ident {
66 Some(ident) => ident.clone(),
67 None => {
68 return syn::Error::new_spanned(&field.ty, "All fields must have names")
69 .to_compile_error()
70 .into();
71 }
72 };
73 field_names.push(field_ident.clone());
74 field_types.push(field.ty.clone());
75 }
76
77 let has_size_impl = {
81 let sizes = field_types.iter().map(|ty| {
82 quote! {
83 <#ty as HasSize>::SIZE
84 }
85 });
86
87 let sum_sizes = sizes.fold(quote! { 0 }, |acc, size| {
88 quote! { #acc + #size }
89 });
90
91 quote! {
92 impl HasSize for #name {
93 const SIZE: usize = #sum_sizes;
94 }
95 }
96 };
97
98 let writable_impl = {
100 let mut sum_expr = quote! { 0 };
101 let write_fields = field_names
102 .iter()
103 .zip(field_types.iter())
104 .map(|(field, ty)| {
105 let current_offset = if sum_expr.to_string() == "0" {
106 quote! { offset }
107 } else {
108 quote! { offset + #sum_expr }
109 };
110
111 let write_field = quote! {
112 let #field = self.#field;
113 writer.write_at(&#field, #current_offset);
114 };
115
116 let new_sum_expr = quote! { #sum_expr + <#ty as HasSize>::SIZE as isize };
117
118 sum_expr = new_sum_expr.clone();
119
120 write_field
121 });
122
123 quote! {
124 impl EndianWritableAt for #name {
125 unsafe fn write_at<W: EndianWriter>(&self, writer: &mut W, offset: isize) {
126 #(
127 #write_fields
128 )*
129 }
130 }
131 }
132 };
133
134 let readable_impl = {
136 let mut sum_expr = quote! { 0 };
137 let read_fields = field_names
138 .iter()
139 .zip(field_types.iter())
140 .map(|(field, ty)| {
141 let current_offset = if sum_expr.to_string() == "0" {
142 quote! { offset }
143 } else {
144 quote! { offset + #sum_expr }
145 };
146
147 let read_field = quote! {
148 let #field = <#ty as EndianReadableAt>::read_at(reader, #current_offset);
149 };
150
151 let new_sum_expr = quote! { #sum_expr + <#ty as HasSize>::SIZE as isize };
152
153 sum_expr = new_sum_expr.clone();
154
155 read_field
156 });
157
158 let assign_fields = field_names.iter();
159
160 quote! {
161 impl EndianReadableAt for #name {
162 unsafe fn read_at<R: EndianReader>(reader: &mut R, offset: isize) -> Self {
163 #(
164 #read_fields
165 )*
166 Self {
167 #(
168 #assign_fields,
169 )*
170 }
171 }
172 }
173 }
174 };
175
176 let expanded = quote! {
178 use endian_writer::*;
179 #has_size_impl
180 #writable_impl
181 #readable_impl
182 };
183
184 TokenStream::from(expanded)
186}