1use padlock_core::arch::ArchConfig;
8use padlock_core::ir::{AccessPattern, Field, StructLayout, TypeInfo};
9use quote::ToTokens;
10use syn::{visit::Visit, Fields, ItemStruct, Type};
11
12pub fn extract_guard_from_attrs(attrs: &[syn::Attribute]) -> Option<String> {
22 for attr in attrs {
23 let path = attr.path();
24 if path.is_ident("lock_protected_by") || path.is_ident("protected_by") {
26 if let syn::Meta::NameValue(nv) = &attr.meta {
27 if let syn::Expr::Lit(syn::ExprLit {
28 lit: syn::Lit::Str(s),
29 ..
30 }) = &nv.value
31 {
32 return Some(s.value());
33 }
34 }
35 }
36 if path.is_ident("guarded_by") || path.is_ident("pt_guarded_by") {
38 if let Ok(s) = attr.parse_args::<syn::LitStr>() {
40 return Some(s.value());
41 }
42 if let Ok(id) = attr.parse_args::<syn::Ident>() {
44 return Some(id.to_string());
45 }
46 }
47 }
48 None
49}
50
51fn rust_type_size_align(ty: &Type, arch: &'static ArchConfig) -> (usize, usize, TypeInfo) {
54 match ty {
55 Type::Path(tp) => {
56 let name = tp
57 .path
58 .segments
59 .last()
60 .map(|s| s.ident.to_string())
61 .unwrap_or_default();
62 let (size, align) = primitive_size_align(&name, arch);
63 (size, align, TypeInfo::Primitive { name, size, align })
64 }
65 Type::Ptr(_) | Type::Reference(_) => {
66 let s = arch.pointer_size;
67 (s, s, TypeInfo::Pointer { size: s, align: s })
68 }
69 Type::Array(arr) => {
70 let (elem_size, elem_align, elem_ty) = rust_type_size_align(&arr.elem, arch);
71 let count = array_len_from_expr(&arr.len);
72 let size = elem_size * count;
73 (
74 size,
75 elem_align,
76 TypeInfo::Array {
77 element: Box::new(elem_ty),
78 count,
79 size,
80 align: elem_align,
81 },
82 )
83 }
84 _ => {
85 let s = arch.pointer_size;
86 (
87 s,
88 s,
89 TypeInfo::Opaque {
90 name: "(unknown)".into(),
91 size: s,
92 align: s,
93 },
94 )
95 }
96 }
97}
98
99fn primitive_size_align(name: &str, arch: &'static ArchConfig) -> (usize, usize) {
100 match name {
101 "bool" | "u8" | "i8" => (1, 1),
102 "u16" | "i16" => (2, 2),
103 "u32" | "i32" | "f32" => (4, 4),
104 "u64" | "i64" | "f64" => (8, 8),
105 "u128" | "i128" => (16, 16),
106 "usize" | "isize" => (arch.pointer_size, arch.pointer_size),
107 "char" => (4, 4), "__m64" => (8, 8),
110 "__m128" | "__m128d" | "__m128i" => (16, 16),
111 "__m256" | "__m256d" | "__m256i" => (32, 32),
112 "__m512" | "__m512d" | "__m512i" => (64, 64),
113 "f32x4" | "i32x4" | "u32x4" => (16, 16),
115 "f64x2" | "i64x2" | "u64x2" => (16, 16),
116 "f32x8" | "i32x8" | "u32x8" => (32, 32),
117 "f64x4" | "i64x4" | "u64x4" => (32, 32),
118 "f32x16" | "i32x16" | "u32x16" => (64, 64),
119 _ => (arch.pointer_size, arch.pointer_size),
120 }
121}
122
123fn array_len_from_expr(expr: &syn::Expr) -> usize {
124 if let syn::Expr::Lit(syn::ExprLit {
125 lit: syn::Lit::Int(n),
126 ..
127 }) = expr
128 {
129 n.base10_parse::<usize>().unwrap_or(0)
130 } else {
131 0
132 }
133}
134
135fn is_packed(attrs: &[syn::Attribute]) -> bool {
138 attrs
139 .iter()
140 .any(|a| a.path().is_ident("repr") && a.to_token_stream().to_string().contains("packed"))
141}
142
143fn simulate_rust_layout(
144 name: String,
145 fields: &[(String, Type)],
146 packed: bool,
147 arch: &'static ArchConfig,
148) -> StructLayout {
149 let mut offset = 0usize;
150 let mut struct_align = 1usize;
151 let mut out_fields: Vec<Field> = Vec::new();
152
153 for (fname, ty) in fields {
154 let (size, align, type_info) = rust_type_size_align(ty, arch);
155 let effective_align = if packed { 1 } else { align };
156
157 if effective_align > 0 {
158 offset = offset.next_multiple_of(effective_align);
159 }
160 struct_align = struct_align.max(effective_align);
161
162 out_fields.push(Field {
163 name: fname.clone(),
164 ty: type_info,
165 offset,
166 size,
167 align: effective_align,
168 source_file: None,
169 source_line: None,
170 access: AccessPattern::Unknown,
171 });
172 offset += size;
173 }
174
175 if !packed && struct_align > 0 {
176 offset = offset.next_multiple_of(struct_align);
177 }
178
179 StructLayout {
180 name,
181 total_size: offset,
182 align: struct_align,
183 fields: out_fields,
184 source_file: None,
185 source_line: None,
186 arch,
187 is_packed: packed,
188 is_union: false,
189 }
190}
191
192struct StructVisitor {
195 arch: &'static ArchConfig,
196 layouts: Vec<StructLayout>,
197}
198
199impl<'ast> Visit<'ast> for StructVisitor {
200 fn visit_item_struct(&mut self, node: &'ast ItemStruct) {
201 syn::visit::visit_item_struct(self, node); let name = node.ident.to_string();
204 let packed = is_packed(&node.attrs);
205
206 let fields: Vec<(String, Type, Option<String>)> = match &node.fields {
208 Fields::Named(nf) => nf
209 .named
210 .iter()
211 .map(|f| {
212 let fname = f.ident.as_ref().map(|i| i.to_string()).unwrap_or_default();
213 let guard = extract_guard_from_attrs(&f.attrs);
214 (fname, f.ty.clone(), guard)
215 })
216 .collect(),
217 Fields::Unnamed(uf) => uf
218 .unnamed
219 .iter()
220 .enumerate()
221 .map(|(i, f)| {
222 let guard = extract_guard_from_attrs(&f.attrs);
223 (format!("_{i}"), f.ty.clone(), guard)
224 })
225 .collect(),
226 Fields::Unit => vec![],
227 };
228
229 let name_ty: Vec<(String, Type)> = fields
230 .iter()
231 .map(|(n, t, _)| (n.clone(), t.clone()))
232 .collect();
233 let mut layout = simulate_rust_layout(name, &name_ty, packed, self.arch);
234
235 for (i, (_, _, guard)) in fields.iter().enumerate() {
238 if let Some(g) = guard {
239 layout.fields[i].access = AccessPattern::Concurrent {
240 guard: Some(g.clone()),
241 is_atomic: false,
242 };
243 }
244 }
245
246 self.layouts.push(layout);
247 }
248}
249
250pub fn parse_rust(source: &str, arch: &'static ArchConfig) -> anyhow::Result<Vec<StructLayout>> {
253 let file: syn::File = syn::parse_str(source)?;
254 let mut visitor = StructVisitor {
255 arch,
256 layouts: Vec::new(),
257 };
258 visitor.visit_file(&file);
259 Ok(visitor.layouts)
260}
261
262#[cfg(test)]
265mod tests {
266 use super::*;
267 use padlock_core::arch::X86_64_SYSV;
268
269 #[test]
270 fn parse_simple_struct() {
271 let src = "struct Foo { a: u8, b: u64, c: u32 }";
272 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
273 assert_eq!(layouts.len(), 1);
274 let l = &layouts[0];
275 assert_eq!(l.name, "Foo");
276 assert_eq!(l.fields.len(), 3);
277 assert_eq!(l.fields[0].size, 1); assert_eq!(l.fields[1].size, 8); assert_eq!(l.fields[2].size, 4); }
281
282 #[test]
283 fn layout_includes_padding() {
284 let src = "struct T { a: u8, b: u64 }";
286 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
287 let l = &layouts[0];
288 assert_eq!(l.fields[0].offset, 0);
289 assert_eq!(l.fields[1].offset, 8); assert_eq!(l.total_size, 16);
291 let gaps = padlock_core::ir::find_padding(l);
292 assert_eq!(gaps[0].bytes, 7);
293 }
294
295 #[test]
296 fn multiple_structs_parsed() {
297 let src = "struct A { x: u32 } struct B { y: u64 }";
298 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
299 assert_eq!(layouts.len(), 2);
300 }
301
302 #[test]
303 fn packed_struct_no_padding() {
304 let src = "#[repr(packed)] struct P { a: u8, b: u64 }";
305 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
306 let l = &layouts[0];
307 assert!(l.is_packed);
308 assert_eq!(l.fields[1].offset, 1); let gaps = padlock_core::ir::find_padding(l);
310 assert!(gaps.is_empty());
311 }
312
313 #[test]
314 fn pointer_field_uses_arch_size() {
315 let src = "struct S { p: *const u8 }";
316 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
317 assert_eq!(layouts[0].fields[0].size, 8); }
319
320 #[test]
323 fn lock_protected_by_attr_sets_guard() {
324 let src = r#"
325struct Cache {
326 #[lock_protected_by = "mu"]
327 readers: u64,
328 mu: u64,
329}
330"#;
331 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
332 let readers = &layouts[0].fields[0];
333 assert_eq!(readers.name, "readers");
334 if let AccessPattern::Concurrent { guard, .. } = &readers.access {
335 assert_eq!(guard.as_deref(), Some("mu"));
336 } else {
337 panic!("expected Concurrent, got {:?}", readers.access);
338 }
339 }
340
341 #[test]
342 fn guarded_by_string_attr_sets_guard() {
343 let src = r#"
344struct S {
345 #[guarded_by("lock")]
346 value: u32,
347}
348"#;
349 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
350 if let AccessPattern::Concurrent { guard, .. } = &layouts[0].fields[0].access {
351 assert_eq!(guard.as_deref(), Some("lock"));
352 } else {
353 panic!("expected Concurrent");
354 }
355 }
356
357 #[test]
358 fn guarded_by_ident_attr_sets_guard() {
359 let src = r#"
360struct S {
361 #[guarded_by(mu)]
362 count: u64,
363}
364"#;
365 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
366 if let AccessPattern::Concurrent { guard, .. } = &layouts[0].fields[0].access {
367 assert_eq!(guard.as_deref(), Some("mu"));
368 } else {
369 panic!("expected Concurrent");
370 }
371 }
372
373 #[test]
374 fn protected_by_attr_sets_guard() {
375 let src = r#"
376struct S {
377 #[protected_by = "lock_a"]
378 x: u64,
379}
380"#;
381 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
382 if let AccessPattern::Concurrent { guard, .. } = &layouts[0].fields[0].access {
383 assert_eq!(guard.as_deref(), Some("lock_a"));
384 } else {
385 panic!("expected Concurrent");
386 }
387 }
388
389 #[test]
390 fn different_guards_on_same_cache_line_is_false_sharing() {
391 let src = r#"
394struct HotPath {
395 #[lock_protected_by = "mu_a"]
396 readers: u64,
397 #[lock_protected_by = "mu_b"]
398 writers: u64,
399}
400"#;
401 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
402 assert!(padlock_core::analysis::false_sharing::has_false_sharing(
403 &layouts[0]
404 ));
405 }
406
407 #[test]
408 fn same_guard_on_same_cache_line_is_not_false_sharing() {
409 let src = r#"
410struct Safe {
411 #[lock_protected_by = "mu"]
412 a: u64,
413 #[lock_protected_by = "mu"]
414 b: u64,
415}
416"#;
417 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
418 assert!(!padlock_core::analysis::false_sharing::has_false_sharing(
419 &layouts[0]
420 ));
421 }
422
423 #[test]
424 fn unannotated_field_stays_unknown() {
425 let src = "struct S { x: u64 }";
426 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
427 assert!(matches!(
428 layouts[0].fields[0].access,
429 AccessPattern::Unknown
430 ));
431 }
432}