1use padlock_core::arch::ArchConfig;
8use padlock_core::ir::{AccessPattern, Field, StructLayout, TypeInfo};
9use quote::ToTokens;
10use syn::{Fields, ItemEnum, ItemStruct, Type, visit::Visit};
11
12pub fn extract_guard_from_attrs(attrs: &[syn::Attribute]) -> Option<String> {
22 for attr in attrs {
23 let path = attr.path();
24 if (path.is_ident("lock_protected_by") || path.is_ident("protected_by"))
26 && let syn::Meta::NameValue(nv) = &attr.meta
27 && let syn::Expr::Lit(syn::ExprLit {
28 lit: syn::Lit::Str(s),
29 ..
30 }) = &nv.value
31 {
32 return Some(s.value());
33 }
34 if path.is_ident("guarded_by") || path.is_ident("pt_guarded_by") {
36 if let Ok(s) = attr.parse_args::<syn::LitStr>() {
38 return Some(s.value());
39 }
40 if let Ok(id) = attr.parse_args::<syn::Ident>() {
42 return Some(id.to_string());
43 }
44 }
45 }
46 None
47}
48
49fn rust_type_size_align(ty: &Type, arch: &'static ArchConfig) -> (usize, usize, TypeInfo) {
52 match ty {
53 Type::Path(tp) => {
54 let seg = tp.path.segments.last();
55 let name = seg.map(|s| s.ident.to_string()).unwrap_or_default();
56
57 if matches!(
62 name.as_str(),
63 "Cell" | "MaybeUninit" | "UnsafeCell" | "Wrapping" | "Saturating" | "ManuallyDrop"
64 ) && let Some(inner_ty) = seg.and_then(|s| {
65 if let syn::PathArguments::AngleBracketed(ref ab) = s.arguments {
66 ab.args.iter().find_map(|a| {
67 if let syn::GenericArgument::Type(t) = a {
68 Some(t)
69 } else {
70 None
71 }
72 })
73 } else {
74 None
75 }
76 }) {
77 let (size, align, _) = rust_type_size_align(inner_ty, arch);
78 return (size, align, TypeInfo::Primitive { name, size, align });
79 }
80
81 if name == "Option"
93 && let Some(inner) = seg.and_then(|s| {
94 if let syn::PathArguments::AngleBracketed(ref ab) = s.arguments {
95 ab.args.iter().find_map(|a| {
96 if let syn::GenericArgument::Type(t) = a {
97 Some(t)
98 } else {
99 None
100 }
101 })
102 } else {
103 None
104 }
105 })
106 {
107 if let Some((sz, al)) = option_niche_size(inner, arch) {
108 return (
109 sz,
110 al,
111 TypeInfo::Primitive {
112 name,
113 size: sz,
114 align: al,
115 },
116 );
117 }
118 let (inner_size, inner_align, _) = rust_type_size_align(inner, arch);
121 let sz = if inner_size == 0 {
122 1 } else {
124 (inner_size + 1).next_multiple_of(inner_align.max(1))
125 };
126 return (
127 sz,
128 inner_align.max(1),
129 TypeInfo::Primitive {
130 name,
131 size: sz,
132 align: inner_align.max(1),
133 },
134 );
135 }
136
137 let is_fat = matches!(name.as_str(), "Box" | "Arc" | "Rc" | "Weak")
140 && seg
141 .map(|s| {
142 if let syn::PathArguments::AngleBracketed(ref ab) = s.arguments {
143 ab.args.iter().any(|a| {
144 matches!(a, syn::GenericArgument::Type(Type::TraitObject(_)))
145 })
146 } else {
147 false
148 }
149 })
150 .unwrap_or(false);
151 let (size, align) = if is_fat {
152 (arch.pointer_size * 2, arch.pointer_size)
153 } else {
154 primitive_size_align(&name, arch)
155 };
156 (size, align, TypeInfo::Primitive { name, size, align })
157 }
158 Type::Ptr(p) => {
159 let s = arch.pointer_size;
160 let is_fat = matches!(*p.elem, Type::TraitObject(_));
162 let sz = if is_fat { s * 2 } else { s };
163 (sz, s, TypeInfo::Pointer { size: sz, align: s })
164 }
165 Type::Reference(r) => {
166 let s = arch.pointer_size;
167 let is_fat = matches!(*r.elem, Type::TraitObject(_));
169 let sz = if is_fat { s * 2 } else { s };
170 (sz, s, TypeInfo::Pointer { size: sz, align: s })
171 }
172 Type::Array(arr) => {
173 let (elem_size, elem_align, elem_ty) = rust_type_size_align(&arr.elem, arch);
174 let count = array_len_from_expr(&arr.len);
175 let size = elem_size * count;
176 (
177 size,
178 elem_align,
179 TypeInfo::Array {
180 element: Box::new(elem_ty),
181 count,
182 size,
183 align: elem_align,
184 },
185 )
186 }
187 _ => {
188 let s = arch.pointer_size;
189 (
190 s,
191 s,
192 TypeInfo::Opaque {
193 name: "(unknown)".into(),
194 size: s,
195 align: s,
196 },
197 )
198 }
199 }
200}
201
202fn option_niche_size(inner: &Type, arch: &'static ArchConfig) -> Option<(usize, usize)> {
210 match inner {
211 Type::Path(tp) => {
212 let name = tp
213 .path
214 .segments
215 .last()
216 .map(|s| s.ident.to_string())
217 .unwrap_or_default();
218 match name.as_str() {
219 "NonZeroU8" | "NonZeroI8" => Some((1, 1)),
220 "NonZeroU16" | "NonZeroI16" => Some((2, 2)),
221 "NonZeroU32" | "NonZeroI32" => Some((4, 4)),
222 "NonZeroU64" | "NonZeroI64" => Some((8, 8)),
223 "NonZeroU128" | "NonZeroI128" => Some((16, 16)),
224 "NonZeroUsize" | "NonZeroIsize" => {
225 let ps = arch.pointer_size;
226 Some((ps, ps))
227 }
228 "Box" | "NonNull" | "Arc" | "Rc" => {
230 let ps = arch.pointer_size;
231 Some((ps, ps))
232 }
233 _ => None,
234 }
235 }
236 Type::Reference(_) => {
238 let ps = arch.pointer_size;
239 Some((ps, ps))
240 }
241 _ => None,
242 }
243}
244
245fn primitive_size_align(name: &str, arch: &'static ArchConfig) -> (usize, usize) {
246 let ps = arch.pointer_size;
247 match name {
248 "bool" | "u8" | "i8" => (1, 1),
250 "u16" | "i16" | "f16" => (2, 2),
251 "u32" | "i32" | "f32" => (4, 4),
252 "u64" | "i64" | "f64" => (8, 8),
253 "u128" | "i128" | "f128" => (16, 16),
254 "usize" | "isize" => (ps, ps),
255 "char" => (4, 4), "NonZeroU8" | "NonZeroI8" => (1, 1),
261 "NonZeroU16" | "NonZeroI16" => (2, 2),
262 "NonZeroU32" | "NonZeroI32" => (4, 4),
263 "NonZeroU64" | "NonZeroI64" => (8, 8),
264 "NonZeroU128" | "NonZeroI128" => (16, 16),
265 "NonZeroUsize" | "NonZeroIsize" => (ps, ps),
266
267 "Wrapping" | "Saturating" => (ps, ps),
272
273 "MaybeUninit" | "UnsafeCell" => (ps, ps),
277
278 "AtomicBool" | "AtomicU8" | "AtomicI8" => (1, 1),
280 "AtomicU16" | "AtomicI16" => (2, 2),
281 "AtomicU32" | "AtomicI32" => (4, 4),
282 "AtomicU64" | "AtomicI64" => (8, 8),
283 "AtomicUsize" | "AtomicIsize" | "AtomicPtr" => (ps, ps),
284
285 "Vec" | "String" | "OsString" | "CString" | "PathBuf" => (3 * ps, ps),
288 "VecDeque" | "LinkedList" | "BinaryHeap" => (3 * ps, ps),
289 "HashMap" | "HashSet" | "BTreeMap" | "BTreeSet" => (3 * ps, ps),
290
291 "Box" | "Rc" | "Arc" | "Weak" | "NonNull" | "Cell" => (ps, ps),
295
296 "RefCell" | "Mutex" | "RwLock" => (ps, ps),
300
301 "Sender" | "Receiver" | "SyncSender" => (ps, ps),
303
304 "PhantomData" | "PhantomPinned" => (0, 1),
306
307 "Duration" => (16, 8),
310 "Instant" | "SystemTime" => (16, 8),
311
312 "Pin" => (ps, ps),
314
315 "__m64" => (8, 8),
317 "__m128" | "__m128d" | "__m128i" => (16, 16),
318 "__m256" | "__m256d" | "__m256i" => (32, 32),
319 "__m512" | "__m512d" | "__m512i" => (64, 64),
320
321 "f32x4" | "i32x4" | "u32x4" => (16, 16),
323 "f64x2" | "i64x2" | "u64x2" => (16, 16),
324 "f32x8" | "i32x8" | "u32x8" => (32, 32),
325 "f64x4" | "i64x4" | "u64x4" => (32, 32),
326 "f32x16" | "i32x16" | "u32x16" => (64, 64),
327
328 _ => (ps, ps),
330 }
331}
332
333fn array_len_from_expr(expr: &syn::Expr) -> usize {
334 if let syn::Expr::Lit(syn::ExprLit {
335 lit: syn::Lit::Int(n),
336 ..
337 }) = expr
338 {
339 n.base10_parse::<usize>().unwrap_or(0)
340 } else {
341 0
342 }
343}
344
345fn is_packed(attrs: &[syn::Attribute]) -> bool {
348 attrs
349 .iter()
350 .any(|a| a.path().is_ident("repr") && a.to_token_stream().to_string().contains("packed"))
351}
352
353fn is_repr_rust(attrs: &[syn::Attribute]) -> bool {
358 !attrs.iter().any(|a| {
359 if !a.path().is_ident("repr") {
360 return false;
361 }
362 let ts = a.to_token_stream().to_string();
363 ts.contains('C') || ts.contains("packed") || ts.contains("transparent")
364 })
365}
366
367fn repr_align(attrs: &[syn::Attribute]) -> Option<usize> {
369 for attr in attrs {
370 if !attr.path().is_ident("repr") {
371 continue;
372 }
373 let ts = attr.to_token_stream().to_string();
374 if let Some(start) = ts.find("align") {
377 let after = ts[start..].trim_start_matches("align").trim_start();
378 if after.starts_with('(') {
379 let inner = after.trim_start_matches('(');
380 let num_str: String = inner.chars().take_while(|c| c.is_ascii_digit()).collect();
381 if let Ok(n) = num_str.parse::<usize>()
382 && n > 0
383 && n.is_power_of_two()
384 {
385 return Some(n);
386 }
387 }
388 }
389 }
390 None
391}
392
393fn simulate_rust_layout(
394 name: String,
395 fields: &[(String, Type)],
396 packed: bool,
397 forced_align: Option<usize>,
398 arch: &'static ArchConfig,
399) -> StructLayout {
400 let mut offset = 0usize;
401 let mut struct_align = 1usize;
402 let mut out_fields: Vec<Field> = Vec::new();
403
404 for (fname, ty) in fields {
405 let (size, align, type_info) = rust_type_size_align(ty, arch);
406 let effective_align = if packed { 1 } else { align };
407
408 if effective_align > 0 {
409 offset = offset.next_multiple_of(effective_align);
410 }
411 struct_align = struct_align.max(effective_align);
412
413 out_fields.push(Field {
414 name: fname.clone(),
415 ty: type_info,
416 offset,
417 size,
418 align: effective_align,
419 source_file: None,
420 source_line: None,
421 access: AccessPattern::Unknown,
422 });
423 offset += size;
424 }
425
426 if let Some(fa) = forced_align
428 && fa > struct_align
429 {
430 struct_align = fa;
431 }
432
433 if !packed && struct_align > 0 {
434 offset = offset.next_multiple_of(struct_align);
435 }
436
437 StructLayout {
438 name,
439 total_size: offset,
440 align: struct_align,
441 fields: out_fields,
442 source_file: None,
443 source_line: None,
444 arch,
445 is_packed: packed,
446 is_union: false,
447 is_repr_rust: false, suppressed_findings: Vec::new(), uncertain_fields: Vec::new(),
450 }
451}
452
453struct StructVisitor<'src> {
456 arch: &'static ArchConfig,
457 layouts: Vec<StructLayout>,
458 source: &'src str,
459}
460
461impl<'ast, 'src> Visit<'ast> for StructVisitor<'src> {
462 fn visit_item_struct(&mut self, node: &'ast ItemStruct) {
463 syn::visit::visit_item_struct(self, node); if !node.generics.params.is_empty() {
469 let name = node.ident.to_string();
470 eprintln!(
471 "padlock: note: skipping '{name}' — generic struct \
472 (layout depends on type arguments; use binary analysis for accurate results)"
473 );
474 crate::record_skipped(
475 &name,
476 "generic struct — layout depends on type arguments; \
477 use binary analysis for accurate results",
478 );
479 return;
480 }
481
482 let name = node.ident.to_string();
483 let packed = is_packed(&node.attrs);
484 let forced_align = repr_align(&node.attrs);
485
486 let fields: Vec<(String, Type, Option<String>, u32)> = match &node.fields {
488 Fields::Named(nf) => nf
489 .named
490 .iter()
491 .map(|f| {
492 let fname = f.ident.as_ref().map(|i| i.to_string()).unwrap_or_default();
493 let guard = extract_guard_from_attrs(&f.attrs);
494 let line = f
495 .ident
496 .as_ref()
497 .map(|i| i.span().start().line as u32)
498 .unwrap_or(0);
499 (fname, f.ty.clone(), guard, line)
500 })
501 .collect(),
502 Fields::Unnamed(uf) => uf
503 .unnamed
504 .iter()
505 .enumerate()
506 .map(|(i, f)| {
507 let guard = extract_guard_from_attrs(&f.attrs);
508 (format!("_{i}"), f.ty.clone(), guard, 0u32)
510 })
511 .collect(),
512 Fields::Unit => vec![],
513 };
514
515 let name_ty: Vec<(String, Type)> = fields
516 .iter()
517 .map(|(n, t, _, _)| (n.clone(), t.clone()))
518 .collect();
519 let mut layout = simulate_rust_layout(name, &name_ty, packed, forced_align, self.arch);
520 let struct_line = node.ident.span().start().line as u32;
521 layout.source_line = Some(struct_line);
522 layout.is_repr_rust = is_repr_rust(&node.attrs);
523 layout.suppressed_findings =
524 super::suppress::suppressed_from_source_line(self.source, struct_line);
525
526 for (i, (_, _, guard, field_line)) in fields.iter().enumerate() {
528 if *field_line > 0 {
529 layout.fields[i].source_line = Some(*field_line);
530 }
531 if let Some(g) = guard {
532 layout.fields[i].access = AccessPattern::Concurrent {
533 guard: Some(g.clone()),
534 is_atomic: false,
535 is_annotated: true,
536 };
537 }
538 }
539
540 self.layouts.push(layout);
541 }
542
543 fn visit_item_enum(&mut self, node: &'ast ItemEnum) {
544 syn::visit::visit_item_enum(self, node);
545
546 if !node.generics.params.is_empty() {
548 let name = node.ident.to_string();
549 eprintln!(
550 "padlock: note: skipping '{name}' — generic enum \
551 (layout depends on type arguments; use binary analysis for accurate results)"
552 );
553 crate::record_skipped(
554 &name,
555 "generic enum — layout depends on type arguments; \
556 use binary analysis for accurate results",
557 );
558 return;
559 }
560
561 let name = node.ident.to_string();
562 let n_variants = node.variants.len();
563 if n_variants == 0 {
564 return;
565 }
566
567 let disc_size: usize = if n_variants <= 256 {
570 1
571 } else if n_variants <= 65536 {
572 2
573 } else {
574 4
575 };
576
577 let all_unit = node
579 .variants
580 .iter()
581 .all(|v| matches!(v.fields, Fields::Unit));
582
583 if all_unit {
584 let enum_line = node.ident.span().start().line as u32;
586 let layout = StructLayout {
587 name,
588 total_size: disc_size,
589 align: disc_size,
590 fields: vec![Field {
591 name: "__discriminant".to_string(),
592 ty: TypeInfo::Primitive {
593 name: format!("u{}", disc_size * 8),
594 size: disc_size,
595 align: disc_size,
596 },
597 offset: 0,
598 size: disc_size,
599 align: disc_size,
600 source_file: None,
601 source_line: None,
602 access: AccessPattern::Unknown,
603 }],
604 source_file: None,
605 source_line: Some(enum_line),
606 arch: self.arch,
607 is_packed: false,
608 is_union: false,
609 is_repr_rust: is_repr_rust(&node.attrs),
610 suppressed_findings: super::suppress::suppressed_from_source_line(
611 self.source,
612 enum_line,
613 ),
614 uncertain_fields: Vec::new(),
615 };
616 self.layouts.push(layout);
617 return;
618 }
619
620 let mut max_payload_size = 0usize;
622 let mut max_payload_align = 1usize;
623
624 for variant in &node.variants {
625 let var_fields: Vec<(String, Type)> = match &variant.fields {
626 Fields::Named(nf) => nf
627 .named
628 .iter()
629 .map(|f| {
630 let n = f.ident.as_ref().map(|i| i.to_string()).unwrap_or_default();
631 (n, f.ty.clone())
632 })
633 .collect(),
634 Fields::Unnamed(uf) => uf
635 .unnamed
636 .iter()
637 .enumerate()
638 .map(|(i, f)| (format!("_{i}"), f.ty.clone()))
639 .collect(),
640 Fields::Unit => vec![],
641 };
642
643 if !var_fields.is_empty() {
644 let var_layout =
645 simulate_rust_layout(String::new(), &var_fields, false, None, self.arch);
646 if var_layout.total_size > max_payload_size {
647 max_payload_size = var_layout.total_size;
648 }
649 max_payload_align = max_payload_align.max(var_layout.align);
650 }
651 }
652
653 let payload_align = max_payload_align.max(1);
657 let disc_offset = max_payload_size;
658 let total_before_pad = disc_offset + disc_size;
659 let total_align = payload_align.max(disc_size);
660 let total_size = total_before_pad.next_multiple_of(total_align);
661
662 let mut fields: Vec<Field> = Vec::new();
663 if max_payload_size > 0 {
664 fields.push(Field {
665 name: "__payload".to_string(),
666 ty: TypeInfo::Opaque {
667 name: format!("largest_variant_payload ({}B)", max_payload_size),
668 size: max_payload_size,
669 align: payload_align,
670 },
671 offset: 0,
672 size: max_payload_size,
673 align: payload_align,
674 source_file: None,
675 source_line: None,
676 access: AccessPattern::Unknown,
677 });
678 }
679 fields.push(Field {
680 name: "__discriminant".to_string(),
681 ty: TypeInfo::Primitive {
682 name: format!("u{}", disc_size * 8),
683 size: disc_size,
684 align: disc_size,
685 },
686 offset: disc_offset,
687 size: disc_size,
688 align: disc_size,
689 source_file: None,
690 source_line: None,
691 access: AccessPattern::Unknown,
692 });
693
694 let enum_line = node.ident.span().start().line as u32;
695 self.layouts.push(StructLayout {
696 name,
697 total_size,
698 align: total_align,
699 fields,
700 source_file: None,
701 source_line: Some(enum_line),
702 arch: self.arch,
703 is_packed: false,
704 is_union: false,
705 is_repr_rust: is_repr_rust(&node.attrs),
706 suppressed_findings: super::suppress::suppressed_from_source_line(
707 self.source,
708 enum_line,
709 ),
710 uncertain_fields: Vec::new(),
711 });
712 }
713}
714
715pub fn parse_rust(source: &str, arch: &'static ArchConfig) -> anyhow::Result<Vec<StructLayout>> {
718 let file: syn::File = syn::parse_str(source)?;
719 let mut visitor = StructVisitor {
720 arch,
721 layouts: Vec::new(),
722 source,
723 };
724 visitor.visit_file(&file);
725 Ok(visitor.layouts)
726}
727
728#[cfg(test)]
731mod tests {
732 use super::*;
733 use padlock_core::arch::X86_64_SYSV;
734
735 #[test]
736 fn parse_simple_struct() {
737 let src = "struct Foo { a: u8, b: u64, c: u32 }";
738 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
739 assert_eq!(layouts.len(), 1);
740 let l = &layouts[0];
741 assert_eq!(l.name, "Foo");
742 assert_eq!(l.fields.len(), 3);
743 assert_eq!(l.fields[0].size, 1); assert_eq!(l.fields[1].size, 8); assert_eq!(l.fields[2].size, 4); }
747
748 #[test]
749 fn layout_includes_padding() {
750 let src = "struct T { a: u8, b: u64 }";
752 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
753 let l = &layouts[0];
754 assert_eq!(l.fields[0].offset, 0);
755 assert_eq!(l.fields[1].offset, 8); assert_eq!(l.total_size, 16);
757 let gaps = padlock_core::ir::find_padding(l);
758 assert_eq!(gaps[0].bytes, 7);
759 }
760
761 #[test]
762 fn multiple_structs_parsed() {
763 let src = "struct A { x: u32 } struct B { y: u64 }";
764 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
765 assert_eq!(layouts.len(), 2);
766 }
767
768 #[test]
769 fn packed_struct_no_padding() {
770 let src = "#[repr(packed)] struct P { a: u8, b: u64 }";
771 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
772 let l = &layouts[0];
773 assert!(l.is_packed);
774 assert_eq!(l.fields[1].offset, 1); let gaps = padlock_core::ir::find_padding(l);
776 assert!(gaps.is_empty());
777 }
778
779 #[test]
780 fn pointer_field_uses_arch_size() {
781 let src = "struct S { p: *const u8 }";
782 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
783 assert_eq!(layouts[0].fields[0].size, 8); }
785
786 #[test]
789 fn lock_protected_by_attr_sets_guard() {
790 let src = r#"
791struct Cache {
792 #[lock_protected_by = "mu"]
793 readers: u64,
794 mu: u64,
795}
796"#;
797 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
798 let readers = &layouts[0].fields[0];
799 assert_eq!(readers.name, "readers");
800 if let AccessPattern::Concurrent { guard, .. } = &readers.access {
801 assert_eq!(guard.as_deref(), Some("mu"));
802 } else {
803 panic!("expected Concurrent, got {:?}", readers.access);
804 }
805 }
806
807 #[test]
808 fn guarded_by_string_attr_sets_guard() {
809 let src = r#"
810struct S {
811 #[guarded_by("lock")]
812 value: u32,
813}
814"#;
815 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
816 if let AccessPattern::Concurrent { guard, .. } = &layouts[0].fields[0].access {
817 assert_eq!(guard.as_deref(), Some("lock"));
818 } else {
819 panic!("expected Concurrent");
820 }
821 }
822
823 #[test]
824 fn guarded_by_ident_attr_sets_guard() {
825 let src = r#"
826struct S {
827 #[guarded_by(mu)]
828 count: u64,
829}
830"#;
831 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
832 if let AccessPattern::Concurrent { guard, .. } = &layouts[0].fields[0].access {
833 assert_eq!(guard.as_deref(), Some("mu"));
834 } else {
835 panic!("expected Concurrent");
836 }
837 }
838
839 #[test]
840 fn protected_by_attr_sets_guard() {
841 let src = r#"
842struct S {
843 #[protected_by = "lock_a"]
844 x: u64,
845}
846"#;
847 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
848 if let AccessPattern::Concurrent { guard, .. } = &layouts[0].fields[0].access {
849 assert_eq!(guard.as_deref(), Some("lock_a"));
850 } else {
851 panic!("expected Concurrent");
852 }
853 }
854
855 #[test]
856 fn different_guards_on_same_cache_line_is_false_sharing() {
857 let src = r#"
860struct HotPath {
861 #[lock_protected_by = "mu_a"]
862 readers: u64,
863 #[lock_protected_by = "mu_b"]
864 writers: u64,
865}
866"#;
867 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
868 assert!(padlock_core::analysis::false_sharing::has_false_sharing(
869 &layouts[0]
870 ));
871 }
872
873 #[test]
874 fn same_guard_on_same_cache_line_is_not_false_sharing() {
875 let src = r#"
876struct Safe {
877 #[lock_protected_by = "mu"]
878 a: u64,
879 #[lock_protected_by = "mu"]
880 b: u64,
881}
882"#;
883 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
884 assert!(!padlock_core::analysis::false_sharing::has_false_sharing(
885 &layouts[0]
886 ));
887 }
888
889 #[test]
890 fn unannotated_field_stays_unknown() {
891 let src = "struct S { x: u64 }";
892 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
893 assert!(matches!(
894 layouts[0].fields[0].access,
895 AccessPattern::Unknown
896 ));
897 }
898
899 #[test]
902 fn vec_field_has_three_pointer_size() {
903 let src = "struct S { items: Vec<u64> }";
905 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
906 assert_eq!(layouts[0].fields[0].size, 24); }
908
909 #[test]
910 fn string_field_has_three_pointer_size() {
911 let src = "struct S { name: String }";
912 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
913 assert_eq!(layouts[0].fields[0].size, 24);
914 }
915
916 #[test]
917 fn box_field_has_pointer_size() {
918 let src = "struct S { inner: Box<u64> }";
919 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
920 assert_eq!(layouts[0].fields[0].size, 8);
921 }
922
923 #[test]
924 fn arc_field_has_pointer_size() {
925 let src = "struct S { shared: Arc<Vec<u8>> }";
926 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
927 assert_eq!(layouts[0].fields[0].size, 8);
928 }
929
930 #[test]
931 fn phantom_data_is_zero_sized() {
932 let src = "struct S { a: u64, _marker: PhantomData<u8> }";
933 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
934 let marker = layouts[0]
935 .fields
936 .iter()
937 .find(|f| f.name == "_marker")
938 .unwrap();
939 assert_eq!(marker.size, 0);
940 }
941
942 #[test]
943 fn duration_field_is_16_bytes() {
944 let src = "struct S { timeout: Duration }";
945 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
946 assert_eq!(layouts[0].fields[0].size, 16);
947 }
948
949 #[test]
950 fn atomic_u64_has_correct_size() {
951 let src = "struct S { counter: AtomicU64 }";
952 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
953 assert_eq!(layouts[0].fields[0].size, 8);
954 }
955
956 #[test]
957 fn atomic_bool_has_correct_size() {
958 let src = "struct S { flag: AtomicBool }";
959 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
960 assert_eq!(layouts[0].fields[0].size, 1);
961 }
962
963 #[test]
966 fn generic_struct_is_skipped() {
967 let src = "struct Wrapper<T> { value: T, count: usize }";
969 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
970 assert!(
971 layouts.is_empty(),
972 "generic structs should be skipped; got {:?}",
973 layouts.iter().map(|l| &l.name).collect::<Vec<_>>()
974 );
975 }
976
977 #[test]
978 fn generic_struct_with_multiple_params_is_skipped() {
979 let src = "struct Pair<A, B> { first: A, second: B }";
980 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
981 assert!(layouts.is_empty());
982 }
983
984 #[test]
985 fn non_generic_struct_still_parsed_when_generic_sibling_exists() {
986 let src = r#"
987struct Generic<T> { value: T }
988struct Concrete { a: u32, b: u64 }
989"#;
990 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
991 assert_eq!(layouts.len(), 1);
992 assert_eq!(layouts[0].name, "Concrete");
993 }
994
995 #[test]
998 fn unit_enum_is_just_discriminant() {
999 let src = "enum Color { Red, Green, Blue }";
1000 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1001 assert_eq!(layouts.len(), 1);
1002 let l = &layouts[0];
1003 assert_eq!(l.name, "Color");
1004 assert_eq!(l.total_size, 1); assert_eq!(l.fields.len(), 1);
1006 assert_eq!(l.fields[0].name, "__discriminant");
1007 }
1008
1009 #[test]
1010 fn unit_enum_with_many_variants_uses_u16_discriminant() {
1011 let variants: String = (0..300)
1013 .map(|i| format!("V{i}"))
1014 .collect::<Vec<_>>()
1015 .join(", ");
1016 let src = format!("enum Big {{ {variants} }}");
1017 let layouts = parse_rust(&src, &X86_64_SYSV).unwrap();
1018 let l = &layouts[0];
1019 assert_eq!(l.total_size, 2); assert_eq!(l.fields[0].size, 2);
1021 }
1022
1023 #[test]
1024 fn data_enum_total_size_covers_largest_variant() {
1025 let src = r#"
1028enum Message {
1029 Quit,
1030 Move { x: i32, y: i32 },
1031 Write(String),
1032}
1033"#;
1034 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1035 let l = &layouts[0];
1036 assert_eq!(l.name, "Message");
1037 assert_eq!(l.total_size, 32);
1039 assert_eq!(l.fields.len(), 2);
1040 let payload = l.fields.iter().find(|f| f.name == "__payload").unwrap();
1041 assert_eq!(payload.size, 24); }
1043
1044 #[test]
1045 fn generic_enum_is_skipped() {
1046 let src = "enum Wrapper<T> { Some(T), None }";
1047 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1048 assert!(
1049 layouts.is_empty(),
1050 "generic enums should be skipped; got {:?}",
1051 layouts.iter().map(|l| &l.name).collect::<Vec<_>>()
1052 );
1053 }
1054
1055 #[test]
1056 fn empty_enum_is_skipped() {
1057 let src = "enum Never {}";
1058 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1059 assert!(layouts.is_empty());
1060 }
1061
1062 #[test]
1063 fn enum_with_only_unit_variants_has_no_payload_field() {
1064 let src = "enum Dir { North, South, East, West }";
1065 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1066 assert!(!layouts[0].fields.iter().any(|f| f.name == "__payload"));
1067 }
1068
1069 #[test]
1070 fn data_enum_and_sibling_struct_both_parsed() {
1071 let src = r#"
1072enum Status { Ok, Err(u32) }
1073struct Conn { port: u16, status: u32 }
1074"#;
1075 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1076 assert_eq!(layouts.len(), 2);
1077 assert!(layouts.iter().any(|l| l.name == "Status"));
1078 assert!(layouts.iter().any(|l| l.name == "Conn"));
1079 }
1080
1081 #[test]
1084 fn enum_with_only_zero_sized_variants_has_payload_size_zero() {
1085 let src = "enum E { A, B }";
1087 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1088 let l = &layouts[0];
1089 assert_eq!(l.total_size, 1);
1090 }
1091
1092 #[test]
1093 fn enum_mixed_unit_and_data_includes_max_payload() {
1094 let src = "enum E { Nothing, Data(u64) }";
1096 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1097 let l = &layouts[0];
1098 let payload = l.fields.iter().find(|f| f.name == "__payload").unwrap();
1099 assert_eq!(payload.size, 8); }
1101
1102 #[test]
1105 fn repr_align_raises_struct_alignment() {
1106 let src = "#[repr(align(64))]\nstruct CacheLine { a: u8, b: u32 }";
1107 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1108 let l = &layouts[0];
1109 assert_eq!(
1110 l.align, 64,
1111 "repr(align(64)) must set struct alignment to 64"
1112 );
1113 assert_eq!(l.total_size, 64, "size must be padded to 64 bytes");
1114 }
1115
1116 #[test]
1117 fn repr_align_does_not_shrink_natural_alignment() {
1118 let src = "#[repr(align(1))]\nstruct S { a: u64 }";
1120 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1121 let l = &layouts[0];
1122 assert_eq!(
1123 l.align, 8,
1124 "natural align must not be reduced below repr(align)"
1125 );
1126 }
1127
1128 #[test]
1129 fn repr_align_adds_trailing_padding() {
1130 let src = "#[repr(align(8))]\nstruct S { a: u8, b: u32 }";
1132 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1133 let l = &layouts[0];
1134 assert_eq!(l.total_size, 8);
1135 }
1136
1137 #[test]
1138 fn no_repr_align_has_natural_size() {
1139 let src = "struct S { a: u8, b: u32 }";
1141 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1142 let l = &layouts[0];
1143 assert_eq!(l.total_size, 8);
1145 assert_eq!(l.align, 4);
1146 }
1147
1148 #[test]
1151 fn tuple_struct_fields_named_by_index() {
1152 let src = "struct Pair(u64, u8);";
1153 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1154 let l = &layouts[0];
1155 assert_eq!(l.fields[0].name, "_0");
1156 assert_eq!(l.fields[1].name, "_1");
1157 }
1158
1159 #[test]
1160 fn tuple_struct_layout_follows_alignment() {
1161 let src = "struct S(u64, u8);";
1163 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1164 let l = &layouts[0];
1165 assert_eq!(l.fields[0].offset, 0);
1166 assert_eq!(l.fields[0].size, 8);
1167 assert_eq!(l.fields[1].offset, 8);
1168 assert_eq!(l.fields[1].size, 1);
1169 assert_eq!(l.total_size, 16);
1170 }
1171
1172 #[test]
1173 fn tuple_struct_with_padding_waste_detected() {
1174 let src = "struct S(u8, u64);";
1176 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1177 let l = &layouts[0];
1178 assert_eq!(l.fields[0].offset, 0); assert_eq!(l.fields[1].offset, 8); assert_eq!(l.total_size, 16);
1181 let gaps = padlock_core::ir::find_padding(l);
1182 assert_eq!(gaps[0].bytes, 7);
1183 }
1184
1185 #[test]
1188 fn nonzero_types_same_size_as_base() {
1189 assert_eq!(primitive_size_align("NonZeroU8", &X86_64_SYSV), (1, 1));
1190 assert_eq!(primitive_size_align("NonZeroI8", &X86_64_SYSV), (1, 1));
1191 assert_eq!(primitive_size_align("NonZeroU16", &X86_64_SYSV), (2, 2));
1192 assert_eq!(primitive_size_align("NonZeroU32", &X86_64_SYSV), (4, 4));
1193 assert_eq!(primitive_size_align("NonZeroU64", &X86_64_SYSV), (8, 8));
1194 assert_eq!(primitive_size_align("NonZeroU128", &X86_64_SYSV), (16, 16));
1195 assert_eq!(
1196 primitive_size_align("NonZeroUsize", &X86_64_SYSV),
1197 (X86_64_SYSV.pointer_size, X86_64_SYSV.pointer_size)
1198 );
1199 }
1200
1201 #[test]
1202 fn float16_and_float128_correct_size() {
1203 assert_eq!(primitive_size_align("f16", &X86_64_SYSV), (2, 2));
1204 assert_eq!(primitive_size_align("f128", &X86_64_SYSV), (16, 16));
1205 }
1206
1207 #[test]
1208 fn rust_struct_with_nonzero_fields() {
1209 let src = "struct Counts { hits: NonZeroU64, misses: NonZeroU32, flags: u8 }";
1210 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1211 let l = &layouts[0];
1212 assert_eq!(l.fields[0].size, 8); assert_eq!(l.fields[1].size, 4); assert_eq!(l.fields[2].size, 1); assert_eq!(l.total_size, 16);
1217 }
1218
1219 #[test]
1222 fn plain_struct_is_repr_rust() {
1223 let src = "struct Foo { a: u64, b: u32 }";
1224 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1225 assert!(layouts[0].is_repr_rust, "plain struct should be repr(Rust)");
1226 }
1227
1228 #[test]
1229 fn repr_c_struct_is_not_repr_rust() {
1230 let src = "#[repr(C)] struct Foo { a: u64, b: u32 }";
1231 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1232 assert!(
1233 !layouts[0].is_repr_rust,
1234 "repr(C) struct must not be repr(Rust)"
1235 );
1236 }
1237
1238 #[test]
1239 fn repr_packed_struct_is_not_repr_rust() {
1240 let src = "#[repr(packed)] struct Foo { a: u64, b: u32 }";
1241 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1242 assert!(
1243 !layouts[0].is_repr_rust,
1244 "repr(packed) struct must not be repr(Rust)"
1245 );
1246 }
1247
1248 #[test]
1249 fn repr_transparent_struct_is_not_repr_rust() {
1250 let src = "#[repr(transparent)] struct Wrapper(u64);";
1251 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1252 assert!(
1253 !layouts[0].is_repr_rust,
1254 "repr(transparent) struct must not be repr(Rust)"
1255 );
1256 }
1257
1258 #[test]
1259 fn plain_enum_is_repr_rust() {
1260 let src = "enum Color { Red, Green, Blue }";
1261 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1262 assert!(layouts[0].is_repr_rust, "plain enum should be repr(Rust)");
1263 }
1264
1265 #[test]
1266 fn repr_c_enum_is_not_repr_rust() {
1267 let src = "#[repr(C)] enum Dir { North, South }";
1268 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1269 assert!(
1270 !layouts[0].is_repr_rust,
1271 "repr(C) enum must not be repr(Rust)"
1272 );
1273 }
1274
1275 #[test]
1278 fn box_dyn_trait_is_fat_pointer() {
1279 let src = "struct S { handler: Box<dyn std::any::Any> }";
1281 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1282 assert_eq!(
1283 layouts[0].fields[0].size, 16,
1284 "Box<dyn Trait> must be 16 bytes (fat pointer)"
1285 );
1286 }
1287
1288 #[test]
1289 fn arc_dyn_trait_is_fat_pointer() {
1290 let src = "struct S { shared: Arc<dyn std::fmt::Display> }";
1291 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1292 assert_eq!(layouts[0].fields[0].size, 16);
1293 }
1294
1295 #[test]
1296 fn ref_dyn_trait_is_fat_pointer() {
1297 let src = "struct S { cb: &'static dyn Fn() }";
1301 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1302 assert_eq!(layouts.len(), 1, "S must be parsed");
1303 assert_eq!(
1304 layouts[0].fields[0].size, 16,
1305 "&dyn Trait must be a 16-byte fat pointer"
1306 );
1307 }
1308
1309 #[test]
1310 fn box_concrete_type_is_single_pointer() {
1311 let src = "struct S { inner: Box<u64> }";
1313 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1314 assert_eq!(
1315 layouts[0].fields[0].size, 8,
1316 "Box<concrete> must remain 8 bytes"
1317 );
1318 }
1319
1320 #[test]
1323 fn cell_u8_is_one_byte() {
1324 let src = "struct S { x: Cell<u8> }";
1325 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1326 assert_eq!(
1327 layouts[0].fields[0].size, 1,
1328 "Cell<u8> must be 1 byte, not pointer-sized"
1329 );
1330 }
1331
1332 #[test]
1333 fn maybe_uninit_u32_is_four_bytes() {
1334 let src = "struct S { x: MaybeUninit<u32> }";
1335 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1336 assert_eq!(
1337 layouts[0].fields[0].size, 4,
1338 "MaybeUninit<u32> must be 4 bytes"
1339 );
1340 }
1341
1342 #[test]
1343 fn wrapping_i16_is_two_bytes() {
1344 let src = "struct S { x: Wrapping<i16> }";
1345 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1346 assert_eq!(
1347 layouts[0].fields[0].size, 2,
1348 "Wrapping<i16> must be 2 bytes"
1349 );
1350 }
1351
1352 #[test]
1353 fn manually_drop_u64_is_eight_bytes() {
1354 let src = "struct S { x: ManuallyDrop<u64> }";
1355 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1356 assert_eq!(
1357 layouts[0].fields[0].size, 8,
1358 "ManuallyDrop<u64> must be 8 bytes"
1359 );
1360 }
1361
1362 #[test]
1363 fn unsafe_cell_u32_is_four_bytes() {
1364 let src = "struct S { x: UnsafeCell<u32> }";
1365 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1366 assert_eq!(
1367 layouts[0].fields[0].size, 4,
1368 "UnsafeCell<u32> must be 4 bytes"
1369 );
1370 }
1371
1372 #[test]
1373 fn transparent_wrapper_affects_total_size() {
1374 let src = "struct S { a: bool, b: Cell<u16> }";
1376 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1377 let l = &layouts[0];
1378 assert_eq!(l.fields[0].size, 1); assert_eq!(l.fields[1].size, 2); assert_eq!(l.total_size, 4);
1381 }
1382
1383 #[test]
1384 fn struct_with_box_dyn_has_correct_layout() {
1385 let src = "struct Handler { active: bool, err: Box<dyn std::error::Error> }";
1387 let layouts = parse_rust(src, &X86_64_SYSV).unwrap();
1388 let l = &layouts[0];
1389 assert_eq!(l.fields[0].size, 1); assert_eq!(l.fields[1].size, 16); assert_eq!(l.fields[1].offset, 8); assert_eq!(l.total_size, 24);
1393 }
1394}