diskann_wide/arch/aarch64/
u64x2_.rs1use crate::{
7 Emulated,
8 arch::Scalar,
9 constant::Const,
10 helpers,
11 traits::{SIMDMask, SIMDMulAdd, SIMDPartialEq, SIMDPartialOrd, SIMDVector},
12};
13
14use super::{
16 Neon, internal,
17 macros::{self, AArchLoadStore, AArchSplat},
18 masks::mask64x2,
19};
20
21use std::arch::aarch64::*;
23
24#[inline(always)]
29pub(super) unsafe fn emulated_vmvnq_u64(x: uint64x2_t) -> uint64x2_t {
30 let x: [u64; 2] = u64x2(x).to_array();
31 let mapped: [u64; 2] = core::array::from_fn(|i| !x[i]);
32 u64x2::from_array(unsafe { Neon::new() }, mapped).0
35}
36
37#[inline(always)]
38pub(super) unsafe fn emulated_vminq_u64(x: uint64x2_t, y: uint64x2_t) -> uint64x2_t {
39 let x = u64x2(x).to_array();
40 let y = u64x2(y).to_array();
41 let mapped: [u64; 2] = core::array::from_fn(|i| x[i].min(y[i]));
42 u64x2::from_array(unsafe { Neon::new() }, mapped).0
45}
46
47macros::aarch64_define_register!(u64x2, uint64x2_t, mask64x2, u64, 2, Neon);
48macros::aarch64_define_splat!(u64x2, vmovq_n_u64);
49macros::aarch64_define_loadstore!(u64x2, vld1q_u64, internal::load_first::u64x2, vst1q_u64, 2);
50
51helpers::unsafe_map_binary_op!(u64x2, std::ops::Add, add, vaddq_u64, "neon");
52helpers::unsafe_map_binary_op!(u64x2, std::ops::Sub, sub, vsubq_u64, "neon");
53
54impl std::ops::Mul for u64x2 {
55 type Output = Self;
56 #[inline(always)]
57 fn mul(self, rhs: Self) -> Self {
58 let x = Emulated::<u64, 2>::from_array(Scalar, self.to_array());
59 let y = Emulated::<u64, 2>::from_array(Scalar, rhs.to_array());
60 Self::from_array(self.arch(), (x * y).to_array())
61 }
62}
63
64macros::aarch64_define_fma!(u64x2, integer);
65
66macros::aarch64_define_cmp!(
67 u64x2,
68 vceqq_u64,
69 (emulated_vmvnq_u64),
70 vcltq_u64,
71 vcleq_u64,
72 vcgtq_u64,
73 vcgeq_u64
74);
75macros::aarch64_define_bitops!(
76 u64x2,
77 emulated_vmvnq_u64,
78 vandq_u64,
79 vorrq_u64,
80 veorq_u64,
81 (
82 vshlq_u64,
83 64,
84 vnegq_s64,
85 emulated_vminq_u64,
86 vreinterpretq_s64_u64,
87 std::convert::identity
88 ),
89 (u64, i64, vmovq_n_s64),
90);
91
92#[cfg(test)]
97mod tests {
98 use super::*;
99 use crate::{arch::aarch64::test_neon, reference::ReferenceScalarOps, test_utils};
100
101 #[test]
102 fn miri_test_load() {
103 if let Some(arch) = test_neon() {
104 test_utils::test_load_simd::<u64, 2, u64x2>(arch);
105 }
106 }
107
108 #[test]
109 fn miri_test_store() {
110 if let Some(arch) = test_neon() {
111 test_utils::test_store_simd::<u64, 2, u64x2>(arch);
112 }
113 }
114
115 #[test]
117 fn test_constructors() {
118 if let Some(arch) = test_neon() {
119 test_utils::ops::test_splat::<u64, 2, u64x2>(arch);
120 }
121 }
122
123 test_utils::ops::test_add!(u64x2, 0x8d7bf28b1c6e2545, test_neon());
125 test_utils::ops::test_sub!(u64x2, 0x4a1c644a1a910bed, test_neon());
126 test_utils::ops::test_mul!(u64x2, 0xf42ee707a808fd10, test_neon());
127 test_utils::ops::test_fma!(u64x2, 0x28540d9936a9e803, test_neon());
128
129 test_utils::ops::test_cmp!(u64x2, 0xfae27072c6b70885, test_neon());
130
131 test_utils::ops::test_bitops!(u64x2, 0xbe927713ea310164, test_neon());
133}