Skip to main content

diskann_wide/arch/aarch64/
i64x2_.rs

1/*
2 * Copyright (c) Microsoft Corporation. All rights reserved.
3 * Licensed under the MIT license.
4 */
5
6use crate::{
7    Emulated, SIMDAbs, SIMDMask, SIMDMulAdd, SIMDPartialEq, SIMDPartialOrd, SIMDVector,
8    arch::Scalar, constant::Const, helpers,
9};
10
11// AArch64 masks
12use super::{
13    Neon, internal,
14    macros::{self, AArchLoadStore, AArchSplat},
15    masks::mask64x2,
16    u64x2_::{emulated_vminq_u64, emulated_vmvnq_u64},
17};
18
19// AArch64 intrinsics
20use std::arch::aarch64::*;
21
22///////////////////
23// 64-bit signed //
24///////////////////
25
26#[inline(always)]
27pub(super) unsafe fn emulated_vmvnq_s64(x: int64x2_t) -> int64x2_t {
28    let x: [i64; 2] = i64x2(x).to_array();
29    let mapped: [i64; 2] = core::array::from_fn(|i| !x[i]);
30    // SAFETY: This is only called in a context where the caller guarantees `Neon` is
31    // available.
32    i64x2::from_array(unsafe { Neon::new() }, mapped).0
33}
34
35macros::aarch64_define_register!(i64x2, int64x2_t, mask64x2, i64, 2, Neon);
36macros::aarch64_define_splat!(i64x2, vmovq_n_s64);
37macros::aarch64_define_loadstore!(i64x2, vld1q_s64, internal::load_first::i64x2, vst1q_s64, 2);
38
39helpers::unsafe_map_binary_op!(i64x2, std::ops::Add, add, vaddq_s64, "neon");
40helpers::unsafe_map_binary_op!(i64x2, std::ops::Sub, sub, vsubq_s64, "neon");
41helpers::unsafe_map_unary_op!(i64x2, SIMDAbs, abs_simd, vabsq_s64, "neon");
42
43impl std::ops::Mul for i64x2 {
44    type Output = Self;
45    #[inline(always)]
46    fn mul(self, rhs: Self) -> Self {
47        let x = Emulated::<i64, 2>::from_array(Scalar, self.to_array());
48        let y = Emulated::<i64, 2>::from_array(Scalar, rhs.to_array());
49        Self::from_array(self.arch(), (x * y).to_array())
50    }
51}
52
53macros::aarch64_define_fma!(i64x2, integer);
54
55macros::aarch64_define_cmp!(
56    i64x2,
57    vceqq_s64,
58    (emulated_vmvnq_u64),
59    vcltq_s64,
60    vcleq_s64,
61    vcgtq_s64,
62    vcgeq_s64
63);
64macros::aarch64_define_bitops!(
65    i64x2,
66    emulated_vmvnq_s64,
67    vandq_s64,
68    vorrq_s64,
69    veorq_s64,
70    (
71        vshlq_s64,
72        64,
73        vnegq_s64,
74        emulated_vminq_u64,
75        vreinterpretq_s64_u64,
76        vreinterpretq_u64_s64
77    ),
78    (u64, i64, vmovq_n_s64),
79);
80
81///////////
82// Tests //
83///////////
84
85#[cfg(test)]
86mod tests {
87    use super::*;
88    use crate::{arch::aarch64::test_neon, reference::ReferenceScalarOps, test_utils};
89
90    #[test]
91    fn miri_test_load() {
92        if let Some(arch) = test_neon() {
93            test_utils::test_load_simd::<i64, 2, i64x2>(arch);
94        }
95    }
96
97    #[test]
98    fn miri_test_store() {
99        if let Some(arch) = test_neon() {
100            test_utils::test_store_simd::<i64, 2, i64x2>(arch);
101        }
102    }
103
104    // constructors
105    #[test]
106    fn test_constructors() {
107        if let Some(arch) = test_neon() {
108            test_utils::ops::test_splat::<i64, 2, i64x2>(arch);
109        }
110    }
111
112    // Binary Ops
113    test_utils::ops::test_add!(i64x2, 0x8d7bf28b1c6e2545, test_neon());
114    test_utils::ops::test_sub!(i64x2, 0x4a1c644a1a910bed, test_neon());
115    test_utils::ops::test_mul!(i64x2, 0xf42ee707a808fd10, test_neon());
116    test_utils::ops::test_fma!(i64x2, 0x28540d9936a9e803, test_neon());
117    test_utils::ops::test_abs!(i64x2, 0xb8f702ba85375041, test_neon());
118
119    test_utils::ops::test_cmp!(i64x2, 0xfae27072c6b70885, test_neon());
120
121    // Bit ops
122    test_utils::ops::test_bitops!(i64x2, 0xbe927713ea310164, test_neon());
123}