use crate::{backend, hash::HashInto, marker::*, op};
use core::{
marker::PhantomData,
ops::{AddAssign, MulAssign, SubAssign},
};
use digest::{self, generic_array::typenum::U32};
use rand_core::RngCore;
pub struct Scalar<S = Secret, Z = NonZero>(pub(crate) backend::Scalar, PhantomData<(Z, S)>);
impl<Z, S> Copy for Scalar<S, Z> {}
impl<S, Z> AsRef<backend::Scalar> for Scalar<S, Z> {
fn as_ref(&self) -> &backend::Scalar {
&self.0
}
}
impl<S, Z> AsRef<Scalar<S, Z>> for Scalar<S, Z> {
fn as_ref(&self) -> &Scalar<S, Z> {
self
}
}
impl<S, Z> Clone for Scalar<S, Z> {
fn clone(&self) -> Self {
*self
}
}
impl<Z> core::hash::Hash for Scalar<Public, Z> {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.to_bytes().hash(state)
}
}
impl<Z, S> Scalar<S, Z> {
pub fn to_bytes(&self) -> [u8; 32] {
backend::BackendScalar::to_bytes(&self.0)
}
pub fn from_bytes(bytes: [u8; 32]) -> Option<Self>
where
Z: ZeroChoice,
{
let bscalar: backend::Scalar = backend::BackendScalar::from_bytes(bytes)?;
let scalar = Self::from_inner(bscalar);
if op::scalar_is_zero(&scalar) && !Z::is_zero() {
return None;
}
Some(scalar)
}
pub fn from_slice(slice: &[u8]) -> Option<Self>
where
Z: ZeroChoice,
{
if slice.len() != 32 {
return None;
}
let mut bytes = [0u8; 32];
bytes.copy_from_slice(slice);
Self::from_bytes(bytes)
}
pub fn conditional_negate(&mut self, cond: bool) {
op::scalar_conditional_negate(self, cond)
}
pub fn is_high(&self) -> bool {
op::scalar_is_high(self)
}
pub fn is_zero(&self) -> bool {
op::scalar_is_zero(self)
}
pub(crate) fn from_inner(inner: backend::Scalar) -> Self {
Scalar(inner, PhantomData)
}
pub fn set_secrecy<SNew>(self) -> Scalar<SNew, Z> {
Scalar::from_inner(self.0)
}
pub fn public(self) -> Scalar<Public, Z> {
Scalar::from_inner(self.0)
}
pub fn secret(self) -> Scalar<Secret, Z> {
Scalar::from_inner(self.0)
}
pub fn mark_zero(self) -> Scalar<S, Zero> {
Scalar::from_inner(self.0)
}
}
impl<S> Scalar<S, NonZero> {
pub fn invert(&self) -> Self {
op::scalar_invert(self)
}
pub fn one() -> Self {
Scalar::<S, Zero>::from(1u32).non_zero().unwrap()
}
pub fn minus_one() -> Self {
Self::from_inner(backend::BackendScalar::minus_one())
}
pub fn mark_zero_choice<Z: ZeroChoice>(self) -> Scalar<S, Z> {
Scalar::from_inner(self.0)
}
}
impl Scalar<Secret, NonZero> {
pub fn random<R: RngCore>(rng: &mut R) -> Self {
let mut bytes = [0u8; 32];
rng.fill_bytes(&mut bytes);
Scalar::from_bytes_mod_order(bytes)
.non_zero()
.expect("computationally unreachable")
}
pub fn from_hash(hash: impl digest::FixedOutput<OutputSize = U32>) -> Self {
let mut bytes = [0u8; 32];
bytes.copy_from_slice(hash.finalize_fixed().as_slice());
Scalar::from_bytes_mod_order(bytes)
.non_zero()
.expect("computationally unreachable")
}
}
impl<S> Scalar<S, Zero> {
pub fn non_zero(self) -> Option<Scalar<S, NonZero>> {
if self.is_zero() {
None
} else {
Some(Scalar::from_inner(self.0))
}
}
pub fn zero() -> Self {
Self::from_inner(backend::BackendScalar::zero())
}
pub fn from_bytes_mod_order(bytes: [u8; 32]) -> Self {
Self::from_inner(backend::BackendScalar::from_bytes_mod_order(bytes))
}
pub fn from_slice_mod_order(slice: &[u8]) -> Option<Self> {
if slice.len() != 32 {
return None;
}
let mut bytes = [0u8; 32];
bytes.copy_from_slice(slice);
Some(Self::from_bytes_mod_order(bytes))
}
}
impl<Z1, Z2, S1, S2> PartialEq<Scalar<S2, Z2>> for Scalar<S1, Z1> {
fn eq(&self, rhs: &Scalar<S2, Z2>) -> bool {
crate::op::scalar_eq(self, rhs)
}
}
impl<Z, S> Eq for Scalar<Z, S> {}
crate::impl_fromstr_deserialize! {
name => "secp256k1 scalar",
fn from_bytes<S, Z: ZeroChoice>(bytes: [u8;32]) -> Option<Scalar<S,Z>> {
Scalar::from_bytes(bytes)
}
}
crate::impl_display_debug_serialize! {
fn to_bytes<Z,S>(scalar: &Scalar<S,Z>) -> [u8;32] {
scalar.to_bytes()
}
}
impl<S, Z> core::ops::Neg for Scalar<S, Z> {
type Output = Scalar<S, Z>;
fn neg(self) -> Self::Output {
crate::op::scalar_negate(self)
}
}
impl<S, Z> core::ops::Neg for &Scalar<S, Z> {
type Output = Scalar<S, Z>;
fn neg(self) -> Self::Output {
crate::op::scalar_negate(self)
}
}
impl<S, Z> HashInto for Scalar<S, Z> {
fn hash_into(self, hash: &mut impl digest::Update) {
hash.update(&self.to_bytes())
}
}
impl<S> Default for Scalar<S, Zero>
where
S: Secrecy,
{
fn default() -> Self {
Scalar::<S, _>::zero()
}
}
impl<S> Default for Scalar<S, NonZero>
where
S: Secrecy,
{
fn default() -> Self {
Self::one()
}
}
impl<SL, SR, ZR> AddAssign<Scalar<SR, ZR>> for Scalar<SL, Zero> {
fn add_assign(&mut self, rhs: Scalar<SR, ZR>) {
*self = crate::op::scalar_add(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR, ZR> AddAssign<&Scalar<SR, ZR>> for Scalar<SL, Zero> {
fn add_assign(&mut self, rhs: &Scalar<SR, ZR>) {
*self = crate::op::scalar_add(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR, ZR> SubAssign<&Scalar<SR, ZR>> for Scalar<SL, Zero> {
fn sub_assign(&mut self, rhs: &Scalar<SR, ZR>) {
*self = crate::op::scalar_sub(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR, ZR> SubAssign<Scalar<SR, ZR>> for Scalar<SL, Zero> {
fn sub_assign(&mut self, rhs: Scalar<SR, ZR>) {
*self = crate::op::scalar_sub(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR> MulAssign<Scalar<SR, NonZero>> for Scalar<SL, NonZero> {
fn mul_assign(&mut self, rhs: Scalar<SR, NonZero>) {
*self = crate::op::scalar_mul(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR> MulAssign<&Scalar<SR, NonZero>> for Scalar<SL, NonZero> {
fn mul_assign(&mut self, rhs: &Scalar<SR, NonZero>) {
*self = crate::op::scalar_mul(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR, ZR: ZeroChoice> MulAssign<Scalar<SR, ZR>> for Scalar<SL, Zero> {
fn mul_assign(&mut self, rhs: Scalar<SR, ZR>) {
*self = crate::op::scalar_mul(*self, rhs).set_secrecy::<SL>();
}
}
impl<SL, SR, ZR: ZeroChoice> MulAssign<&Scalar<SR, ZR>> for Scalar<SL, Zero> {
fn mul_assign(&mut self, rhs: &Scalar<SR, ZR>) {
*self = crate::op::scalar_mul(*self, rhs).set_secrecy::<SL>();
}
}
impl<Z1, Z2> PartialOrd<Scalar<Public, Z2>> for Scalar<Public, Z1> {
fn partial_cmp(&self, other: &Scalar<Public, Z2>) -> Option<core::cmp::Ordering> {
Some(self.to_bytes().cmp(&other.to_bytes()))
}
}
impl<Z> Ord for Scalar<Public, Z> {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.to_bytes().cmp(&other.to_bytes())
}
}
mod conversion_impls {
use super::*;
use core::{any::type_name, convert::TryFrom, fmt, marker::PhantomData, mem};
use subtle::ConstantTimeEq;
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct ScalarTooLarge<T>(PhantomData<T>);
impl<T> core::fmt::Display for ScalarTooLarge<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "scalar value does not fit into {}", type_name::<T>())
}
}
impl<T> core::fmt::Debug for ScalarTooLarge<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ScalarTooLarge")
.field(&type_name::<T>())
.finish()
}
}
#[cfg(feature = "std")]
impl<T> std::error::Error for ScalarTooLarge<T> {}
pub struct ZeroScalar<T>(PhantomData<T>);
impl<T> core::fmt::Display for ZeroScalar<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"cannot convert zero {} to NonZero scalar",
type_name::<T>()
)
}
}
impl<T> core::fmt::Debug for ZeroScalar<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ZeroScalar")
.field(&type_name::<T>())
.finish()
}
}
#[cfg(feature = "std")]
impl<T> std::error::Error for ZeroScalar<T> {}
macro_rules! impl_scalar_conversions {
($($t:ty),+ $(,)?) => {
$(
impl<S> From<$t> for Scalar<S, Zero> {
fn from(value: $t) -> Self {
let mut bytes = [0u8; 32];
let int_bytes = value.to_be_bytes();
bytes[32 - int_bytes.len() ..].copy_from_slice(&int_bytes);
Scalar::<S, Zero>::from_bytes(bytes).unwrap()
}
}
impl<S> TryFrom<$t> for Scalar<S, NonZero> {
type Error = ZeroScalar<$t>;
fn try_from(value: $t) -> Result<Self, Self::Error> {
let mut bytes = [0u8; 32];
let int_bytes = value.to_be_bytes();
bytes[32 - int_bytes.len() ..].copy_from_slice(&int_bytes);
let scalar = Scalar::<S, Zero>::from_bytes(bytes).unwrap();
if value == 0 {
Err(ZeroScalar(PhantomData))
} else {
Ok(scalar.non_zero().unwrap())
}
}
}
impl<S, Z> TryFrom<Scalar<S, Z>> for $t {
type Error = ScalarTooLarge<$t>;
fn try_from(value: Scalar<S, Z>) -> Result<Self, Self::Error> {
let bytes = value.to_bytes();
let high = &bytes[.. 32 - mem::size_of::<$t>()];
if high.ct_eq(&[0x0;32 - mem::size_of::<$t>()]).into() {
let mut buf = [0u8; mem::size_of::<$t>()];
buf.copy_from_slice(&bytes[32 - mem::size_of::<$t>() ..]);
Ok(<$t>::from_be_bytes(buf))
} else {
Err(ScalarTooLarge(PhantomData))
}
}
}
impl<S> From<core::num::NonZero<$t>> for Scalar<S, NonZero> {
fn from(value: core::num::NonZero<$t>) -> Self {
let mut bytes = [0u8; 32];
let int_bytes = value.get().to_be_bytes();
bytes[32 - int_bytes.len() ..].copy_from_slice(&int_bytes);
Scalar::<S, Zero>::from_bytes(bytes).unwrap().non_zero().unwrap()
}
}
impl<S> TryFrom<Scalar<S, NonZero>> for core::num::NonZero<$t> {
type Error = ScalarTooLarge<$t>;
fn try_from(value: Scalar<S, NonZero>) -> Result<Self, Self::Error> {
let bytes = value.to_bytes();
let high = &bytes[.. 32 - mem::size_of::<$t>()];
if high.ct_eq(&[0x0;32 - mem::size_of::<$t>()]).into() {
let mut buf = [0u8; mem::size_of::<$t>()];
buf.copy_from_slice(&bytes[32 - mem::size_of::<$t>() ..]);
Ok(core::num::NonZero::new(<$t>::from_be_bytes(buf)).unwrap())
} else {
Err(ScalarTooLarge(PhantomData))
}
}
}
)*
};
}
impl_scalar_conversions!(u8, u16, u32, u64, usize, u128);
}
#[cfg(test)]
mod test {
use super::*;
use crate::{hex, s};
#[cfg(feature = "alloc")]
use proptest::prelude::*;
#[cfg(target_arch = "wasm32")]
use wasm_bindgen_test::wasm_bindgen_test as test;
#[cfg(all(feature = "serde", feature = "bincode"))]
#[test]
fn scalar_serde_rountrip() {
let original = Scalar::random(&mut rand::thread_rng());
let serialized = bincode::encode_to_vec(
bincode::serde::Compat(&original),
bincode::config::standard(),
)
.unwrap();
let deserialized = bincode::decode_from_slice::<bincode::serde::Compat<Scalar>, _>(
&serialized[..],
bincode::config::standard(),
)
.unwrap()
.0;
assert_eq!(deserialized.0, original)
}
#[test]
fn random() {
let scalar_1 = Scalar::random(&mut rand::thread_rng());
let scalar_2 = Scalar::random(&mut rand::thread_rng());
assert_ne!(scalar_1, scalar_2);
}
#[cfg(feature = "alloc")] proptest! {
#[test]
fn invert(x in any::<Scalar>(), y in any::<Scalar<Public>>()) {
prop_assert_eq!(s!(x * { x.invert() }), s!(1));
prop_assert_eq!(s!(y * { y.invert() }), s!(1));
}
#[test]
fn sub(a in any::<Scalar>(),
b in any::<Scalar<Public>>(),
c in any::<Scalar<Public,Zero>>(),
d in any::<Scalar<Secret,Zero>>(),
) {
prop_assert_eq!(s!(a - a), s!(0));
prop_assert_eq!(s!(b - b), s!(0));
prop_assert_eq!(s!(c - c), s!(0));
prop_assert_eq!(s!(d - d), s!(0));
prop_assert_eq!(s!(a - a), s!(-a + a));
prop_assert_eq!(s!(a - b), s!(-b + a));
prop_assert_eq!(s!(a - c), s!(-c + a));
prop_assert_eq!(s!(a - d), s!(-d + a));
if a != b {
prop_assert_ne!(s!(a - b), s!(b - a));
}
if c != d {
prop_assert_ne!(s!(c - d), s!(d - c));
}
}
#[test]
fn u128_roundtrip(xs in any::<u128>()) {
let s: Scalar<Public, Zero> = xs.into();
let back = u128::try_from(s)
.expect("a u128 always fits inside a 256-bit scalar");
prop_assert_eq!(xs, back);
}
#[test]
fn nz_u128_roundtrip(xs in any::<core::num::NonZero<u128>>()) {
let s: Scalar<Public, NonZero> = xs.into();
let back = core::num::NonZero::<u128>::try_from(s)
.expect("a u128 always fits inside a 256-bit scalar");
prop_assert_eq!(xs, back);
}
}
#[test]
fn scalar_subtraction_is_not_commutative() {
let two = s!(2);
let three = s!(3);
let minus_1 = s!(-1);
let one = s!(1);
assert_eq!(s!(two - three), minus_1);
assert_eq!(s!(three - two), one);
}
#[test]
fn one() {
assert_eq!(
Scalar::<Secret, NonZero>::one(),
Scalar::<Secret, _>::from(1u32)
);
assert_eq!(
Scalar::<Secret, NonZero>::minus_one(),
-Scalar::<Secret, NonZero>::one()
);
assert_eq!(
op::scalar_mul(s!(3), Scalar::<Secret, NonZero>::minus_one()),
-s!(3)
);
}
#[test]
fn zero() {
assert_eq!(
Scalar::<Secret, Zero>::zero(),
Scalar::<Secret, _>::from(0u32)
);
}
#[test]
fn from_slice() {
assert!(
Scalar::<Secret, NonZero>::from_slice(b"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx".as_ref())
.is_some()
);
assert!(
Scalar::<Secret, NonZero>::from_slice(b"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx".as_ref())
.is_none()
);
assert!(
Scalar::<Secret, NonZero>::from_slice(
hex::decode_array::<32>(
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
)
.unwrap()
.as_ref()
)
.is_none()
);
}
#[test]
fn from_slice_mod_order() {
assert_eq!(
Scalar::<Secret, _>::from_slice_mod_order(b"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx".as_ref())
.unwrap()
.to_bytes(),
*b"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
);
assert_eq!(
Scalar::<Secret, _>::from_slice_mod_order(
hex::decode_array::<32>(
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364142"
)
.unwrap()
.as_ref()
)
.unwrap(),
Scalar::<Secret, _>::from(1u32)
)
}
#[test]
fn minus_one() {
assert_eq!(
Scalar::<Secret, _>::minus_one(),
Scalar::<Secret, _>::from_bytes_mod_order(
hex::decode_array(
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140"
)
.unwrap()
)
);
}
#[test]
fn assign_tests() {
let mut a = Scalar::<Secret, _>::from(42u8);
let b = Scalar::<Secret, _>::from(1337u16).public();
a += b;
assert_eq!(a, Scalar::<Secret, _>::from(1379u16));
a -= b;
assert_eq!(a, Scalar::<Secret, _>::from(42u32));
a *= b;
assert_eq!(a, Scalar::<Secret, _>::from(42u16 * 1337u16));
}
#[test]
fn scalar_ord() {
assert!(Scalar::<Public, _>::from(1337u32) > Scalar::<Public, _>::from(42u8));
assert!(Scalar::<Public, _>::from(42u32) < Scalar::<Public, _>::from(1337u16));
assert!(Scalar::<Public, _>::from(41u32) < Scalar::<Public, _>::from(42u32));
assert!(Scalar::<Public, _>::from(42u32) <= Scalar::<Public, _>::from(42u32));
}
#[test]
fn try_from_zero_to_nonzero() {
use core::convert::TryFrom;
let result = Scalar::<Secret, NonZero>::try_from(0u32);
assert!(result.is_err());
let result = Scalar::<Secret, NonZero>::try_from(42u32);
assert!(result.is_ok());
assert_eq!(
result.unwrap(),
Scalar::<Secret, Zero>::from(42u32).non_zero().unwrap()
);
assert!(Scalar::<Public, NonZero>::try_from(0u8).is_err());
assert!(Scalar::<Public, NonZero>::try_from(0u16).is_err());
assert!(Scalar::<Public, NonZero>::try_from(0u64).is_err());
assert!(Scalar::<Public, NonZero>::try_from(1u8).is_ok());
assert!(Scalar::<Public, NonZero>::try_from(1u16).is_ok());
assert!(Scalar::<Public, NonZero>::try_from(1u64).is_ok());
let _zero_scalar: Scalar<Secret, Zero> = 0u32.into();
let _nonzero_scalar: Scalar<Secret, Zero> = 42u32.into();
}
}