1use aes_gcm::{aes::Aes256, Aes256Gcm};
6use blake2::{Digest, digest::{FixedOutputReset, Output}};
7use chacha20::{cipher::{BlockEncrypt, KeyIvInit, StreamCipher, StreamCipherSeek, StreamCipherSeekCore}, ChaCha20};
8use chacha20poly1305::{aead::AeadInPlace, ChaCha20Poly1305, KeyInit};
9use rand_core::{CryptoRng, RngCore};
10use zeroize::Zeroize;
11
12use crate::{crypto::{noise::{x25519::SecretKey, AskChain, HandshakeState}, transport::BuiltinTransportCrypto, CryptoBackend, HYPHAE_AEAD_NONCE_LEN, HYPHAE_AEAD_TAG_LEN}, handshake::HYPHAE_KEY_ASK_LABEL};
13
14use crate::crypto::{CryptoError, SymmetricKey, HYPHAE_HEADER_MASK_MAX_LEN, HYPHAE_HEADER_SAMPLE_LEN};
15
16use super::{AeadBackend, HashBackend};
17
18pub struct RustCryptoBackend;
19
20impl RustCryptoBackend {
21 pub const X25519_KEY_SIZE: usize = SecretKey::SIZE;
22
23 pub fn new_secret_key_into(&self, rng: &mut (impl RngCore + CryptoRng), secret_key: &mut [u8; Self::X25519_KEY_SIZE]) {
24 rng.fill_bytes(secret_key.as_mut_slice());
25 }
26
27 pub fn new_secret_key(&self, rng: &mut (impl RngCore + CryptoRng)) -> [u8; Self::X25519_KEY_SIZE] {
28 let mut secret_key = [0u8; Self::X25519_KEY_SIZE];
29 self.new_secret_key_into(rng, &mut secret_key);
30 secret_key
31 }
32
33 pub fn public_key(&self, secret_key: &[u8; Self::X25519_KEY_SIZE]) -> [u8; Self::X25519_KEY_SIZE] {
34 SecretKey::public_from_bytes(secret_key)
35 }
36}
37
38impl CryptoBackend for RustCryptoBackend {
39 type InitialCrypto = BuiltinTransportCrypto<ChaChaPoly, Blake2s>;
40
41 type NoiseHandshake = HandshakeState<AnyAead, AnyHash>;
42
43 type TransportCrypto = BuiltinTransportCrypto<AnyAead, AnyHash>;
44
45 type TransportRekey = AskChain<AnyHash>;
46
47 fn protocol_supported(&self, noise_protocol: &str) -> bool {
48 Self::NoiseHandshake::parse_protocol(noise_protocol).is_ok()
49 }
50
51 fn initial_crypto(&self) -> Self::InitialCrypto {
52 BuiltinTransportCrypto::new(ChaChaPoly, Blake2s)
53 }
54
55 fn new_handshake(&self) -> Result<Self::NoiseHandshake, CryptoError> {
56 Ok(Default::default())
57 }
58
59 fn transport_crypto(&self, handshake: &Self::NoiseHandshake) -> Result<Self::TransportCrypto, CryptoError> {
60 Ok(BuiltinTransportCrypto::new(handshake.aead_backend()?, handshake.hash_backend()?))
61 }
62
63 fn export_1rtt_rekey(&self, handshake: &mut Self::NoiseHandshake, rekey: &mut Self::TransportRekey) -> Result<(), CryptoError> {
64 handshake.export_ask_into(rekey, HYPHAE_KEY_ASK_LABEL)
65 }
66}
67
68#[derive(Default, Clone, Zeroize)]
69pub struct AnyAead (RustCryptoAeadProtocol);
70
71#[derive(Default, Clone, Zeroize)]
72enum RustCryptoAeadProtocol {
73 #[default]
74 Uninitialized,
75 AesGcm,
76 ChaChaPoly,
77}
78
79impl AnyAead {
80 fn panic_uninitialized() -> ! {
81 panic!("uninitialized aead backend");
82 }
83}
84
85impl AeadBackend for AnyAead {
86 fn initialize(&mut self, aead_protocol: &str) -> Result<(), CryptoError> {
87 if aead_protocol == "AESGCM" {
88 self.0 = RustCryptoAeadProtocol::AesGcm;
89 } else if aead_protocol == "ChaChaPoly" {
90 self.0 = RustCryptoAeadProtocol::ChaChaPoly
91 } else {
92 return Err(CryptoError::UnsupportedProtocol);
93 }
94 Ok(())
95 }
96
97 fn encrypt_in_place(&self, key: &SymmetricKey, nonce: u64, ad: &[u8], buffer: &mut [u8]) -> Result<(), CryptoError> {
98 match self.0 {
99 RustCryptoAeadProtocol::Uninitialized => Self::panic_uninitialized(),
100 RustCryptoAeadProtocol::AesGcm => AesGcm.encrypt_in_place(key, nonce, ad, buffer),
101 RustCryptoAeadProtocol::ChaChaPoly => ChaChaPoly.encrypt_in_place(key, nonce, ad, buffer)
102 }
103 }
104
105 fn decrypt_in_place<'a> (&self, key: &SymmetricKey, nonce: u64, ad: &[u8], buffer: &'a mut [u8]) -> Result<&'a [u8], CryptoError> {
106 match self.0 {
107 RustCryptoAeadProtocol::Uninitialized => Self::panic_uninitialized(),
108 RustCryptoAeadProtocol::AesGcm => AesGcm.decrypt_in_place(key, nonce, ad, buffer),
109 RustCryptoAeadProtocol::ChaChaPoly => ChaChaPoly.decrypt_in_place(key, nonce, ad, buffer)
110 }
111 }
112
113 fn header_protection_mask(&self, key: &SymmetricKey, sample: &[u8], mask: &mut [u8]) -> Result<(), CryptoError> {
114 match self.0 {
115 RustCryptoAeadProtocol::Uninitialized => Self::panic_uninitialized(),
116 RustCryptoAeadProtocol::AesGcm => AesGcm.header_protection_mask(key, sample, mask),
117 RustCryptoAeadProtocol::ChaChaPoly => ChaChaPoly.header_protection_mask(key, sample, mask),
118 }
119 }
120
121 fn confidentiality_limit(&self) -> u64 {
122 match self.0 {
123 RustCryptoAeadProtocol::Uninitialized => Self::panic_uninitialized(),
124 RustCryptoAeadProtocol::AesGcm => AesGcm.confidentiality_limit(),
125 RustCryptoAeadProtocol::ChaChaPoly => ChaChaPoly.confidentiality_limit(),
126 }
127 }
128
129 fn integrity_limit(&self) -> u64 {
130 match self.0 {
131 RustCryptoAeadProtocol::Uninitialized => Self::panic_uninitialized(),
132 RustCryptoAeadProtocol::AesGcm => AesGcm.integrity_limit(),
133 RustCryptoAeadProtocol::ChaChaPoly => ChaChaPoly.integrity_limit(),
134 }
135 }
136}
137
138#[derive(Default, Clone, Zeroize)]
139pub struct AesGcm;
140
141impl AesGcm {
142 fn nonce(nonce_u64: u64) -> [u8; HYPHAE_AEAD_NONCE_LEN] {
143 let mut nonce = [0u8; HYPHAE_AEAD_NONCE_LEN];
144 nonce[4..].copy_from_slice(&nonce_u64.to_be_bytes());
145 nonce
146 }
147}
148
149impl AeadBackend for AesGcm {
150 fn initialize(&mut self, aead_protocol: &str) -> Result<(), CryptoError> {
151 if aead_protocol == "AESGCM" {
152 Ok(())
153 } else {
154 Err(CryptoError::InvalidProtocol)
155 }
156 }
157
158 fn encrypt_in_place(&self, packet_key: &SymmetricKey, packet_id: u64, ad: &[u8], buffer: &mut [u8]) -> Result<(), CryptoError> {
159 if buffer.len() < HYPHAE_AEAD_TAG_LEN {
160 return Err(CryptoError::Internal);
161 }
162
163 let (packet, tag) = buffer.split_at_mut(buffer.len() - HYPHAE_AEAD_TAG_LEN);
164 let aead = Aes256Gcm::new(packet_key.as_ref().into());
165 let tag_temp = aead.encrypt_in_place_detached(Self::nonce(packet_id).as_ref().into(), ad, packet)
166 .map_err(|_| CryptoError::Internal)?;
167 tag.copy_from_slice(&tag_temp);
168 Ok(())
169 }
170
171 fn decrypt_in_place<'a> (&self, packet_key: &SymmetricKey, packet_id: u64, ad: &[u8], buffer: &'a mut [u8]) -> Result<&'a [u8], CryptoError> {
172 if buffer.len() < HYPHAE_AEAD_TAG_LEN {
173 return Err(CryptoError::Internal);
174 }
175 let (packet, tag) = buffer.split_at_mut(buffer.len() - HYPHAE_AEAD_TAG_LEN);
176
177 let aead = Aes256Gcm::new(packet_key.as_ref().into());
178 aead.decrypt_in_place_detached(Self::nonce(packet_id).as_ref().into(), ad, packet, tag.as_ref().into())
179 .map_err(|_| CryptoError::DecryptionFailed)?;
180
181 Ok(packet)
182 }
183
184 fn header_protection_mask(&self, header_key: &SymmetricKey, sample: &[u8], mask: &mut [u8]) -> Result<(), CryptoError> {
185 if sample.len() != HYPHAE_HEADER_SAMPLE_LEN ||
186 mask.len() > HYPHAE_HEADER_MASK_MAX_LEN
187 {
188 return Err(CryptoError::Internal)
189 }
190
191 let mut sample_block = [0u8; HYPHAE_HEADER_SAMPLE_LEN];
192 sample_block.copy_from_slice(sample);
193
194 let cipher = Aes256::new(header_key.as_ref().into());
195 cipher.encrypt_block(&mut sample_block.into());
196 mask.copy_from_slice(&sample_block[0..mask.len()]);
197
198 Ok(())
199 }
200
201 fn confidentiality_limit(&self) -> u64 {
202 2u64.pow(23)
203 }
204
205 fn integrity_limit(&self) -> u64 {
206 2u64.pow(52)
207 }
208}
209
210#[derive(Default, Clone, Zeroize)]
211pub struct ChaChaPoly;
212
213impl ChaChaPoly {
214 fn nonce(nonce_u64: u64) -> [u8; HYPHAE_AEAD_NONCE_LEN] {
215 let mut nonce = [0u8; HYPHAE_AEAD_NONCE_LEN];
216 nonce[4..].copy_from_slice(&nonce_u64.to_le_bytes());
217 nonce
218 }
219}
220
221impl AeadBackend for ChaChaPoly {
222 fn initialize(&mut self, aead_protocol: &str) -> Result<(), CryptoError> {
223 if aead_protocol == "ChaChaPoly" {
224 Ok(())
225 } else {
226 Err(CryptoError::InvalidProtocol)
227 }
228 }
229
230 fn encrypt_in_place(&self, packet_key: &SymmetricKey, packet_id: u64, ad: &[u8], buffer: &mut [u8]) -> Result<(), CryptoError> {
231 if buffer.len() < HYPHAE_AEAD_TAG_LEN {
232 return Err(CryptoError::Internal);
233 }
234
235 let (packet, tag) = buffer.split_at_mut(buffer.len() - HYPHAE_AEAD_TAG_LEN);
236 let aead = ChaCha20Poly1305::new(packet_key.as_ref().into());
237 let tag_temp = aead.encrypt_in_place_detached(Self::nonce(packet_id).as_ref().into(), ad, packet)
238 .map_err(|_| CryptoError::Internal)?;
239 tag.copy_from_slice(&tag_temp);
240 Ok(())
241 }
242
243 fn decrypt_in_place<'a> (&self, packet_key: &SymmetricKey, packet_id: u64, ad: &[u8], buffer: &'a mut [u8]) -> Result<&'a [u8], CryptoError> {
244 if buffer.len() < HYPHAE_AEAD_TAG_LEN {
245 return Err(CryptoError::Internal);
246 }
247 let (packet, tag) = buffer.split_at_mut(buffer.len() - HYPHAE_AEAD_TAG_LEN);
248
249 let aead = ChaCha20Poly1305::new(packet_key.as_ref().into());
250 aead.decrypt_in_place_detached(Self::nonce(packet_id).as_ref().into(), ad, packet, tag.as_ref().into())
251 .map_err(|_| CryptoError::DecryptionFailed)?;
252
253 Ok(packet)
254 }
255
256 fn header_protection_mask(&self, header_key: &SymmetricKey, sample: &[u8], mask: &mut [u8]) -> Result<(), CryptoError> {
257 if sample.len() != HYPHAE_HEADER_SAMPLE_LEN ||
258 mask.len() > HYPHAE_HEADER_MASK_MAX_LEN
259 {
260 return Err(CryptoError::Internal)
261 }
262
263 let block = u32::from_le_bytes(sample[0..4].try_into().unwrap());
264 let nonce: &[u8; HYPHAE_AEAD_NONCE_LEN] = sample[4..].try_into().unwrap();
265 let mut cipher = ChaCha20::new(header_key.as_ref().into(), nonce.into());
266
267 cipher.seek(block as u64 * 64);
268 debug_assert_eq!(cipher.get_core().get_block_pos(), block);
269
270 mask.zeroize();
271 cipher.apply_keystream(mask);
272
273 Ok(())
274 }
275
276 fn confidentiality_limit(&self) -> u64 {
277 u64::MAX
280 }
281
282 fn integrity_limit(&self) -> u64 {
283 2u64.pow(36)
284 }
285}
286
287#[derive(Default, Clone, Zeroize)]
288pub struct AnyHash (RustCryptoHashProtocol);
289
290#[derive(Default, Clone, Zeroize)]
291enum RustCryptoHashProtocol {
292 #[default]
293 Uninitialized,
294 Blake2s,
295 Blake2b,
296 Sha256,
297 Sha512,
298}
299
300impl AnyHash {
301 fn panic_uninitialized() -> ! {
302 panic!("uninitialized hash backend");
303 }
304
305 fn hash_len(&self) -> usize {
306 match self.0 {
307 RustCryptoHashProtocol::Uninitialized => Self::panic_uninitialized(),
308 RustCryptoHashProtocol::Blake2s |
309 RustCryptoHashProtocol::Sha256 => 32,
310 RustCryptoHashProtocol::Blake2b |
311 RustCryptoHashProtocol::Sha512 => 64,
312 }
313 }
314}
315
316impl HashBackend for AnyHash {
317 type Hash = [u8; 64];
318
319 fn initialize(&mut self, hash_protocol: &str) -> Result<(), CryptoError> {
320 if hash_protocol == "BLAKE2s" {
321 self.0 = RustCryptoHashProtocol::Blake2s;
322 } else if hash_protocol == "BLAKE2b" {
323 self.0 = RustCryptoHashProtocol::Blake2b;
324 } else if hash_protocol == "SHA256" {
325 self.0 = RustCryptoHashProtocol::Sha256;
326 } else if hash_protocol == "SHA512" {
327 self.0 = RustCryptoHashProtocol::Sha512;
328 } else {
329 return Err(CryptoError::UnsupportedProtocol);
330 }
331 Ok(())
332 }
333
334 fn block_size(&self) -> usize {
335 self.hash_len() * 2
336 }
337
338 fn zeros(&self) -> Self::Hash {
339 [0u8; 64]
340 }
341
342 fn hash_into<'a> (&self, hash: &mut Self::Hash, mix_hash: bool, inputs: impl IntoIterator<Item = &'a [u8]>) {
343 match self.0 {
344 RustCryptoHashProtocol::Uninitialized => Self::panic_uninitialized(),
345 RustCryptoHashProtocol::Blake2s => Blake2s.hash_into((&mut hash[0..32]).try_into().unwrap(), mix_hash, inputs),
346 RustCryptoHashProtocol::Blake2b => Blake2b.hash_into(hash, mix_hash, inputs),
347 RustCryptoHashProtocol::Sha256 => Sha256.hash_into((&mut hash[0..32]).try_into().unwrap(), mix_hash, inputs),
348 RustCryptoHashProtocol::Sha512 => Sha512.hash_into(hash, mix_hash, inputs),
349 }
350 }
351
352 fn hash_as_slice<'a> (&self, hash: &'a Self::Hash) -> &'a [u8] {
353 let len = self.hash_len();
354 &hash[0..len]
355 }
356
357 fn hash_as_mut_slice<'a> (&self, hash: &'a mut Self::Hash) -> &'a mut [u8] {
358 let len = self.hash_len();
359 &mut hash[0..len]
360 }
361}
362
363#[derive(Default, Clone, Zeroize)]
364pub struct Blake2s;
365
366impl HashBackend for Blake2s {
367 type Hash = [u8; 32];
368
369 fn initialize(&mut self, hash_protocol: &str) -> Result<(), CryptoError> {
370 if hash_protocol == "BLAKE2s" {
371 Ok(())
372 } else {
373 Err(CryptoError::InvalidProtocol)
374 }
375 }
376
377 fn block_size(&self) -> usize {
378 64
379 }
380
381 fn zeros(&self) -> Self::Hash {
382 Self::Hash::default()
383 }
384
385 fn hash_into<'a> (&self, hash: &mut Self::Hash, mix_hash: bool, inputs: impl IntoIterator<Item = &'a [u8]>) {
386 hash_into_with_digest::<blake2::Blake2s256>(hash.into(), mix_hash, inputs);
387 }
388
389 fn hash_as_slice<'a> (&self, hash: &'a Self::Hash) -> &'a [u8] {
390 hash.as_slice()
391 }
392
393 fn hash_as_mut_slice<'a> (&self, hash: &'a mut Self::Hash) -> &'a mut [u8] {
394 hash.as_mut_slice()
395 }
396}
397
398#[derive(Default, Clone, Zeroize)]
399pub struct Blake2b;
400
401impl HashBackend for Blake2b {
402 type Hash = [u8; 64];
403
404 fn initialize(&mut self, hash_protocol: &str) -> Result<(), CryptoError> {
405 if hash_protocol == "BLAKE2b" {
406 Ok(())
407 } else {
408 Err(CryptoError::InvalidProtocol)
409 }
410 }
411
412 fn block_size(&self) -> usize {
413 128
414 }
415
416 fn zeros(&self) -> Self::Hash {
417 [0u8; 64]
418 }
419
420 fn hash_into<'a> (&self, hash: &mut Self::Hash, mix_hash: bool, inputs: impl IntoIterator<Item = &'a [u8]>) {
421 hash_into_with_digest::<blake2::Blake2b512>(hash.into(), mix_hash, inputs);
422 }
423
424 fn hash_as_slice<'a> (&self, hash: &'a Self::Hash) -> &'a [u8] {
425 hash.as_slice()
426 }
427
428 fn hash_as_mut_slice<'a> (&self, hash: &'a mut Self::Hash) -> &'a mut [u8] {
429 hash.as_mut_slice()
430 }
431}
432
433#[derive(Default, Clone, Zeroize)]
434pub struct Sha256;
435
436impl HashBackend for Sha256 {
437 type Hash = [u8; 32];
438
439 fn initialize(&mut self, hash_protocol: &str) -> Result<(), CryptoError> {
440 if hash_protocol == "SHA256" {
441 Ok(())
442 } else {
443 Err(CryptoError::InvalidProtocol)
444 }
445 }
446
447 fn block_size(&self) -> usize {
448 64
449 }
450
451 fn zeros(&self) -> Self::Hash {
452 [0u8; 32]
453 }
454
455 fn hash_into<'a> (&self, hash: &mut Self::Hash, mix_hash: bool, inputs: impl IntoIterator<Item = &'a [u8]>) {
456 hash_into_with_digest::<sha2::Sha256>(hash.into(), mix_hash, inputs);
457 }
458
459 fn hash_as_slice<'a> (&self, hash: &'a Self::Hash) -> &'a [u8] {
460 hash.as_slice()
461 }
462
463 fn hash_as_mut_slice<'a> (&self, hash: &'a mut Self::Hash) -> &'a mut [u8] {
464 hash.as_mut_slice()
465 }
466}
467
468#[derive(Default, Clone, Zeroize)]
469pub struct Sha512;
470
471impl HashBackend for Sha512 {
472 type Hash = [u8; 64];
473
474 fn initialize(&mut self, hash_protocol: &str) -> Result<(), CryptoError> {
475 if hash_protocol == "SHA512" {
476 Ok(())
477 } else {
478 Err(CryptoError::InvalidProtocol)
479 }
480 }
481
482 fn block_size(&self) -> usize {
483 128
484 }
485
486 fn zeros(&self) -> Self::Hash {
487 [0u8; 64]
488 }
489
490 fn hash_into<'a> (&self, hash: &mut Self::Hash, mix_hash: bool, inputs: impl IntoIterator<Item = &'a [u8]>) {
491 hash_into_with_digest::<sha2::Sha512>(hash.into(), mix_hash, inputs);
492 }
493
494 fn hash_as_slice<'a> (&self, hash: &'a Self::Hash) -> &'a [u8] {
495 hash.as_slice()
496 }
497
498 fn hash_as_mut_slice<'a> (&self, hash: &'a mut Self::Hash) -> &'a mut [u8] {
499 hash.as_mut_slice()
500 }
501}
502
503fn hash_into_with_digest<'a, D: Digest + FixedOutputReset> (hash: &mut Output<D>, mix_hash: bool, inputs: impl IntoIterator<Item = &'a [u8]>) {
504 let mut digest = match mix_hash {
507 true => D::new_with_prefix(&hash),
508 false => D::new(),
509 };
510 inputs.into_iter().for_each(|input| Digest::update(&mut digest, input));
511 Digest::finalize_into_reset(&mut digest, hash);
512}
513
514#[cfg(test)]
515mod tests {
516 use hmac::Mac;
517 use rand_core::{OsRng, RngCore};
518
519 use crate::crypto::backends::HashExt;
520
521 use super::*;
522
523 fn random_hash<H: HashBackend> (hash_impl: &H) -> H::Hash {
524 let mut random_hash = hash_impl.zeros();
525 OsRng.fill_bytes(hash_impl.hash_as_mut_slice(&mut random_hash));
526 random_hash
527 }
528
529 #[test]
530 fn rustcrypto_hmac() {
531 let inputs = [b"hello".as_slice(), b"world".as_slice()];
532
533 let key = random_hash(&Blake2s);
534 let mut hmac = Blake2s.zeros();
535 Blake2s.hmac(&key,&mut hmac, inputs.iter().copied());
536
537 let mut hmac_rc_inst: hmac::SimpleHmac<blake2::Blake2s256> = <hmac::SimpleHmac<blake2::Blake2s256> as Mac>::new_from_slice(key.as_ref()).unwrap();
538 inputs.into_iter().for_each(|input| hmac_rc_inst.update(input));
539 let hmac_rc = hmac_rc_inst.finalize().into_bytes();
540 assert_eq!(hmac.as_ref(), hmac_rc.as_slice());
541 }
542
543 #[test]
544 fn rustcrypto_hkdf() {
545 let key = random_hash(&Blake2s);
546 let ikm = [b"hello".as_slice(), b"world".as_slice()];
547 let ikm_rc = b"helloworld";
548 let info = b"foobar".as_slice();
549
550 let mut output1 = Blake2s.zeros();
551 let mut output2 = [0u8; 16];
552 let mut output3 = Blake2s.zeros();
553
554 Blake2s.hkdf(&key, [output1.as_mut(), output2.as_mut(), output3.as_mut()], ikm.iter().copied(), info);
555
556 let hk_inst_rc = hkdf::SimpleHkdf::<blake2::Blake2s256>::new(Some(key.as_ref()), ikm_rc);
557 let mut output_rc = [0u8; 32 * 3];
558 hk_inst_rc.expand(info, &mut output_rc).unwrap();
559
560 assert_eq!(output1.as_ref(), &output_rc[0..32]);
561 assert_eq!(output2.as_ref(), &output_rc[32..48]);
562 assert_eq!(output3.as_ref(), &output_rc[64..]);
563 }
564}