1use mbedtls_sys::*;
10
11use crate::error::{codes, IntoResult, Result};
12
13mod serde;
14
15define!(
16 #[c_ty(cipher_id_t)]
17 #[derive(Copy, Clone, Eq, PartialEq)]
18 enum CipherId {
19 None = CIPHER_ID_NONE,
20 Null = CIPHER_ID_NULL,
21 Aes = CIPHER_ID_AES,
22 Des = CIPHER_ID_DES,
23 Des3 = CIPHER_ID_3DES,
24 Camellia = CIPHER_ID_CAMELLIA,
25 Blowfish = CIPHER_ID_BLOWFISH,
26 Arc4 = CIPHER_ID_ARC4,
27 Aria = CIPHER_ID_ARIA,
28 Chacha20 = CIPHER_ID_CHACHA20,
29 }
30);
31
32impl From<cipher_id_t> for CipherId {
33 fn from(inner: cipher_id_t) -> Self {
34 match inner {
35 CIPHER_ID_NONE => CipherId::None,
36 CIPHER_ID_NULL => CipherId::Null,
37 CIPHER_ID_AES => CipherId::Aes,
38 CIPHER_ID_DES => CipherId::Des,
39 CIPHER_ID_3DES => CipherId::Des3,
40 CIPHER_ID_CAMELLIA => CipherId::Camellia,
41 CIPHER_ID_BLOWFISH => CipherId::Blowfish,
42 CIPHER_ID_ARC4 => CipherId::Arc4,
43 CIPHER_ID_ARIA => CipherId::Aria,
44 CIPHER_ID_CHACHA20 => CipherId::Chacha20,
45 _ => panic!("Invalid cipher_id_t"),
47 }
48 }
49}
50
51define!(
52 #[c_ty(cipher_mode_t)]
53 #[derive(Copy, Clone, Eq, PartialEq, Debug)]
54 enum CipherMode {
55 None = MODE_NONE,
56 ECB = MODE_ECB,
57 CBC = MODE_CBC,
58 CFB = MODE_CFB,
59 OFB = MODE_OFB,
60 CTR = MODE_CTR,
61 GCM = MODE_GCM,
62 STREAM = MODE_STREAM,
63 CCM = MODE_CCM,
64 XTS = MODE_XTS,
65 CHACHAPOLY = MODE_CHACHAPOLY,
66 KW = MODE_KW,
67 KWP = MODE_KWP,
68 }
69);
70
71impl From<cipher_mode_t> for CipherMode {
72 fn from(inner: cipher_mode_t) -> Self {
73 match inner {
74 MODE_NONE => CipherMode::None,
75 MODE_ECB => CipherMode::ECB,
76 MODE_CBC => CipherMode::CBC,
77 MODE_CFB => CipherMode::CFB,
78 MODE_OFB => CipherMode::OFB,
79 MODE_CTR => CipherMode::CTR,
80 MODE_GCM => CipherMode::GCM,
81 MODE_STREAM => CipherMode::STREAM,
82 MODE_CCM => CipherMode::CCM,
83 MODE_XTS => CipherMode::XTS,
84 MODE_CHACHAPOLY => CipherMode::CHACHAPOLY,
85 MODE_KW => CipherMode::KW,
86 MODE_KWP => CipherMode::KWP,
87 _ => panic!("Invalid cipher_mode_t"),
89 }
90 }
91}
92
93define!(
94 #[c_ty(cipher_type_t)]
95 enum CipherType {
96 None = CIPHER_NONE,
97 Null = CIPHER_NULL,
98 Aes128Ecb = CIPHER_AES_128_ECB,
99 Aes192Ecb = CIPHER_AES_192_ECB,
100 Aes256Ecb = CIPHER_AES_256_ECB,
101 Aes128Cbc = CIPHER_AES_128_CBC,
102 Aes192Cbc = CIPHER_AES_192_CBC,
103 Aes256Cbc = CIPHER_AES_256_CBC,
104 Aes128Cfb128 = CIPHER_AES_128_CFB128,
105 Aes192Cfb128 = CIPHER_AES_192_CFB128,
106 Aes256Cfb128 = CIPHER_AES_256_CFB128,
107 Aes128Ctr = CIPHER_AES_128_CTR,
108 Aes192Ctr = CIPHER_AES_192_CTR,
109 Aes256Ctr = CIPHER_AES_256_CTR,
110 Aes128Gcm = CIPHER_AES_128_GCM,
111 Aes192Gcm = CIPHER_AES_192_GCM,
112 Aes256Gcm = CIPHER_AES_256_GCM,
113 Camellia128Ecb = CIPHER_CAMELLIA_128_ECB,
114 Camellia192Ecb = CIPHER_CAMELLIA_192_ECB,
115 Camellia256Ecb = CIPHER_CAMELLIA_256_ECB,
116 Camellia128Cbc = CIPHER_CAMELLIA_128_CBC,
117 Camellia192Cbc = CIPHER_CAMELLIA_192_CBC,
118 Camellia256Cbc = CIPHER_CAMELLIA_256_CBC,
119 Camellia128Cfb128 = CIPHER_CAMELLIA_128_CFB128,
120 Camellia192Cfb128 = CIPHER_CAMELLIA_192_CFB128,
121 Camellia256Cfb128 = CIPHER_CAMELLIA_256_CFB128,
122 Camellia128Ctr = CIPHER_CAMELLIA_128_CTR,
123 Camellia192Ctr = CIPHER_CAMELLIA_192_CTR,
124 Camellia256Ctr = CIPHER_CAMELLIA_256_CTR,
125 Camellia128Gcm = CIPHER_CAMELLIA_128_GCM,
126 Camellia192Gcm = CIPHER_CAMELLIA_192_GCM,
127 Camellia256Gcm = CIPHER_CAMELLIA_256_GCM,
128 DesEcb = CIPHER_DES_ECB,
129 DesCbc = CIPHER_DES_CBC,
130 DesEdeEcb = CIPHER_DES_EDE_ECB,
131 DesEdeCbc = CIPHER_DES_EDE_CBC,
132 DesEde3Ecb = CIPHER_DES_EDE3_ECB,
133 DesEde3Cbc = CIPHER_DES_EDE3_CBC,
134 BlowfishEcb = CIPHER_BLOWFISH_ECB,
135 BlowfishCbc = CIPHER_BLOWFISH_CBC,
136 BlowfishCfb64 = CIPHER_BLOWFISH_CFB64,
137 BlowfishCtr = CIPHER_BLOWFISH_CTR,
138 Arcfour128 = CIPHER_ARC4_128,
139 Aes128Ccm = CIPHER_AES_128_CCM,
140 Aes192Ccm = CIPHER_AES_192_CCM,
141 Aes256Ccm = CIPHER_AES_256_CCM,
142 Camellia128Ccm = CIPHER_CAMELLIA_128_CCM,
143 Camellia192Ccm = CIPHER_CAMELLIA_192_CCM,
144 Camellia256Ccm = CIPHER_CAMELLIA_256_CCM,
145 Aria128Ecb = CIPHER_ARIA_128_ECB,
146 Aria192Ecb = CIPHER_ARIA_192_ECB,
147 Aria256Ecb = CIPHER_ARIA_256_ECB,
148 Aria128Cbc = CIPHER_ARIA_128_CBC,
149 Aria192Cbc = CIPHER_ARIA_192_CBC,
150 Aria256Cbc = CIPHER_ARIA_256_CBC,
151 Aria128Cfb128 = CIPHER_ARIA_128_CFB128,
152 Aria192Cfb128 = CIPHER_ARIA_192_CFB128,
153 Aria256Cfb128 = CIPHER_ARIA_256_CFB128,
154 Aria128Ctr = CIPHER_ARIA_128_CTR,
155 Aria192Ctr = CIPHER_ARIA_192_CTR,
156 Aria256Ctr = CIPHER_ARIA_256_CTR,
157 Aria128Gcm = CIPHER_ARIA_128_GCM,
158 Aria192Gcm = CIPHER_ARIA_192_GCM,
159 Aria256Gcm = CIPHER_ARIA_256_GCM,
160 Aria128Ccm = CIPHER_ARIA_128_CCM,
161 Aria192Ccm = CIPHER_ARIA_192_CCM,
162 Aria256Ccm = CIPHER_ARIA_256_CCM,
163 Aes128Ofb = CIPHER_AES_128_OFB,
164 Aes192Ofb = CIPHER_AES_192_OFB,
165 Aes256Ofb = CIPHER_AES_256_OFB,
166 Aes128Xts = CIPHER_AES_128_XTS,
167 Aes256Xts = CIPHER_AES_256_XTS,
168 Chacha20 = CIPHER_CHACHA20,
169 Chacha20Poly1305 = CIPHER_CHACHA20_POLY1305,
170 Aes128Kw = CIPHER_AES_128_KW,
171 Aes192Kw = CIPHER_AES_192_KW,
172 Aes256Kw = CIPHER_AES_256_KW,
173 Aes128Kwp = CIPHER_AES_128_KWP,
174 Aes192Kwp = CIPHER_AES_192_KWP,
175 Aes256Kwp = CIPHER_AES_256_KWP,
176 }
177);
178
179define!(
180 #[c_ty(cipher_padding_t)]
181 #[derive(Serialize, Deserialize, Copy, Clone, Debug, Eq, PartialEq)]
182 enum CipherPadding {
183 Pkcs7 = PADDING_PKCS7,
184 IsoIec78164 = PADDING_ONE_AND_ZEROS,
185 AnsiX923 = PADDING_ZEROS_AND_LEN,
186 Zeros = PADDING_ZEROS,
187 None = PADDING_NONE,
188 }
189);
190
191define!(
192 #[c_ty(operation_t)]
193 enum Operation {
194 None = OPERATION_NONE,
195 Decrypt = DECRYPT,
196 Encrypt = ENCRYPT,
197 }
198);
199
200define!(
201 #[c_ty(cipher_context_t)]
202 #[repr(C)]
203 struct Cipher;
204 const init: fn() -> Self = cipher_init;
205 const drop: fn(&mut Self) = cipher_free;
206 impl<'a> Into<ptr> {}
207);
208
209impl Cipher {
210 pub fn setup(cipher_id: CipherId, cipher_mode: CipherMode, key_bit_len: u32) -> Result<Cipher> {
214 let mut ret = Self::init();
215 unsafe {
216 cipher_setup(
218 &mut ret.inner,
219 cipher_info_from_values(cipher_id.into(), key_bit_len as i32, cipher_mode.into()),
220 )
221 .into_result()?;
222 }
223 Ok(ret)
224 }
225
226 pub fn set_key(&mut self, op: Operation, key: &[u8]) -> Result<()> {
228 unsafe { cipher_setkey(&mut self.inner, key.as_ptr(), (key.len() * 8) as _, op.into()).into_result_discard() }
229 }
230
231 pub fn set_padding(&mut self, padding: CipherPadding) -> Result<()> {
232 unsafe { cipher_set_padding_mode(&mut self.inner, padding.into()).into_result_discard() }
233 }
234
235 pub fn set_iv(&mut self, iv: &[u8]) -> Result<()> {
237 unsafe { cipher_set_iv(&mut self.inner, iv.as_ptr(), iv.len()).into_result_discard() }
238 }
239
240 pub fn reset(&mut self) -> Result<()> {
241 unsafe { cipher_reset(&mut self.inner).into_result_discard() }
242 }
243
244 pub fn update_ad(&mut self, ad: &[u8]) -> Result<()> {
245 unsafe { cipher_update_ad(&mut self.inner, ad.as_ptr(), ad.len()).into_result_discard() }
246 }
247
248 pub fn update(&mut self, in_data: &[u8], out_data: &mut [u8]) -> Result<usize> {
249 let required_size = if unsafe { *self.inner.cipher_info }.mode == MODE_ECB {
251 self.block_size()
252 } else {
253 in_data.len() + self.block_size()
254 };
255
256 if out_data.len() < required_size {
257 return Err(codes::CipherFullBlockExpected.into());
258 }
259
260 let mut olen = 0;
261 unsafe {
262 cipher_update(
263 &mut self.inner,
264 in_data.as_ptr(),
265 in_data.len(),
266 out_data.as_mut_ptr(),
267 &mut olen,
268 )
269 .into_result()?;
270 }
271 Ok(olen)
272 }
273
274 pub fn finish(&mut self, out_data: &mut [u8]) -> Result<usize> {
275 if out_data.len() < self.block_size() {
277 return Err(codes::CipherFullBlockExpected.into());
278 }
279
280 let mut olen = 0;
281 unsafe {
282 cipher_finish(&mut self.inner, out_data.as_mut_ptr(), &mut olen).into_result()?;
283 }
284 Ok(olen)
285 }
286
287 pub fn write_tag(&mut self, tag: &mut [u8]) -> Result<()> {
288 unsafe { cipher_write_tag(&mut self.inner, tag.as_mut_ptr(), tag.len()).into_result_discard() }
289 }
290
291 pub fn check_tag(&mut self, tag: &[u8]) -> Result<()> {
292 unsafe { cipher_check_tag(&mut self.inner, tag.as_ptr(), tag.len()).into_result_discard() }
293 }
294
295 pub fn block_size(&self) -> usize {
297 unsafe { (*self.inner.cipher_info).block_size as usize }
298 }
299
300 pub fn iv_size(&self) -> usize {
302 unsafe { (*self.inner.cipher_info).iv_size as usize }
303 }
304
305 pub fn cipher_mode(&self) -> CipherMode {
306 unsafe { (*self.inner.cipher_info).mode.into() }
307 }
308
309 pub fn is_authenticated(&self) -> bool {
311 unsafe {
312 if (*self.inner.cipher_info).mode == MODE_GCM || (*self.inner.cipher_info).mode == MODE_CCM {
313 return true;
314 } else {
315 return false;
316 }
317 }
318 }
319
320 pub fn set_parity(key: &mut [u8]) -> Result<()> {
322 unsafe { des_key_set_parity(key.as_mut_ptr()) }
323 Ok(())
324 }
325
326 pub fn encrypt(&mut self, plain: &[u8], cipher: &mut [u8]) -> Result<usize> {
327 self.do_crypto(plain, cipher)
328 }
329
330 pub fn decrypt(&mut self, cipher: &[u8], plain: &mut [u8]) -> Result<usize> {
331 self.do_crypto(cipher, plain)
332 }
333
334 pub fn encrypt_auth(&mut self, ad: &[u8], plain: &[u8], cipher_and_tag: &mut [u8], tag_len: usize) -> Result<usize> {
335 if cipher_and_tag
336 .len()
337 .checked_sub(tag_len)
338 .map_or(true, |cipher_len| cipher_len < plain.len())
339 {
340 return Err(codes::CipherBadInputData.into());
341 }
342
343 let iv = self.inner.iv;
344 let iv_len = self.inner.iv_size;
345 let mut cipher_len = cipher_and_tag.len();
346 unsafe {
347 cipher_auth_encrypt_ext(
348 &mut self.inner,
349 iv.as_ptr(),
350 iv_len,
351 ad.as_ptr(),
352 ad.len(),
353 plain.as_ptr(),
354 plain.len(),
355 cipher_and_tag.as_mut_ptr(),
356 cipher_len,
357 &mut cipher_len,
358 tag_len,
359 )
360 .into_result()?
361 };
362
363 Ok(cipher_len)
364 }
365
366 pub fn decrypt_auth(&mut self, ad: &[u8], cipher_and_tag: &[u8], plain: &mut [u8], tag_len: usize) -> Result<usize> {
367 if self.is_authenticated()
369 && cipher_and_tag
370 .len()
371 .checked_sub(tag_len)
372 .map_or(true, |cipher_len| plain.len() < cipher_len)
373 {
374 return Err(codes::CipherBadInputData.into());
375 }
376
377 let iv = self.inner.iv;
378 let iv_len = self.inner.iv_size;
379 let mut plain_len = plain.len();
380 unsafe {
381 cipher_auth_decrypt_ext(
382 &mut self.inner,
383 iv.as_ptr(),
384 iv_len,
385 ad.as_ptr(),
386 ad.len(),
387 cipher_and_tag.as_ptr(),
388 cipher_and_tag.len(),
389 plain.as_mut_ptr(),
390 plain_len,
391 &mut plain_len,
392 tag_len,
393 )
394 .into_result()?
395 };
396
397 Ok(plain_len)
398 }
399
400 pub fn encrypt_auth_inplace(&mut self, ad: &[u8], data: &mut [u8], tag: &mut [u8]) -> Result<usize> {
401 let iv = self.inner.iv;
402 let iv_len = self.inner.iv_size;
403 let mut olen = data.len();
404 unsafe {
405 cipher_auth_encrypt(
406 &mut self.inner,
407 iv.as_ptr(),
408 iv_len,
409 ad.as_ptr(),
410 ad.len(),
411 data.as_ptr(),
412 data.len(),
413 data.as_mut_ptr(),
414 &mut olen,
415 tag.as_mut_ptr(),
416 tag.len(),
417 )
418 .into_result()?
419 };
420
421 Ok(olen)
422 }
423
424 pub fn decrypt_auth_inplace(&mut self, ad: &[u8], data: &mut [u8], tag: &[u8]) -> Result<usize> {
425 let iv = self.inner.iv;
426 let iv_len = self.inner.iv_size;
427 let mut plain_len = data.len();
428 unsafe {
429 cipher_auth_decrypt(
430 &mut self.inner,
431 iv.as_ptr(),
432 iv_len,
433 ad.as_ptr(),
434 ad.len(),
435 data.as_ptr(),
436 data.len(),
437 data.as_mut_ptr(),
438 &mut plain_len,
439 tag.as_ptr(),
440 tag.len(),
441 )
442 .into_result()?
443 };
444
445 Ok(plain_len)
446 }
447
448 fn do_crypto(&mut self, in_data: &[u8], out_data: &mut [u8]) -> Result<usize> {
449 self.reset()?;
450
451 let mut total_len = 0;
455
456 if unsafe { *self.inner.cipher_info }.mode == MODE_ECB {
457 for chunk in in_data.chunks(self.block_size()) {
459 let len = self.update(chunk, &mut out_data[total_len..])?;
460 total_len += len;
461 }
462 } else {
463 total_len = self.update(in_data, out_data)?;
464 total_len += self.finish(&mut out_data[total_len..])?;
465 }
466
467 Ok(total_len)
468 }
469
470 pub fn cmac(&mut self, key: &[u8], data: &[u8], out_data: &mut [u8]) -> Result<()> {
471 if out_data.len() < self.block_size() {
473 return Err(codes::CipherFullBlockExpected.into());
474 }
475 self.reset()?;
476 unsafe {
477 cipher_cmac(
478 &*self.inner.cipher_info,
479 key.as_ptr(),
480 (key.len() * 8) as _,
481 data.as_ptr(),
482 data.len(),
483 out_data.as_mut_ptr(),
484 )
485 .into_result()?;
486 }
487 Ok(())
488 }
489}
490
491#[test]
492fn no_overflow() {
493 let mut c = Cipher::setup(CipherId::Aes, CipherMode::CBC, 128).unwrap();
494 c.set_key(Operation::Encrypt, &[0u8; 16]).unwrap();
495 c.set_iv(&[0u8; 16]).unwrap();
496 let mut out = [0u8; 48];
497 let encrypt_result = c.encrypt(&[0u8; 16][..], &mut out[..16]);
498 assert_eq!(out[16..], [0u8; 32]);
499 encrypt_result.expect_err("Returned OK with too small buffer");
500}
501
502#[test]
503fn one_part_ecb() {
504 let mut c = Cipher::setup(CipherId::Aes, CipherMode::ECB, 128).unwrap();
505 c.set_key(
506 Operation::Encrypt,
507 b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
508 )
509 .unwrap();
510 let mut out = [0u8; 48];
511 let len = c.encrypt(b"\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", &mut out).unwrap();
512 assert_eq!(len, 32);
513 assert_eq!(&out[..len], b"\x69\xc4\xe0\xd8\x6a\x7b\x04\x30\xd8\xcd\xb7\x80\x70\xb4\xc5\x5a\x69\xc4\xe0\xd8\x6a\x7b\x04\x30\xd8\xcd\xb7\x80\x70\xb4\xc5\x5a");
514}
515
516#[test]
517fn cmac_test() {
518 let mut c = Cipher::setup(CipherId::Aes, CipherMode::ECB, 128).unwrap();
519 let mut out = [0u8; 16];
520 c.cmac(
521 b"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
522 b"\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff",
523 &mut out,
524 )
525 .expect("Success in CMAC");
526 assert_eq!(&out, b"\x38\x7b\x36\x22\x8b\xa7\x77\x44\x5b\xaf\xa0\x36\x45\xb9\x40\x10");
527}