1#![deny(non_upper_case_globals)]
20#![deny(non_camel_case_types)]
21#![deny(non_snake_case)]
22#![deny(unused_mut)]
23#![deny(dead_code)]
24#![deny(unused_imports)]
25#![deny(missing_docs)]
26#![cfg_attr(all(not(test), not(feature = "std")), no_std)]
27
28#[cfg(any(test, feature = "std"))]
29pub extern crate core;
30
31extern crate bitcoin_hashes;
32extern crate rand_core;
33
34#[cfg(feature = "std")]
35extern crate unicode_normalization;
36
37#[cfg(feature = "rand")]
38extern crate rand;
39#[cfg(feature = "serde")]
40pub extern crate serde;
41
42use core::{fmt, str};
43
44#[cfg(feature = "std")]
45use std::borrow::Cow;
46#[cfg(feature = "std")]
47use std::error;
48
49use bitcoin_hashes::{sha256, Hash};
50
51#[cfg(feature = "std")]
52use unicode_normalization::UnicodeNormalization;
53
54#[cfg(feature = "zeroize")]
55extern crate zeroize;
56#[cfg(feature = "zeroize")]
57use zeroize::{Zeroize, ZeroizeOnDrop};
58
59#[macro_use]
60mod internal_macros;
61mod language;
62mod pbkdf2;
63
64pub use language::Language;
65
66#[allow(unused)]
68const MIN_NB_WORDS: usize = 12;
69
70const MAX_NB_WORDS: usize = 24;
72
73const EOF: u16 = u16::max_value();
75
76#[derive(Debug, Clone, PartialEq, Eq, Copy)]
79pub struct AmbiguousLanguages([bool; language::MAX_NB_LANGUAGES]);
80
81impl AmbiguousLanguages {
82 pub fn as_bools(&self) -> &[bool; language::MAX_NB_LANGUAGES] {
85 &self.0
86 }
87
88 pub fn iter(&self) -> impl Iterator<Item = Language> + '_ {
90 Language::all().iter().enumerate().filter(move |(i, _)| self.0[*i]).map(|(_, l)| *l)
91 }
92
93 #[cfg(feature = "std")]
95 pub fn to_vec(&self) -> Vec<Language> {
96 self.iter().collect()
97 }
98}
99
100#[derive(Debug, Clone, PartialEq, Eq, Copy)]
102pub enum Error {
103 BadWordCount(usize),
105 UnknownWord(usize),
109 BadEntropyBitCount(usize),
111 InvalidChecksum,
113 AmbiguousLanguages(AmbiguousLanguages),
117}
118
119impl fmt::Display for Error {
120 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
121 match *self {
122 Error::BadWordCount(c) => {
123 write!(f, "mnemonic has a word count that is not a multiple of 6: {}", c,)
124 }
125 Error::UnknownWord(i) => write!(f, "mnemonic contains an unknown word (word {})", i,),
126 Error::BadEntropyBitCount(c) => write!(
127 f,
128 "entropy was not between 128-256 bits or not a multiple of 32 bits: {} bits",
129 c,
130 ),
131 Error::InvalidChecksum => write!(f, "the mnemonic has an invalid checksum"),
132 Error::AmbiguousLanguages(a) => {
133 write!(f, "ambiguous word list: ")?;
134 for (i, lang) in a.iter().enumerate() {
135 if i == 0 {
136 write!(f, "{}", lang)?;
137 } else {
138 write!(f, ", {}", lang)?;
139 }
140 }
141 Ok(())
142 }
143 }
144 }
145}
146
147#[cfg(feature = "std")]
148impl error::Error for Error {}
149
150#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
158#[cfg_attr(feature = "zeroize", derive(Zeroize, ZeroizeOnDrop))]
159pub struct Mnemonic {
160 lang: Language,
162 words: [u16; MAX_NB_WORDS],
165}
166
167#[cfg(feature = "zeroize")]
168impl zeroize::DefaultIsZeroes for Language {}
169
170serde_string_impl!(Mnemonic, "a BIP-39 Mnemonic Code");
171
172impl Mnemonic {
173 #[inline]
177 #[cfg(feature = "std")]
178 fn normalize_utf8_cow<'a>(cow: &mut Cow<'a, str>) {
179 let is_nfkd = unicode_normalization::is_nfkd_quick(cow.as_ref().chars());
180 if is_nfkd != unicode_normalization::IsNormalized::Yes {
181 *cow = Cow::Owned(cow.as_ref().nfkd().to_string());
182 }
183 }
184
185 pub fn from_entropy_in(language: Language, entropy: &[u8]) -> Result<Mnemonic, Error> {
188 const MAX_ENTROPY_BITS: usize = 256;
189 const MIN_ENTROPY_BITS: usize = 128;
190 const MAX_CHECKSUM_BITS: usize = 8;
191
192 let nb_bytes = entropy.len();
193 let nb_bits = nb_bytes * 8;
194
195 if nb_bits % 32 != 0 {
196 return Err(Error::BadEntropyBitCount(nb_bits));
197 }
198 if nb_bits < MIN_ENTROPY_BITS || nb_bits > MAX_ENTROPY_BITS {
199 return Err(Error::BadEntropyBitCount(nb_bits));
200 }
201
202 let check = sha256::Hash::hash(&entropy);
203 let mut bits = [false; MAX_ENTROPY_BITS + MAX_CHECKSUM_BITS];
204 for i in 0..nb_bytes {
205 for j in 0..8 {
206 bits[i * 8 + j] = (entropy[i] & (1 << (7 - j))) > 0;
207 }
208 }
209 for i in 0..nb_bytes / 4 {
210 bits[8 * nb_bytes + i] = (check[i / 8] & (1 << (7 - (i % 8)))) > 0;
211 }
212
213 let mut words = [EOF; MAX_NB_WORDS];
214 let nb_words = nb_bytes * 3 / 4;
215 for i in 0..nb_words {
216 let mut idx = 0;
217 for j in 0..11 {
218 if bits[i * 11 + j] {
219 idx += 1 << (10 - j);
220 }
221 }
222 words[i] = idx;
223 }
224
225 Ok(Mnemonic {
226 lang: language,
227 words: words,
228 })
229 }
230
231 pub fn from_entropy(entropy: &[u8]) -> Result<Mnemonic, Error> {
234 Mnemonic::from_entropy_in(Language::English, entropy)
235 }
236
237 pub fn generate_in_with<R>(
253 rng: &mut R,
254 language: Language,
255 word_count: usize,
256 ) -> Result<Mnemonic, Error>
257 where
258 R: rand_core::RngCore + rand_core::CryptoRng,
259 {
260 if is_invalid_word_count(word_count) {
261 return Err(Error::BadWordCount(word_count));
262 }
263
264 let entropy_bytes = (word_count / 3) * 4;
265 let mut entropy = [0u8; (MAX_NB_WORDS / 3) * 4];
266 rand_core::RngCore::fill_bytes(rng, &mut entropy[0..entropy_bytes]);
267 Mnemonic::from_entropy_in(language, &entropy[0..entropy_bytes])
268 }
269
270 #[cfg(feature = "rand")]
283 pub fn generate_in(language: Language, word_count: usize) -> Result<Mnemonic, Error> {
284 Mnemonic::generate_in_with(&mut rand::thread_rng(), language, word_count)
285 }
286
287 #[cfg(feature = "rand")]
300 pub fn generate(word_count: usize) -> Result<Mnemonic, Error> {
301 Mnemonic::generate_in(Language::English, word_count)
302 }
303
304 pub fn language(&self) -> Language {
306 self.lang
307 }
308
309 pub fn word_iter(&self) -> impl Iterator<Item = &'static str> + Clone + '_ {
311 let list = self.lang.word_list();
312 self.words.iter().take_while(|w| **w != EOF).map(move |w| list[*w as usize])
313 }
314
315 fn language_of_iter<'a, W: Iterator<Item = &'a str>>(words: W) -> Result<Language, Error> {
318 let mut words = words.peekable();
319 let langs = Language::all();
320 {
321 let first_word = words.peek().ok_or(Error::BadWordCount(0))?;
323 if first_word.len() == 0 {
324 return Err(Error::BadWordCount(0));
325 }
326
327 for language in langs.iter().filter(|l| l.unique_words()) {
330 if language.find_word(first_word).is_some() {
331 return Ok(*language);
332 }
333 }
334 }
335
336 let mut possible = [false; language::MAX_NB_LANGUAGES];
340 for (i, lang) in langs.iter().enumerate() {
341 possible[i] = !lang.unique_words();
344 }
345 for (idx, word) in words.enumerate() {
346 for (i, lang) in langs.iter().enumerate() {
348 possible[i] &= lang.find_word(word).is_some();
349 }
350
351 let mut iter = possible.iter().zip(langs.iter()).filter(|(p, _)| **p).map(|(_, l)| l);
353
354 match iter.next() {
355 None => return Err(Error::UnknownWord(idx)),
357 Some(remaining) => {
359 if iter.next().is_none() {
360 return Ok(*remaining);
362 }
363 }
364 }
365 }
366
367 return Err(Error::AmbiguousLanguages(AmbiguousLanguages(possible)));
368 }
369
370 pub fn language_of<S: AsRef<str>>(mnemonic: S) -> Result<Language, Error> {
381 Mnemonic::language_of_iter(mnemonic.as_ref().split_whitespace())
382 }
383
384 pub fn parse_in_normalized(language: Language, s: &str) -> Result<Mnemonic, Error> {
386 let nb_words = s.split_whitespace().count();
387 if is_invalid_word_count(nb_words) {
388 return Err(Error::BadWordCount(nb_words));
389 }
390
391 let mut words = [EOF; MAX_NB_WORDS];
393
394 let mut bits = [false; MAX_NB_WORDS * 11];
397
398 for (i, word) in s.split_whitespace().enumerate() {
399 let idx = language.find_word(word).ok_or(Error::UnknownWord(i))?;
400
401 words[i] = idx;
402
403 for j in 0..11 {
404 bits[i * 11 + j] = idx >> (10 - j) & 1 == 1;
405 }
406 }
407
408 let mut entropy = [0u8; MAX_NB_WORDS / 3 * 4];
411 let nb_bytes_entropy = nb_words / 3 * 4;
412 for i in 0..nb_bytes_entropy {
413 for j in 0..8 {
414 if bits[i * 8 + j] {
415 entropy[i] += 1 << (7 - j);
416 }
417 }
418 }
419 let check = sha256::Hash::hash(&entropy[0..nb_bytes_entropy]);
420 for i in 0..nb_bytes_entropy / 4 {
421 if bits[8 * nb_bytes_entropy + i] != ((check[i / 8] & (1 << (7 - (i % 8)))) > 0) {
422 return Err(Error::InvalidChecksum);
423 }
424 }
425
426 Ok(Mnemonic {
427 lang: language,
428 words: words,
429 })
430 }
431
432 pub fn parse_normalized(s: &str) -> Result<Mnemonic, Error> {
434 let lang = Mnemonic::language_of(s)?;
435 Mnemonic::parse_in_normalized(lang, s)
436 }
437
438 #[cfg(feature = "std")]
440 pub fn parse_in<'a, S: Into<Cow<'a, str>>>(
441 language: Language,
442 s: S,
443 ) -> Result<Mnemonic, Error> {
444 let mut cow = s.into();
445 Mnemonic::normalize_utf8_cow(&mut cow);
446 Ok(Mnemonic::parse_in_normalized(language, cow.as_ref())?)
447 }
448
449 #[cfg(feature = "std")]
451 pub fn parse<'a, S: Into<Cow<'a, str>>>(s: S) -> Result<Mnemonic, Error> {
452 let mut cow = s.into();
453 Mnemonic::normalize_utf8_cow(&mut cow);
454
455 let language = if Language::all().len() == 1 {
456 Language::all()[0]
457 } else {
458 Mnemonic::language_of(cow.as_ref())?
459 };
460
461 Ok(Mnemonic::parse_in_normalized(language, cow.as_ref())?)
462 }
463
464 pub fn word_count(&self) -> usize {
466 self.words.iter().take_while(|w| **w != EOF).count()
467 }
468
469 pub fn to_seed_normalized(&self, normalized_passphrase: &str) -> [u8; 64] {
471 const PBKDF2_ROUNDS: usize = 2048;
472 const PBKDF2_BYTES: usize = 64;
473
474 let mut seed = [0u8; PBKDF2_BYTES];
475 pbkdf2::pbkdf2(
476 self.word_iter(),
477 normalized_passphrase.as_bytes(),
478 PBKDF2_ROUNDS,
479 &mut seed,
480 );
481 seed
482 }
483
484 #[cfg(feature = "std")]
486 pub fn to_seed<'a, P: Into<Cow<'a, str>>>(&self, passphrase: P) -> [u8; 64] {
487 let normalized_passphrase = {
488 let mut cow = passphrase.into();
489 Mnemonic::normalize_utf8_cow(&mut cow);
490 cow
491 };
492 self.to_seed_normalized(normalized_passphrase.as_ref())
493 }
494
495 pub fn to_entropy_array(&self) -> ([u8; 33], usize) {
499 let language = Mnemonic::language_of_iter(self.word_iter()).unwrap();
503
504 let mut entropy = [0; 33];
506 let mut cursor = 0;
507 let mut offset = 0;
508 let mut remainder = 0;
509
510 let nb_words = self.word_count();
511 for word in self.word_iter() {
512 let idx = language.find_word(word).expect("invalid mnemonic");
513
514 remainder |= ((idx as u32) << (32 - 11)) >> offset;
515 offset += 11;
516
517 while offset >= 8 {
518 entropy[cursor] = (remainder >> 24) as u8;
519 cursor += 1;
520 remainder <<= 8;
521 offset -= 8;
522 }
523 }
524
525 if offset != 0 {
526 entropy[cursor] = (remainder >> 24) as u8;
527 }
528
529 let entropy_bytes = (nb_words / 3) * 4;
530 (entropy, entropy_bytes)
531 }
532
533 #[cfg(feature = "std")]
535 pub fn to_entropy(&self) -> Vec<u8> {
536 let (arr, len) = self.to_entropy_array();
537 arr[0..len].to_vec()
538 }
539}
540
541impl fmt::Display for Mnemonic {
542 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
543 for (i, word) in self.word_iter().enumerate() {
544 if i > 0 {
545 f.write_str(" ")?;
546 }
547 f.write_str(word)?;
548 }
549 Ok(())
550 }
551}
552
553impl str::FromStr for Mnemonic {
554 type Err = Error;
555
556 fn from_str(s: &str) -> Result<Mnemonic, Error> {
557 #[cfg(feature = "std")]
558 {
559 Mnemonic::parse(s)
560 }
561 #[cfg(not(feature = "std"))]
562 {
563 Mnemonic::parse_normalized(s)
564 }
565 }
566}
567
568fn is_invalid_word_count(word_count: usize) -> bool {
569 word_count < MIN_NB_WORDS || word_count % 3 != 0 || word_count > MAX_NB_WORDS
570}
571
572#[cfg(test)]
573mod tests {
574 use super::*;
575
576 use bitcoin_hashes::hex::FromHex;
577
578 #[cfg(feature = "rand")]
579 #[test]
580 fn test_language_of() {
581 for lang in Language::all() {
582 let m = Mnemonic::generate_in(*lang, 24).unwrap();
583 assert_eq!(*lang, Mnemonic::language_of_iter(m.word_iter()).unwrap());
584 assert_eq!(
585 *lang,
586 Mnemonic::language_of_iter(m.to_string().split_whitespace()).unwrap()
587 );
588 assert_eq!(*lang, Mnemonic::language_of(m.to_string()).unwrap());
589 assert_eq!(*lang, Mnemonic::language_of(&m.to_string()).unwrap());
590 }
591 }
592
593 #[cfg(feature = "std")]
594 #[test]
595 fn test_ambiguous_languages() {
596 let mut present = [false; language::MAX_NB_LANGUAGES];
597 let mut present_vec = Vec::new();
598 let mut alternate = true;
599 for i in 0..Language::all().len() {
600 present[i] = alternate;
601 if alternate {
602 present_vec.push(Language::all()[i]);
603 }
604 alternate = !alternate;
605 }
606 let amb = AmbiguousLanguages(present);
607 assert_eq!(amb.to_vec(), present_vec);
608 assert_eq!(amb.iter().collect::<Vec<_>>(), present_vec);
609 }
610
611 #[cfg(feature = "rand")]
612 #[test]
613 fn test_generate() {
614 let _ = Mnemonic::generate(24).unwrap();
615 let _ = Mnemonic::generate_in(Language::English, 24).unwrap();
616 let _ = Mnemonic::generate_in_with(&mut rand::thread_rng(), Language::English, 24).unwrap();
617 }
618
619 #[cfg(feature = "rand")]
620 #[test]
621 fn test_generate_word_counts() {
622 for word_count in [12, 15, 18, 21, 24].iter() {
623 let _ = Mnemonic::generate(*word_count).unwrap();
624 }
625 }
626
627 #[test]
628 fn test_vectors_english() {
629 let test_vectors = [
632 (
633 "00000000000000000000000000000000",
634 "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about",
635 "c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04",
636 ),
637 (
638 "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
639 "legal winner thank year wave sausage worth useful legal winner thank yellow",
640 "2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607",
641 ),
642 (
643 "80808080808080808080808080808080",
644 "letter advice cage absurd amount doctor acoustic avoid letter advice cage above",
645 "d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8",
646 ),
647 (
648 "ffffffffffffffffffffffffffffffff",
649 "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong",
650 "ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069",
651 ),
652 (
653 "000000000000000000000000000000000000000000000000",
654 "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent",
655 "035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa",
656 ),
657 (
658 "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
659 "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will",
660 "f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd",
661 ),
662 (
663 "808080808080808080808080808080808080808080808080",
664 "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always",
665 "107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65",
666 ),
667 (
668 "ffffffffffffffffffffffffffffffffffffffffffffffff",
669 "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when",
670 "0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528",
671 ),
672 (
673 "0000000000000000000000000000000000000000000000000000000000000000",
674 "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art",
675 "bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8",
676 ),
677 (
678 "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
679 "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title",
680 "bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87",
681 ),
682 (
683 "8080808080808080808080808080808080808080808080808080808080808080",
684 "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless",
685 "c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f",
686 ),
687 (
688 "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
689 "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote",
690 "dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad",
691 ),
692 (
693 "9e885d952ad362caeb4efe34a8e91bd2",
694 "ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic",
695 "274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028",
696 ),
697 (
698 "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b",
699 "gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog",
700 "628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac",
701 ),
702 (
703 "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c",
704 "hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length",
705 "64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440",
706 ),
707 (
708 "c0ba5a8e914111210f2bd131f3d5e08d",
709 "scheme spot photo card baby mountain device kick cradle pact join borrow",
710 "ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612",
711 ),
712 (
713 "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3",
714 "horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave",
715 "fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d",
716 ),
717 (
718 "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863",
719 "panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside",
720 "72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d",
721 ),
722 (
723 "23db8160a31d3e0dca3688ed941adbf3",
724 "cat swing flag economy stadium alone churn speed unique patch report train",
725 "deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5",
726 ),
727 (
728 "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0",
729 "light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access",
730 "4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02",
731 ),
732 (
733 "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad",
734 "all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform",
735 "26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d",
736 ),
737 (
738 "f30f8c1da665478f49b001d94c5fc452",
739 "vessel ladder alter error federal sibling chat ability sun glass valve picture",
740 "2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f",
741 ),
742 (
743 "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05",
744 "scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump",
745 "7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88",
746 ),
747 (
748 "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f",
749 "void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold",
750 "01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998",
751 )
752 ];
753
754 for vector in &test_vectors {
755 let entropy = Vec::<u8>::from_hex(&vector.0).unwrap();
756 let mnemonic_str = vector.1;
757 let seed = Vec::<u8>::from_hex(&vector.2).unwrap();
758
759 let mnemonic = Mnemonic::from_entropy(&entropy).unwrap();
760
761 assert_eq!(
762 mnemonic,
763 Mnemonic::parse_in_normalized(Language::English, mnemonic_str).unwrap(),
764 "failed vector: {}",
765 mnemonic_str
766 );
767 assert_eq!(
768 mnemonic,
769 Mnemonic::parse_normalized(mnemonic_str).unwrap(),
770 "failed vector: {}",
771 mnemonic_str
772 );
773 assert_eq!(
774 &seed[..],
775 &mnemonic.to_seed_normalized("TREZOR")[..],
776 "failed vector: {}",
777 mnemonic_str
778 );
779
780 #[cfg(features = "std")]
781 {
782 assert_eq!(&mnemonic.to_string(), mnemonic_str, "failed vector: {}", mnemonic_str);
783 assert_eq!(
784 mnemonic,
785 Mnemonic::parse_in(Language::English, mnemonic_str).unwrap(),
786 "failed vector: {}",
787 mnemonic_str
788 );
789 assert_eq!(
790 mnemonic,
791 Mnemonic::parse(mnemonic_str).unwrap(),
792 "failed vector: {}",
793 mnemonic_str
794 );
795 assert_eq!(
796 &seed[..],
797 &mnemonic.to_seed("TREZOR")[..],
798 "failed vector: {}",
799 mnemonic_str
800 );
801 assert_eq!(&entropy, &mnemonic.to_entropy(), "failed vector: {}", mnemonic_str);
802 assert_eq!(
803 &entropy,
804 &mnemonic.to_entropy_array().0[0..entropy.len()],
805 "failed vector: {}",
806 mnemonic_str
807 );
808 }
809 }
810 }
811
812 #[test]
813 fn test_invalid_engish() {
814 assert_eq!(
818 Mnemonic::parse_normalized(
819 "getter advice cage absurd amount doctor acoustic avoid letter advice cage above",
820 ),
821 Err(Error::UnknownWord(0))
822 );
823
824 assert_eq!(
825 Mnemonic::parse_normalized(
826 "letter advice cagex absurd amount doctor acoustic avoid letter advice cage above",
827 ),
828 Err(Error::UnknownWord(2))
829 );
830
831 assert_eq!(
832 Mnemonic::parse_normalized(
833 "advice cage absurd amount doctor acoustic avoid letter advice cage above",
834 ),
835 Err(Error::BadWordCount(11))
836 );
837
838 assert_eq!(
839 Mnemonic::parse_normalized(
840 "primary advice cage absurd amount doctor acoustic avoid letter advice cage above",
841 ),
842 Err(Error::InvalidChecksum)
843 );
844 }
845
846 #[test]
847 fn test_invalid_entropy() {
848 assert_eq!(Mnemonic::from_entropy(&vec![b'x'; 17]), Err(Error::BadEntropyBitCount(136)));
850
851 assert_eq!(Mnemonic::from_entropy(&vec![b'x'; 4]), Err(Error::BadEntropyBitCount(32)));
853
854 assert_eq!(Mnemonic::from_entropy(&vec![b'x'; 36]), Err(Error::BadEntropyBitCount(288)));
856 }
857
858 #[cfg(all(feature = "japanese", feature = "std"))]
859 #[test]
860 fn test_vectors_japanese() {
861 let vectors = [
869 (
870 "00000000000000000000000000000000",
871 "あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あおぞら",
872 "㍍ガバヴァぱばぐゞちぢ十人十色",
873 "a262d6fb6122ecf45be09c50492b31f92e9beb7d9a845987a02cefda57a15f9c467a17872029a9e92299b5cbdf306e3a0ee620245cbd508959b6cb7ca637bd55",
874 ),
875 (
876 "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
877 "そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかめ",
878 "㍍ガバヴァぱばぐゞちぢ十人十色",
879 "aee025cbe6ca256862f889e48110a6a382365142f7d16f2b9545285b3af64e542143a577e9c144e101a6bdca18f8d97ec3366ebf5b088b1c1af9bc31346e60d9",
880 ),
881 (
882 "80808080808080808080808080808080",
883 "そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あかちゃん",
884 "㍍ガバヴァぱばぐゞちぢ十人十色",
885 "e51736736ebdf77eda23fa17e31475fa1d9509c78f1deb6b4aacfbd760a7e2ad769c714352c95143b5c1241985bcb407df36d64e75dd5a2b78ca5d2ba82a3544",
886 ),
887 (
888 "ffffffffffffffffffffffffffffffff",
889 "われる われる われる われる われる われる われる われる われる われる われる ろんぶん",
890 "㍍ガバヴァぱばぐゞちぢ十人十色",
891 "4cd2ef49b479af5e1efbbd1e0bdc117f6a29b1010211df4f78e2ed40082865793e57949236c43b9fe591ec70e5bb4298b8b71dc4b267bb96ed4ed282c8f7761c",
892 ),
893 (
894 "000000000000000000000000000000000000000000000000",
895 "あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あらいぐま",
896 "㍍ガバヴァぱばぐゞちぢ十人十色",
897 "d99e8f1ce2d4288d30b9c815ae981edd923c01aa4ffdc5dee1ab5fe0d4a3e13966023324d119105aff266dac32e5cd11431eeca23bbd7202ff423f30d6776d69",
898 ),
899 (
900 "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
901 "そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れいぎ",
902 "㍍ガバヴァぱばぐゞちぢ十人十色",
903 "eaaf171efa5de4838c758a93d6c86d2677d4ccda4a064a7136344e975f91fe61340ec8a615464b461d67baaf12b62ab5e742f944c7bd4ab6c341fbafba435716",
904 ),
905 (
906 "808080808080808080808080808080808080808080808080",
907 "そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら いきなり",
908 "㍍ガバヴァぱばぐゞちぢ十人十色",
909 "aec0f8d3167a10683374c222e6e632f2940c0826587ea0a73ac5d0493b6a632590179a6538287641a9fc9df8e6f24e01bf1be548e1f74fd7407ccd72ecebe425",
910 ),
911 (
912 "ffffffffffffffffffffffffffffffffffffffffffffffff",
913 "われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる りんご",
914 "㍍ガバヴァぱばぐゞちぢ十人十色",
915 "f0f738128a65b8d1854d68de50ed97ac1831fc3a978c569e415bbcb431a6a671d4377e3b56abd518daa861676c4da75a19ccb41e00c37d086941e471a4374b95",
916 ),
917 (
918 "0000000000000000000000000000000000000000000000000000000000000000",
919 "あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん いってい",
920 "㍍ガバヴァぱばぐゞちぢ十人十色",
921 "23f500eec4a563bf90cfda87b3e590b211b959985c555d17e88f46f7183590cd5793458b094a4dccc8f05807ec7bd2d19ce269e20568936a751f6f1ec7c14ddd",
922 ),
923 (
924 "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
925 "そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん まんきつ",
926 "㍍ガバヴァぱばぐゞちぢ十人十色",
927 "cd354a40aa2e241e8f306b3b752781b70dfd1c69190e510bc1297a9c5738e833bcdc179e81707d57263fb7564466f73d30bf979725ff783fb3eb4baa86560b05",
928 ),
929 (
930 "8080808080808080808080808080808080808080808080808080808080808080",
931 "そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる うめる",
932 "㍍ガバヴァぱばぐゞちぢ十人十色",
933 "6b7cd1b2cdfeeef8615077cadd6a0625f417f287652991c80206dbd82db17bf317d5c50a80bd9edd836b39daa1b6973359944c46d3fcc0129198dc7dc5cd0e68",
934 ),
935 (
936 "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
937 "われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる らいう",
938 "㍍ガバヴァぱばぐゞちぢ十人十色",
939 "a44ba7054ac2f9226929d56505a51e13acdaa8a9097923ca07ea465c4c7e294c038f3f4e7e4b373726ba0057191aced6e48ac8d183f3a11569c426f0de414623",
940 ),
941 (
942 "77c2b00716cec7213839159e404db50d",
943 "せまい うちがわ あずき かろう めずらしい だんち ますく おさめる ていぼう あたる すあな えしゃく",
944 "㍍ガバヴァぱばぐゞちぢ十人十色",
945 "344cef9efc37d0cb36d89def03d09144dd51167923487eec42c487f7428908546fa31a3c26b7391a2b3afe7db81b9f8c5007336b58e269ea0bd10749a87e0193",
946 ),
947 (
948 "b63a9c59a6e641f288ebc103017f1da9f8290b3da6bdef7b",
949 "ぬすむ ふっかつ うどん こうりつ しつじ りょうり おたがい せもたれ あつめる いちりゅう はんしゃ ごますり そんけい たいちょう らしんばん ぶんせき やすみ ほいく",
950 "㍍ガバヴァぱばぐゞちぢ十人十色",
951 "b14e7d35904cb8569af0d6a016cee7066335a21c1c67891b01b83033cadb3e8a034a726e3909139ecd8b2eb9e9b05245684558f329b38480e262c1d6bc20ecc4",
952 ),
953 (
954 "3e141609b97933b66a060dcddc71fad1d91677db872031e85f4c015c5e7e8982",
955 "くのう てぬぐい そんかい すろっと ちきゅう ほあん とさか はくしゅ ひびく みえる そざい てんすう たんぴん くしょう すいようび みけん きさらぎ げざん ふくざつ あつかう はやい くろう おやゆび こすう",
956 "㍍ガバヴァぱばぐゞちぢ十人十色",
957 "32e78dce2aff5db25aa7a4a32b493b5d10b4089923f3320c8b287a77e512455443298351beb3f7eb2390c4662a2e566eec5217e1a37467af43b46668d515e41b",
958 ),
959 (
960 "0460ef47585604c5660618db2e6a7e7f",
961 "あみもの いきおい ふいうち にげる ざんしょ じかん ついか はたん ほあん すんぽう てちがい わかめ",
962 "㍍ガバヴァぱばぐゞちぢ十人十色",
963 "0acf902cd391e30f3f5cb0605d72a4c849342f62bd6a360298c7013d714d7e58ddf9c7fdf141d0949f17a2c9c37ced1d8cb2edabab97c4199b142c829850154b",
964 ),
965 (
966 "72f60ebac5dd8add8d2a25a797102c3ce21bc029c200076f",
967 "すろっと にくしみ なやむ たとえる へいこう すくう きない けってい とくべつ ねっしん いたみ せんせい おくりがな まかい とくい けあな いきおい そそぐ",
968 "㍍ガバヴァぱばぐゞちぢ十人十色",
969 "9869e220bec09b6f0c0011f46e1f9032b269f096344028f5006a6e69ea5b0b8afabbb6944a23e11ebd021f182dd056d96e4e3657df241ca40babda532d364f73",
970 ),
971 (
972 "2c85efc7f24ee4573d2b81a6ec66cee209b2dcbd09d8eddc51e0215b0b68e416",
973 "かほご きうい ゆたか みすえる もらう がっこう よそう ずっと ときどき したうけ にんか はっこう つみき すうじつ よけい くげん もくてき まわり せめる げざい にげる にんたい たんそく ほそく",
974 "㍍ガバヴァぱばぐゞちぢ十人十色",
975 "713b7e70c9fbc18c831bfd1f03302422822c3727a93a5efb9659bec6ad8d6f2c1b5c8ed8b0b77775feaf606e9d1cc0a84ac416a85514ad59f5541ff5e0382481",
976 ),
977 (
978 "eaebabb2383351fd31d703840b32e9e2",
979 "めいえん さのう めだつ すてる きぬごし ろんぱ はんこ まける たいおう さかいし ねんいり はぶらし",
980 "㍍ガバヴァぱばぐゞちぢ十人十色",
981 "06e1d5289a97bcc95cb4a6360719131a786aba057d8efd603a547bd254261c2a97fcd3e8a4e766d5416437e956b388336d36c7ad2dba4ee6796f0249b10ee961",
982 ),
983 (
984 "7ac45cfe7722ee6c7ba84fbc2d5bd61b45cb2fe5eb65aa78",
985 "せんぱい おしえる ぐんかん もらう きあい きぼう やおや いせえび のいず じゅしん よゆう きみつ さといも ちんもく ちわわ しんせいじ とめる はちみつ",
986 "㍍ガバヴァぱばぐゞちぢ十人十色",
987 "1fef28785d08cbf41d7a20a3a6891043395779ed74503a5652760ee8c24dfe60972105ee71d5168071a35ab7b5bd2f8831f75488078a90f0926c8e9171b2bc4a",
988 ),
989 (
990 "4fa1a8bc3e6d80ee1316050e862c1812031493212b7ec3f3bb1b08f168cabeef",
991 "こころ いどう きあつ そうがんきょう へいあん せつりつ ごうせい はいち いびき きこく あんい おちつく きこえる けんとう たいこ すすめる はっけん ていど はんおん いんさつ うなぎ しねま れいぼう みつかる",
992 "㍍ガバヴァぱばぐゞちぢ十人十色",
993 "43de99b502e152d4c198542624511db3007c8f8f126a30818e856b2d8a20400d29e7a7e3fdd21f909e23be5e3c8d9aee3a739b0b65041ff0b8637276703f65c2",
994 ),
995 (
996 "18ab19a9f54a9274f03e5209a2ac8a91",
997 "うりきれ さいせい じゆう むろん とどける ぐうたら はいれつ ひけつ いずれ うちあわせ おさめる おたく",
998 "㍍ガバヴァぱばぐゞちぢ十人十色",
999 "3d711f075ee44d8b535bb4561ad76d7d5350ea0b1f5d2eac054e869ff7963cdce9581097a477d697a2a9433a0c6884bea10a2193647677977c9820dd0921cbde",
1000 ),
1001 (
1002 "18a2e1d81b8ecfb2a333adcb0c17a5b9eb76cc5d05db91a4",
1003 "うりきれ うねる せっさたくま きもち めんきょ へいたく たまご ぜっく びじゅつかん さんそ むせる せいじ ねくたい しはらい せおう ねんど たんまつ がいけん",
1004 "㍍ガバヴァぱばぐゞちぢ十人十色",
1005 "753ec9e333e616e9471482b4b70a18d413241f1e335c65cd7996f32b66cf95546612c51dcf12ead6f805f9ee3d965846b894ae99b24204954be80810d292fcdd",
1006 ),
1007 (
1008 "15da872c95a13dd738fbf50e427583ad61f18fd99f628c417a61cf8343c90419",
1009 "うちゅう ふそく ひしょ がちょう うけもつ めいそう みかん そざい いばる うけとる さんま さこつ おうさま ぱんつ しひょう めした たはつ いちぶ つうじょう てさぎょう きつね みすえる いりぐち かめれおん",
1010 "㍍ガバヴァぱばぐゞちぢ十人十色",
1011 "346b7321d8c04f6f37b49fdf062a2fddc8e1bf8f1d33171b65074531ec546d1d3469974beccb1a09263440fc92e1042580a557fdce314e27ee4eabb25fa5e5fe",
1012 )
1013 ];
1014
1015 for vector in &vectors {
1016 let entropy = Vec::<u8>::from_hex(&vector.0).unwrap();
1017 let mnemonic_str = vector.1;
1018 let passphrase = vector.2;
1019 let seed = Vec::<u8>::from_hex(&vector.3).unwrap();
1020
1021 let mnemonic = Mnemonic::from_entropy_in(Language::Japanese, &entropy).unwrap();
1022
1023 assert_eq!(seed, &mnemonic.to_seed(passphrase)[..], "failed vector: {}", mnemonic_str);
1024 let rt = Mnemonic::parse_in(Language::Japanese, mnemonic.to_string())
1025 .expect(&format!("vector: {}", mnemonic_str));
1026 assert_eq!(seed, &rt.to_seed(passphrase)[..]);
1027
1028 let mnemonic = Mnemonic::parse_in(Language::Japanese, mnemonic_str)
1029 .expect(&format!("vector: {}", mnemonic_str));
1030 assert_eq!(seed, &mnemonic.to_seed(passphrase)[..], "failed vector: {}", mnemonic_str);
1031 }
1032 }
1033}