1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339

//! Utility functions related to xor encryption / decryption.
//!
//! Contains a mix bag of functions related to xor encryption / decryption.

extern crate hamming;

#[macro_use]
extern crate log;

use std::io::{Read, BufReader};
use std::fs::File;
use hamming::distance;
use std::collections::HashMap;

pub trait Xor {
    /// Creates xor encrypted copy of data using the provided key.
    fn xor(&mut self, key_bytes : Vec<u8>) -> Vec<u8>;
}

fn xor(reader: &mut Read, key_bytes : Vec<u8>) -> Vec<u8> {
    let mut key_idx = 0;
    let mut warning_shown = false;
    let mut encoded_bytes: Vec<u8> = Vec::new();

    // Iterate each chunk of input data and XOR it against the provided key.
    loop {
        let mut data = [0; 1024];
        let num_read = reader.read(&mut data[..]).unwrap();

        if num_read == 0 {
            break;
        }

        let data_bytes = &data[0 .. num_read];

        for b in data_bytes {
            let k = key_bytes[key_idx];
            let e = b ^ k;

            encoded_bytes.push(e);

            key_idx += 1;

            if key_idx >= key_bytes.len() {
                key_idx = key_idx % key_bytes.len();

                if !warning_shown {
                    warning_shown = true;
                    warn!("Key wasn't long enough and had to be re-used to fully encode data, use a longer key to be secure.");
                }
            }
        }
    }

    encoded_bytes
}

impl<'a, R: Read> Xor for &'a mut R {
    fn xor(&mut self, key_bytes : Vec<u8>) -> Vec<u8> {
        xor(self, key_bytes)
    }
}

impl Xor for Read {
    fn xor(&mut self, key_bytes : Vec<u8>) -> Vec<u8> {
        xor(self, key_bytes)
    }
}

pub trait Score {
    /// Calculates a relative value "score" for an item which relates to how likely it is the item
    /// represents text.
    ///
    /// This value can be used to determine the likeliness of the item representing text.
    fn score(&self) -> f32;
}

pub trait ScoreAgainstDictionary {
    /// Calculates a relative value "score" for an item which relates to how likely it is the item
    /// represents text.
    ///
    /// The provided words vector is used to increase the score of the item if it contained any of
    /// the words in the vector.
    fn score_with_words(&self, words_list : Vec<String>) -> f32;
}

impl Score for char {
    fn score(&self) -> f32 {
        score_character(*self)
    }
}

impl Score for String {
    fn score(&self) -> f32 {
        let mut sum = 0.0f32;

        for c in self.chars() {
            sum += score_character(c);
        }

        sum
    }
}

impl ScoreAgainstDictionary for String {
    fn score_with_words(&self, words_list : Vec<String>) -> f32 {
        let mut sum = 0.0f32;

        // Score each character.
        sum += self.score();

        // Score each word.
        sum += score_words(self, words_list);

        sum
    }
}

/// Loads all lines in the given file and sorts them
///
/// Assumes the file is newline separated list of words.
pub fn load_words_list(path : &str) -> Vec<String> {

    // Will hold all the words in the dictionary file.
    let mut dictionary_lines : Vec<String> = Vec::new();

    match File::open(path) {
        Ok(file) => {
            // Read all the words and push them to the dictionary vector.
            let mut reader = BufReader::new(file);
            let mut dictionary_data = String::new();
            let _ = reader.read_to_string(&mut dictionary_data);

            for line in dictionary_data.lines() {
                let word = line.to_lowercase();
                dictionary_lines.push(word);
            }

            // Sort the dictionary in order of word length, largest words to smallest words.
            dictionary_lines.sort_by(|a, b| {
                let x = a.len();
                let y = b.len();

                y.cmp(&x)
            });
        },
        Err(err) => {
            println!("Failed to open dictionary file '{}' because: {:?}", path, err);
        }
    }

    dictionary_lines
}


/// Generate all combinations of ASCII up to the supplied character length.
///
/// This can be used to get all the possible ASCII keys of a certain character length.
pub fn gen_ascii_keys(length : u32) -> Vec<String> {
    let mut keys : Vec<String> = Vec::new();
    let max = 128u32.pow(length);

    for i in 0..max {
        let mut value = i;
        let mut key = String::new();

        for j in (0..length).rev() {
            let digit = value / 128u32.pow(j);
            value = value - digit * 128u32.pow(j);
            key.push_str(format!("{}", (digit as u8) as char).as_str());
        }

        keys.push(key);
    }

    keys
}

/// Calculates the average normalized hamming distance for the given input bytes
///
/// The average normalized hamming distance is calculated by
///
/// 1. Pick a keysize s
/// 2. Take 2 chunks each of size s
/// 3. Calculate the hamming distance between these 2 chunks
/// 4. Normalize the hamming distance by dividing by s
/// 5. Repeat 1-4 until there are no more chunks left
/// 6. Calculate the mean average of the normalized hamming distances calculated from the above.
///
/// Returns a HashMap that maps keysize to average normalized hamming distance for that keysize.
pub fn avg_normalized_hamming_distance(input : &Vec<u8>, max_keysize : usize) -> HashMap<usize, f32> {

    let mut keysize_to_avg_hamming_dist = HashMap::new();

    for keysize in 1..(max_keysize+1) {

        let mut chunks = input.chunks(keysize);
        let mut num_chunks_compared = 0;
        let mut average_hamming_dist = 0.0_f32;

        // Calculate the mean normalized hamming distance over a
        // number of samples to try to improve accuracy.
        loop {

            let left_chunk = chunks.next();
            let right_chunk = chunks.next();

            if left_chunk.is_none() {
                break;
            }
            if right_chunk.is_none() {
                break;
            }

            let left = left_chunk.unwrap();
            let right = right_chunk.unwrap();

            if left.len() != right.len() {
                break;
            }

            let hamming_dist = distance(left, right);
            let normalized_hamming = hamming_dist as f32 / keysize as f32;
            average_hamming_dist += normalized_hamming;

            debug!("{:4.3} is the normalized hamming distance for keysize {} and block {}", normalized_hamming, keysize, num_chunks_compared);

            num_chunks_compared += 1;
        }

        if num_chunks_compared != 0 {
            average_hamming_dist = average_hamming_dist / num_chunks_compared as f32;
            keysize_to_avg_hamming_dist.insert(keysize, average_hamming_dist);
        } else {
            debug!("Not enough data in input file to check a keysize of '{}'", keysize);
        }
    }

    keysize_to_avg_hamming_dist
}

fn score_words(words : &String, dictionary : Vec<String>) -> f32 {
    let mut score : f32 = 0.0;

    // Check if the input contains a word from the dictionary.
    // Each time a word is matched it's removed from the input so there isn't double
    // counting of words.
    //
    // For each word the score is increased by 3 * e ^ word_length.
    // In this way large words contribute exponentially more to the overall score.
    let mut cloned_input = words.clone();
    for word in dictionary {
        if cloned_input.contains(word.as_str()) {
            let adjustment = 3.0 * (word.len() as f32).exp();
            score = score + adjustment;

            cloned_input = cloned_input.replacen(word.as_str(), "", 1);
        }
    }

    score
}


fn score_character(c : char) -> f32 {
    let character_scores = get_char_score_map();

    if character_scores.contains_key(&c) {
        let value = character_scores.get(&c).unwrap();
        *value
    } else {
        0.00
    }
}

// Creates a dictionary where:
// key      - character
// value    - frequency score
fn get_char_score_map() -> HashMap<char, f32> {
    let mut character_scores = HashMap::new();

    character_scores.insert('e', 12.702);	
    character_scores.insert('t', 9.056);	
    character_scores.insert('a', 8.167);	
    character_scores.insert('o', 7.507);	
    character_scores.insert('i', 6.966);	
    character_scores.insert('n', 6.749);	
    character_scores.insert('s', 6.327);	
    character_scores.insert('h', 6.094);	
    character_scores.insert('r', 5.987);	
    character_scores.insert('d', 4.253);	
    character_scores.insert('l', 4.025);	
    character_scores.insert('c', 2.782);	
    character_scores.insert('u', 2.758);	
    character_scores.insert('m', 2.406);	
    character_scores.insert('w', 2.360);	
    character_scores.insert('f', 2.228);	
    character_scores.insert('g', 2.015);	
    character_scores.insert('y', 1.974);	
    character_scores.insert('p', 1.929);	
    character_scores.insert('b', 1.492);	
    character_scores.insert('v', 0.978);	
    character_scores.insert('k', 0.772);	
    character_scores.insert('j', 0.153);	
    character_scores.insert('x', 0.150);	
    character_scores.insert('q', 0.095);	
    character_scores.insert('z', 0.074);	

    character_scores
}

#[cfg(test)]
mod tests {
    use super::*;
    use std::io::Cursor;

    #[test]
    fn xor_works() {

        // Data is twice as long as the key.
        let data : Vec<u8>  = vec![0b11111111u8, 0b11111111u8, 0b00001111u8, 0b10101010u8, 0b11111111u8, 0b11111111u8, 0b00001111u8, 0b10101010u8];
        let key : Vec<u8>   = vec![0b11111111u8, 0b00000000u8, 0b11110000u8, 0b01010101u8];

        let reader : &mut Read = &mut Cursor::new(data);

        let cipher = reader.xor(key);

        assert_eq!(0b00000000u8, cipher[0]);
        assert_eq!(0b11111111u8, cipher[1]);
        assert_eq!(0b11111111u8, cipher[2]);
        assert_eq!(0b11111111u8, cipher[3]);
        assert_eq!(0b00000000u8, cipher[4]);
        assert_eq!(0b11111111u8, cipher[5]);
        assert_eq!(0b11111111u8, cipher[6]);
        assert_eq!(0b11111111u8, cipher[7]);
    }

}