1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
use crate::{
    constants::DESCRIBE_MAX,
    hash::{Hash, HashPrimitive},
    table::general::{Bucket, HashTables},
    utils::{all_eq, increase_capacity},
    DataPoint, DataPointSlice, Error, Result,
};
use fnv::{FnvHashMap as HashMap, FnvHashSet};
use serde::{Deserialize, Serialize};
use std::iter::FromIterator;

/// Indexible vector storage.
/// indexes will be stored in hashtables. The original vectors can be looked up in this data structure.
#[derive(Debug, Deserialize, Serialize)]
pub struct VecStore {
    pub map: Vec<DataPoint>,
}

impl VecStore {
    fn push(&mut self, d: DataPoint) -> u32 {
        self.map.push(d);
        (self.map.len() - 1) as u32
    }

    fn position(&self, d: &DataPointSlice) -> Option<u32> {
        self.map.iter().position(|x| all_eq(x, d)).map(|x| x as u32)
    }

    fn get(&self, idx: u32) -> &DataPoint {
        &self.map[idx as usize]
    }

    fn increase_storage(&mut self, size: usize) {
        increase_capacity(size, &mut self.map);
    }
}

/// In memory backend for [LSH](struct.LSH.html).
#[derive(Deserialize, Serialize)]
pub struct MemoryTable {
    hash_tables: Vec<HashMap<Hash, Bucket>>,
    n_hash_tables: usize,
    pub vec_store: VecStore,
    only_index_storage: bool,
    counter: u32,
}

impl MemoryTable {
    fn remove_idx(&mut self, idx: u32, hash: &Hash, hash_table: usize) -> Result<()> {
        let tbl = &mut self.hash_tables[hash_table];
        let bucket = tbl.get_mut(hash);
        match bucket {
            None => return Err(Error::NotFound),
            Some(bucket) => {
                bucket.remove(&idx);
                Ok(())
            }
        }
    }
    fn insert_idx(&mut self, idx: u32, hash: Hash, hash_table: usize) {
        let tbl = &mut self.hash_tables[hash_table];
        let bucket = tbl.entry(hash).or_insert_with(|| FnvHashSet::default());
        bucket.insert(idx);
    }
}

impl HashTables for MemoryTable {
    fn new(n_hash_tables: usize, only_index_storage: bool, _: &str) -> Result<Box<Self>> {
        // TODO: Check the average number of vectors in the buckets.
        // this way the capacity can be approximated by the number of DataPoints that will
        // be stored.
        let hash_tables = vec![HashMap::default(); n_hash_tables];
        let vector_store = VecStore { map: vec![] };
        let m = MemoryTable {
            hash_tables,
            n_hash_tables,
            vec_store: vector_store,
            only_index_storage,
            counter: 0,
        };
        Ok(Box::new(m))
    }

    fn put(&mut self, hash: Hash, d: &DataPointSlice, hash_table: usize) -> Result<u32> {
        // Store hash and id/idx
        let idx = self.counter;
        self.insert_idx(idx, hash, hash_table);

        // There are N hash_tables per unique vector. So we only store
        // the unique v hash_table 0 and increment the counter (the id)
        // after we've update the last (N) hash_table.
        if (hash_table == 0) && (!self.only_index_storage) {
            self.vec_store.push(d.to_vec());
        } else if hash_table == self.n_hash_tables - 1 {
            self.counter += 1
        }
        Ok(idx)
    }

    /// Expensive operation we need to do a linear search over all datapoints
    fn delete(&mut self, hash: &Hash, d: &DataPointSlice, hash_table: usize) -> Result<()> {
        // First find the data point in the VecStore
        let idx = match self.vec_store.position(d) {
            None => return Ok(()),
            Some(idx) => idx,
        };
        // Note: data point remains in VecStore as shrinking the vector would mean we need to
        // re-hash all datapoints.
        self.remove_idx(idx, &hash, hash_table)
    }

    fn update_by_idx(
        &mut self,
        old_hash: &Hash,
        new_hash: Hash,
        idx: u32,
        hash_table: usize,
    ) -> Result<()> {
        self.remove_idx(idx, old_hash, hash_table)?;
        self.insert_idx(idx, new_hash, hash_table);
        Ok(())
    }

    /// Query the whole bucket
    fn query_bucket(&self, hash: &Hash, hash_table: usize) -> Result<Bucket> {
        let tbl = &self.hash_tables[hash_table];
        match tbl.get(hash) {
            None => Err(Error::NotFound),
            Some(bucket) => Ok(bucket.clone()),
        }
    }

    fn idx_to_datapoint(&self, idx: u32) -> Result<&DataPoint> {
        Ok(self.vec_store.get(idx))
    }

    fn increase_storage(&mut self, size: usize) {
        increase_capacity(size, &mut self.hash_tables);
        self.vec_store.increase_storage(size);
    }

    fn describe(&self) -> Result<String> {
        let mut lengths = vec![];
        let mut max_len = 0;
        let mut min_len = 1000000;
        let mut set: FnvHashSet<HashPrimitive> = FnvHashSet::default();
        // iterator over hash tables 0..L
        for map in self.hash_tables.iter() {
            // iterator over all hashes
            // zip to truncate at the describe maximum
            for ((k, v), _) in map.iter().zip(0..DESCRIBE_MAX) {
                let len = v.len();
                let hash_values: FnvHashSet<HashPrimitive> =
                    FnvHashSet::from_iter(k.iter().copied());
                set = set.union(&hash_values).copied().collect();
                lengths.push(len);
                if len > max_len {
                    max_len = len
                }
                if len < min_len {
                    min_len = len
                }
            }
        }

        let avg = lengths.iter().sum::<usize>() as f32 / lengths.len() as f32;
        let var = lengths
            .iter()
            .map(|&v| (avg - v as f32).powf(2.))
            .sum::<f32>()
            / lengths.len() as f32;
        let std_dev = var.powf(0.5);

        let mut out = String::from(&format!("No. of tables: {}\n", self.n_hash_tables));
        out.push_str(&format!("Unique hash values:\n{:?}\n", set));
        out.push_str("\nHash collisions:\n");
        out.push_str(&format!("avg:\t{:?}\n", avg));
        out.push_str(&format!("std-dev:\t{:?}\n", std_dev));
        out.push_str(&format!("min:\t{:?}\n", min_len));
        out.push_str(&format!("max:\t{:?}\n", max_len));

        Ok(out)
    }

    fn get_unique_hash_int(&self) -> FnvHashSet<HashPrimitive> {
        let mut hash_numbers = FnvHashSet::default();

        for ht in &self.hash_tables {
            for ((hash, _), _i) in ht.iter().zip(0..100) {
                for &v in hash {
                    hash_numbers.insert(v);
                }
            }
        }
        hash_numbers
    }
}

impl std::fmt::Debug for MemoryTable {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        write!(f, "hash_tables:\nhash, \t buckets\n")?;
        for ht in self.hash_tables.iter() {
            write!(f, "{:?}\n", ht)?;
        }
        Ok(())
    }
}