1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
use std::io::{BufRead, Write};
use std::mem;
use std::slice::from_raw_parts_mut;
use byteorder::{LittleEndian, WriteBytesExt};
use failure::{err_msg, Error};
use ndarray::{Array2, Axis};
use crate::embeddings::Embeddings;
use crate::norms::NdNorms;
use crate::storage::{CowArray, NdArray, Storage, StorageViewMut};
use crate::util::l2_normalize_array;
use crate::vocab::{SimpleVocab, Vocab};
pub trait ReadWord2Vec<R>
where
Self: Sized,
R: BufRead,
{
fn read_word2vec_binary(reader: &mut R) -> Result<Self, Error>;
}
impl<R> ReadWord2Vec<R> for Embeddings<SimpleVocab, NdArray>
where
R: BufRead,
{
fn read_word2vec_binary(reader: &mut R) -> Result<Self, Error> {
let (_, vocab, mut storage, _) = Embeddings::read_word2vec_binary_raw(reader)?.into_parts();
let norms = l2_normalize_array(storage.view_mut());
Ok(Embeddings::new(None, vocab, storage, NdNorms(norms)))
}
}
pub(crate) trait ReadWord2VecRaw<R>
where
Self: Sized,
R: BufRead,
{
fn read_word2vec_binary_raw(reader: &mut R) -> Result<Self, Error>;
}
impl<R> ReadWord2VecRaw<R> for Embeddings<SimpleVocab, NdArray>
where
R: BufRead,
{
fn read_word2vec_binary_raw(reader: &mut R) -> Result<Self, Error> {
let n_words = read_number(reader, b' ')?;
let embed_len = read_number(reader, b'\n')?;
let mut matrix = Array2::zeros((n_words, embed_len));
let mut words = Vec::with_capacity(n_words);
for idx in 0..n_words {
let word = read_string(reader, b' ')?;
let word = word.trim();
words.push(word.to_owned());
let mut embedding = matrix.index_axis_mut(Axis(0), idx);
{
let mut embedding_raw = match embedding.as_slice_mut() {
Some(s) => unsafe { typed_to_bytes(s) },
None => return Err(err_msg("Matrix not contiguous")),
};
reader.read_exact(&mut embedding_raw)?;
}
}
Ok(Embeddings::new_without_norms(
None,
SimpleVocab::new(words),
NdArray(matrix),
))
}
}
fn read_number(reader: &mut BufRead, delim: u8) -> Result<usize, Error> {
let field_str = read_string(reader, delim)?;
Ok(field_str.parse()?)
}
fn read_string(reader: &mut BufRead, delim: u8) -> Result<String, Error> {
let mut buf = Vec::new();
reader.read_until(delim, &mut buf)?;
buf.pop();
Ok(String::from_utf8(buf)?)
}
unsafe fn typed_to_bytes<T>(slice: &mut [T]) -> &mut [u8] {
from_raw_parts_mut(
slice.as_mut_ptr() as *mut u8,
slice.len() * mem::size_of::<T>(),
)
}
pub trait WriteWord2Vec<W>
where
W: Write,
{
fn write_word2vec_binary(&self, w: &mut W, unnormalize: bool) -> Result<(), Error>;
}
impl<W, V, S> WriteWord2Vec<W> for Embeddings<V, S>
where
W: Write,
V: Vocab,
S: Storage,
{
fn write_word2vec_binary(&self, w: &mut W, unnormalize: bool) -> Result<(), Error>
where
W: Write,
{
writeln!(w, "{} {}", self.vocab().len(), self.dims())?;
for (word, embed_norm) in self.iter_with_norms() {
write!(w, "{} ", word)?;
let embed = if unnormalize {
CowArray::Owned(embed_norm.into_unnormalized())
} else {
embed_norm.embedding
};
for v in embed.as_view() {
w.write_f32::<LittleEndian>(*v)?;
}
w.write_all(&[0x0a])?;
}
Ok(())
}
}