1use fnv::FnvHashMap;
12mod dbfile;
13pub mod graph;
14pub mod table;
15
16use self::dbfile::DBFile;
17use crate::cogs;
18
19use std::cell::RefCell;
20use std::fmt;
21use std::fs;
22use std::path;
23use std::path::PathBuf;
24use std::process;
25use std::sync::mpsc;
26use std::thread;
27
28struct DBImpl {
32 directory: path::PathBuf,
33 tag_to_product_info: FnvHashMap<String, FnvHashMap<String, DBFile>>,
34 product_to_version_info: FnvHashMap<String, FnvHashMap<String, DBFile>>,
35 product_to_tags: FnvHashMap<String, Vec<String>>,
36}
37
38struct DBIter<'a> {
43 inner: &'a DB,
44 pos: usize,
45}
46
47impl<'a> Iterator for DBIter<'a> {
50 type Item = &'a DBImpl;
51
52 fn next(&mut self) -> Option<Self::Item> {
53 match self.pos {
56 0 => {
58 self.pos += 1;
59 Some(&self.inner.system_db)
60 }
61 1 => {
67 self.pos += 1;
68 self.inner.user_db.as_ref()
70 }
71 _ => None,
73 }
74 }
75}
76
77#[derive(Clone)]
79pub enum DBLoadControl {
80 Versions,
81 Tags,
82 All,
83}
84
85pub struct DB {
89 system_db: DBImpl,
90 user_db: Option<DBImpl>,
91 stack_root: path::PathBuf,
92 cache: RefCell<FnvHashMap<(String, String), table::Table>>,
93}
94
95impl fmt::Debug for DB {
97 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
98 write!(f, "Database at {:?}\n", self.system_db.directory)?;
99 if let Some(ref user_db) = self.user_db {
100 write!(f, "User database at {:?}\n", user_db.directory)?;
101 }
102 write!(f, "Stack at {:?}\n", self.stack_root)
103 }
104}
105
106impl DB {
107 pub fn new(
113 db_path: Option<&str>,
114 user_tag_root: Option<&str>,
115 def_stack_root: Option<&str>,
116 preload: Option<DBLoadControl>,
117 ) -> DB {
118 let eups_path = match db_path {
121 Some(path) => PathBuf::from(path),
122 None => cogs::get_eups_path_from_env(),
123 };
124
125 let (directory, product_to_info, tags_to_info, product_to_tags) =
126 build_db(eups_path.clone(), preload.clone());
127 let system_db = DBImpl {
128 directory: directory,
129 tag_to_product_info: tags_to_info,
130 product_to_version_info: product_to_info,
131 product_to_tags: product_to_tags,
132 };
133
134 let user_db_path = match user_tag_root {
136 Some(user_path) => Some(PathBuf::from(user_path)),
137 None => cogs::get_user_path_from_home(),
138 };
139
140 let user_db = match user_db_path {
141 Some(user_path) => {
142 let (directory, product_to_info, tags_to_info, product_to_tags) =
143 build_db(user_path, preload);
144 Some(DBImpl {
145 directory: directory,
146 tag_to_product_info: tags_to_info,
147 product_to_version_info: product_to_info,
148 product_to_tags: product_to_tags,
149 })
150 }
151 None => None,
152 };
153
154 let stack_root = match def_stack_root {
156 Some(path) => path::PathBuf::from(path),
157 None => path::PathBuf::from(eups_path)
158 .parent()
159 .unwrap_or_else(|| {
160 println!("problem creating stack root");
161 process::exit(1);
162 })
163 .to_path_buf(),
164 };
165 let cache = RefCell::new(FnvHashMap::default());
166
167 DB {
169 system_db,
170 user_db,
171 stack_root,
172 cache,
173 }
174 }
175
176 pub fn get_all_products(&self) -> Vec<String> {
178 let return_vec: Vec<String> = vec![];
181 self.iter()
182 .fold(return_vec, |mut acc: Vec<String>, db: &DBImpl| {
183 acc.extend(db.product_to_tags.keys().map(|x| x.clone()));
184 acc
185 })
186 }
187
188 pub fn get_db_directories(&self) -> Vec<PathBuf> {
190 let mut paths = Vec::new();
191 for db in self.iter() {
192 paths.push(db.directory.clone());
193 }
194 paths
195 }
196
197 pub fn product_versions(&self, product: &String) -> Vec<String> {
199 let mut product_versions = vec![];
200 for db in self.iter() {
201 if db.product_to_version_info.contains_key(product) {
202 product_versions.extend(
203 db.product_to_version_info[product]
204 .keys()
205 .map(|x| x.clone()),
206 );
207 }
208 }
209 product_versions
210 }
211
212 pub fn product_tags(&self, product: &String) -> Vec<String> {
214 let mut tags_vec = vec![];
215 for db in self.iter() {
216 if db.product_to_tags.contains_key(product) {
217 tags_vec.extend(db.product_to_tags[product].clone());
218 }
219 }
220 tags_vec
221 }
222
223 pub fn get_table_from_version(
225 &self,
226 product: &String,
227 version: &String,
228 ) -> Option<table::Table> {
229 {
232 let cache_borrow = self.cache.borrow();
233 let table_option = cache_borrow.get(&(product.clone(), version.clone()));
234 if let Some(table_from_cache) = table_option {
235 return Some(table_from_cache.clone());
236 }
237 }
238
239 let mut tables_vec: Vec<Option<(path::PathBuf, path::PathBuf)>> = vec![];
240
241 for db in self.iter() {
242 if db.product_to_version_info.contains_key(product)
243 && db.product_to_version_info[product].contains_key(version)
244 {
245 let prod_dir =
246 db.product_to_version_info[product][version].entry(&"PROD_DIR".to_string());
247 let mut ups_dir =
248 db.product_to_version_info[product][version].entry(&"UPS_DIR".to_string());
249 if prod_dir.is_none() || ups_dir.is_none() {
250 tables_vec.push(None);
251 continue;
252 }
253 let mut total = self.stack_root.clone();
254 let mut product_clone = product.clone();
255 product_clone.push_str(".table");
256 total.push(prod_dir.unwrap());
257 let total_only_prod = total.clone();
258 if ups_dir.as_ref().unwrap() == "none" {
260 ups_dir = Some(String::from("ups"));
261 }
262 total.push(ups_dir.unwrap());
263 total.push(product_clone);
264 tables_vec.push(Some((total_only_prod, total)));
265 }
266 }
267
268 match tables_vec.len() {
269 x if x > 0 => {
270 let (prod_dir, total) = tables_vec.remove(x - 1).unwrap();
271 let resolved_table = table::Table::new(product.clone(), total, prod_dir).ok();
272 self.cache.borrow_mut().insert(
273 (product.clone(), version.clone()),
274 resolved_table.as_ref().unwrap().clone(),
275 );
276 resolved_table
277 }
278 _ => None,
279 }
280 }
281
282 pub fn get_flavors_from_version(&self, product: &String, version: &String) -> Vec<String> {
284 let mut flavors = Vec::new();
285 for db in self.iter() {
286 if db.product_to_version_info.contains_key(product)
287 && db.product_to_version_info[product].contains_key(version)
288 {
289 let flavor_option =
290 db.product_to_version_info[product][version].entry(&String::from("FLAVOR"));
291 if let Some(flav) = flavor_option {
292 flavors.push(flav);
293 }
294 }
295 }
296 flavors
297 }
298
299 pub fn get_versions_from_tag(&self, product: &String, tag: Vec<&String>) -> Vec<String> {
301 let mut versions_vec: Vec<String> = vec![];
303 for db in self.iter() {
305 for t in &tag {
306 if db.tag_to_product_info.contains_key(t.clone()) {
307 let ref tag_map = db.tag_to_product_info[t.clone()];
308 if let Some(product_file) = tag_map.get(product) {
309 let version = product_file.entry(&"VERSION".to_string());
310 versions_vec.push(version.unwrap());
311 break;
312 }
313 }
314 }
315 }
316 versions_vec
317 }
318
319 pub fn get_table_from_tag(&self, product: &String, tag: Vec<&String>) -> Option<table::Table> {
321 let versions_vec = self.get_versions_from_tag(product, tag);
322 match versions_vec.len() {
328 x if x > 0 => {
329 let mut res: Option<table::Table> = None;
330 for ver in versions_vec.iter().rev() {
331 res = self.get_table_from_version(product, ver);
332 if res.is_some() {
335 break;
336 }
337 }
338 res
339 }
340 _ => None,
341 }
342 }
343
344 fn iter<'a>(&'a self) -> DBIter<'a> {
347 DBIter {
348 inner: self,
349 pos: 0,
350 }
351 }
352
353 pub fn has_product(&self, product: &String) -> bool {
355 for db in self.iter() {
357 if db.product_to_version_info.contains_key(product) {
358 return true;
359 } else if db.product_to_tags.contains_key(product) {
360 return true;
361 }
362 }
363 return false;
364 }
365}
366
367fn build_db(
373 eups_path: PathBuf,
374 load_options: Option<DBLoadControl>,
375) -> (
376 path::PathBuf,
377 FnvHashMap<String, FnvHashMap<String, DBFile>>,
378 FnvHashMap<String, FnvHashMap<String, DBFile>>,
379 FnvHashMap<String, Vec<String>>,
380) {
381 let (name_tx, name_rx) = mpsc::channel::<(String, path::PathBuf)>();
383 let (tag_tx, tag_rx) = mpsc::channel::<(String, path::PathBuf)>();
384 let (worker1_tx, worker1_rx) = mpsc::channel::<path::PathBuf>();
385 let (worker2_tx, worker2_rx) = mpsc::channel::<path::PathBuf>();
386
387 let worker_tx_vec = vec![worker1_tx, worker2_tx];
389 let worker_rx_vec = vec![worker1_rx, worker2_rx];
390
391 let (mut load_version, mut load_tag) = (false, false);
392 match load_options {
393 Some(DBLoadControl::Versions) => {
394 load_version = true;
395 }
396 Some(DBLoadControl::Tags) => {
397 load_tag = true;
398 }
399 Some(DBLoadControl::All) => {
400 load_version = true;
401 load_tag = true;
402 }
403 None => (),
404 }
405
406 let names_thread = thread::spawn(move || {
407 let mut product_hash: FnvHashMap<String, FnvHashMap<String, DBFile>> =
409 FnvHashMap::default();
410
411 let mut tx_vec = vec![];
413 let mut threads_vec = vec![];
414 for _ in 0..2 {
415 let (tx, rx) = mpsc::channel::<(String, String, path::PathBuf, bool)>();
416 tx_vec.push(tx);
417 threads_vec.push(thread::spawn(move || {
418 let mut dbfiles = vec![];
419 for (version, product, path, preload) in rx {
420 dbfiles.push((version, product, DBFile::new(path, preload)));
421 }
422 dbfiles
423 }));
424 }
425 {
427 let mut tx_vec_cycle = tx_vec.iter().cycle();
428 for (product, file) in name_rx {
429 let version;
430 {
433 let version_file_name = file.file_name().unwrap().to_str().unwrap();
434 let version_str: Vec<&str> = version_file_name.split(".version").collect();
435 version = String::from(version_str[0]);
436 }
437 tx_vec_cycle
438 .next()
439 .unwrap()
440 .send((version, product, file, load_version))
441 .unwrap();
442 }
443 }
444 drop(tx_vec);
446 for thread in threads_vec {
447 let result = thread.join().unwrap();
448 for (version, product, dbfile) in result {
449 let version_hash = product_hash.entry(product).or_insert(FnvHashMap::default());
450 version_hash.insert(version, dbfile);
451 }
452 }
453 product_hash
454 });
455
456 let tags_thread = thread::spawn(move || {
457 let mut tags_hash: FnvHashMap<String, FnvHashMap<String, DBFile>> = FnvHashMap::default();
459 let mut product_to_tags: FnvHashMap<String, Vec<String>> = FnvHashMap::default();
460 let mut tx_vec = vec![];
463 let mut threads_vec = vec![];
464 for _ in 0..2 {
465 let (tx, rx) = mpsc::channel::<(String, String, path::PathBuf, bool)>();
466 tx_vec.push(tx);
467 threads_vec.push(thread::spawn(move || {
468 let mut dbfiles = vec![];
469 for (product, tag, path, preload) in rx {
470 dbfiles.push((product, tag, DBFile::new(path, preload)));
471 }
472 dbfiles
473 }));
474 }
475 {
476 let mut tx_vec_cycle = tx_vec.iter().cycle();
477
478 for (product, file) in tag_rx {
479 let tag;
480 {
483 let tag_file_name = file.file_name().unwrap().to_str().unwrap();
484 let tag_str: Vec<&str> = tag_file_name.split(".chain").collect();
485 tag = String::from(tag_str[0]);
486 }
487 let tags_vec = product_to_tags.entry(product.clone()).or_insert(Vec::new());
488 tags_vec.push(tag.clone());
489 tx_vec_cycle
490 .next()
491 .unwrap()
492 .send((product, tag, file, load_tag))
493 .unwrap();
494 }
495 }
496 drop(tx_vec);
498 for thread in threads_vec {
499 let result = thread.join().unwrap();
500 for (product, tag, dbfile) in result {
501 let product_hash = tags_hash.entry(tag).or_insert(FnvHashMap::default());
502 product_hash.insert(product, dbfile);
503 }
504 }
505 (tags_hash, product_to_tags)
506 });
507
508 let mut worker_threads = vec![];
511 for reciver in worker_rx_vec {
512 let name_tx_clone = mpsc::Sender::clone(&name_tx);
513 let tag_tx_clone = mpsc::Sender::clone(&tag_tx);
514
515 worker_threads.push(thread::spawn(move || {
516 for entry in reciver {
517 if !entry.is_dir() {
518 continue;
519 }
520 let entry_name = String::from(entry.file_name().unwrap().to_str().unwrap());
521 let contents = fs::read_dir(entry).expect("problem in worker thread read_dir");
522 for file in contents {
523 let obj = file.unwrap();
524 let obj_name = obj.file_name().to_str().unwrap().to_string();
525 let message = (entry_name.clone(), obj.path().clone());
526 if obj_name.ends_with(".version") {
527 name_tx_clone.send(message).unwrap();
528 } else if obj_name.ends_with(".chain") {
529 tag_tx_clone.send(message).unwrap();
530 }
531 }
532 }
533 }));
534 }
535
536 {
538 let mut worker_iter = worker_tx_vec.iter().cycle();
541 for entry in fs::read_dir(eups_path.clone()).expect("issue in main list") {
542 worker_iter
543 .next()
544 .unwrap()
545 .send(entry.unwrap().path())
546 .unwrap();
547 }
548 }
549
550 drop(worker_tx_vec);
552
553 for thread in worker_threads {
555 thread.join().unwrap();
556 }
557
558 drop(name_tx);
560 drop(tag_tx);
561
562 let product_to_info = names_thread.join().unwrap();
564 let (tags_to_info, product_to_tags) = tags_thread.join().unwrap();
565
566 (eups_path, product_to_info, tags_to_info, product_to_tags)
567}