1pub(crate) mod entry;
4pub(crate) mod group;
5pub(crate) mod iconid;
6#[cfg(feature = "merge")]
7pub(crate) mod merge;
8pub(crate) mod meta;
9pub(crate) mod node;
10#[cfg(feature = "totp")]
11pub(crate) mod otp;
12
13pub use crate::db::{
14 entry::{AutoType, AutoTypeAssociation, Entry, History, Value},
15 group::Group,
16 meta::{BinaryAttachment, BinaryAttachments, CustomIcons, Icon, MemoryProtection, Meta},
17 node::*,
18};
19use chrono::NaiveDateTime;
20use std::{collections::HashMap, str::FromStr};
21use uuid::Uuid;
22
23#[cfg(feature = "totp")]
24pub use crate::db::otp::{TOTP, TOTPAlgorithm};
25
26#[cfg(feature = "merge")]
27use crate::db::merge::{MergeError, MergeEvent, MergeEventType, MergeLog};
28
29#[cfg(feature = "merge")]
30use std::collections::VecDeque;
31
32use crate::{
33 config::DatabaseConfig,
34 db::iconid::IconId,
35 error::{DatabaseIntegrityError, DatabaseOpenError, ParseColorError},
36 format::{
37 DatabaseVersion,
38 kdb::parse_kdb,
39 kdbx3::{decrypt_kdbx3, parse_kdbx3},
40 kdbx4::{decrypt_kdbx4, parse_kdbx4},
41 },
42 key::DatabaseKey,
43};
44
45#[derive(Debug)]
47#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
48pub struct Database {
49 pub config: DatabaseConfig,
51
52 pub header_attachments: Vec<HeaderAttachment>,
54
55 pub root: SerializableNodePtr,
57
58 pub deleted_objects: DeletedObjects,
60
61 pub meta: Meta,
63}
64
65impl Clone for Database {
66 fn clone(&self) -> Self {
67 Self {
68 config: self.config.clone(),
69 header_attachments: self.header_attachments.clone(),
70 root: self.root.borrow().duplicate().into(),
71 deleted_objects: self.deleted_objects.clone(),
72 meta: self.meta.clone(),
73 }
74 }
75}
76
77impl PartialEq for Database {
78 fn eq(&self, other: &Self) -> bool {
79 self.config == other.config
80 && self.header_attachments == other.header_attachments
81 && self.deleted_objects == other.deleted_objects
82 && self.meta == other.meta
83 && node_is_equals_to(&self.root, &other.root)
84 }
85}
86
87impl Eq for Database {}
88
89impl Database {
90 pub fn open(source: &mut dyn std::io::Read, key: DatabaseKey) -> Result<Database, DatabaseOpenError> {
92 let mut data = Vec::new();
93 source.read_to_end(&mut data)?;
94
95 Database::parse(data.as_ref(), key)
96 }
97
98 pub fn parse(data: &[u8], key: DatabaseKey) -> Result<Database, DatabaseOpenError> {
99 let database_version = DatabaseVersion::parse(data)?;
100
101 match database_version {
102 DatabaseVersion::KDB(_) => parse_kdb(data, &key),
103 DatabaseVersion::KDB2(_) => Err(DatabaseOpenError::UnsupportedVersion),
104 DatabaseVersion::KDB3(_) => parse_kdbx3(data, &key),
105 DatabaseVersion::KDB4(_) => parse_kdbx4(data, &key),
106 }
107 }
108
109 #[cfg(feature = "save_kdbx4")]
111 pub fn save(&self, destination: &mut dyn std::io::Write, key: DatabaseKey) -> Result<(), crate::error::DatabaseSaveError> {
112 use crate::error::DatabaseSaveError;
113 use crate::format::kdbx4::dump_kdbx4;
114
115 match self.config.version {
116 DatabaseVersion::KDB(_) => Err(DatabaseSaveError::UnsupportedVersion),
117 DatabaseVersion::KDB2(_) => Err(DatabaseSaveError::UnsupportedVersion),
118 DatabaseVersion::KDB3(_) => Err(DatabaseSaveError::UnsupportedVersion),
119 DatabaseVersion::KDB4(_) => dump_kdbx4(self, &key, destination),
120 }
121 }
122
123 pub fn get_xml(source: &mut dyn std::io::Read, key: DatabaseKey) -> Result<Vec<u8>, DatabaseOpenError> {
125 let mut data = Vec::new();
126 source.read_to_end(&mut data)?;
127
128 let database_version = DatabaseVersion::parse(data.as_ref())?;
129
130 let data = match database_version {
131 DatabaseVersion::KDB(_) => return Err(DatabaseOpenError::UnsupportedVersion),
132 DatabaseVersion::KDB2(_) => return Err(DatabaseOpenError::UnsupportedVersion),
133 DatabaseVersion::KDB3(_) => decrypt_kdbx3(data.as_ref(), &key)?.2,
134 DatabaseVersion::KDB4(_) => decrypt_kdbx4(data.as_ref(), &key)?.3,
135 };
136
137 Ok(data)
138 }
139
140 pub fn get_version(source: &mut dyn std::io::Read) -> Result<DatabaseVersion, DatabaseIntegrityError> {
142 let mut data = vec![0; DatabaseVersion::get_version_header_size()];
143 _ = source.read(&mut data)?;
144 DatabaseVersion::parse(data.as_ref())
145 }
146
147 pub fn new(config: DatabaseConfig) -> Database {
149 Self {
150 config,
151 header_attachments: Vec::new(),
152 root: rc_refcell_node(Group::new("Root")).into(),
153 deleted_objects: DeletedObjects::default(),
154 meta: Meta::new(),
155 }
156 }
157
158 pub fn node_get_parents(&self, node: &NodePtr) -> Vec<Uuid> {
159 let mut parents = Vec::new();
160 let mut parent_uuid = node.borrow().get_parent();
161 while let Some(uuid) = parent_uuid {
162 parents.push(uuid);
163 let parent_node = search_node_by_uuid_with_specific_type::<Group>(&self.root, uuid);
164 parent_uuid = parent_node.and_then(|node| node.borrow().get_parent());
165 }
166 parents
167 }
168
169 pub fn set_recycle_bin_enabled(&mut self, enabled: bool) {
170 self.meta.set_recycle_bin_enabled(enabled);
171 }
172
173 pub fn recycle_bin_enabled(&self) -> bool {
174 self.meta.recycle_bin_enabled()
175 }
176
177 pub fn node_is_recycle_bin(&self, node: &NodePtr) -> bool {
178 let uuid = node.borrow().get_uuid();
179 node_is_group(node) && self.get_recycle_bin().is_some_and(|bin| bin.borrow().get_uuid() == uuid)
180 }
181
182 pub fn node_is_in_recycle_bin(&self, node: Uuid) -> bool {
183 if let Some(node) = search_node_by_uuid(&self.root, node) {
184 let parents = self.node_get_parents(&node);
185 self.get_recycle_bin()
186 .map(|bin| bin.borrow().get_uuid())
187 .is_some_and(|uuid| parents.contains(&uuid))
188 } else {
189 false
190 }
191 }
192
193 pub fn get_recycle_bin(&self) -> Option<NodePtr> {
194 if !self.recycle_bin_enabled() {
195 return None;
196 }
197 let uuid = self.meta.recyclebin_uuid?;
198 group_get_children(&self.root).and_then(|children| {
199 children
200 .into_iter()
201 .find(|child| child.borrow().get_uuid() == uuid && node_is_group(child))
202 })
203 }
204
205 pub fn create_recycle_bin(&mut self) -> crate::Result<NodePtr> {
206 use crate::error::Error;
207 if !self.recycle_bin_enabled() {
208 return Err(Error::RecycleBinDisabled);
209 }
210 if self.get_recycle_bin().is_some() {
211 return Err(Error::RecycleBinAlreadyExists);
212 }
213 let recycle_bin = rc_refcell_node(Group::new("Recycle Bin"));
214 recycle_bin.borrow_mut().set_icon_id(Some(IconId::RECYCLE_BIN));
215 self.meta.recyclebin_uuid = Some(recycle_bin.borrow().get_uuid());
216 let count = group_get_children(&self.root).ok_or("")?.len();
217 group_add_child(&self.root, recycle_bin.clone(), count)?;
218 Ok(recycle_bin)
219 }
220
221 pub fn remove_node_by_uuid(&mut self, uuid: Uuid) -> crate::Result<NodePtr> {
222 if !self.recycle_bin_enabled() {
223 let node = group_remove_node_by_uuid(&self.root, uuid)?;
224 self.deleted_objects.add(uuid);
225 return Ok(node);
226 }
227 let node_in_recycle_bin = self.node_is_in_recycle_bin(uuid);
228 let recycle_bin = self.get_recycle_bin().ok_or("").or_else(|_| self.create_recycle_bin())?;
229 let recycle_bin_uuid = recycle_bin.borrow().get_uuid();
230 let node = group_remove_node_by_uuid(&self.root, uuid)?;
232 self.deleted_objects.add(uuid);
233 if uuid != recycle_bin_uuid && !node_in_recycle_bin {
234 group_add_child(&recycle_bin, node.clone(), 0)?;
235 }
236 self.meta.set_recycle_bin_changed();
237 Ok(node)
238 }
239
240 pub fn search_node_by_uuid(&self, uuid: Uuid) -> Option<NodePtr> {
241 search_node_by_uuid(&self.root, uuid)
242 }
243
244 fn create_new_node<T: Node + Default>(&self, parent: Uuid, index: usize) -> crate::Result<NodePtr> {
245 let new_node = rc_refcell_node(T::default());
246 let parent = search_node_by_uuid_with_specific_type::<Group>(&self.root, parent)
247 .or_else(|| Some(self.root.clone().into()))
248 .ok_or("No parent node")?;
249 with_node_mut::<Group, _, _>(&parent, |parent| {
250 parent.add_child(new_node.clone(), index);
251 });
252 Ok(new_node)
253 }
254
255 pub fn create_new_entry(&self, parent: Uuid, index: usize) -> crate::Result<NodePtr> {
256 self.create_new_node::<Entry>(parent, index)
257 }
258
259 pub fn create_new_group(&self, parent: Uuid, index: usize) -> crate::Result<NodePtr> {
260 self.create_new_node::<Group>(parent, index)
261 }
262
263 #[cfg(feature = "merge")]
267 pub fn merge(&mut self, other: &Database) -> Result<MergeLog, MergeError> {
268 let mut log = MergeLog::default();
269 log.append(&self.merge_group(&[], &other.root, false)?);
270 log.append(&self.merge_deletions(other)?);
271 Ok(log)
272 }
273
274 #[cfg(feature = "merge")]
275 fn merge_deletions(&mut self, other: &Database) -> Result<MergeLog, MergeError> {
276 let is_in_deleted_queue = |uuid: Uuid, deleted_groups_queue: &VecDeque<DeletedObject>| -> bool {
278 for deleted_object in deleted_groups_queue {
279 if deleted_object.uuid == uuid {
281 return true;
282 }
283 }
284 false
285 };
286 let mut log = MergeLog::default();
287 let mut new_deleted_objects = self.deleted_objects.clone();
288 for deleted_object in &other.deleted_objects.objects {
290 if new_deleted_objects.contains(deleted_object.uuid) {
291 continue;
292 }
293 let entry_location = match Self::find_node_location(&self.root, deleted_object.uuid) {
294 Some(l) => l,
295 None => continue,
296 };
297 let parent_group = Group::find_group(&self.root, &entry_location).ok_or(MergeError::FindGroupError(entry_location))?;
298
299 let entry = match Group::find_entry(&parent_group, &[deleted_object.uuid]) {
300 Some(e) => e,
301 None => continue,
303 };
304
305 let entry_last_modification = match with_node::<Entry, _, _>(&entry, |e| e.get_times().get_last_modification()).unwrap() {
306 Some(t) => t,
307 None => {
308 log.warnings.push(format!(
309 "Entry {} did not have a last modification timestamp",
310 entry.borrow().downcast_ref::<Entry>().unwrap().uuid
311 ));
312 Times::now()
313 }
314 };
315 if entry_last_modification < deleted_object.deletion_time {
316 with_node_mut::<Group, _, _>(&parent_group, |pg| pg.remove_node(deleted_object.uuid)).unwrap()?;
317 log.events.push(MergeEvent {
318 event_type: MergeEventType::EntryDeleted,
319 node_uuid: deleted_object.uuid,
320 });
321 new_deleted_objects.objects.push(deleted_object.clone());
322 }
323 }
324 let mut deleted_groups_queue: VecDeque<DeletedObject> = vec![].into();
325 for deleted_object in &other.deleted_objects.objects {
326 if new_deleted_objects.contains(deleted_object.uuid) {
327 continue;
328 }
329 deleted_groups_queue.push_back(deleted_object.clone());
330 }
331 while !deleted_groups_queue.is_empty() {
332 let deleted_object = deleted_groups_queue.pop_front().unwrap();
333 if new_deleted_objects.contains(deleted_object.uuid) {
334 continue;
335 }
336 let group_location = match Self::find_node_location(&self.root, deleted_object.uuid) {
337 Some(l) => l,
338 None => continue,
339 };
340 let parent_group = Group::find_group(&self.root, &group_location).ok_or(MergeError::FindGroupError(group_location))?;
341
342 let group = match Group::find_group(&parent_group, &[deleted_object.uuid]) {
343 Some(g) => g,
344 None => {
345 continue;
348 }
349 };
350 if !with_node::<Group, _, _>(&group, |g| g.entries()).unwrap().is_empty() {
352 continue;
353 }
354 if !with_node::<Group, _, _>(&group, |g| g.groups())
357 .unwrap()
358 .iter()
359 .filter(|&g| !is_in_deleted_queue(g.borrow().get_uuid(), &deleted_groups_queue))
360 .collect::<Vec<_>>()
361 .is_empty()
362 {
363 deleted_groups_queue.push_back(deleted_object.clone());
364 continue;
365 }
366 if !with_node::<Group, _, _>(&group, |g| g.groups()).unwrap().is_empty() {
368 continue;
369 }
370 let group_last_modification = match with_node::<Group, _, _>(&group, |g| g.get_times().get_last_modification()).unwrap() {
371 Some(t) => t,
372 None => {
373 log.warnings.push(format!(
374 "Group {} did not have a last modification timestamp",
375 group.borrow().downcast_ref::<Group>().unwrap().uuid
376 ));
377 Times::now()
378 }
379 };
380 if group_last_modification < deleted_object.deletion_time {
381 with_node_mut::<Group, _, _>(&parent_group, |pg| pg.remove_node(deleted_object.uuid)).unwrap()?;
382 log.events.push(MergeEvent {
383 event_type: MergeEventType::GroupDeleted,
384 node_uuid: deleted_object.uuid,
385 });
386 new_deleted_objects.objects.push(deleted_object.clone());
387 }
388 }
389 self.deleted_objects = new_deleted_objects;
390 Ok(log)
391 }
392
393 #[cfg(feature = "merge")]
394 pub(crate) fn find_node_location(root: &NodePtr, id: Uuid) -> Option<Vec<Uuid>> {
395 for node in &group_get_children(root).unwrap_or_default() {
398 let node_uuid = node.borrow().get_uuid();
399 if node_is_entry(node) {
400 if node_uuid == id {
401 return Some(vec![]);
404 }
405 } else if node_is_group(node) {
406 if node_uuid == id {
407 return Some(vec![]);
410 }
411 #[allow(unused_mut)]
412 if let Some(mut location) = Group::find_node_location(node, id) {
413 return Some(location);
416 }
417 }
418 }
419 None
420 }
421
422 #[cfg(feature = "merge")]
423 fn merge_group(&self, current_group_path: &[Uuid], current_group: &NodePtr, is_in_deleted_group: bool) -> Result<MergeLog, MergeError> {
424 let mut log = MergeLog::default();
425 if let Some(destination_group_location) = Self::find_node_location(&self.root, current_group.borrow().get_uuid()) {
426 let mut destination_group_path = destination_group_location.clone();
427 destination_group_path.push(current_group.borrow().get_uuid());
428 let destination_group =
429 Group::find_group(&self.root, &destination_group_path).ok_or(MergeError::FindGroupError(destination_group_path))?;
430 let group_update_merge_events = Group::merge_with(&destination_group, current_group)?;
431 log.append(&group_update_merge_events);
432 }
433 for other_entry in &with_node::<Group, _, _>(current_group, |g| g.entries()).unwrap() {
434 let other_entry_uuid = other_entry.borrow().get_uuid();
435 let destination_entry_location = Self::find_node_location(&self.root, other_entry_uuid);
437 if let Some(destination_entry_location) = destination_entry_location {
439 let mut existing_entry_location = destination_entry_location.clone();
440 existing_entry_location.push(other_entry_uuid);
441 let existing_entry = Group::find_entry(&self.root, &existing_entry_location)
444 .ok_or(MergeError::FindEntryError(existing_entry_location.clone()))?
445 .borrow()
446 .duplicate();
447 if current_group_path.last() != destination_entry_location.last() && !is_in_deleted_group {
450 let source_location_changed_time =
451 match with_node::<Entry, _, _>(other_entry, |e| e.get_times().get_location_changed()).unwrap() {
452 Some(t) => t,
453 None => {
454 log.warnings
455 .push(format!("Entry {other_entry_uuid} did not have a location updated timestamp"));
456 Times::epoch()
457 }
458 };
459 let destination_location_changed =
460 match with_node::<Entry, _, _>(&existing_entry, |e| e.get_times().get_location_changed()).unwrap() {
461 Some(t) => t,
462 None => {
463 log.warnings
464 .push(format!("Entry {} did not have a location updated timestamp", other_entry_uuid));
465 Times::now()
466 }
467 };
468 if source_location_changed_time > destination_location_changed {
469 log.events.push(MergeEvent {
470 event_type: MergeEventType::EntryLocationUpdated,
471 node_uuid: other_entry_uuid,
472 });
473 self.relocate_node(
474 other_entry_uuid,
475 &destination_entry_location,
476 current_group_path,
477 source_location_changed_time,
478 )?;
479 existing_entry_location = current_group_path.to_owned();
482 existing_entry_location.push(other_entry_uuid);
483 with_node_mut::<Entry, _, _>(&existing_entry, |e| {
484 e.get_times_mut().set_location_changed(Some(source_location_changed_time));
485 });
486 }
487 }
488 if !has_diverged_from(&existing_entry, other_entry) {
489 continue;
490 }
491 let (merged_entry, entry_merge_log) = Entry::merge(&existing_entry, other_entry)?;
494 let merged_entry = match merged_entry {
495 Some(m) => m,
496 None => continue,
497 };
498 if node_is_equals_to(&existing_entry, &merged_entry) {
499 continue;
500 }
501 let existing_entry =
502 Group::find_entry(&self.root, &existing_entry_location).ok_or(MergeError::FindEntryError(existing_entry_location))?;
503 Entry::entry_replaced_with(&existing_entry, &merged_entry);
505 log.events.push(MergeEvent {
506 event_type: MergeEventType::EntryUpdated,
507 node_uuid: merged_entry.borrow().get_uuid(),
508 });
509 log.append(&entry_merge_log);
510 continue;
511 }
512 if self.deleted_objects.contains(other_entry_uuid) {
513 continue;
514 }
515 if is_in_deleted_group {
517 continue;
518 }
519 let new_entry = other_entry.borrow().duplicate();
522 let new_entry_parent_group =
523 Group::find_group(&self.root, current_group_path).ok_or(MergeError::FindGroupError(current_group_path.to_owned()))?;
524
525 group_add_child(&new_entry_parent_group, new_entry.clone(), 0).unwrap();
527 log.events.push(MergeEvent {
529 event_type: MergeEventType::EntryCreated,
530 node_uuid: new_entry.borrow().get_uuid(),
531 });
532 }
533 for other_group in ¤t_group.borrow().downcast_ref::<Group>().unwrap().groups() {
534 let mut new_group_location = current_group_path.to_owned();
535 let other_group_uuid = other_group.borrow().get_uuid();
536 new_group_location.push(other_group_uuid);
537 if self.deleted_objects.contains(other_group_uuid) || is_in_deleted_group {
538 let new_merge_log = self.merge_group(&new_group_location, other_group, true)?;
539 log.append(&new_merge_log);
540 continue;
541 }
542 let destination_group_location = Self::find_node_location(&self.root, other_group_uuid);
543 if let Some(destination_group_location) = &destination_group_location {
545 if current_group_path != destination_group_location {
546 let mut existing_group_location = destination_group_location.clone();
547 existing_group_location.push(other_group_uuid);
548 let existing_group = Group::find_group(&self.root, &existing_group_location)
551 .ok_or(MergeError::FindGroupError(existing_group_location))?;
552 let existing_group_location_changed =
553 match with_node::<Group, _, _>(&existing_group, |g| g.get_times().get_location_changed()).unwrap() {
554 Some(t) => t,
555 None => {
556 let uuid = existing_group.borrow().get_uuid();
557 log.warnings.push(format!("Entry {uuid} did not have a location changed timestamp"));
558 Times::now()
559 }
560 };
561 let other_group_location_changed =
562 match with_node::<Group, _, _>(other_group, |g| g.get_times().get_location_changed()).unwrap() {
563 Some(t) => t,
564 None => {
565 log.warnings
566 .push(format!("Entry {other_group_uuid} did not have a location changed timestamp"));
567 Times::epoch()
568 }
569 };
570 if existing_group_location_changed < other_group_location_changed {
572 self.relocate_node(
573 other_group_uuid,
574 destination_group_location,
575 current_group_path,
576 other_group_location_changed,
577 )?;
578 log.events.push(MergeEvent {
579 event_type: MergeEventType::GroupLocationUpdated,
580 node_uuid: other_group_uuid,
581 });
582 let new_merge_log = self.merge_group(&new_group_location, other_group, is_in_deleted_group)?;
583 log.append(&new_merge_log);
584 continue;
585 }
586 }
587 let new_merge_log = self.merge_group(&new_group_location, other_group, is_in_deleted_group)?;
590 log.append(&new_merge_log);
591 continue;
592 }
593 let new_group = other_group.borrow().duplicate();
596 with_node_mut::<Group, _, _>(&new_group, |g| g.reset_children(vec![])).unwrap();
598 log.events.push(MergeEvent {
599 event_type: MergeEventType::GroupCreated,
600 node_uuid: new_group.borrow().get_uuid(),
601 });
602 let new_group_parent_group =
603 Group::find_group(&self.root, current_group_path).ok_or(MergeError::FindGroupError(current_group_path.to_owned()))?;
604 with_node_mut::<Group, _, _>(&new_group_parent_group, |g| g.add_child(new_group, 0)).unwrap();
605 let new_merge_log = self.merge_group(&new_group_location, other_group, is_in_deleted_group)?;
606 log.append(&new_merge_log);
607 }
608 Ok(log)
609 }
610 #[cfg(feature = "merge")]
611 fn relocate_node(
612 &self,
613 node_uuid: Uuid,
614 from: &[Uuid],
615 to: &[Uuid],
616 new_location_changed_timestamp: NaiveDateTime,
617 ) -> Result<(), MergeError> {
618 let source_group = Group::find_group(&self.root, from).ok_or(MergeError::FindGroupError(from.to_owned()))?;
619 let relocated_node = with_node_mut::<Group, _, _>(&source_group, |s| s.remove_node(node_uuid)).unwrap()?;
620 relocated_node
621 .borrow_mut()
622 .get_times_mut()
623 .set_location_changed(Some(new_location_changed_timestamp));
624
625 let destination_group = Group::find_group(&self.root, to).ok_or(MergeError::FindGroupError(to.to_owned()))?;
626 group_add_child(&destination_group, relocated_node, 0).unwrap();
627 Ok(())
628 }
629}
630
631#[cfg(feature = "merge")]
632pub(crate) fn has_diverged_from(node: &NodePtr, other_node: &NodePtr) -> bool {
633 if let Some(entry) = node.borrow().downcast_ref::<Entry>() {
634 if let Some(other_entry) = other_node.borrow().downcast_ref::<Entry>() {
635 return entry._has_diverged_from(other_entry);
636 }
637 }
638 if let Some(group) = node.borrow().downcast_ref::<Group>() {
639 if let Some(other_group) = other_node.borrow().downcast_ref::<Group>() {
640 return group._has_diverged_from(other_group);
641 }
642 }
643 false
644}
645
646#[derive(Debug, Default, PartialEq, Eq, Clone)]
648#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
649pub struct Times {
650 pub(crate) expires: bool,
652
653 pub(crate) usage_count: usize,
655
656 pub(crate) times: HashMap<String, NaiveDateTime>,
661}
662
663pub const EXPIRY_TIME_TAG_NAME: &str = "ExpiryTime";
664pub const LAST_MODIFICATION_TIME_TAG_NAME: &str = "LastModificationTime";
665pub const CREATION_TIME_TAG_NAME: &str = "CreationTime";
666pub const LAST_ACCESS_TIME_TAG_NAME: &str = "LastAccessTime";
667pub const LOCATION_CHANGED_TAG_NAME: &str = "LocationChanged";
668
669impl Times {
670 fn get(&self, key: &str) -> Option<NaiveDateTime> {
671 self.times.get(key).copied()
672 }
673
674 fn set(&mut self, key: &str, time: Option<NaiveDateTime>) {
675 if let Some(time) = time {
676 self.times.insert(key.to_string(), time);
677 } else {
678 self.times.remove(key);
679 }
680 }
681
682 pub fn get_expires(&self) -> bool {
683 self.expires
684 }
685
686 pub fn set_expires(&mut self, expires: bool) {
687 self.expires = expires;
688 }
689
690 pub fn get_usage_count(&self) -> usize {
691 self.usage_count
692 }
693
694 pub fn set_usage_count(&mut self, usage_count: usize) {
695 self.usage_count = usage_count;
696 }
697
698 pub fn get_expiry_time(&self) -> Option<NaiveDateTime> {
701 self.get(EXPIRY_TIME_TAG_NAME)
702 }
703
704 pub fn set_expiry_time(&mut self, time: Option<NaiveDateTime>) {
705 self.set(EXPIRY_TIME_TAG_NAME, time);
706 }
707
708 pub fn get_last_modification(&self) -> Option<NaiveDateTime> {
709 self.get(LAST_MODIFICATION_TIME_TAG_NAME)
710 }
711
712 pub fn set_last_modification(&mut self, time: Option<NaiveDateTime>) {
713 self.set(LAST_MODIFICATION_TIME_TAG_NAME, time);
714 }
715
716 pub fn get_creation(&self) -> Option<NaiveDateTime> {
717 self.get(CREATION_TIME_TAG_NAME)
718 }
719
720 pub fn set_creation(&mut self, time: Option<NaiveDateTime>) {
721 self.set(CREATION_TIME_TAG_NAME, time);
722 }
723
724 pub fn get_last_access(&self) -> Option<NaiveDateTime> {
725 self.get(LAST_ACCESS_TIME_TAG_NAME)
726 }
727
728 pub fn set_last_access(&mut self, time: Option<NaiveDateTime>) {
729 self.set(LAST_ACCESS_TIME_TAG_NAME, time);
730 }
731
732 pub fn get_location_changed(&self) -> Option<NaiveDateTime> {
733 self.get(LOCATION_CHANGED_TAG_NAME)
734 }
735
736 pub fn set_location_changed(&mut self, time: Option<NaiveDateTime>) {
737 self.set(LOCATION_CHANGED_TAG_NAME, time);
738 }
739
740 pub fn now() -> NaiveDateTime {
743 let now = chrono::Utc::now().timestamp();
744 chrono::DateTime::from_timestamp(now, 0).unwrap().naive_utc()
745 }
746
747 pub fn epoch() -> NaiveDateTime {
748 chrono::DateTime::from_timestamp(0, 0).unwrap().naive_utc()
749 }
750
751 pub fn new() -> Times {
752 let mut response = Times::default();
753 let now = Some(Times::now());
754 response.set_creation(now);
755 response.set_last_modification(now);
756 response.set_last_access(now);
757 response.set_location_changed(now);
758 response.set_expiry_time(now);
759 response.set_expires(false);
760 response
761 }
762}
763
764#[derive(Debug, Default, PartialEq, Eq, Clone)]
766#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
767pub struct CustomData {
768 pub items: HashMap<String, CustomDataItem>,
769}
770
771#[derive(Debug, Default, PartialEq, Eq, Clone)]
773#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
774pub struct CustomDataItem {
775 pub value: Option<Value>,
776 pub last_modification_time: Option<NaiveDateTime>,
777}
778
779#[derive(Debug, Default, PartialEq, Eq, Clone)]
781#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
782pub struct CustomDataItemDenormalized {
783 pub key: String,
784 pub custom_data_item: CustomDataItem,
785}
786
787#[derive(Debug, Default, PartialEq, Eq, Clone)]
789#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
790pub struct HeaderAttachment {
791 pub flags: u8,
792 pub content: Vec<u8>,
793}
794
795#[derive(Debug, Default, PartialEq, Eq, Clone)]
797#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
798pub struct DeletedObjects {
799 pub objects: Vec<DeletedObject>,
800}
801
802impl DeletedObjects {
803 pub fn contains(&self, uuid: Uuid) -> bool {
804 self.objects.iter().any(|deleted_object| deleted_object.uuid == uuid)
805 }
806}
807
808impl DeletedObjects {
809 pub fn add(&mut self, uuid: Uuid) {
810 let deletion_time = Times::now();
811 if let Some(item) = self.objects.iter_mut().find(|item| item.uuid == uuid) {
812 item.deletion_time = deletion_time;
813 } else {
814 self.objects.push(DeletedObject { uuid, deletion_time });
815 }
816 }
817}
818
819#[derive(Debug, Default, PartialEq, Eq, Clone)]
821#[cfg_attr(feature = "serialization", derive(serde::Serialize))]
822pub struct DeletedObject {
823 pub uuid: Uuid,
824 pub deletion_time: NaiveDateTime,
825}
826
827#[derive(Debug, Default, PartialEq, Eq, Clone, Copy)]
829pub struct Color {
830 pub r: u8,
831 pub g: u8,
832 pub b: u8,
833}
834
835#[cfg(feature = "serialization")]
836impl serde::Serialize for Color {
837 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
838 where
839 S: serde::Serializer,
840 {
841 serializer.serialize_str(&self.to_string())
842 }
843}
844
845impl FromStr for Color {
846 type Err = ParseColorError;
847
848 fn from_str(str: &str) -> Result<Self, Self::Err> {
849 if !str.starts_with('#') || str.len() != 7 {
850 return Err(ParseColorError(str.to_string()));
851 }
852
853 let var = u64::from_str_radix(str.trim_start_matches('#'), 16).map_err(|_e| ParseColorError(str.to_string()))?;
854
855 let r = ((var >> 16) & 0xff) as u8;
856 let g = ((var >> 8) & 0xff) as u8;
857 let b = (var & 0xff) as u8;
858
859 Ok(Self { r, g, b })
860 }
861}
862
863impl std::fmt::Display for Color {
864 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
865 write!(f, "#{:0x}{:0x}{:0x}", self.r, self.g, self.b)
866 }
867}
868
869#[cfg(test)]
870mod database_tests {
871 use crate::{
872 Result,
873 db::{Database, DatabaseKey},
874 };
875 #[cfg(feature = "save_kdbx4")]
876 use crate::{config::DatabaseConfig, db::Entry};
877 use std::fs::File;
878
879 #[test]
880 fn test_xml() -> Result<()> {
881 let key = DatabaseKey::new().with_password("demopass");
882 let mut f = File::open("tests/resources/test_db_with_password.kdbx")?;
883 let xml = Database::get_xml(&mut f, key)?;
884
885 assert!(xml.len() > 100);
886
887 Ok(())
888 }
889
890 #[test]
891 fn test_open_invalid_version_header_size() {
892 assert!(Database::parse(&[], DatabaseKey::new().with_password("testing")).is_err());
893 assert!(Database::parse(&[0, 0, 0, 0, 0, 0, 0, 0], DatabaseKey::new().with_password("testing")).is_err());
894 assert!(Database::parse(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], DatabaseKey::new().with_password("testing")).is_err());
895 }
896
897 #[cfg(feature = "save_kdbx4")]
898 #[test]
899 fn test_save() -> Result<()> {
900 use crate::{
901 db::Group,
902 db::{group_add_child, rc_refcell_node},
903 };
904
905 let db = Database::new(DatabaseConfig::default());
906
907 group_add_child(&db.root, rc_refcell_node(Entry::default()), 0).unwrap();
908 group_add_child(&db.root, rc_refcell_node(Entry::default()), 1).unwrap();
909 group_add_child(&db.root, rc_refcell_node(Entry::default()), 2).unwrap();
910
911 let group = rc_refcell_node(Group::new("my group"));
912 group_add_child(&group, rc_refcell_node(Entry::default()), 0).unwrap();
913 group_add_child(&group, rc_refcell_node(Entry::default()), 1).unwrap();
914 group_add_child(&db.root, group, 3).unwrap();
915
916 let mut buffer = Vec::new();
917 let key = DatabaseKey::new().with_password("testing");
918
919 db.save(&mut buffer, key.clone())?;
920
921 let db_loaded = Database::open(&mut buffer.as_slice(), key)?;
922
923 assert_eq!(db, db_loaded);
924 Ok(())
925 }
926}