1use lazy_db::*;
2use crate::home_dir;
3use crate::list;
4use crate::unwrap_opt;
5use soulog::*;
6use std::fs;
7use std::path::PathBuf;
8use std::path::Path;
9use crate::entry::Entry;
10use crate::moc::MOC;
11
12pub struct Archive {
13 database: LazyDB,
14 uid: u64,
15 pub itver: u16,
16}
17
18impl Archive {
19 pub fn init(mut logger: impl Logger) -> Self {
21 let path = home_dir().join("archive");
22 let path_string = path.to_string_lossy();
23 if path.exists() {
25 log!((logger.error) Init("Archive '{path_string}' already exists, try wiping it before initialising again") as Fatal);
26 return logger.crash()
27 }
28
29 log!((logger) Init("Initialising a new archive at '{path_string}'..."));
30 let database = if_err!((logger) [Init, err => ("While initialising database: {err:?}")] retry LazyDB::init(&path));
31
32 let uid = {
33 use std::collections::hash_map::RandomState;
34 use std::hash::{BuildHasher, Hasher};
35 RandomState::new().build_hasher().finish()
36 };
37 let itver = 0u16;
38
39 log!((logger) Init("Writing uid and itver to archive..."));
40 if_err!((logger) [Init, err => ("While writing uid: {err:?}")] retry write_database!((&database) uid = new_u64(uid)));
41 if_err!((logger) [Init, err => ("While writing itver: {err:?}")] retry write_database!((&database) itver = new_u16(itver)));
42
43 log!((logger) Init("Initialising sorted and unsorted entry containers..."));
44 if_err!((logger) [Init, err => ("While writing stack length: {err:?}")] retry write_database!((&database) /order/sorted::length = new_u16(0)));
45 if_err!((logger) [Init, err => ("While writing stack length: {err:?}")] retry write_database!((&database) /order/unsorted::length = new_u16(0)));
46
47 log!((logger.vital) Init("Successfully initialised archive '{path_string}'") as Log);
48 Self {
49 database,
50 uid,
51 itver,
52 }
53 }
54
55 #[inline]
57 pub fn load(logger: impl Logger) -> Self {
58 let path = home_dir().join("archive");
59 Self::load_dir(path, logger)
60 }
61
62 pub fn load_dir(path: PathBuf, mut logger: impl Logger) -> Self {
64 let path_string = path.to_string_lossy();
65 log!((logger) Archive("Loading archive '{path_string}'..."));
66
67 if !path.is_dir() {
69 log!((logger.vital) Archive("Archive '{path_string}' not found; initialising a new one...") as Inconvenience);
70 return Self::init(logger)
71 };
72
73 let database = if_err!((logger) [Archive, err => ("While loading archive '{path_string}': {err:?}")] retry LazyDB::load_dir(&path));
74 log!((logger) Archive("Loading uid and itver of archive..."));
75 let uid = if_err!((logger) [Archive, err => ("While loading archive uid: {err:?}")] retry (|| search_database!((&database) uid)?.collect_u64())());
76 let itver = if_err!((logger) [Archive, err => ("While loading archive itver: {err:?}")] retry (|| search_database!((&database) itver)?.collect_u16())());
77
78 log!((logger.verbose) Archive("Successfully loaded archive at '{path_string}'") as Log);
79 log!((logger) Archive(""));
80
81 Self {
82 database,
83 uid,
84 itver,
85 }
86 }
87
88 pub fn rollback(force: bool, mut logger: impl Logger) {
90 log!((logger) RollBack("Rolling back to last backup..."));
91 log!((logger.vital) RollBack("Rollback cannot revert successful commits; only unsuccessful ones that corrupt the archive.") as Warning);
92 let path = home_dir().join("backup.ldb");
93 if !path.is_file() {
94 log!((logger.error) RollBack("No recent backups made; cannot rollback") as Fatal);
95 return logger.crash();
96 } Self::load_backup(path, force, logger.hollow());
97 log!((logger.vital) RollBack("Successfully rolled back to last backup") as Log);
98 }
99
100 pub fn backup(out_path: impl AsRef<Path>, mut logger: impl Logger) {
102 let out_path = out_path.as_ref();
103 let path = home_dir().join("archive");
104 let path_string = path.to_string_lossy();
105 let out_string = out_path.to_string_lossy();
106
107 log!((logger) Backup("Backing up archive '{path_string}' as '{out_string}'..."));
108
109 if !path.is_dir() {
110 log!((logger.error) Backup("Archive does not exist, run `diary-cli init` to create a new one before you can back it up.") as Fatal);
111 return logger.crash();
112 }
113
114 let database = if_err!((logger) [Backup, err => ("While backing up archive: {err:?}")] retry LazyDB::load_dir(&path));
115 if_err!((logger) [Backup, err => ("While backing up archive: {err:?}")] retry database.compile(out_path));
116 log!((logger.vital) Backup("Successfully backed up archive '{path_string}' as '{out_string}'") as Log);
117 log!((logger) Backup(""));
118 }
119
120 pub fn load_backup(path: impl AsRef<Path>, force: bool, mut logger: impl Logger) {
122 let path = path.as_ref();
123 let archive = home_dir().join("archive");
124 let archive_string = archive.to_string_lossy();
125 let path_string = path.to_string_lossy();
126
127 log!((logger) Backup("Loading archive backup '{path_string}'..."));
128
129 if !path.is_file() {
131 log!((logger.error) Backup("Backup file '{path_string}' does not exist") as Fatal);
132 return logger.crash();
133 }
134
135 if archive.is_dir() {
137 log!((logger.vital) Backup("Detected that there is already a loaded archive at '{archive_string}'") as Inconvenience);
138 let old = Archive::load(logger.hollow()); if force {
141 log!((logger.vital) Backup("Forcefully loading backup; this may result in archive data loss") as Warning);
142 }
143
144 let new = home_dir().join("new");
146 if_err!((logger) [Backup, err => ("While decompiling backup '{path_string}': {err:?}")] retry LazyDB::decompile(path, &new));
147 let new = Archive::load_dir(new, logger.hollow());
148
149 let _ = std::fs::remove_dir_all(new.database.path()); if new.uid != old.uid && !force {
153 log!((logger.error) Backup("Cannot load backup as it is a backup of a different archive (uids don't match)") as Fatal);
154 log!((logger.vital) Backup("If you still want to load it (deleting your current archive in the process) then run the same command but with `-f` to force it.") as Warning);
155 return logger.crash();
156 }
157
158 if old.itver == new.itver && !force {
159 log!((logger.vital) Backup("Detected that backup is the same age as the currently loaded archive (itver is the same)") as Warning);
160 }
161
162 if old.itver > new.itver && !force {
163 log!((logger.error) Backup("Cannot load backup as it is older than the currently loaded archive (itver is less)") as Fatal);
164 log!((logger.vital) Backup("If you still want to load it (losing un-backed changes in the process) then run the same command but with `-f` to force it.") as Warning);
165 return logger.crash();
166 }
167
168 let _ = std::fs::remove_dir_all(&archive); }
170
171 if_err!((logger) [Backup, err => ("While decompiling backup '{path_string}': {err:?}")] retry LazyDB::decompile(path, &archive));
172 log!((logger.vital) Backup("Successfully loaded backup '{path_string}'") as Log);
173 }
174
175 pub fn wipe(self, mut logger: impl Logger) {
177 let expected = "I, as the user, confirm that I fully understand that I am wiping my ENTIRE archive and that this action is permanent and irreversible";
179 log!((logger.vital) Wipe("To confirm with wiping your ENTIRE archive PERMANENTLY enter the phrase below (without quotes):") as Log);
180 if_err!((logger) [Wipe, err => ("Entered phrase incorrect, please retry")] retry {
181 log!((logger.vital) Wipe("\"{expected}\"") as Log);
182 let input = logger.ask("Wipe", "Enter the phrase");
183 if &input[0..input.len() - 1] != expected { Err(()) }
184 else { Ok(()) }
185 });
186
187 log!((logger) Wipe("Wiping archive..."));
188
189 let path = home_dir().join("archive");
190 if !path.exists() {
192 log!((logger.vital) Wipe("Archive '{}' doesn't exist; doing nothing", path.to_string_lossy()) as Inconvenience);
193 return;
194 }
195
196 if_err!((logger) [Wipe, err => ("While wiping archive: {err:?}")] retry std::fs::remove_dir_all(&path));
198 log!((logger.vital) Wipe("Successfully wiped archive! Run `diary-cli init` to init a new archive\n") as Log);
199 }
200
201 pub fn commit(&self, config: impl AsRef<Path>, mut logger: impl Logger) {
202 let config = config.as_ref();
203 let path = home_dir().join("archive");
204 let path_string = path.to_string_lossy();
205
206 if !path.is_dir() {
208 log!((logger.error) Commit("Archive '{path_string}' doesn't exist! Run `diary-cli init` before you can commit") as Fatal);
209 return logger.crash();
210 }
211
212 let config_string = config.to_string_lossy();
214 if !config.is_file() {
215 log!((logger.error) Commit("Entry config file '{config_string}' doesn't exist") as Fatal);
216 return logger.crash();
217 }
218
219 let _ = std::fs::remove_file(home_dir().join("backup.ldb")); Self::backup(home_dir().join("backup.ldb"), logger.hollow());
222
223 log!((logger) Commit("Parsing toml at '{}'", config.to_string_lossy()));
225 let entry = if_err!((logger) [Commit, err => ("While reading the entry config file: {err:?}")] retry std::fs::read_to_string(config));
226 let entry = if_err!((logger) [Commit, err => ("While parsing entry config toml: {err:?}")] {entry.parse::<toml::Table>()} crash {
227 log!((logger.error) Commit("{err:#?}") as Fatal);
228 logger.crash()
229 });
230
231
232 let is_moc = entry.get("is-moc")
234 .map(|x| unwrap_opt!((x.as_bool()) with logger, format: Commit("`is-moc` attribute of config file '{config_string}' must be boolean")))
235 .unwrap_or(false);
236
237 if is_moc {
238 let container = if_err!((logger) [Commit, err => ("While loading archive as container: {err:?}")] retry search_database!((self.database) /mocs/));
239 log!((logger) Commit("Detected that config file '{config_string}' is an moc (map of contents)"));
240 MOC::new(entry, &config_string, container, logger.hollow());
241 } else {
242 let container = if_err!((logger) [Commit, err => ("While loading archive as container: {err:?}")] retry search_database!((self.database) /entries/));
243 log!((logger) Commit("Detected that config file '{config_string}' is an entry"));
244
245 let entry = Entry::new(entry, &config_string, container, logger.hollow());
247 log!((logger) Commit("Adding entry to unsorted stack..."));
248 list::push(
249 |file| LazyData::new_string(file, &entry.uid),
250 &if_err!((logger) [Commit, err => ("While loaded unsorted stack: {err:?}")] retry search_database!((self.database) /order/unsorted)),
251 logger.hollow(),
252 );
253 }
254
255 log!((logger) Commit("Updating archive itver..."));
257 if_err!((logger) [Commit, err => ("While update archive itver: {err:?}")] retry write_database!((self.database) itver = new_u16(self.itver + 1)));
258
259 log!((logger.vital) Commit("Successfully commited config to archive") as Log);
260 }
261
262 #[inline]
263 pub fn database(&self) -> &LazyDB {
264 &self.database
265 }
266
267 #[inline]
268 pub fn database_exists(&self, path: impl AsRef<Path>) -> bool {
269 self.database().path().join(path).exists()
270 }
271
272 pub fn get_entry(&self, uid: String, mut logger: impl Logger) -> Option<Entry> {
273 if !self.database_exists(format!("entries/{uid}")) {
274 log!((logger.error) Archive("Entry of uid `{uid}` does not exist") as Fatal);
275 return logger.crash();
276 }
277
278 match search_database!((self.database) /entries/(&uid)) {
279 Ok(x) => Some(Entry::load_lazy(uid, x)),
280 Err(err) => match err {
281 LDBError::DirNotFound(..) => None,
282 _ => {
283 log!((logger.error) Archive("While getting entry '{uid}': {err:?}") as Fatal);
284 logger.crash()
285 }
286 }
287 }
288 }
289
290 pub fn get_moc(&self, uid: String, mut logger: impl Logger) -> Option<MOC> {
291 if !self.database_exists(format!("mocs/{uid}")) {
292 log!((logger.error) Archive("Moc of uid `{uid}` does not exist") as Fatal);
293 return logger.crash();
294 }
295
296 match search_database!((self.database) /mocs/(&uid)) {
297 Ok(x) => Some(MOC::load_lazy(uid, x)),
298 Err(err) => match err {
299 LDBError::DirNotFound(..) => None,
300 _ => {
301 log!((logger.error) Archive("While getting moc '{uid}': {err:?}") as Fatal);
302 logger.crash()
303 }
304 }
305 }
306 }
307
308 pub fn list_entries(&self, mut logger: impl Logger) -> Vec<Entry> {
309 let path = self.database.path().join("entries");
310
311 if !path.is_dir() {
312 log!((logger.vital) Entries("Path '{}' does not exist; doing nothing", path.to_string_lossy()) as Inconvenience);
313 return Vec::with_capacity(0);
314 }
315
316 let mut logger1 = logger.hollow();
317 let logger2 = logger.hollow();
318 let dir = if_err!((logger) [Entries, err => ("While reading directory {}'s contents: {err:?}", path.to_string_lossy())] retry fs::read_dir(&path));
319 dir.into_iter()
320 .map(|x| if_err!((logger) [Entries, err => ("While reading dir element: {err:?}")] {x} crash logger.crash()))
321 .filter(|x| if_err!((logger1) [Entries, err => ("While reading dir element: {err:?}")] {x.file_type()} crash logger1.crash()).is_dir())
322 .map(|x| self.get_entry(x.file_name().to_string_lossy().to_string(), logger2.hollow()).unwrap())
323 .collect()
324 }
325
326 pub fn list_mocs(&self, mut logger: impl Logger) -> Vec<MOC> {
327 let path = self.database.path().join("mocs");
328
329 if !path.is_dir() {
330 log!((logger.vital) MOCs("Path '{}' does not exist; doing nothing", path.to_string_lossy()) as Inconvenience);
331 return Vec::with_capacity(0);
332 }
333
334 let mut logger1 = logger.hollow();
335 let logger2 = logger.hollow();
336 let dir = if_err!((logger) [MOCs, err => ("While reading directory {}'s contents: {err:?}", path.to_string_lossy())] retry fs::read_dir(&path));
337 dir.into_iter()
338 .map(|x| if_err!((logger) [MOCs, err => ("While reading dir element: {err:?}")] {x} crash logger.crash()))
339 .filter(|x| if_err!((logger1) [MOCs, err => ("While reading dir element: {err:?}")] {x.file_type()} crash logger1.crash()).is_dir())
340 .map(|x| self.get_moc(x.file_name().to_string_lossy().to_string(), logger2.hollow()).unwrap())
341 .collect()
342 }
343}