simuldb 0.11.1

Database with backend and format agnostic data storage for simulation results coupled with metadata
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
//! This library provides backend and format agnostic data storage for simulation results coupled with metadata about the used [Software] and the simulation [Run]
//!
//! The main use case is the following
//!
//! 1. generate data on a cluster and save it with JSON backend
//! 1. transfer data to Neo4j backend
//! 1. directly use Neo4j to select data
//!
//! Therefore the main goal is a simple solution for writing data and there are no plans to support advanched search or query features.
//!
//! Data storage is not handled by the database, only associated metadata.
//!
//! Currently two backends are included:
//! * [Json](db::json::Json), which saves everything in JSON files
//! * [Neo4j](db::neo4j::Neo4j), which uses a Neo4j database as backend (write only)
//!
//! Custom backends can be implemented via the [Database](db::Database) and [DatabaseSession](db::DatabaseSession) traits.
//! Sessions are meant to associate a [Dataset]s specific [Run] of a [Software].
//! [Dataset]s are references to data stored in a file of any arbitrary format.
//!
//! ## Features
//!
//! * `json` enable [Json](db::json::Json) backend
//! * `neo4j` enable [Neo4j](db::neo4j::Neo4j) backend
//! * `sha` enable [sha2] support for automatic hash calculations
//! * `arbitrary` enable support for [arbitrary] (required for tests)
//!
//! ## Example
//!
//! This creates a [Json](db::json::Json) based [Database](db::Database) and writes some arbitraty data to it.
//! Note that in order to create a session, usually the [vergen_session] macro will suffice.
//!
//! ```
//! use std::io::Write;
//! use serde::Serialize;
//! use simuldb::prelude::*;
//!
//! // Define a metadata type
//! #[derive(Debug, Serialize)]
//! struct Metadata {
//!     a: usize,
//!     b: String,
//! }
//!
//! fn main() -> Result<(), Box<dyn std::error::Error>> {
//!     # std::env::set_current_dir(format!("{}/..", env!("CARGO_MANIFEST_DIR")))?; // change to top level directory
//!     // Create or open database
//!     let mut json = Json::new("output/json");
//!
//!     // Start new session which will contain references to the datasets
//!     let software = Software::new("example", "1.0", "now");
//!     let run = Run::new("now");
//!     let session = Session::new(software, run);
//!     let mut json_session = json.add_session(session)?;
//!
//!     // Create a directory for the result data
//!     std::fs::create_dir_all("output/data")?;
//!
//!     // Generate some data and add it to the database
//!     for a in 0_usize..10 {
//!         // A DataWriter can be used to automatically calculate
//!         // the hash of a file and create a Dataset from it
//!         let mut writer = DatasetWriter::new("output/data")?;
//!
//!         // Write some data to the output file
//!         writeln!(writer, "a^2 = {}", a.pow(2))?;
//!
//!         // Generate metadata to associate with it
//!         let metadata = Metadata {
//!             a,
//!             b: "squaring".to_string(),
//!         };
//!
//!         // Add the corresponding dataset to the database
//!         let dataset = writer.finalize(metadata)?;
//!         json_session.add_dataset(&dataset)?;
//!     }
//!
//!     Ok(())
//! }
//! ```

use std::{
    collections::BTreeMap,
    fmt::Debug,
    fs::File,
    hash::{Hash, Hasher},
    io::Read,
    path::Path,
    sync::OnceLock,
};

#[cfg(feature = "arbitrary")]
use arbitrary::Arbitrary;
pub use chrono;
use log::*;
#[cfg(feature = "neo4j")]
pub use neo4rs;
#[cfg(feature = "py")]
use pyo3::prelude::*;
use serde::{Deserialize, Serialize};
use uuid::Uuid;

use value::Map;

use crate::{
    error::{Error, Result},
    value::{Value, ValueSerializer},
};

/// Commonly used structs, traits and macros
pub mod prelude {
    #[cfg(feature = "json")]
    pub use crate::db::json::Json;
    #[cfg(feature = "neo4j")]
    pub use crate::db::neo4j::Neo4j;
    #[cfg(feature = "sha")]
    pub use crate::db::DatasetWriter;
    pub use crate::{
        db::{Database, DatabaseSession},
        vergen_session, Dataset, Run, Session, Software,
    };
}

pub mod db;
pub mod error;
pub mod value;

#[cfg(test)]
mod testutils;

/// Generate version for usage with [vergen_session]
#[macro_export]
#[cfg(feature = "git")]
macro_rules! vergen_version {
    () => {{
        concat!(env!("CARGO_PKG_VERSION"), "-", env!("VERGEN_GIT_SHA")).to_string()
    }};
}
/// Generate version for usage with [vergen_session]
#[macro_export]
#[cfg(not(feature = "git"))]
macro_rules! vergen_version {
    () => {{
        env!("CARGO_PKG_VERSION").to_string()
    }};
}

/// Generate session object automatically by extracting data from environment variables provided by Cargo and vergen
///
/// This requires setting up a suitable build script that enables VERGEN_BUILD_TIMESTAMP (see the examples folder).
/// If the `git` feature is set, the version number will also contain the short SHA hash of the current commit (this requires VERGEN_GIT_SHA).
///
/// The following build script can be used to set up the required configuration:
///
/// ```rust,ignore
/// use vergen::EmitBuilder;
///
/// fn main() -> Result<(), Box<dyn std::error::Error>> {
///     println!("cargo:rerun-if-changed=build.rs");
///     EmitBuilder::builder()
///         .build_timestamp()
///         .git_sha(true)
///         .emit()?;
///     Ok(())
/// }
/// ```
#[macro_export]
macro_rules! vergen_session {
    () => {{
        ::simuldb::Session {
            software: ::simuldb::Software {
                name: env!("CARGO_CRATE_NAME").to_string(),
                version: ::simuldb::vergen_version!(),
                compile_time: env!("VERGEN_BUILD_TIMESTAMP").to_string(),
            },
            run: ::simuldb::Run {
                date: ::simuldb::chrono::offset::Utc::now().to_rfc3339(),
            },
        }
    }};
}

/// Combination of [Software] and [Run] metadata associated with the data of one run
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, PartialOrd, Ord)]
#[cfg_attr(feature = "arbitrary", derive(Arbitrary))]
#[cfg_attr(feature = "py", pyclass(set_all, get_all))]
pub struct Session {
    /// Used software
    pub software: Software,
    /// Run information
    pub run: Run,
}

impl Session {
    /// Create new Session
    pub fn new(software: Software, run: Run) -> Self {
        Self { software, run }
    }

    pub fn get_hash(&self) -> u64 {
        let mut hasher = highway::HighwayHasher::default();
        self.hash(&mut hasher);
        hasher.finish()
    }
}

#[cfg(feature = "py")]
#[pymethods]
impl Session {
    #[new]
    fn py_new(software: Software, run: Run) -> Self {
        Self::new(software, run)
    }

    fn __repr__(&self) -> String {
        format!(
            "Session(software={}-{}, run={})",
            self.software.name, self.software.version, self.run.date
        )
    }

    #[pyo3(name = "hash")]
    fn py_hash(&self) -> String {
        let hash = self.get_hash();
        format!("{:016X}", hash)
    }
}

/// [Run] metadata containing information about the machine and the start time
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, PartialOrd, Ord)]
#[cfg_attr(feature = "arbitrary", derive(Arbitrary))]
#[cfg_attr(feature = "py", pyclass(set_all, get_all))]
pub struct Run {
    /// Start time of run
    pub date: String,
}

impl Run {
    pub fn new<S>(date: S) -> Self
    where
        S: ToString,
    {
        Self {
            date: date.to_string(),
        }
    }
}

#[cfg(feature = "py")]
#[pymethods]
impl Run {
    #[new]
    fn py_new(date: &str) -> Self {
        Self::new(date)
    }

    fn __repr__(&self) -> String {
        format!("Run(date={})", self.date)
    }
}

/// Version information about the used [Software]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, PartialOrd, Ord)]
#[cfg_attr(feature = "arbitrary", derive(Arbitrary))]
#[cfg_attr(feature = "py", pyclass(set_all, get_all))]
pub struct Software {
    /// Name of software
    pub name: String,
    /// Version of software
    pub version: String,
    /// Compilation time
    pub compile_time: String,
}

impl Software {
    /// Create new [Software]
    ///
    /// # Arguments
    ///
    /// * `name` name of software
    /// * `version` version of software
    /// * `compile_time` compilation time
    pub fn new<S, T, U>(name: S, version: T, compile_time: U) -> Self
    where
        S: ToString,
        T: ToString,
        U: ToString,
    {
        Self {
            name: name.to_string(),
            version: version.to_string(),
            compile_time: compile_time.to_string(),
        }
    }
}

#[cfg(feature = "py")]
#[pymethods]
impl Software {
    #[new]
    fn py_new(name: &str, version: &str, compile_time: &str) -> Self {
        Self::new(name, version, compile_time)
    }

    fn __repr__(&self) -> String {
        format!(
            "Software(name={}, version={}, compile_time={})",
            self.name, self.version, self.compile_time
        )
    }
}

static HOST: OnceLock<Host> = OnceLock::new();

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, PartialOrd, Ord)]
#[cfg_attr(feature = "arbitrary", derive(Arbitrary))]
#[cfg_attr(feature = "py", pyclass(set_all, get_all))]
pub struct Host {
    // Hostname
    pub hostname: String,
}

impl Host {
    /// Create new [Host]
    ///
    /// # Arguments
    ///
    /// * `hostname` Homename
    pub fn new<S>(hostname: S) -> Self
    where
        S: ToString,
    {
        Self {
            hostname: hostname.to_string(),
        }
    }

    /// Obtain host information
    ///
    /// The infomation is stored in a once cell to avoid recalculation
    pub fn get_once<'a>() -> Result<&'a Self> {
        HOST.get().map(Ok).unwrap_or_else(|| {
            let hostname = ::hostname::get()?.to_string_lossy().into_owned();

            Ok(HOST.get_or_init(|| Self { hostname }))
        })
    }
}

#[cfg(feature = "py")]
#[pymethods]
impl Host {
    #[new]
    fn py_new(hostname: &str) -> Self {
        Self::new(hostname)
    }

    #[staticmethod]
    fn get() -> Result<Self> {
        Self::get_once().cloned()
    }

    fn __repr__(&self) -> String {
        format!("Host(hostname={})", self.hostname)
    }
}

/// Reference to one data file
///
/// This stores an `id` to identify to file, a `hash` to verify it and some metadata that is associated with the data.
/// Note that this does not specify exactly where the file is stored.
///
/// By default [sha2] is used to generate a SHA-512 hash of the file if the `sha` feature is enabled.
/// Manually calculating the hash and using `from_hash` allows for other hash functions as long as the hash is representable as [`Vec<u8>`](Vec).
///
/// Every time that is serializable into a [Map] can be used as metadata.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord)]
#[cfg_attr(feature = "arbitrary", derive(Arbitrary))]
#[cfg_attr(feature = "py", pyclass)]
pub struct Dataset {
    /// Id of file
    pub id: Uuid,
    /// Hash of file
    pub hash: Vec<u8>,
    /// Host information
    pub host: Option<Host>,
    /// Metadata associated with dataset
    metadata: Map,
}

/// Generate [Map] object from serializable metadata.
///
/// This will serialize it as [Value] first and then extract the [Map].
/// Unit types are also supported and will be serialized as an empty map.
/// If `metadata` serializes to anything except a map, an error of kind [UnsupportedMetadata](error::Error::UnsupportedMetadata) is returned.
pub fn serialize_metadata<S: Serialize + Debug>(metadata: S) -> Result<Map> {
    match metadata.serialize(ValueSerializer) {
        Ok(Value::Map(map)) => Ok(map),
        Ok(Value::Unit) => Ok(BTreeMap::new()),
        Ok(_) => Err(Error::UnsupportedMetadata(format!(
            "{metadata:?} is not a map",
        ))),
        Err(e) => Err(Error::UnsupportedMetadata(format!("{e}"))),
    }
}

impl Dataset {
    /// Generate dataset object from externally calculated `hash` with automatic
    /// host information gathering
    ///
    /// If no id is specified, a random one will be generated
    ///
    /// # Arguments
    /// * `hash`: hash of the data file
    /// * `metadata`: associated metadata, has to be serializable to a [Map]
    /// * `id`: id of the file
    pub fn from_hash<S: Serialize + Debug>(
        hash: Vec<u8>,
        metadata: S,
        id: Option<Uuid>,
    ) -> Result<Self> {
        let id = id.unwrap_or_else(Uuid::new_v4);

        debug!("Serializing metadata: {metadata:?}");
        let metadata = serialize_metadata(metadata)?;
        let host = Some(Host::get_once()?.clone());
        Ok(Dataset {
            id,
            hash,
            metadata,
            host,
        })
    }

    /// Generate dataset object by calulating the SHA-512 hash of an existing
    /// with automatic host information gathering
    ///
    /// If no id is specified, a random one will be generated
    ///
    /// # Arguments
    /// * `path`: path to the data file
    /// * `metadata`: associated metadata, has to be serializable to a [Map]
    /// * `id`: id of the file
    #[cfg(feature = "sha")]
    pub fn from_file<P: AsRef<Path>, S: Serialize + Debug>(
        path: P,
        metadata: S,
        id: Option<Uuid>,
    ) -> Result<Self> {
        use sha2::{Digest, Sha512};

        let id = id.unwrap_or_else(Uuid::new_v4);

        let hash = {
            let mut hasher = Sha512::new();
            let mut buffer = Vec::new();
            File::open(path)?.read_to_end(&mut buffer)?;
            hasher.update(buffer);
            hasher.finalize().as_slice().to_vec()
        };

        debug!("Serializing metadata: {metadata:?}");
        let metadata = serialize_metadata(metadata)?;
        let host = Some(Host::get_once()?.clone());
        Ok(Dataset {
            id,
            hash,
            metadata,
            host,
        })
    }

    /// Generate dataset object from externally calculated `hash`
    ///
    /// If no id is specified, a random one will be generated
    ///
    /// # Arguments
    /// * `hash`: hash of the data file
    /// * `metadata`: associated metadata, has to be serializable to a [Map]
    /// * `id`: id of the file
    /// * `host`: optional host information
    pub fn from_hash_with_host<S: Serialize + Debug>(
        hash: Vec<u8>,
        metadata: S,
        id: Option<Uuid>,
        host: Option<Host>,
    ) -> Result<Self> {
        let id = id.unwrap_or_else(Uuid::new_v4);

        debug!("Serializing metadata: {metadata:?}");
        let metadata = serialize_metadata(metadata)?;
        Ok(Dataset {
            id,
            hash,
            metadata,
            host,
        })
    }

    /// Generate dataset object by calulating the SHA-512 hash of an existing
    /// file
    ///
    /// If no id is specified, a random one will be generated
    ///
    /// # Arguments
    /// * `path`: path to the data file
    /// * `metadata`: associated metadata, has to be serializable to a [Map]
    /// * `id`: id of the file
    #[cfg(feature = "sha")]
    pub fn from_file_with_host<P: AsRef<Path>, S: Serialize + Debug>(
        path: P,
        metadata: S,
        id: Option<Uuid>,
        host: Option<Host>,
    ) -> Result<Self> {
        use sha2::{Digest, Sha512};

        let id = id.unwrap_or_else(Uuid::new_v4);

        let hash = {
            let mut hasher = Sha512::new();
            let mut buffer = Vec::new();
            File::open(path)?.read_to_end(&mut buffer)?;
            hasher.update(buffer);
            hasher.finalize().as_slice().to_vec()
        };

        debug!("Serializing metadata: {metadata:?}");
        let metadata = serialize_metadata(metadata)?;
        Ok(Dataset {
            id,
            hash,
            metadata,
            host,
        })
    }
}

#[cfg(feature = "py")]
#[pymethods]
impl Dataset {
    #[new]
    fn py_new(
        id: Option<&str>,
        metadata: Option<BTreeMap<String, Value>>,
        hash: Option<&str>,
        path: Option<&str>,
    ) -> Result<Dataset> {
        let id = id.map(Uuid::parse_str).transpose()?;
        match (hash, path) {
            (Some(hash), None) => Self::from_hash(hex::decode(hash)?, metadata, id),
            (None, Some(path)) => Self::from_file(path, metadata, id),
            _ => Err(Error::Other(
                "Exactly one of hash and path has to be specified".to_string(),
            )),
        }
    }

    fn __repr__(&self) -> String {
        format!("Dataset(id={}, hash={})", self.id, hex::encode(&self.hash))
    }

    #[getter]
    fn get_id(&self) -> String {
        self.id.to_string()
    }

    #[getter]
    fn get_hash(&self) -> String {
        hex::encode(&self.hash)
    }

    #[getter]
    fn get_metadata(&self) -> Result<Value> {
        Ok(Value::Map(serialize_metadata(&self.metadata)?))
    }
}

// TODO test for python
#[cfg(feature = "py")]
#[pymodule]
fn simuldb(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
    m.add_class::<Session>()?;
    m.add_class::<Software>()?;
    m.add_class::<Run>()?;
    m.add_class::<Dataset>()?;

    #[cfg(feature = "json")]
    {
        m.add_class::<db::json::Json>()?;
        m.add_class::<db::json::JsonSession>()?;
    }

    #[cfg(feature = "neo4j")]
    {
        m.add_class::<db::neo4j::Neo4j>()?;
        m.add_class::<db::neo4j::Neo4jSession>()?;
    }

    Ok(())
}