#![doc(
html_logo_url = "https://raw.githubusercontent.com/nav-solutions/.github/master/logos/logo2.jpg"
)]
#![doc = include_str!("../README.md")]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![allow(clippy::type_complexity)]
extern crate num_derive;
#[cfg(feature = "serde")]
#[macro_use]
extern crate serde;
extern crate gnss_rs as gnss;
extern crate num;
pub mod constants;
pub mod error;
pub mod frequency;
pub mod header;
pub mod matcher;
pub mod observable;
pub mod production;
pub mod record;
pub mod station;
mod epoch;
#[cfg(test)]
mod tests;
use std::{
fs::File,
io::{BufReader, BufWriter, Read, Write},
path::Path,
str::FromStr,
};
use itertools::Itertools;
#[cfg(feature = "flate2")]
use flate2::{read::GzDecoder, write::GzEncoder, Compression as GzCompression};
use hifitime::prelude::{Duration, Epoch};
use crate::{
error::{FormattingError, ParsingError},
header::Header,
matcher::Matcher,
production::ProductionAttributes,
record::{ClockOffset, Record},
station::GroundStation,
};
pub type Comments = Vec<String>;
pub mod prelude {
pub use crate::{
error::{FormattingError, ParsingError},
frequency::Frequency,
header::{Antenna, Header, Receiver, Version},
matcher::Matcher,
observable::Observable,
production::ProductionAttributes,
record::{
ClockOffset, EpochFlag, Key, Measurements, Observation, ObservationKey, Record, SNR,
},
station::GroundStation,
Comments, DORIS,
};
pub use gnss::prelude::{Constellation, DOMESTrackingPoint, COSPAR, DOMES, SV};
pub use hifitime::{Duration, Epoch, Polynomial, TimeScale, TimeSeries};
}
pub(crate) fn fmt_doris(content: &str, marker: &str) -> String {
if content.len() < 60 {
format!("{:<padding$}{}", content, marker, padding = 60)
} else {
let mut string = String::new();
let nb_lines = num_integer::div_ceil(content.len(), 60);
for i in 0..nb_lines {
let start_off = i * 60;
let end_off = std::cmp::min(start_off + 60, content.len());
let chunk = &content[start_off..end_off];
string.push_str(&format!("{:<padding$}{}", chunk, marker, padding = 60));
if i < nb_lines - 1 {
string.push('\n');
}
}
string
}
}
pub(crate) fn fmt_comment(content: &str) -> String {
fmt_doris(content, "COMMENT")
}
#[derive(Clone, Default, Debug, PartialEq)]
pub struct DORIS {
pub header: Header,
pub record: Record,
pub production: Option<ProductionAttributes>,
}
impl DORIS {
pub fn new(header: Header, record: Record) -> DORIS {
DORIS {
header,
record,
production: Default::default(),
}
}
pub fn with_header(&self, header: Header) -> Self {
Self {
header,
record: self.record.clone(),
production: Default::default(),
}
}
pub fn replace_header(&mut self, header: Header) {
self.header = header.clone();
}
pub fn with_record(&self, record: Record) -> Self {
DORIS {
record,
header: self.header.clone(),
production: self.production.clone(),
}
}
pub fn replace_record(&mut self, record: Record) {
self.record = record.clone();
}
pub fn parse<R: Read>(reader: &mut BufReader<R>) -> Result<Self, ParsingError> {
let mut header = Header::parse(reader)?;
let record = Record::parse(&mut header, reader)?;
Ok(Self {
header,
record,
production: Default::default(),
})
}
pub fn format<W: Write>(&self, writer: &mut BufWriter<W>) -> Result<(), FormattingError> {
self.header.format(writer)?;
self.record.format(writer, &self.header)?;
writer.flush()?;
Ok(())
}
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<DORIS, ParsingError> {
let path = path.as_ref();
let file_attributes = match path.file_name() {
Some(filename) => {
let filename = filename.to_string_lossy().to_string();
if let Ok(prod) = ProductionAttributes::from_str(&filename) {
Some(prod)
} else {
None
}
},
_ => None,
};
let fd = File::open(path)?;
let mut reader = BufReader::new(fd);
let mut doris = Self::parse(&mut reader)?;
doris.production = file_attributes;
Ok(doris)
}
pub fn to_file<P: AsRef<Path>>(&self, path: P) -> Result<(), FormattingError> {
let fd = File::create(path)?;
let mut writer = BufWriter::new(fd);
self.format(&mut writer)?;
Ok(())
}
#[cfg(feature = "flate2")]
#[cfg_attr(docsrs, doc(cfg(feature = "flate2")))]
pub fn from_gzip_file<P: AsRef<Path>>(path: P) -> Result<DORIS, ParsingError> {
let path = path.as_ref();
let file_attributes = match path.file_name() {
Some(filename) => {
let filename = filename.to_string_lossy().to_string();
if let Ok(prod) = ProductionAttributes::from_str(&filename) {
Some(prod)
} else {
None
}
},
_ => None,
};
let fd = File::open(path)?;
let reader = GzDecoder::new(fd);
let mut reader = BufReader::new(reader);
let mut doris = Self::parse(&mut reader)?;
doris.production = file_attributes;
Ok(doris)
}
#[cfg(feature = "flate2")]
#[cfg_attr(docsrs, doc(cfg(feature = "flate2")))]
pub fn to_gzip_file<P: AsRef<Path>>(&self, path: P) -> Result<(), FormattingError> {
let fd = File::create(path)?;
let compression = GzCompression::new(5);
let mut writer = BufWriter::new(GzEncoder::new(fd, compression));
self.format(&mut writer)?;
Ok(())
}
pub fn is_merged(&self) -> bool {
for comment in self.header.comments.iter() {
if comment.eq("FILE MERGE") {
return true;
}
}
false
}
pub fn ground_station<'a>(&self, matcher: Matcher<'a>) -> Option<GroundStation> {
self.header
.ground_stations
.iter()
.filter(|station| station.matches(&matcher))
.reduce(|k, _| k)
.cloned()
}
pub fn satellite_clock_offset_iter(
&self,
) -> Box<dyn Iterator<Item = (Epoch, ClockOffset)> + '_> {
Box::new(
self.record
.measurements
.iter()
.filter_map(|(k, v)| {
if let Some(clock_offset) = v.satellite_clock_offset {
Some((k.epoch, clock_offset))
} else {
None
}
})
.unique(),
)
}
pub fn sampling_histogram(&self) -> Box<dyn Iterator<Item = (Duration, usize)> + '_> {
Box::new(
self.record
.epochs_iter()
.zip(self.record.epochs_iter().skip(1))
.map(|((ek_1, _), (ek_2, _))| ek_2 - ek_1)
.fold(vec![], |mut list, dt| {
let mut found = false;
for (delta, pop) in list.iter_mut() {
if *delta == dt {
*pop += 1;
found = true;
break;
}
}
if !found {
list.push((dt, 1));
}
list
})
.into_iter(),
)
}
pub fn dominant_sampling_period(&self) -> Option<Duration> {
self.sampling_histogram()
.sorted()
.map(|(dt, _)| dt)
.reduce(|k, _| k)
}
pub fn standard_filename(&self) -> String {
if let Some(attributes) = &self.production {
attributes.to_string()
} else {
let mut doy = 0;
let mut year = 0i32;
let sat_len = self.header.satellite.len();
let mut sat_name = self.header.satellite[..std::cmp::min(sat_len, 5)].to_string();
if let Some(epoch) = self.header.time_of_first_observation {
year = epoch.year() - 2000;
doy = epoch.day_of_year().round() as u32;
}
for _ in sat_len..5 {
sat_name.push('X');
}
format!("{}{:02}{:03}", sat_name, year, doy)
}
}
pub fn substract(&self, rhs: &Self) -> Self {
let mut s = self.clone();
s.substract_mut(rhs);
s
}
pub fn substract_mut(&mut self, rhs: &Self) {
self.record.measurements.retain(|k, measurements| {
if let Some(rhs_measurements) = rhs.record.measurements.get(&k) {
measurements.observations.retain(|obs_k, observation| {
if let Some(rhs_obs) = rhs_measurements.observations.get(&obs_k) {
observation.value -= rhs_obs.value;
true
} else {
false
}
});
!measurements.observations.is_empty()
} else {
false
}
});
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::fmt_comment;
#[test]
fn fmt_comments_singleline() {
for desc in [
"test",
"just a basic comment",
"just another lengthy comment blahblabblah",
] {
let comment = fmt_comment(desc);
assert!(
comment.len() >= 60,
"comments should be at least 60 byte long"
);
assert_eq!(
comment.find("COMMENT"),
Some(60),
"comment marker should located @ 60"
);
}
}
#[test]
fn fmt_wrapped_comments() {
for desc in ["just trying to form a very lengthy comment that will overflow since it does not fit in a single line",
"just trying to form a very very lengthy comment that will overflow since it does fit on three very meaningful lines. Imazdmazdpoakzdpoakzpdokpokddddddddddddddddddaaaaaaaaaaaaaaaaaaaaaaa"] {
let nb_lines = num_integer::div_ceil(desc.len(), 60);
let comments = fmt_comment(desc);
assert_eq!(comments.lines().count(), nb_lines);
for line in comments.lines() {
assert!(line.len() >= 60, "comment line should be at least 60 byte long");
assert_eq!(line.find("COMMENT"), Some(60), "comment marker should located @ 60");
}
}
}
#[test]
fn fmt_observables_v3() {
for (desc, expected) in [
("R 9 C1C L1C S1C C2C C2P L2C L2P S2C S2P",
"R 9 C1C L1C S1C C2C C2P L2C L2P S2C S2P SYS / # / OBS TYPES"),
("G 18 C1C L1C S1C C2P C2W C2S C2L C2X L2P L2W L2S L2L L2X S2P S2W S2S S2L S2X",
"G 18 C1C L1C S1C C2P C2W C2S C2L C2X L2P L2W L2S L2L L2X SYS / # / OBS TYPES
S2P S2W S2S S2L S2X SYS / # / OBS TYPES"),
] {
assert_eq!(fmt_doris(desc, "SYS / # / OBS TYPES"), expected);
}
}
}